瀏覽代碼

Adds module docstrings

Ask Solem 13 年之前
父節點
當前提交
27caaad22c
共有 53 個文件被更改,包括 746 次插入96 次删除
  1. 11 0
      celery/beat.py
  2. 25 2
      celery/contrib/batches.py
  3. 8 31
      celery/datastructures.py
  4. 2 0
      celery/decorators.py
  5. 12 0
      celery/events/__init__.py
  6. 9 0
      celery/events/cursesmon.py
  7. 11 0
      celery/events/dumper.py
  8. 14 0
      celery/events/snapshot.py
  9. 21 0
      celery/events/state.py
  10. 18 0
      celery/exceptions.py
  11. 11 0
      celery/execute/trace.py
  12. 11 0
      celery/loaders/__init__.py
  13. 10 0
      celery/loaders/app.py
  14. 11 0
      celery/loaders/base.py
  15. 10 0
      celery/loaders/default.py
  16. 16 1
      celery/local.py
  17. 13 1
      celery/log.py
  18. 8 0
      celery/messaging.py
  19. 146 20
      celery/platforms.py
  20. 10 1
      celery/registry.py
  21. 10 0
      celery/result.py
  22. 10 0
      celery/routes.py
  23. 10 0
      celery/schedules.py
  24. 0 3
      celery/signals.py
  25. 2 4
      celery/states.py
  26. 8 0
      celery/task/__init__.py
  27. 10 0
      celery/task/base.py
  28. 10 0
      celery/task/chords.py
  29. 13 0
      celery/task/control.py
  30. 11 0
      celery/task/http.py
  31. 10 0
      celery/task/sets.py
  32. 1 15
      celery/tests/test_utils/test_datastructures.py
  33. 17 0
      celery/utils/__init__.py
  34. 8 0
      celery/utils/compat.py
  35. 12 1
      celery/utils/encoding.py
  36. 10 0
      celery/utils/functional.py
  37. 11 0
      celery/utils/mail.py
  38. 10 0
      celery/utils/patch.py
  39. 12 1
      celery/utils/serialization.py
  40. 17 6
      celery/utils/term.py
  41. 5 0
      celery/utils/timer2.py
  42. 27 7
      celery/utils/timeutils.py
  43. 10 0
      celery/worker/__init__.py
  44. 15 0
      celery/worker/autoscale.py
  45. 17 0
      celery/worker/buckets.py
  46. 8 3
      celery/worker/consumer.py
  47. 9 0
      celery/worker/control/__init__.py
  48. 16 0
      celery/worker/control/builtins.py
  49. 11 0
      celery/worker/control/registry.py
  50. 11 0
      celery/worker/heartbeat.py
  51. 14 0
      celery/worker/job.py
  52. 17 0
      celery/worker/mediator.py
  53. 17 0
      celery/worker/state.py

+ 11 - 0
celery/beat.py

@@ -1,3 +1,11 @@
+"""
+
+celery.beat
+===========
+
+The Celery periodic task scheduler.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import errno
 import errno
@@ -24,6 +32,9 @@ from .schedules import maybe_schedule, crontab
 from .utils import cached_property, instantiate, maybe_promise
 from .utils import cached_property, instantiate, maybe_promise
 from .utils.timeutils import humanize_seconds
 from .utils.timeutils import humanize_seconds
 
 
+__all__ = ["SchedulingError", "ScheduleEntry", "Scheduler",
+           "Service", "EmbeddedService"]
+
 
 
 class SchedulingError(Exception):
 class SchedulingError(Exception):
     """An error occured while scheduling a task."""
     """An error occured while scheduling a task."""

+ 25 - 2
celery/contrib/batches.py

@@ -42,14 +42,37 @@ Registering the click is done as follows:
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 from itertools import count
 from itertools import count
-from Queue import Queue
+from Queue import Empty, Queue
 
 
-from celery.datastructures import consume_queue
 from celery.task import Task
 from celery.task import Task
 from celery.utils import cached_property, timer2
 from celery.utils import cached_property, timer2
 from celery.worker import state
 from celery.worker import state
 
 
 
 
+def consume_queue(queue):
+    """Iterator yielding all immediately available items in a
+    :class:`Queue.Queue`.
+
+    The iterator stops as soon as the queue raises :exc:`Queue.Empty`.
+
+    *Examples*
+
+        >>> q = Queue()
+        >>> map(q.put, range(4))
+        >>> list(consume_queue(q))
+        [0, 1, 2, 3]
+        >>> list(consume_queue(q))
+        []
+
+    """
+    get = queue.get_nowait
+    while 1:
+        try:
+            yield get()
+        except Empty:
+            break
+
+
 def apply_batches_task(task, args, loglevel, logfile):
 def apply_batches_task(task, args, loglevel, logfile):
     task.request.update({"loglevel": loglevel, "logfile": logfile})
     task.request.update({"loglevel": loglevel, "logfile": logfile})
     try:
     try:

+ 8 - 31
celery/datastructures.py

@@ -2,10 +2,7 @@
 celery.datastructures
 celery.datastructures
 =====================
 =====================
 
 
-Custom data structures.
-
-:copyright: (c) 2009 - 2011 by Ask Solem.
-:license: BSD, see LICENSE for more details.
+Custom types and data structures.
 
 
 """
 """
 from __future__ import absolute_import
 from __future__ import absolute_import
@@ -21,6 +18,10 @@ from threading import RLock
 
 
 from .utils.compat import UserDict, OrderedDict
 from .utils.compat import UserDict, OrderedDict
 
 
+__all__ = ["AttributeDictMixin", "AttributeDict", "DictAttribute",
+           "ConfigurationView", "ExceptionInfo", "LimitedSet",
+           "LRUCache", "TokenBucket"]
+
 
 
 class AttributeDictMixin(object):
 class AttributeDictMixin(object):
     """Adds attribute access to mappings.
     """Adds attribute access to mappings.
@@ -193,7 +194,7 @@ class _Frame(object):
         self.f_code = self.Code(frame.f_code)
         self.f_code = self.Code(frame.f_code)
 
 
 
 
-class _Traceback(object):
+class Traceback(object):
     Frame = _Frame
     Frame = _Frame
 
 
     def __init__(self, tb):
     def __init__(self, tb):
@@ -202,7 +203,7 @@ class _Traceback(object):
         if tb.tb_next is None:
         if tb.tb_next is None:
             self.tb_next = None
             self.tb_next = None
         else:
         else:
-            self.tb_next = _Traceback(tb.tb_next)
+            self.tb_next = Traceback(tb.tb_next)
 
 
 
 
 class ExceptionInfo(object):
 class ExceptionInfo(object):
@@ -227,7 +228,7 @@ class ExceptionInfo(object):
 
 
     def __init__(self, exc_info):
     def __init__(self, exc_info):
         self.type, self.exception, tb = exc_info
         self.type, self.exception, tb = exc_info
-        self.tb = _Traceback(tb)
+        self.tb = Traceback(tb)
         self.traceback = ''.join(traceback.format_exception(*exc_info))
         self.traceback = ''.join(traceback.format_exception(*exc_info))
 
 
     def __str__(self):
     def __str__(self):
@@ -241,30 +242,6 @@ class ExceptionInfo(object):
         return self.type, self.exception, self.tb
         return self.type, self.exception, self.tb
 
 
 
 
-def consume_queue(queue):
-    """Iterator yielding all immediately available items in a
-    :class:`Queue.Queue`.
-
-    The iterator stops as soon as the queue raises :exc:`Queue.Empty`.
-
-    *Examples*
-
-        >>> q = Queue()
-        >>> map(q.put, range(4))
-        >>> list(consume_queue(q))
-        [0, 1, 2, 3]
-        >>> list(consume_queue(q))
-        []
-
-    """
-    get = queue.get_nowait
-    while 1:
-        try:
-            yield get()
-        except Empty:
-            break
-
-
 class LimitedSet(object):
 class LimitedSet(object):
     """Kind-of Set with limitations.
     """Kind-of Set with limitations.
 
 

+ 2 - 0
celery/decorators.py

@@ -19,6 +19,8 @@ import warnings
 from . import task as _task
 from . import task as _task
 from .exceptions import CDeprecationWarning
 from .exceptions import CDeprecationWarning
 
 
+__all__ = ["task", "periodic_task"]
+
 
 
 warnings.warn(CDeprecationWarning("""
 warnings.warn(CDeprecationWarning("""
 The `celery.decorators` module and the magic keyword arguments
 The `celery.decorators` module and the magic keyword arguments

+ 12 - 0
celery/events/__init__.py

@@ -1,3 +1,13 @@
+"""
+
+celery.events
+=============
+
+Events are messages sent for actions happening
+in the worker (and clients if :setting:`CELERY_SEND_TASK_SENT_EVENT` is
+enabled).  These events can be used for monitoring.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 from __future__ import with_statement
 from __future__ import with_statement
 
 
@@ -15,6 +25,8 @@ from kombu.messaging import Consumer, Producer
 from ..app import app_or_default
 from ..app import app_or_default
 from ..utils import uuid
 from ..utils import uuid
 
 
+__all__ = ["event_exchange", "Event", "EventDispatcher", "EventReceiver"]
+
 event_exchange = Exchange("celeryev", type="topic")
 event_exchange = Exchange("celeryev", type="topic")
 
 
 
 

+ 9 - 0
celery/events/cursesmon.py

@@ -1,3 +1,10 @@
+"""
+celery.events.cursesmon
+=======================
+
+celeryev is a monitor written in curses using celery events.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import curses
 import curses
@@ -15,6 +22,8 @@ from .. import states
 from ..app import app_or_default
 from ..app import app_or_default
 from ..utils import abbr, abbrtask
 from ..utils import abbr, abbrtask
 
 
+__all__ = ["CursesMonitor", "evtop"]
+
 BORDER_SPACING = 4
 BORDER_SPACING = 4
 LEFT_BORDER_OFFSET = 3
 LEFT_BORDER_OFFSET = 3
 UUID_WIDTH = 36
 UUID_WIDTH = 36

+ 11 - 0
celery/events/dumper.py

@@ -1,3 +1,12 @@
+"""
+
+celery.events.dumper
+====================
+
+This is a simple program used to show events as they are happening.
+Like tcpdump just for Celery events.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import sys
 import sys
@@ -7,6 +16,8 @@ from datetime import datetime
 from ..app import app_or_default
 from ..app import app_or_default
 from ..datastructures import LRUCache
 from ..datastructures import LRUCache
 
 
+__all__ = ["Dumper", "evdump"]
+
 
 
 TASK_NAMES = LRUCache(limit=0xFFF)
 TASK_NAMES = LRUCache(limit=0xFFF)
 
 

+ 14 - 0
celery/events/snapshot.py

@@ -1,3 +1,15 @@
+"""
+
+celery.events.snapshot
+======================
+
+Consuming the events as a stream is not always suitable,
+so this module implements a system to take snapshots of the
+state of a cluster.  There is a full implementation of this
+writing the snapshots to a database in ``django-celery``.
+
+"""
+
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import atexit
 import atexit
@@ -9,6 +21,8 @@ from ..utils import timer2, instantiate, LOG_LEVELS
 from ..utils.dispatch import Signal
 from ..utils.dispatch import Signal
 from ..utils.timeutils import rate
 from ..utils.timeutils import rate
 
 
+__all__ = ["Polaroid", "evcam"]
+
 
 
 class Polaroid(object):
 class Polaroid(object):
     timer = timer2
     timer = timer2

+ 21 - 0
celery/events/state.py

@@ -1,3 +1,22 @@
+"""
+
+celery.events.state
+===================
+
+This module implements a way to keep track of the
+state of a cluster of workers and the tasks it is working on
+by consuming events.
+
+For every event consumed the state is updated, so
+it represents the state of the cluster at the time
+of the last event.
+
+Snapshots (:mod:`celery.events.snapshot`) can be used
+to take pictures of this state at regular intervals
+and e.g. store it inside a database.
+
+"""
+
 from __future__ import absolute_import
 from __future__ import absolute_import
 from __future__ import with_statement
 from __future__ import with_statement
 
 
@@ -10,6 +29,8 @@ from .. import states
 from ..datastructures import AttributeDict, LRUCache
 from ..datastructures import AttributeDict, LRUCache
 from ..utils import kwdict
 from ..utils import kwdict
 
 
+__all__ = ["HEARTBEAT_EXPIRE", "Worker", "Task", "State", "state"]
+
 #: Hartbeat expiry time in seconds.  The worker will be considered offline
 #: Hartbeat expiry time in seconds.  The worker will be considered offline
 #: if no heartbeat is received within this time.
 #: if no heartbeat is received within this time.
 #: Default is 2:30 minutes.
 #: Default is 2:30 minutes.

+ 18 - 0
celery/exceptions.py

@@ -1,5 +1,23 @@
+"""
+
+celery.exceptions
+=================
+
+This module contains Celery-specific exceptions.
+
+"""
+
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
+__all__ = ["SystemTerminate", "QueueNotFound",
+           "TimeLimitExceeded", "SoftTimeLimitExceeded",
+           "WorkerLostError", "ImproperlyConfigured",
+           "NotRegistered", "AlreadyRegistered",
+           "TimeoutError", "MaxRetriesExceededError",
+           "RetryTaskError", "TaskRevokedError",
+           "NotConfigured", "CPendingDeprecationWarning",
+           "CDeprecationWarning"]
+
 
 
 UNREGISTERED_FMT = """\
 UNREGISTERED_FMT = """\
 Task of kind %s is not registered, please make sure it's imported.\
 Task of kind %s is not registered, please make sure it's imported.\

+ 11 - 0
celery/execute/trace.py

@@ -1,3 +1,12 @@
+"""
+celery.execute.trace
+====================
+
+This module defines how the task execution is traced,
+errors are recorded, handlers are applied and so on.
+
+"""
+
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import sys
 import sys
@@ -8,6 +17,8 @@ from ..datastructures import ExceptionInfo
 from ..exceptions import RetryTaskError
 from ..exceptions import RetryTaskError
 from ..registry import tasks
 from ..registry import tasks
 
 
+__all__ = ["TraceInfo", "TaskTrace"]
+
 
 
 class TraceInfo(object):
 class TraceInfo(object):
 
 

+ 11 - 0
celery/loaders/__init__.py

@@ -1,3 +1,12 @@
+"""
+
+celery.loaders
+==============
+
+Loaders define how configuration is read, what happens
+when workers start, and when tasks are executed and so on.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 from .. import current_app
 from .. import current_app
@@ -7,6 +16,8 @@ LOADER_ALIASES = {"app": "celery.loaders.app.AppLoader",
                   "default": "celery.loaders.default.Loader",
                   "default": "celery.loaders.default.Loader",
                   "django": "djcelery.loaders.DjangoLoader"}
                   "django": "djcelery.loaders.DjangoLoader"}
 
 
+__all__ = ["get_loader_cls"]
+
 
 
 def get_loader_cls(loader):
 def get_loader_cls(loader):
     """Get loader class by name/alias"""
     """Get loader class by name/alias"""

+ 10 - 0
celery/loaders/app.py

@@ -1,7 +1,17 @@
+"""
+
+celery.loaders.app
+==================
+
+The default loader used with custom app instances.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 from .base import BaseLoader
 from .base import BaseLoader
 
 
+__all__ = ["AppLoader"]
+
 
 
 class AppLoader(BaseLoader):
 class AppLoader(BaseLoader):
 
 

+ 11 - 0
celery/loaders/base.py

@@ -1,3 +1,12 @@
+"""
+
+celery.loaders.base
+===================
+
+Loader base class.
+
+"""
+
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import importlib
 import importlib
@@ -13,6 +22,8 @@ from ..utils import (cached_property, get_cls_by_name,
                      import_from_cwd as _import_from_cwd)
                      import_from_cwd as _import_from_cwd)
 from ..utils.functional import maybe_list
 from ..utils.functional import maybe_list
 
 
+__all__ = ["BaseLoader"]
+
 BUILTIN_MODULES = frozenset(["celery.task"])
 BUILTIN_MODULES = frozenset(["celery.task"])
 
 
 ERROR_ENVVAR_NOT_SET = (
 ERROR_ENVVAR_NOT_SET = (

+ 10 - 0
celery/loaders/default.py

@@ -1,3 +1,11 @@
+"""
+
+celery.loader.default
+=====================
+
+The default loader used when no custom app has been initialized.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import os
 import os
@@ -11,6 +19,8 @@ from .base import BaseLoader
 
 
 DEFAULT_CONFIG_MODULE = "celeryconfig"
 DEFAULT_CONFIG_MODULE = "celeryconfig"
 
 
+__all__ = ["Loader"]
+
 
 
 class Loader(BaseLoader):
 class Loader(BaseLoader):
     """The loader used by the default app."""
     """The loader used by the default app."""

+ 16 - 1
celery/local.py

@@ -1,7 +1,20 @@
+"""
+
+celery.local
+============
+
+This module contains critical utilities that
+needs to be loaded as soon as possible, and that
+should not load any third party modules.
+
+"""
+
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 
 
 def try_import(module):
 def try_import(module):
+    """Try to import and return module, or return
+    None if the module does not exist."""
     from importlib import import_module
     from importlib import import_module
     try:
     try:
         return import_module(module)
         return import_module(module)
@@ -10,7 +23,9 @@ def try_import(module):
 
 
 
 
 class Proxy(object):
 class Proxy(object):
-    """Code stolen from werkzeug.local.Proxy."""
+    """Proxy to another object."""
+
+    # Code stolen from werkzeug.local.Proxy.
     __slots__ = ('__local', '__dict__', '__name__')
     __slots__ = ('__local', '__dict__', '__name__')
 
 
     def __init__(self, local, name=None):
     def __init__(self, local, name=None):

+ 13 - 1
celery/log.py

@@ -1,4 +1,11 @@
-"""celery.log"""
+"""
+
+celery.log
+==========
+
+Logging utilities.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import logging
 import logging
@@ -23,6 +30,11 @@ from .utils.term import colored
 
 
 is_py3k = sys.version_info >= (3, 0)
 is_py3k = sys.version_info >= (3, 0)
 
 
+__all__ = ["ColorFormatter", "Logging", "get_default_logger",
+           "setup_logger", "setup_task_logger", "get_task_logger",
+           "setup_logging_subsystem", "redirect_stdouts_to_logger",
+           "LoggingProxy"]
+
 
 
 class ColorFormatter(logging.Formatter):
 class ColorFormatter(logging.Formatter):
     #: Loglevel -> Color mapping.
     #: Loglevel -> Color mapping.

+ 8 - 0
celery/messaging.py

@@ -1,3 +1,11 @@
+"""
+
+celery.messaging
+================
+
+This module is deprecated, use ``current_app.amqp`` instead.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 from . import current_app
 from . import current_app

+ 146 - 20
celery/platforms.py

@@ -1,3 +1,12 @@
+"""
+
+celery.platforms
+================
+
+Utilities dealing with platform specifics: signals, daemonization, users &
+groups, etc.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import os
 import os
@@ -16,12 +25,25 @@ DAEMON_UMASK = 0
 DAEMON_WORKDIR = "/"
 DAEMON_WORKDIR = "/"
 DAEMON_REDIRECT_TO = getattr(os, "devnull", "/dev/null")
 DAEMON_REDIRECT_TO = getattr(os, "devnull", "/dev/null")
 
 
+__all__ = ["LockFailed", "get_fdmax", "create_pidlock",
+           "DaemonContext", "detached", "parse_uid", "parse_gid",
+           "setegid", "seteuid", "set_effective_user", "Signals",
+           "set_process_title", "set_mp_process_title"]
+
 
 
 class LockFailed(Exception):
 class LockFailed(Exception):
+    """Raised if a pidlock can't be acquired."""
     pass
     pass
 
 
 
 
 def get_fdmax(default=None):
 def get_fdmax(default=None):
+    """Returns the maximum number of open file descriptors
+    on this system.
+
+    :keyword default: Value returned if there's no file
+                      descriptor limit.
+
+    """
     fdmax = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
     fdmax = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
     if fdmax == resource.RLIM_INFINITY:
     if fdmax == resource.RLIM_INFINITY:
         return default
         return default
@@ -29,22 +51,23 @@ def get_fdmax(default=None):
 
 
 
 
 class PIDFile(object):
 class PIDFile(object):
+    """PID lock file.
+
+    This is the type returned by :func:`create_pidlock`.
+
+    **Should not be used directly, use the :func:`create_pidlock`
+    context instead**
+
+    """
+
+    #: Path to the pid lock file.
+    path = None
 
 
     def __init__(self, path):
     def __init__(self, path):
         self.path = os.path.abspath(path)
         self.path = os.path.abspath(path)
 
 
-    def write_pid(self):
-        open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY)
-        open_mode = (((os.R_OK | os.W_OK) << 6) |
-                        ((os.R_OK) << 3) |
-                        ((os.R_OK)))
-        pidfile_fd = os.open(self.path, open_flags, open_mode)
-        pidfile = os.fdopen(pidfile_fd, "w")
-        pid = os.getpid()
-        pidfile.write("%d\n" % (pid, ))
-        pidfile.close()
-
     def acquire(self):
     def acquire(self):
+        """Acquire lock."""
         try:
         try:
             self.write_pid()
             self.write_pid()
         except OSError, exc:
         except OSError, exc:
@@ -53,13 +76,16 @@ class PIDFile(object):
     __enter__ = acquire
     __enter__ = acquire
 
 
     def is_locked(self):
     def is_locked(self):
+        """Returns true if the pid lock exists."""
         return os.path.exists(self.path)
         return os.path.exists(self.path)
 
 
     def release(self, *args):
     def release(self, *args):
+        """Release lock."""
         self.remove()
         self.remove()
     __exit__ = release
     __exit__ = release
 
 
     def read_pid(self):
     def read_pid(self):
+        """Reads and returns the current pid."""
         try:
         try:
             fh = open(self.path, "r")
             fh = open(self.path, "r")
         except IOError, exc:
         except IOError, exc:
@@ -76,6 +102,7 @@ class PIDFile(object):
             raise ValueError("PID file %r contents invalid." % self.path)
             raise ValueError("PID file %r contents invalid." % self.path)
 
 
     def remove(self):
     def remove(self):
+        """Removes the lock."""
         try:
         try:
             os.unlink(self.path)
             os.unlink(self.path)
         except OSError, exc:
         except OSError, exc:
@@ -84,6 +111,8 @@ class PIDFile(object):
             raise
             raise
 
 
     def remove_if_stale(self):
     def remove_if_stale(self):
+        """Removes the lock if the process is not running.
+        (does not respond to signals)."""
         try:
         try:
             pid = self.read_pid()
             pid = self.read_pid()
         except ValueError, exc:
         except ValueError, exc:
@@ -103,13 +132,39 @@ class PIDFile(object):
                 return True
                 return True
         return False
         return False
 
 
+    def write_pid(self):
+        open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY)
+        open_mode = (((os.R_OK | os.W_OK) << 6) |
+                        ((os.R_OK) << 3) |
+                        ((os.R_OK)))
+        pidfile_fd = os.open(self.path, open_flags, open_mode)
+        pidfile = os.fdopen(pidfile_fd, "w")
+        try:
+            pid = os.getpid()
+            pidfile.write("%d\n" % (pid, ))
+        finally:
+            pidfile.close()
+
 
 
 def create_pidlock(pidfile):
 def create_pidlock(pidfile):
-    """Create and verify pidfile.
+    """Create and verify pid file.
+
+    If the pid file already exists the program exits with an error message,
+    however if the process it refers to is not running anymore, the pid file
+    is deleted and the program continues.
+
+    The caller is responsible for releasing the lock before the program
+    exits.
+
+    :returns: :class:`PIDFile`.
+
+    **Example**:
 
 
-    If the pidfile already exists the program exits with an error message,
-    however if the process it refers to is not running anymore, the pidfile
-    is just deleted.
+    .. code-block:: python
+
+        import atexit
+        pidlock = create_pidlock("/var/run/app.pid").acquire()
+        atexit.register(pidlock.release)
 
 
     """
     """
 
 
@@ -168,6 +223,41 @@ class DaemonContext(object):
 
 
 def detached(logfile=None, pidfile=None, uid=None, gid=None, umask=0,
 def detached(logfile=None, pidfile=None, uid=None, gid=None, umask=0,
              workdir=None, **opts):
              workdir=None, **opts):
+    """Detach the current process in the background (daemonize).
+
+    :keyword logfile: Optional log file.  The ability to write to this file
+       will be verified before the process is detached.
+    :keyword pidfile: Optional pid file.  The pid file will not be created,
+      as this is the responsibility of the child.  But the process will
+      exit if the pid lock exists and the pid written is still running.
+    :keyword uid: Optional user id or user name to change
+      effective privileges to.
+    :keyword gid: Optional group id or group name to change effective
+      privileges to.
+    :keyword umask: Optional umask that will be effective in the child process.
+    :keyword workdir: Optional new working directory.
+    :keyword \*\*opts: Ignored.
+
+    **Example**:
+
+    .. code-block:: python
+
+        import atexit
+        from celery.platforms import detached, create_pidlock
+
+        with detached(logfile="/var/log/app.log", pidfile="/var/run/app.pid",
+                      uid="nobody"):
+            # Now in detached child process with effective user set to nobody,
+            # and we know that our logfile can be written to, and that
+            # the pidfile is not locked.
+            pidlock = create_pidlock("/var/run/app.pid").acquire()
+            atexit.register(pidlock.release)
+
+            # Run the program
+            program.run(logfile="/var/log/app.log")
+
+    """
+
     if not resource:
     if not resource:
         raise RuntimeError("This platform does not support detach.")
         raise RuntimeError("This platform does not support detach.")
     workdir = os.getcwd() if workdir is None else workdir
     workdir = os.getcwd() if workdir is None else workdir
@@ -187,7 +277,7 @@ def detached(logfile=None, pidfile=None, uid=None, gid=None, umask=0,
 def parse_uid(uid):
 def parse_uid(uid):
     """Parse user id.
     """Parse user id.
 
 
-    uid can be an interger (uid) or a string (username), if a username
+    uid can be an integer (uid) or a string (user name), if a user name
     the uid is taken from the password file.
     the uid is taken from the password file.
 
 
     """
     """
@@ -237,19 +327,19 @@ def seteuid(uid):
 def set_effective_user(uid=None, gid=None):
 def set_effective_user(uid=None, gid=None):
     """Change process privileges to new user/group.
     """Change process privileges to new user/group.
 
 
-    If uid and gid is set the effective user/group is set.
+    If UID and GID is set the effective user/group is set.
 
 
-    If only uid is set, the effective uer is set, and the group is
+    If only UID is set, the effective user is set, and the group is
     set to the users primary group.
     set to the users primary group.
 
 
-    If only gid is set, the effective group is set.
+    If only GID is set, the effective group is set.
 
 
     """
     """
     uid = uid and parse_uid(uid)
     uid = uid and parse_uid(uid)
     gid = gid and parse_gid(gid)
     gid = gid and parse_gid(gid)
 
 
     if uid:
     if uid:
-        # If gid isn't defined, get the primary gid of the uer.
+        # If GID isn't defined, get the primary GID of the user.
         if not gid and pwd:
         if not gid and pwd:
             gid = pwd.getpwuid(uid).pw_gid
             gid = pwd.getpwuid(uid).pw_gid
         setegid(gid)
         setegid(gid)
@@ -259,6 +349,42 @@ def set_effective_user(uid=None, gid=None):
 
 
 
 
 class Signals(object):
 class Signals(object):
+    """Convenience interface to :mod:`signals`.
+
+    If the requested signal is not supported on the current platform,
+    the operation will be ignored.
+
+    **Examples**:
+
+    .. code-block:: python
+
+        >>> from celery.platforms import signals
+
+        >>> signals["INT"] = my_handler
+
+        >>> signals["INT"]
+        my_handler
+
+        >>> signals.supported("INT")
+        True
+
+        >>> signals.signum("INT")
+        2
+
+        >>> signals.ignore("USR1")
+        >>> signals["USR1"] == signals.ignored
+        True
+
+        >>> signals.reset("USR1")
+        >>> signals["USR1"] == signals.default
+        True
+
+        >>> signals.update(INT=exit_handler,
+        ...                TERM=exit_handler,
+        ...                HUP=hup_handler)
+
+    """
+
     ignored = _signal.SIG_IGN
     ignored = _signal.SIG_IGN
     default = _signal.SIG_DFL
     default = _signal.SIG_DFL
 
 

+ 10 - 1
celery/registry.py

@@ -1,10 +1,19 @@
-"""celery.registry"""
+"""
+
+celery.registry
+===============
+
+Registry of available tasks.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import inspect
 import inspect
 
 
 from .exceptions import NotRegistered
 from .exceptions import NotRegistered
 
 
+__all__ = ["TaskRegistry", "tasks"]
+
 
 
 class TaskRegistry(dict):
 class TaskRegistry(dict):
 
 

+ 10 - 0
celery/result.py

@@ -1,3 +1,10 @@
+"""
+celery.result
+=============
+
+Task results/state, and result groups.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 from __future__ import with_statement
 from __future__ import with_statement
 
 
@@ -13,6 +20,9 @@ from .exceptions import TimeoutError
 from .registry import _unpickle_task
 from .registry import _unpickle_task
 from .utils.compat import OrderedDict
 from .utils.compat import OrderedDict
 
 
+__all__ = ["BaseAsyncResult", "AsyncResult", "ResultSet",
+           "TaskSetResult", "EagerResult"]
+
 
 
 def _unpickle_result(task_id, task_name):
 def _unpickle_result(task_id, task_name):
     return _unpickle_task(task_name).AsyncResult(task_id)
     return _unpickle_task(task_name).AsyncResult(task_id)

+ 10 - 0
celery/routes.py

@@ -1,8 +1,18 @@
+"""
+celery.routes
+=============
+
+Contains utilities for working with task routes (e.g.
+:setting:`CELERY_ROUTES`).
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 from .exceptions import QueueNotFound
 from .exceptions import QueueNotFound
 from .utils import firstmethod, instantiate, lpmerge, mpromise
 from .utils import firstmethod, instantiate, lpmerge, mpromise
 
 
+_all__ = ["MapRoute", "Router", "prepare"]
+
 _first_route = firstmethod("route_for_task")
 _first_route = firstmethod("route_for_task")
 
 
 
 

+ 10 - 0
celery/schedules.py

@@ -1,3 +1,11 @@
+"""
+celery.schedules
+================
+
+Schedules define when periodic tasks should be run.
+
+"""
+
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import re
 import re
@@ -9,6 +17,8 @@ from .utils import is_iterable
 from .utils.timeutils import (timedelta_seconds, weekday,
 from .utils.timeutils import (timedelta_seconds, weekday,
                               remaining, humanize_seconds)
                               remaining, humanize_seconds)
 
 
+__all__ = ["ParseException", "schedule", "crontab", "maybe_schedule"]
+
 
 
 class ParseException(Exception):
 class ParseException(Exception):
     """Raised by crontab_parser when the input can't be parsed."""
     """Raised by crontab_parser when the input can't be parsed."""

+ 0 - 3
celery/signals.py

@@ -7,9 +7,6 @@ celery.signals
 Signals allows decoupled applications to receive notifications when
 Signals allows decoupled applications to receive notifications when
 certain actions occur elsewhere in the application.
 certain actions occur elsewhere in the application.
 
 
-:copyright: (c) 2009 - 2011 by Ask Solem.
-:license: BSD, see LICENSE for more details.
-
 .. contents::
 .. contents::
     :local:
     :local:
 
 

+ 2 - 4
celery/states.py

@@ -4,10 +4,6 @@ celery.states
 
 
 Built-in Task States.
 Built-in Task States.
 
 
-:copyright: (c) 2009 - 2011 by Ask Solem.
-:license: BSD, see LICENSE for more details.
-
-
 .. _states:
 .. _states:
 
 
 States
 States
@@ -61,6 +57,8 @@ Misc.
 """
 """
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
+__all__ = ["precedence", "state"]
+
 #: State precedence.
 #: State precedence.
 #: None represents the precedence of an unknown state.
 #: None represents the precedence of an unknown state.
 #: Lower index means higher precedence.
 #: Lower index means higher precedence.

+ 8 - 0
celery/task/__init__.py

@@ -1,4 +1,12 @@
 # -*- coding: utf-8 -*-
 # -*- coding: utf-8 -*-
+"""
+
+celery.task
+===========
+
+Creating tasks and subtasks
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import warnings
 import warnings

+ 10 - 0
celery/task/base.py

@@ -1,3 +1,11 @@
+"""
+
+celery.task.base
+================
+
+The task implementation has been moved to :class:`celery.app.task`.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 from .. import current_app
 from .. import current_app
@@ -5,6 +13,8 @@ from ..app.task import Context, TaskType, BaseTask  # noqa
 from ..schedules import maybe_schedule
 from ..schedules import maybe_schedule
 from ..utils import deprecated, timeutils
 from ..utils import deprecated, timeutils
 
 
+__all__ = ["Task", "PeriodicTask"]
+
 Task = current_app.Task
 Task = current_app.Task
 
 
 
 

+ 10 - 0
celery/task/chords.py

@@ -1,3 +1,11 @@
+"""
+
+celery.task.chords
+==================
+
+Task chords (task set callbacks).
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 from .. import current_app
 from .. import current_app
@@ -6,6 +14,8 @@ from ..utils import uuid
 
 
 from .sets import TaskSet, subtask
 from .sets import TaskSet, subtask
 
 
+__all__ = ["Chord", "chord"]
+
 
 
 @current_app.task(name="celery.chord_unlock", max_retries=None)
 @current_app.task(name="celery.chord_unlock", max_retries=None)
 def _unlock_chord(setid, callback, interval=1, propagate=False,
 def _unlock_chord(setid, callback, interval=1, propagate=False,

+ 13 - 0
celery/task/control.py

@@ -1,3 +1,12 @@
+"""
+
+celery.task.control
+===================
+
+The worker remote control command client.
+For the server implementation see :mod:`celery.worker.control`.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 from __future__ import with_statement
 from __future__ import with_statement
 
 
@@ -5,6 +14,10 @@ from kombu.pidbox import Mailbox
 
 
 from ..app import app_or_default
 from ..app import app_or_default
 
 
+__all__ = ["flatten_reply", "Inspect", "Control",
+           "broadcast", "rate_limit", "time_limit", "ping", "revoke",
+           "discard_all", "inspect"]
+
 
 
 def flatten_reply(reply):
 def flatten_reply(reply):
     nodes = {}
     nodes = {}

+ 11 - 0
celery/task/http.py

@@ -1,3 +1,11 @@
+"""
+
+celery.task.http
+================
+
+Webhook tasks.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import sys
 import sys
@@ -17,6 +25,9 @@ from .base import Task as BaseTask
 
 
 GET_METHODS = frozenset(["GET", "HEAD"])
 GET_METHODS = frozenset(["GET", "HEAD"])
 
 
+__all__ = ["InvalidResponseError", "RemoteExecuteError", "UnknownStatusError",
+           "MutableURL", "HttpDispatch", "HttpDispatchTask", "URL"]
+
 
 
 class InvalidResponseError(Exception):
 class InvalidResponseError(Exception):
     """The remote server gave an invalid response."""
     """The remote server gave an invalid response."""

+ 10 - 0
celery/task/sets.py

@@ -1,3 +1,11 @@
+"""
+
+celery.task.sets
+================
+
+Creating and applying task groups.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 from __future__ import with_statement
 from __future__ import with_statement
 
 
@@ -10,6 +18,8 @@ from ..exceptions import CDeprecationWarning
 from ..utils import cached_property, reprcall, uuid
 from ..utils import cached_property, reprcall, uuid
 from ..utils.compat import UserList
 from ..utils.compat import UserList
 
 
+__all__ = ["subtask", "TaskSet"]
+
 TASKSET_DEPRECATION_TEXT = """\
 TASKSET_DEPRECATION_TEXT = """\
 Using this invocation of TaskSet is deprecated and will be removed
 Using this invocation of TaskSet is deprecated and will be removed
 in Celery v2.4!
 in Celery v2.4!

+ 1 - 15
celery/tests/test_utils/test_datastructures.py

@@ -5,7 +5,7 @@ import sys
 from Queue import Queue
 from Queue import Queue
 
 
 from celery.datastructures import ExceptionInfo, LRUCache
 from celery.datastructures import ExceptionInfo, LRUCache
-from celery.datastructures import LimitedSet, consume_queue
+from celery.datastructures import LimitedSet
 from celery.datastructures import AttributeDict, DictAttribute
 from celery.datastructures import AttributeDict, DictAttribute
 from celery.datastructures import ConfigurationView
 from celery.datastructures import ConfigurationView
 from celery.tests.utils import unittest
 from celery.tests.utils import unittest
@@ -95,20 +95,6 @@ class test_ExceptionInfo(unittest.TestCase):
         self.assertTrue(r)
         self.assertTrue(r)
 
 
 
 
-class test_utilities(unittest.TestCase):
-
-    def test_consume_queue(self):
-        x = Queue()
-        it = consume_queue(x)
-        with self.assertRaises(StopIteration):
-            it.next()
-        x.put("foo")
-        it = consume_queue(x)
-        self.assertEqual(it.next(), "foo")
-        with self.assertRaises(StopIteration):
-            it.next()
-
-
 class test_LimitedSet(unittest.TestCase):
 class test_LimitedSet(unittest.TestCase):
 
 
     def test_add(self):
     def test_add(self):

+ 17 - 0
celery/utils/__init__.py

@@ -1,3 +1,11 @@
+"""
+
+celery.utils
+============
+
+Utility functions that has not found a home in a generic module.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 from __future__ import with_statement
 from __future__ import with_statement
 
 
@@ -40,6 +48,15 @@ DEPRECATION_FMT = """
     version %(removal)s. %(alternative)s
     version %(removal)s. %(alternative)s
 """
 """
 
 
+__all__ = ["uuid", "warn_deprecated", "deprecated", "lpmerge",
+           "promise", "mpromise", "maybe_promise", "noop",
+           "kwdict", "first", "firstmethod", "chunks",
+           "padlist", "is_iterable", "mattrgetter", "get_full_cls_name",
+           "fun_takes_kwargs", "get_cls_by_name", "instantiate",
+           "truncate_text", "abbr", "abbrtask", "isatty",
+           "textindent", "cwd_in_path", "find_module", "import_from_cwd",
+           "cry", "reprkwargs", "reprcall"]
+
 
 
 def warn_deprecated(description=None, deprecation=None, removal=None,
 def warn_deprecated(description=None, deprecation=None, removal=None,
         alternative=None):
         alternative=None):

+ 8 - 0
celery/utils/compat.py

@@ -1,3 +1,11 @@
+"""
+celery.utils.compat
+===================
+
+Backward compatible implementations of features
+only available in later Python versions.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 ############## py3k #########################################################
 ############## py3k #########################################################

+ 12 - 1
celery/utils/encoding.py

@@ -1,9 +1,20 @@
+"""
+
+celery.utils.encoding
+=====================
+
+Utilties to encode text, and to safely emit text from running
+applications without crashing with the infamous :exc:`UnicodeDecodeError`
+exception.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import sys
 import sys
 import traceback
 import traceback
 
 
-__all__ = ["default_encoding", "safe_str", "safe_repr"]
+__all__ = ["str_to_bytes", "bytes_to_str", "from_utf8",
+           "default_encoding", "safe_str", "safe_repr"]
 is_py3k = sys.version_info >= (3, 0)
 is_py3k = sys.version_info >= (3, 0)
 
 
 
 

+ 10 - 0
celery/utils/functional.py

@@ -1,3 +1,11 @@
+"""
+
+celery.utils.functional
+=======================
+
+Functional utilities.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 from __future__ import with_statement
 from __future__ import with_statement
 
 
@@ -12,6 +20,8 @@ except ImportError:
 
 
 from celery.datastructures import LRUCache
 from celery.datastructures import LRUCache
 
 
+__all__ = ["maybe_list", "memoize"]
+
 KEYWORD_MARK = object()
 KEYWORD_MARK = object()
 
 
 
 

+ 11 - 0
celery/utils/mail.py

@@ -1,3 +1,12 @@
+"""
+
+celery.utils.mail
+=================
+
+How task error emails are formatted and sent.
+
+"""
+
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import sys
 import sys
@@ -12,6 +21,8 @@ from celery.utils import get_symbol_by_name
 
 
 supports_timeout = sys.version_info >= (2, 6)
 supports_timeout = sys.version_info >= (2, 6)
 
 
+__all__ = ["SendmailWarning", "Message", "Mailer", "ErrorMail"]
+
 
 
 class SendmailWarning(UserWarning):
 class SendmailWarning(UserWarning):
     """Problem happened while sending the email message."""
     """Problem happened while sending the email message."""

+ 10 - 0
celery/utils/patch.py

@@ -1,9 +1,19 @@
+"""
+
+celery.utils.path
+=================
+
+Monkey patch to ensure process aware loggers.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import logging
 import logging
 
 
 _process_aware = False
 _process_aware = False
 
 
+__all__ = ["ensure_process_aware_logger"]
+
 
 
 def _patch_logger_class():
 def _patch_logger_class():
     """Make sure process name is recorded when loggers are used."""
     """Make sure process name is recorded when loggers are used."""

+ 12 - 1
celery/utils/serialization.py

@@ -1,3 +1,11 @@
+"""
+
+celery.utils.serialization
+==========================
+
+Serialization utilities for safely pickling exceptions.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import inspect
 import inspect
@@ -27,10 +35,13 @@ if sys.version_info < (2, 6):  # pragma: no cover
 else:
 else:
     pickle = cpickle or pypickle
     pickle = cpickle or pypickle
 
 
-
 #: List of base classes we probably don't want to reduce to.
 #: List of base classes we probably don't want to reduce to.
 unwanted_base_classes = (StandardError, Exception, BaseException, object)
 unwanted_base_classes = (StandardError, Exception, BaseException, object)
 
 
+__all__ = ["subclass_exception", "find_nearest_unpickleable_exception",
+           "create_exception_cls", "UnpickleableExceptionWrapper",
+           "get_pickleable_exception", "get_pickled_exception"]
+
 
 
 if sys.version_info < (2, 5):  # pragma: no cover
 if sys.version_info < (2, 5):  # pragma: no cover
 
 

+ 17 - 6
celery/utils/term.py

@@ -1,12 +1,11 @@
 """
 """
 
 
-term utils.
+celery.utils.term
+=================
+
+Utilties for terminals and terminal colors.
+
 
 
->>> c = colored(enabled=True)
->>> print(str(c.red("the quick "), c.blue("brown ", c.bold("fox ")),
-              c.magenta(c.underline("jumps over")),
-              c.yellow(" the lazy "),
-              c.green("dog ")))
 
 
 """
 """
 from __future__ import absolute_import
 from __future__ import absolute_import
@@ -15,6 +14,8 @@ import platform
 
 
 from .encoding import safe_str
 from .encoding import safe_str
 
 
+__all__ = ["colored"]
+
 BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
 BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
 OP_SEQ = "\033[%dm"
 OP_SEQ = "\033[%dm"
 RESET_SEQ = "\033[0m"
 RESET_SEQ = "\033[0m"
@@ -26,6 +27,16 @@ IS_WINDOWS = SYSTEM == "Windows"
 
 
 
 
 class colored(object):
 class colored(object):
+    """Terminal colored text.
+
+    Example::
+        >>> c = colored(enabled=True)
+        >>> print(str(c.red("the quick "), c.blue("brown ", c.bold("fox ")),
+        ...       c.magenta(c.underline("jumps over")),
+        ...       c.yellow(" the lazy "),
+        ...       c.green("dog ")))
+
+    """
 
 
     def __init__(self, *s, **kwargs):
     def __init__(self, *s, **kwargs):
         self.s = s
         self.s = s

+ 5 - 0
celery/utils/timer2.py

@@ -25,6 +25,11 @@ __docformat__ = "restructuredtext"
 
 
 DEFAULT_MAX_INTERVAL = 2
 DEFAULT_MAX_INTERVAL = 2
 
 
+__all__ = ["TimedFunctionFailed", "to_timestamp",
+           "Entry", "Schedule", "Timer",
+           "default_timer", "apply_after", "apply_at",
+           "apply_interval", "enter_after", "enter",
+           "exit_after", "cancel", "clear"]
 
 
 class TimedFunctionFailed(UserWarning):
 class TimedFunctionFailed(UserWarning):
     pass
     pass

+ 27 - 7
celery/utils/timeutils.py

@@ -1,3 +1,11 @@
+"""
+
+celery.utils.timeutils
+======================
+
+This module contains various utilties relating to time and date.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import math
 import math
@@ -5,6 +13,10 @@ import math
 from datetime import datetime, timedelta
 from datetime import datetime, timedelta
 from dateutil.parser import parse as parse_iso8601
 from dateutil.parser import parse as parse_iso8601
 
 
+__all__ = ["maybe_timedelta", "timedelta_seconds", "delta_resolution",
+           "remaining", "rate", "weekday", "humanize_seconds",
+           "maybe_iso8601"]
+
 DAYNAMES = "sun", "mon", "tue", "wed", "thu", "fri", "sat"
 DAYNAMES = "sun", "mon", "tue", "wed", "thu", "fri", "sat"
 WEEKDAYS = dict((name, dow) for name, dow in zip(DAYNAMES, range(7)))
 WEEKDAYS = dict((name, dow) for name, dow in zip(DAYNAMES, range(7)))
 
 
@@ -27,18 +39,26 @@ def maybe_timedelta(delta):
     return delta
     return delta
 
 
 
 
-def timedelta_seconds(delta):  # pragma: no cover
+if HAVE_TIMEDELTA_TOTAL_SECONDS:   # pragma: no cover
+
+    def timedelta_seconds(delta):
     """Convert :class:`datetime.timedelta` to seconds.
     """Convert :class:`datetime.timedelta` to seconds.
 
 
     Doesn't account for negative values.
     Doesn't account for negative values.
 
 
     """
     """
-    if HAVE_TIMEDELTA_TOTAL_SECONDS:
-        # Should return 0 for negative seconds
-        return max(delta.total_seconds(), 0)
-    if delta.days < 0:
-        return 0
-    return delta.days * 86400 + delta.seconds + (delta.microseconds / 10e5)
+    return max(delta.total_seconds(), 0)
+else:  # pragma: no cover
+
+    def timedelta_seconds(delta):  # noqa
+        """Convert :class:`datetime.timedelta` to seconds.
+
+        Doesn't account for negative values.
+
+        """
+        if delta.days < 0:
+            return 0
+        return delta.days * 86400 + delta.seconds + (delta.microseconds / 10e5)
 
 
 
 
 def delta_resolution(dt, delta):
 def delta_resolution(dt, delta):

+ 10 - 0
celery/worker/__init__.py

@@ -1,3 +1,11 @@
+"""
+
+celery.worker
+=============
+
+This is the Celery worker process.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import logging
 import logging
@@ -19,6 +27,8 @@ from ..utils import noop, instantiate
 from . import state
 from . import state
 from .buckets import TaskBucket, FastQueue
 from .buckets import TaskBucket, FastQueue
 
 
+__all__ = ["WorkController"]
+
 RUN = 0x1
 RUN = 0x1
 CLOSE = 0x2
 CLOSE = 0x2
 TERMINATE = 0x3
 TERMINATE = 0x3

+ 15 - 0
celery/worker/autoscale.py

@@ -1,3 +1,16 @@
+"""
+
+celery.worker.autoscale
+=======================
+
+This module implements the internal thread responsible
+for growing and shrinking the pool according to the
+current autoscale settings.
+
+The autoscale thread is only enabled if autoscale
+has been enabled on the command line.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 from __future__ import with_statement
 from __future__ import with_statement
 
 
@@ -10,6 +23,8 @@ from time import sleep, time
 
 
 from . import state
 from . import state
 
 
+__all__ = ["Autoscaler"]
+
 
 
 class Autoscaler(threading.Thread):
 class Autoscaler(threading.Thread):
 
 

+ 17 - 0
celery/worker/buckets.py

@@ -1,3 +1,17 @@
+"""
+
+celery.worker.buckets
+=====================
+
+This module implements the rate limiting of tasks,
+by having a token bucket queue for each task type.
+When a task is allowed to be processed it's moved
+over the the ``ready_queue``
+
+The :mod:`celery.worker.mediator` is then responsible
+for moving tasks from the ready_queue to the worker pool.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 from __future__ import with_statement
 from __future__ import with_statement
 
 
@@ -11,6 +25,9 @@ from ..datastructures import TokenBucket
 from ..utils import timeutils
 from ..utils import timeutils
 from ..utils.compat import izip_longest, chain_from_iterable
 from ..utils.compat import izip_longest, chain_from_iterable
 
 
+__all__ = ["RateLimitExceeded", "TaskBucket",
+           "FastQueue", "TokenBucketQueue"]
+
 
 
 class RateLimitExceeded(Exception):
 class RateLimitExceeded(Exception):
     """The token buckets rate limit has been exceeded."""
     """The token buckets rate limit has been exceeded."""

+ 8 - 3
celery/worker/consumer.py

@@ -1,8 +1,8 @@
-from __future__ import absolute_import
-from __future__ import with_statement
-
 """
 """
 
 
+celery.worker.consumer
+======================
+
 This module contains the component responsible for consuming messages
 This module contains the component responsible for consuming messages
 from the broker, processing the messages and keeping the broker connections
 from the broker, processing the messages and keeping the broker connections
 up and running.
 up and running.
@@ -70,6 +70,9 @@ up and running.
   early, *then* close the connection.
   early, *then* close the connection.
 
 
 """
 """
+from __future__ import absolute_import
+from __future__ import with_statement
+
 import socket
 import socket
 import sys
 import sys
 import threading
 import threading
@@ -87,6 +90,8 @@ from .job import TaskRequest, InvalidTaskError
 from .control.registry import Panel
 from .control.registry import Panel
 from .heartbeat import Heart
 from .heartbeat import Heart
 
 
+__all__ = ["QoS", "Consumer"]
+
 RUN = 0x1
 RUN = 0x1
 CLOSE = 0x2
 CLOSE = 0x2
 
 

+ 9 - 0
celery/worker/control/__init__.py

@@ -1,3 +1,12 @@
+"""
+
+celery.worker.control
+=====================
+
+Remote control commands.
+See :mod:`celery.worker.control.builtins`.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 from . import registry
 from . import registry

+ 16 - 0
celery/worker/control/builtins.py

@@ -1,3 +1,11 @@
+"""
+
+celery.worker.control.builtins
+==============================
+
+This module contains the built-in remote control commands.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import sys
 import sys
@@ -13,6 +21,14 @@ from ..state import revoked
 
 
 from .registry import Panel
 from .registry import Panel
 
 
+__all__ = ["revoke", "enable_events", "disable_events",
+           "heartbeat", "rate_limit", "time_limit", "stats",
+           "dump_scheduled", "dump_reserved", "dump_active",
+           "dump_revoked", "dump_tasks", "ping",
+           "pool_grow", "pool_shrink", "autoscale",
+           "shutdown", "add_consumer", "cancel_consumer",
+           "acvtive_queues"]
+
 TASK_INFO_FIELDS = ("exchange", "routing_key", "rate_limit")
 TASK_INFO_FIELDS = ("exchange", "routing_key", "rate_limit")
 
 
 
 

+ 11 - 0
celery/worker/control/registry.py

@@ -1,7 +1,18 @@
+"""
+
+celery.worker.control.registry
+==============================
+
+The registry keeps track of available remote control commands,
+and can be used to register new commands.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 from ...utils.compat import UserDict
 from ...utils.compat import UserDict
 
 
+__all__ = ["Panel"]
+
 
 
 class Panel(UserDict):
 class Panel(UserDict):
     data = dict()                               # Global registry.
     data = dict()                               # Global registry.

+ 11 - 0
celery/worker/heartbeat.py

@@ -1,7 +1,18 @@
+"""
+
+celery.worker.heartbeat
+=======================
+
+This is an internal thread that sends heartbeat events
+at recurring intervals.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 from .state import SOFTWARE_INFO
 from .state import SOFTWARE_INFO
 
 
+__all__ = ["Heart"]
+
 
 
 class Heart(object):
 class Heart(object):
     """Timer sending heartbeats at regular intervals.
     """Timer sending heartbeats at regular intervals.

+ 14 - 0
celery/worker/job.py

@@ -1,3 +1,13 @@
+"""
+
+celery.worker.job
+=================
+
+This module defines the :class:`TaskRequest` class,
+which specifies how tasks are executed and task state is
+published.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import os
 import os
@@ -22,6 +32,9 @@ from ..utils.timeutils import maybe_iso8601
 
 
 from . import state
 from . import state
 
 
+__all__ = ["InvalidTaskError", "WorkerTaskTrace",
+           "execute_and_trace", "TaskRequest"]
+
 #: Keys to keep from the message delivery info.  The values
 #: Keys to keep from the message delivery info.  The values
 #: of these keys must be pickleable.
 #: of these keys must be pickleable.
 WANTED_DELIVERY_INFO = ("exchange", "routing_key", "consumer_tag", )
 WANTED_DELIVERY_INFO = ("exchange", "routing_key", "consumer_tag", )
@@ -29,6 +42,7 @@ WANTED_DELIVERY_INFO = ("exchange", "routing_key", "consumer_tag", )
 
 
 class InvalidTaskError(Exception):
 class InvalidTaskError(Exception):
     """The task has invalid data or is not properly constructed."""
     """The task has invalid data or is not properly constructed."""
+    pass
 
 
 
 
 if sys.version_info >= (3, 0):
 if sys.version_info >= (3, 0):

+ 17 - 0
celery/worker/mediator.py

@@ -1,3 +1,18 @@
+"""
+
+celery.mediator
+===============
+
+The mediator is an internal thread that moves tasks
+from an internal :class:`Queue` to the worker pool.
+
+This is only used if rate limits are enabled, as it moves
+messages from the rate limited queue (which holds tasks
+that are allowed to be processed) to the pool. Disabling
+rate limits will also disable this machinery,
+and can improve performance.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import os
 import os
@@ -9,6 +24,8 @@ from Queue import Empty
 
 
 from ..app import app_or_default
 from ..app import app_or_default
 
 
+__all__ = ["Mediator"]
+
 
 
 class Mediator(threading.Thread):
 class Mediator(threading.Thread):
     """Thread continuously moving tasks from the ready queue onto the pool."""
     """Thread continuously moving tasks from the ready queue onto the pool."""

+ 17 - 0
celery/worker/state.py

@@ -1,3 +1,15 @@
+"""
+
+celery.worker.state
+===================
+
+Internal worker state (global)
+
+This includes the currently active and reserved tasks, statistics,
+and revoked tasks.
+
+"""
+
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import os
 import os
@@ -10,6 +22,11 @@ from .. import __version__
 from ..datastructures import LimitedSet
 from ..datastructures import LimitedSet
 from ..utils import cached_property
 from ..utils import cached_property
 
 
+__all__ = ["SOFTWARE_INFO", "REVOKES_MAX", "REVOKE_EXPIRES",
+           "reserved_requests", "active_requests", "total_count", "revoked",
+           "task_reserved", "task_accepted", "task_ready",
+           "Persistent"]
+
 #: Worker software/platform information.
 #: Worker software/platform information.
 SOFTWARE_INFO = {"sw_ident": "celeryd",
 SOFTWARE_INFO = {"sw_ident": "celeryd",
                  "sw_ver": __version__,
                  "sw_ver": __version__,