Browse Source

Big celery.conf clean-up

Ask Solem 15 years ago
parent
commit
5e40be713e
5 changed files with 252 additions and 404 deletions
  1. 1 1
      celery/bin/celeryd.py
  2. 77 376
      celery/conf.py
  3. 14 23
      celery/tests/test_conf.py
  4. 1 1
      celery/worker/__init__.py
  5. 159 3
      docs/reference/celery.conf.rst

+ 1 - 1
celery/bin/celeryd.py

@@ -111,7 +111,7 @@ OPTION_LIST = (
             action="store_true", dest="run_clockservice",
             help="Also run the celerybeat periodic task scheduler. \
                   Please note that only one instance must be running."),
-    optparse.make_option('-E', '--events', default=conf.CELERY_SEND_EVENTS,
+    optparse.make_option('-E', '--events', default=conf.SEND_EVENTS,
             action="store_true", dest="events",
             help="Send events so celery can be monitored by e.g. celerymon."),
     optparse.make_option('-d', '--detach', '--daemon', default=False,

+ 77 - 376
celery/conf.py

@@ -4,42 +4,8 @@ from datetime import timedelta
 from celery.registry import tasks
 from celery.loaders import settings
 
-DEFAULT_AMQP_EXCHANGE = "celery"
-DEFAULT_AMQP_PUBLISHER_ROUTING_KEY = "celery"
-DEFAULT_AMQP_CONSUMER_ROUTING_KEY = "celery"
-DEFAULT_AMQP_CONSUMER_QUEUE = "celery"
-DEFAULT_AMQP_EXCHANGE_TYPE = "direct"
-DEFAULT_DAEMON_CONCURRENCY = 0 # defaults to cpu count
-DEFAULT_DAEMON_PID_FILE = "celeryd.pid"
 DEFAULT_LOG_FMT = '[%(asctime)s: %(levelname)s/%(processName)s] %(message)s'
-DEFAULT_DAEMON_LOG_LEVEL = "WARN"
-DEFAULT_DAEMON_LOG_FILE = "celeryd.log"
-DEFAULT_AMQP_CONNECTION_TIMEOUT = 4
-DEFAULT_ALWAYS_EAGER = False
-DEFAULT_TASK_RESULT_EXPIRES = timedelta(days=5)
-DEFAULT_AMQP_CONNECTION_RETRY = True
-DEFAULT_AMQP_CONNECTION_MAX_RETRIES = 100
-DEFAULT_TASK_SERIALIZER = "pickle"
-DEFAULT_BACKEND = "database"
-DEFAULT_DISABLE_RATE_LIMITS = False
-DEFAULT_CELERYBEAT_PID_FILE = "celerybeat.pid"
-DEFAULT_CELERYBEAT_LOG_LEVEL = "INFO"
-DEFAULT_CELERYBEAT_LOG_FILE = "celerybeat.log"
-DEFAULT_CELERYBEAT_SCHEDULE_FILENAME = "celerybeat-schedule"
-DEFAULT_CELERYBEAT_MAX_LOOP_INTERVAL = 5 * 60 # five minutes.
-DEFAULT_CELERYMON_PID_FILE = "celerymon.pid"
-DEFAULT_CELERYMON_LOG_LEVEL = "INFO"
-DEFAULT_CELERYMON_LOG_FILE = "celerymon.log"
-DEFAULT_SEND_EVENTS = False
-DEFAULT_STORE_ERRORS_EVEN_IF_IGNORED = False
 
-
-"""
-.. data:: LOG_LEVELS
-
-    Mapping of log level names to :mod:`logging` module constants.
-
-"""
 LOG_LEVELS = {
     "DEBUG": logging.DEBUG,
     "INFO": logging.INFO,
@@ -50,352 +16,87 @@ LOG_LEVELS = {
     "FATAL": logging.FATAL,
 }
 
-"""
-.. data:: LOG_FORMAT
-
-    The format to use for log messages.
-
-"""
-LOG_FORMAT = getattr(settings, "CELERYD_DAEMON_LOG_FORMAT",
-                     DEFAULT_LOG_FMT)
-
-"""
-.. data:: DAEMON_LOG_FILE
-
-    Filename of the daemon log file.
-
-"""
-DAEMON_LOG_FILE = getattr(settings, "CELERYD_LOG_FILE",
-                          DEFAULT_DAEMON_LOG_FILE)
-
-"""
-.. data:: DAEMON_LOG_LEVEL
-
-
-"""
-DAEMON_LOG_LEVEL = LOG_LEVELS[getattr(settings, "CELERYD_DAEMON_LOG_LEVEL",
-                                      DEFAULT_DAEMON_LOG_LEVEL).upper()]
-
-"""
-.. data:: DAEMON_PID_FILE
-
-    Full path to the daemon pidfile.
-
-"""
-DAEMON_PID_FILE = getattr(settings, "CELERYD_PID_FILE",
-                          DEFAULT_DAEMON_PID_FILE)
-
-"""
-.. data:: DAEMON_CONCURRENCY
-
-    The number of concurrent worker processes.
-
-"""
-DAEMON_CONCURRENCY = getattr(settings, "CELERYD_CONCURRENCY",
-                             DEFAULT_DAEMON_CONCURRENCY)
-
-"""
-.. data:: AMQP_EXCHANGE
-
-    Name of the AMQP exchange.
-
-"""
-AMQP_EXCHANGE = getattr(settings, "CELERY_AMQP_EXCHANGE",
-                        DEFAULT_AMQP_EXCHANGE)
-
-
-"""
-.. data:: AMQP_EXCHANGE_TYPE
-
-The exchange type.
-
-"""
-AMQP_EXCHANGE_TYPE = getattr(settings, "CELERY_AMQP_EXCHANGE_TYPE",
-                        DEFAULT_AMQP_EXCHANGE_TYPE)
-
-"""
-.. data:: AMQP_PUBLISHER_ROUTING_KEY
-
-    The default AMQP routing key used when publishing tasks.
-
-"""
-AMQP_PUBLISHER_ROUTING_KEY = getattr(settings,
-                                "CELERY_AMQP_PUBLISHER_ROUTING_KEY",
-                                DEFAULT_AMQP_PUBLISHER_ROUTING_KEY)
-
-"""
-.. data:: AMQP_CONSUMER_ROUTING_KEY
-
-    The AMQP routing key used when consuming tasks.
-
-"""
-AMQP_CONSUMER_ROUTING_KEY = getattr(settings,
-                                "CELERY_AMQP_CONSUMER_ROUTING_KEY",
-                                DEFAULT_AMQP_CONSUMER_ROUTING_KEY)
-
-"""
-.. data:: AMQP_CONSUMER_QUEUE
-
-    The name of the AMQP queue.
-
-"""
-AMQP_CONSUMER_QUEUE = getattr(settings, "CELERY_AMQP_CONSUMER_QUEUE",
-                              DEFAULT_AMQP_CONSUMER_QUEUE)
-
-
-"""
-.. data:: AMQP_CONSUMER_QUEUES
-
-    Dictionary defining multiple AMQP queues.
+_DEFAULTS = {
+    "CELERY_AMQP_EXCHANGE": "celery",
+    "CELERY_AMQP_PUBLISHER_ROUTING_KEY": "celery",
+    "CELERY_AMQP_CONSUMER_ROUTING_KEY": "celery",
+    "CELERY_AMQP_CONSUMER_QUEUE": "celery",
+    "CELERY_AMQP_EXCHANGE_TYPE": "direct",
+    "CELERYD_CONCURRENCY": 0, # defaults to cpu count
+    "CELERYD_PID_FILE": "celeryd.pid",
+    "CELERYD_DAEMON_LOG_FORMAT": DEFAULT_LOG_FMT,
+    "CELERYD_DAEMON_LOG_LEVEL": "WARN",
+    "CELERYD_LOG_FILE": "celeryd.log",
+    "CELERY_ALWAYS_EAGER": False,
+    "CELERY_TASK_RESULT_EXPIRES": timedelta(days=5),
+    "CELERY_AMQP_CONNECTION_TIMEOUT": 4,
+    "CELERY_AMQP_CONNECTION_RETRY": True,
+    "CELERY_AMQP_CONNECTION_MAX_RETRIES": 100,
+    "CELERY_TASK_SERIALIZER": "pickle",
+    "CELERY_BACKEND": "database",
+    "CELERY_DISABLE_RATE_LIMITS": False,
+    "CELERYBEAT_PID_FILE": "celerybeat.pid",
+    "CELERYBEAT_LOG_LEVEL": "INFO",
+    "CELERYBEAT_LOG_FILE": "celerybeat.log",
+    "CELERYBEAT_SCHEDULE_FILENAME": "celerybeat-schedule",
+    "CELERYBEAT_MAX_LOOP_INTERVAL": 5 * 60, # five minutes.
+    "CELERYMON_PID_FILE": "celerymon.pid",
+    "CELERYMON_LOG_LEVEL": "INFO",
+    "CELERYMON_LOG_FILE": "celerymon.log",
+    "CELERY_SEND_EVENTS": False,
+    "CELERY_STORE_ERRORS_EVEN_IF_IGNORED": False,
+}
 
-"""
+def _get(name, default=None):
+    if default is None:
+        default = _DEFAULTS.get(name)
+    return getattr(settings, name, default)
+
+SEND_EVENTS = _get("CELERY_SEND_EVENTS")
+ALWAYS_EAGER = _get("CELERY_ALWAYS_EAGER")
+CELERY_BACKEND = _get("CELERY_BACKEND")
+CELERY_CACHE_BACKEND = _get("CELERY_CACHE_BACKEND")
+DEFAULT_RATE_LIMIT = _get("CELERY_DEFAULT_RATE_LIMIT")
+DISABLE_RATE_LIMITS = _get("CELERY_DISABLE_RATE_LIMITS")
+STORE_ERRORS_EVEN_IF_IGNORED = _get("CELERY_STORE_ERRORS_EVEN_IF_IGNORED")
+TASK_SERIALIZER = _get("CELERY_TASK_SERIALIZER")
+TASK_RESULT_EXPIRES = _get("CELERY_TASK_RESULT_EXPIRES")
+# Make sure TASK_RESULT_EXPIRES is a timedelta.
+if isinstance(TASK_RESULT_EXPIRES, int):
+    TASK_RESULT_EXPIRES = timedelta(seconds=TASK_RESULT_EXPIRES)
+SEND_CELERY_TASK_ERROR_EMAILS = _get("SEND_CELERY_TASK_ERROR_EMAILS",
+                                     not settings.DEBUG)
+
+AMQP_EXCHANGE = _get("CELERY_AMQP_EXCHANGE")
+AMQP_EXCHANGE_TYPE = _get("CELERY_AMQP_EXCHANGE_TYPE")
+AMQP_PUBLISHER_ROUTING_KEY = _get("CELERY_AMQP_PUBLISHER_ROUTING_KEY")
+AMQP_CONSUMER_ROUTING_KEY = _get("CELERY_AMQP_CONSUMER_ROUTING_KEY")
+AMQP_CONSUMER_QUEUE = _get("CELERY_AMQP_CONSUMER_QUEUE")
 DEFAULT_AMQP_CONSUMER_QUEUES = {
         AMQP_CONSUMER_QUEUE: {
             "exchange": AMQP_EXCHANGE,
             "routing_key": AMQP_CONSUMER_ROUTING_KEY,
             "exchange_type": AMQP_EXCHANGE_TYPE,
         }
-}
-
-AMQP_CONSUMER_QUEUES = getattr(settings, "CELERY_AMQP_CONSUMER_QUEUES",
-                              DEFAULT_AMQP_CONSUMER_QUEUES)
-
-"""
-.. data:: AMQP_CONNECTION_TIMEOUT
-
-    The timeout in seconds before we give up establishing a connection
-    to the AMQP server.
-
-"""
-AMQP_CONNECTION_TIMEOUT = getattr(settings, "CELERY_AMQP_CONNECTION_TIMEOUT",
-                                  DEFAULT_AMQP_CONNECTION_TIMEOUT)
-
-"""
-.. data:: SEND_CELERY_TASK_ERROR_EMAILS
-
-    If set to ``True``, errors in tasks will be sent to admins by e-mail.
-    If unset, it will send the e-mails if ``settings.DEBUG`` is False.
-
-"""
-SEND_CELERY_TASK_ERROR_EMAILS = getattr(settings,
-                                        "SEND_CELERY_TASK_ERROR_EMAILS",
-                                        not settings.DEBUG)
-
-"""
-.. data:: ALWAYS_EAGER
-
-    Always execute tasks locally, don't send to the queue.
-
-"""
-ALWAYS_EAGER = getattr(settings, "CELERY_ALWAYS_EAGER",
-                       DEFAULT_ALWAYS_EAGER)
-
-"""
-.. data: TASK_RESULT_EXPIRES
-
-    Task tombstone expire time in seconds.
-
-"""
-TASK_RESULT_EXPIRES = getattr(settings, "CELERY_TASK_RESULT_EXPIRES",
-                              DEFAULT_TASK_RESULT_EXPIRES)
-
-# Make sure TASK_RESULT_EXPIRES is a timedelta.
-if isinstance(TASK_RESULT_EXPIRES, int):
-    TASK_RESULT_EXPIRES = timedelta(seconds=TASK_RESULT_EXPIRES)
-
-"""
-.. data:: AMQP_CONNECTION_RETRY
-
-Automatically try to re-establish the connection to the AMQP broker if
-it's lost.
-
-"""
-AMQP_CONNECTION_RETRY = getattr(settings, "CELERY_AMQP_CONNECTION_RETRY",
-                                DEFAULT_AMQP_CONNECTION_RETRY)
-
-"""
-.. data:: AMQP_CONNECTION_MAX_RETRIES
-
-Maximum number of retries before we give up re-establishing a connection
-to the AMQP broker.
-
-If this is set to ``0`` or ``None``, we will retry forever.
-
-Default is ``100`` retries.
-
-"""
-AMQP_CONNECTION_MAX_RETRIES = getattr(settings,
-                                      "CELERY_AMQP_CONNECTION_MAX_RETRIES",
-                                      DEFAULT_AMQP_CONNECTION_MAX_RETRIES)
-
-"""
-.. data:: TASK_SERIALIZER
-
-A string identifying the default serialization
-method to use. Can be ``pickle`` (default),
-``json``, ``yaml``, or any custom serialization methods that have
-been registered with :mod:`carrot.serialization.registry`.
-
-Default is ``pickle``.
-
-"""
-TASK_SERIALIZER = getattr(settings, "CELERY_TASK_SERIALIZER",
-                          DEFAULT_TASK_SERIALIZER)
-
-
-"""
-
-.. data:: CELERY_BACKEND
-
-The backend used to store task results (tombstones).
-
-"""
-CELERY_BACKEND = getattr(settings, "CELERY_BACKEND", DEFAULT_BACKEND)
-
-
-"""
-
-.. data:: CELERY_CACHE_BACKEND
-
-Use a custom cache backend for celery. If not set the django-global
-cache backend in ``CACHE_BACKEND`` will be used.
-
-"""
-CELERY_CACHE_BACKEND = getattr(settings, "CELERY_CACHE_BACKEND", None)
-
-
-"""
-
-.. data:: DEFAULT_RATE_LIMIT
-
-The default rate limit applied to all tasks which doesn't have a custom
-rate limit defined. (Default: None)
-
-"""
-DEFAULT_RATE_LIMIT = getattr(settings, "CELERY_DEFAULT_RATE_LIMIT", None)
-
-"""
-
-.. data:: DISABLE_RATE_LIMITS
-
-If ``True`` all rate limits will be disabled and all tasks will be executed
-as soon as possible.
-
-"""
-DISABLE_RATE_LIMITS = getattr(settings, "CELERY_DISABLE_RATE_LIMITS",
-                              DEFAULT_DISABLE_RATE_LIMITS)
-
-"""
-
-.. data:: CELERYBEAT_PID_FILE
-
-Name of celerybeats pid file.
-Default is: ``celerybeat.pid``.
-
-"""
-CELERYBEAT_PID_FILE = getattr(settings, "CELERYBEAT_PID_FILE",
-                              DEFAULT_CELERYBEAT_PID_FILE)
-
-
-"""
-
-.. data:: CELERYBEAT_LOG_LEVEL
-
-Default log level for celerybeat.
-Default is: ``INFO``.
-
-"""
-CELERYBEAT_LOG_LEVEL = getattr(settings, "CELERYBEAT_LOG_LEVEL",
-                               DEFAULT_CELERYBEAT_LOG_LEVEL)
-
-"""
-
-.. data:: CELERYBEAT_LOG_FILE
-
-Default log file for celerybeat.
-Default is: ``celerybeat.log``.
-
-"""
-CELERYBEAT_LOG_FILE = getattr(settings, "CELERYBEAT_LOG_FILE",
-                              DEFAULT_CELERYBEAT_LOG_FILE)
-
-"""
-
-.. data:: CELERYBEAT_SCHEDULE_FILENAME
-
-Name of the persistent schedule database file.
-Default is: ``celerybeat-schedule``.
-
-"""
-CELERYBEAT_SCHEDULE_FILENAME = getattr(settings,
-                                       "CELERYBEAT_SCHEDULE_FILENAME",
-                                       DEFAULT_CELERYBEAT_SCHEDULE_FILENAME)
-
-
-"""
-
-.. data:: CELERYBEAT_MAX_LOOP_INTERVAL
-
-The maximum number of seconds celerybeat is allowed to sleep between
-checking the schedule. The default is 5 minutes, which means celerybeat can
-only sleep a maximum of 5 minutes after checking the schedule run-times for a
-periodic task to apply. If you change the run_times of periodic tasks at
-run-time, you may consider lowering this value for changes to take effect
-faster (A value of 5 minutes, means the changes will take effect in 5 minutes
-at maximum).
-
-"""
-
-CELERYBEAT_MAX_LOOP_INTERVAL = getattr(settings,
-                                       "CELERYBEAT_MAX_LOOP_INTERVAL",
-                                       DEFAULT_CELERYBEAT_MAX_LOOP_INTERVAL)
-
-"""
-.. data:: CELERYMON_PID_FILE
-
-Name of celerymons pid file.
-Default is: ``celerymon.pid``.
-
-"""
-CELERYMON_PID_FILE = getattr(settings, "CELERYMON_PID_FILE",
-                              DEFAULT_CELERYMON_PID_FILE)
-
-
-"""
-
-.. data:: CELERYMON_LOG_LEVEL
-
-Default log level for celerymon.
-Default is: ``INFO``.
-
-"""
-CELERYMON_LOG_LEVEL = getattr(settings, "CELERYMON_LOG_LEVEL",
-                               DEFAULT_CELERYMON_LOG_LEVEL)
-
-"""
-
-.. data:: CELERYMON_LOG_FILE
-
-Default log file for celerymon.
-Default is: ``celerymon.log``.
-
-"""
-CELERYMON_LOG_FILE = getattr(settings, "CELERYMON_LOG_FILE",
-                              DEFAULT_CELERYMON_LOG_FILE)
-
-
-"""
-
-.. data:: CELERY_SEND_EVENTS
-
-If set, celery will send events that can be captured by monitors like
-``celerymon``.
-Default is: ``False``.
-
-"""
-CELERY_SEND_EVENTS = getattr(settings, "CELERY_SEND_EVENTS",
-                             DEFAULT_SEND_EVENTS)
-
-STORE_ERRORS_EVEN_IF_IGNORED = getattr(settings,
-                                       "CELERY_STORE_ERRORS_EVEN_IF_IGNORED",
-                                       DEFAULT_STORE_ERRORS_EVEN_IF_IGNORED)
+AMQP_CONSUMER_QUEUES = _get("CELERY_AMQP_CONSUMER_QUEUES",
+                            DEFAULT_AMQP_CONSUMER_QUEUES)
+AMQP_CONNECTION_TIMEOUT = _get("CELERY_AMQP_CONNECTION_TIMEOUT")
+AMQP_CONNECTION_RETRY = _get("CELERY_AMQP_CONNECTION_RETRY")
+AMQP_CONNECTION_MAX_RETRIES = _get("CELERY_AMQP_CONNECTION_MAX_RETRIES")
+
+LOG_FORMAT = _get("CELERYD_DAEMON_LOG_FORMAT")
+DAEMON_LOG_FILE = _get("CELERYD_LOG_FILE")
+DAEMON_LOG_LEVEL = _get("CELERYD_DAEMON_LOG_LEVEL")
+DAEMON_LOG_LEVEL = LOG_LEVELS[DAEMON_LOG_LEVEL.upper()]
+DAEMON_PID_FILE = _get("CELERYD_PID_FILE")
+DAEMON_CONCURRENCY = _get("CELERYD_CONCURRENCY")
+
+CELERYBEAT_PID_FILE = _get("CELERYBEAT_PID_FILE")
+CELERYBEAT_LOG_LEVEL = _get("CELERYBEAT_LOG_LEVEL")
+CELERYBEAT_LOG_FILE = _get("CELERYBEAT_LOG_FILE")
+CELERYBEAT_SCHEDULE_FILENAME = _get("CELERYBEAT_SCHEDULE_FILENAME")
+CELERYBEAT_MAX_LOOP_INTERVAL = _get("CELERYBEAT_MAX_LOOP_INTERVAL")
+
+CELERYMON_PID_FILE = _get("CELERYMON_PID_FILE")
+CELERYMON_LOG_LEVEL = _get("CELERYMON_LOG_LEVEL")
+CELERYMON_LOG_FILE = _get("CELERYMON_LOG_FILE")

+ 14 - 23
celery/tests/test_conf.py

@@ -4,42 +4,33 @@ from django.conf import settings
 
 
 SETTING_VARS = (
-    ("CELERY_AMQP_CONSUMER_QUEUE", "AMQP_CONSUMER_QUEUE",
-        "DEFAULT_AMQP_CONSUMER_QUEUE"),
-    ("CELERY_AMQP_PUBLISHER_ROUTING_KEY", "AMQP_PUBLISHER_ROUTING_KEY",
-        "DEFAULT_AMQP_PUBLISHER_ROUTING_KEY"),
-    ("CELERY_AMQP_CONSUMER_ROUTING_KEY", "AMQP_CONSUMER_ROUTING_KEY",
-        "DEFAULT_AMQP_CONSUMER_ROUTING_KEY"),
-    ("CELERY_AMQP_EXCHANGE_TYPE", "AMQP_EXCHANGE_TYPE",
-        "DEFAULT_AMQP_EXCHANGE_TYPE"),
-    ("CELERY_AMQP_EXCHANGE", "AMQP_EXCHANGE",
-        "DEFAULT_AMQP_EXCHANGE"),
-    ("CELERYD_CONCURRENCY", "DAEMON_CONCURRENCY",
-        "DEFAULT_DAEMON_CONCURRENCY"),
-    ("CELERYD_PID_FILE", "DAEMON_PID_FILE",
-        "DEFAULT_DAEMON_PID_FILE"),
-    ("CELERYD_LOG_FILE", "DAEMON_LOG_FILE",
-        "DEFAULT_DAEMON_LOG_FILE"),
-    ("CELERYD_DAEMON_LOG_FORMAT", "LOG_FORMAT",
-        "DEFAULT_LOG_FMT"),
+    ("CELERY_AMQP_CONSUMER_QUEUE", "AMQP_CONSUMER_QUEUE"),
+    ("CELERY_AMQP_PUBLISHER_ROUTING_KEY", "AMQP_PUBLISHER_ROUTING_KEY"),
+    ("CELERY_AMQP_CONSUMER_ROUTING_KEY", "AMQP_CONSUMER_ROUTING_KEY"),
+    ("CELERY_AMQP_EXCHANGE_TYPE", "AMQP_EXCHANGE_TYPE"),
+    ("CELERY_AMQP_EXCHANGE", "AMQP_EXCHANGE"),
+    ("CELERYD_CONCURRENCY", "DAEMON_CONCURRENCY"),
+    ("CELERYD_PID_FILE", "DAEMON_PID_FILE"),
+    ("CELERYD_LOG_FILE", "DAEMON_LOG_FILE"),
+    ("CELERYD_DAEMON_LOG_FORMAT", "LOG_FORMAT"),
 )
 
 
 class TestConf(unittest.TestCase):
 
-    def assertDefaultSetting(self, setting_name, result_var, default_var):
+    def assertDefaultSetting(self, setting_name, result_var):
         if hasattr(settings, setting_name):
             self.assertEquals(getattr(conf, result_var),
                               getattr(settings, setting_name),
                               "Overwritten setting %s is written to %s" % (
                                   setting_name, result_var))
         else:
-            self.assertEqual(getattr(conf, default_var),
+            self.assertEqual(conf._DEFAULTS.get(setting_name),
                              getattr(conf, result_var),
                              "Default setting %s is written to %s" % (
-                                 default_var, result_var))
+                                 setting_name, result_var))
 
     def test_configuration_cls(self):
-        for setting_name, result_var, default_var in SETTING_VARS:
-            self.assertDefaultSetting(setting_name, result_var, default_var)
+        for setting_name, result_var in SETTING_VARS:
+            self.assertDefaultSetting(setting_name, result_var)
         self.assertTrue(isinstance(conf.DAEMON_LOG_LEVEL, int))

+ 1 - 1
celery/worker/__init__.py

@@ -100,7 +100,7 @@ class WorkController(object):
     _state = None
 
     def __init__(self, concurrency=None, logfile=None, loglevel=None,
-            send_events=conf.CELERY_SEND_EVENTS,
+            send_events=conf.SEND_EVENTS,
             is_detached=False, embed_clockservice=False):
 
         # Options

+ 159 - 3
docs/reference/celery.conf.rst

@@ -2,7 +2,163 @@
 Configuration - celery.conf
 ============================
 
-.. currentmodule:: celery.conf
+.. data:: AMQP_EXCHANGE
+
+    Name of the AMQP exchange.
+
+.. data:: AMQP_EXCHANGE_TYPE
+
+    The exchange type.
+
+.. data:: AMQP_PUBLISHER_ROUTING_KEY
+
+    The default AMQP routing key used when publishing tasks.
+
+.. data:: AMQP_CONSUMER_ROUTING_KEY
+
+    The AMQP routing key used when consuming tasks.
+
+.. data:: AMQP_CONSUMER_QUEUE
+
+    The name of the AMQP queue.
+
+.. data:: AMQP_CONSUMER_QUEUES
+
+    Dictionary defining multiple AMQP queues.
+
+.. data:: AMQP_CONNECTION_TIMEOUT
+
+    The timeout in seconds before we give up establishing a connection
+    to the AMQP server.
+
+.. data:: SEND_CELERY_TASK_ERROR_EMAILS
+
+    If set to ``True``, errors in tasks will be sent to admins by e-mail.
+    If unset, it will send the e-mails if ``settings.DEBUG`` is False.
+
+.. data:: ALWAYS_EAGER
+
+    Always execute tasks locally, don't send to the queue.
+
+.. data: TASK_RESULT_EXPIRES
+
+    Task tombstone expire time in seconds.
+
+.. data:: AMQP_CONNECTION_RETRY
+
+    Automatically try to re-establish the connection to the AMQP broker if
+    it's lost.
+
+.. data:: AMQP_CONNECTION_MAX_RETRIES
+
+    Maximum number of retries before we give up re-establishing a connection
+    to the broker.
+
+    If this is set to ``0`` or ``None``, we will retry forever.
+
+    Default is ``100`` retries.
+
+.. data:: TASK_SERIALIZER
+
+    A string identifying the default serialization
+    method to use. Can be ``pickle`` (default),
+    ``json``, ``yaml``, or any custom serialization methods that have
+    been registered with :mod:`carrot.serialization.registry`.
+
+    Default is ``pickle``.
+
+.. data:: CELERY_BACKEND
+
+    The backend used to store task results (tombstones).
+
+.. data:: CELERY_CACHE_BACKEND
+
+    Use a custom cache backend for celery. If not set the django-global
+    cache backend in ``CACHE_BACKEND`` will be used.
+
+.. data:: CELERY_SEND_EVENTS
+
+    If set, celery will send events that can be captured by monitors like
+    ``celerymon``.
+    Default is: ``False``.
+
+.. data:: DEFAULT_RATE_LIMIT
+
+    The default rate limit applied to all tasks which doesn't have a custom
+    rate limit defined. (Default: None)
+
+.. data:: DISABLE_RATE_LIMITS
+
+    If ``True`` all rate limits will be disabled and all tasks will be executed
+    as soon as possible.
+
+.. data:: CELERYBEAT_PID_FILE
+
+    Name of celerybeats pid file.
+    Default is: ``celerybeat.pid``.
+
+.. data:: CELERYBEAT_LOG_LEVEL
+
+    Default log level for celerybeat.
+    Default is: ``INFO``.
+
+.. data:: CELERYBEAT_LOG_FILE
+
+    Default log file for celerybeat.
+    Default is: ``celerybeat.log``.
+
+.. data:: CELERYBEAT_SCHEDULE_FILENAME
+
+    Name of the persistent schedule database file.
+    Default is: ``celerybeat-schedule``.
+
+.. data:: CELERYBEAT_MAX_LOOP_INTERVAL
+
+    The maximum number of seconds celerybeat is allowed to sleep between
+    checking the schedule. The default is 5 minutes, which means celerybeat can
+    only sleep a maximum of 5 minutes after checking the schedule run-times for a
+    periodic task to apply. If you change the run_times of periodic tasks at
+    run-time, you may consider lowering this value for changes to take effect
+    faster (A value of 5 minutes, means the changes will take effect in 5 minutes
+    at maximum).
+
+.. data:: CELERYMON_PID_FILE
+
+    Name of celerymons pid file.
+    Default is: ``celerymon.pid``.
+
+.. data:: CELERYMON_LOG_LEVEL
+
+    Default log level for celerymon.
+    Default is: ``INFO``.
+
+.. data:: CELERYMON_LOG_FILE
+
+    Default log file for celerymon.
+    Default is: ``celerymon.log``.
+
+.. data:: LOG_LEVELS
+
+    Mapping of log level names to :mod:`logging` module constants.
+
+.. data:: LOG_FORMAT
+
+    The format to use for log messages.
+
+.. data:: DAEMON_LOG_FILE
+
+    Filename of the daemon log file.
+
+.. data:: DAEMON_LOG_LEVEL
+
+    Default log level for daemons. (``WARN``)
+
+.. data:: DAEMON_PID_FILE
+
+    Full path to the daemon pidfile.
+
+.. data:: DAEMON_CONCURRENCY
+
+    The number of concurrent worker processes.
+    If set to ``0``, the total number of available CPUs/cores will be used.
 
-.. automodule:: celery.conf
-    :members: