Browse Source

Cosmetics

Ask Solem 13 years ago
parent
commit
9977e99388

+ 0 - 7
celery/bin/base.py

@@ -1,12 +1,5 @@
 # -*- coding: utf-8 -*-
 # -*- coding: utf-8 -*-
 """
 """
-    celery.bin.base
-    ~~~~~~~~~~~~~~~
-
-    Base class for command line programs.
-
-    Implements tools for command line arguments parsing,
-    and more.
 
 
 .. _preload-options:
 .. _preload-options:
 
 

+ 1 - 1
docs/django/first-steps-with-django.rst

@@ -38,7 +38,7 @@ to your Django project's ``settings.py`` module. For example
 we can configure the :setting:`BROKER_URL` setting to specify
 we can configure the :setting:`BROKER_URL` setting to specify
 what broker to use::
 what broker to use::
 
 
-    BROKER_URL = "amqp://guest:guest@localhost:5672/"
+    BROKER_URL = 'amqp://guest:guest@localhost:5672/'
 
 
 That's it.
 That's it.
 
 

+ 1 - 1
docs/getting-started/brokers/beanstalk.rst

@@ -29,7 +29,7 @@ Configuration
 Configuration is easy, set the transport, and configure the location of
 Configuration is easy, set the transport, and configure the location of
 your CouchDB database::
 your CouchDB database::
 
 
-    BROKER_URL = "beanstalk://localhost:11300"
+    BROKER_URL = 'beanstalk://localhost:11300'
 
 
 Where the URL is in the format of::
 Where the URL is in the format of::
 
 

+ 1 - 1
docs/getting-started/brokers/couchdb.rst

@@ -28,7 +28,7 @@ Configuration
 Configuration is easy, set the transport, and configure the location of
 Configuration is easy, set the transport, and configure the location of
 your CouchDB database::
 your CouchDB database::
 
 
-    BROKER_URL = "couchdb://localhost:5984/database_name"
+    BROKER_URL = 'couchdb://localhost:5984/database_name'
 
 
 Where the URL is in the format of::
 Where the URL is in the format of::
 
 

+ 2 - 2
docs/getting-started/brokers/django.rst

@@ -19,11 +19,11 @@ configuration values.
 
 
 #. Set your broker transport::
 #. Set your broker transport::
 
 
-    BROKER_URL = "django://"
+    BROKER_URL = 'django://'
 
 
 #. Add :mod:`djcelery.transport` to `INSTALLED_APPS`::
 #. Add :mod:`djcelery.transport` to `INSTALLED_APPS`::
 
 
-    INSTALLED_APPS = ("djcelery.transport", )
+    INSTALLED_APPS = ('djcelery.transport', )
 
 
 #. Sync your database schema::
 #. Sync your database schema::
 
 

+ 1 - 1
docs/getting-started/brokers/mongodb.rst

@@ -28,7 +28,7 @@ Configuration
 Configuration is easy, set the transport, and configure the location of
 Configuration is easy, set the transport, and configure the location of
 your MongoDB database::
 your MongoDB database::
 
 
-    BROKER_URL = "mongodb://localhost:27017/database_name"
+    BROKER_URL = 'mongodb://localhost:27017/database_name'
 
 
 Where the URL is in the format of::
 Where the URL is in the format of::
 
 

+ 1 - 1
docs/getting-started/brokers/rabbitmq.rst

@@ -14,7 +14,7 @@ RabbitMQ is the default broker so it does not require any additional
 dependencies or initial configuration, other than the URL location of
 dependencies or initial configuration, other than the URL location of
 the broker instance you want to use::
 the broker instance you want to use::
 
 
-    >>> BROKER_URL = "amqp://guest:guest@localhost:5672//"
+    >>> BROKER_URL = 'amqp://guest:guest@localhost:5672//'
 
 
 For a description of broker URLs and a full list of the
 For a description of broker URLs and a full list of the
 various broker configuration options available to Celery,
 various broker configuration options available to Celery,

+ 2 - 2
docs/getting-started/brokers/redis.rst

@@ -28,7 +28,7 @@ Configuration
 Configuration is easy, just configure the location of
 Configuration is easy, just configure the location of
 your Redis database::
 your Redis database::
 
 
-    BROKER_URL = "redis://localhost:6379/0"
+    BROKER_URL = 'redis://localhost:6379/0'
 
 
 Where the URL is in the format of::
 Where the URL is in the format of::
 
 
@@ -42,7 +42,7 @@ Results
 If you also want to store the state and return values of tasks in Redis,
 If you also want to store the state and return values of tasks in Redis,
 you should configure these settings::
 you should configure these settings::
 
 
-    CELERY_RESULT_BACKEND = "redis://localhost:6379/0"
+    CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'
 
 
 For a complete list of options supported by the Redis result backend see
 For a complete list of options supported by the Redis result backend see
 :ref:`conf-redis-result-backend`
 :ref:`conf-redis-result-backend`

+ 5 - 5
docs/getting-started/brokers/sqlalchemy.rst

@@ -17,7 +17,7 @@ Configuration
 Celery needs to know the location of your database, which should be the usual
 Celery needs to know the location of your database, which should be the usual
 SQLAlchemy connection string, but with 'sqla+' prepended to it::
 SQLAlchemy connection string, but with 'sqla+' prepended to it::
 
 
-    BROKER_URL = "sqla+sqlite:///celerydb.sqlite"
+    BROKER_URL = 'sqla+sqlite:///celerydb.sqlite'
 
 
 This transport uses only the :setting:`BROKER_URL` setting, which have to be
 This transport uses only the :setting:`BROKER_URL` setting, which have to be
 an SQLAlchemy database URI.
 an SQLAlchemy database URI.
@@ -30,16 +30,16 @@ Here's a list of examples using a selection of other `SQLAlchemy Connection Stri
 .. code-block:: python
 .. code-block:: python
 
 
     # sqlite (filename)
     # sqlite (filename)
-    BROKER_URL = "sqla+sqlite:///celerydb.sqlite"
+    BROKER_URL = 'sqla+sqlite:///celerydb.sqlite'
 
 
     # mysql
     # mysql
-    BROKER_URL = "sqla+mysql://scott:tiger@localhost/foo"
+    BROKER_URL = 'sqla+mysql://scott:tiger@localhost/foo'
 
 
     # postgresql
     # postgresql
-    BROKER_URL = "sqla+postgresql://scott:tiger@localhost/mydatabase"
+    BROKER_URL = 'sqla+postgresql://scott:tiger@localhost/mydatabase'
 
 
     # oracle
     # oracle
-    BROKER_URL = "sqla+oracle://scott:tiger@127.0.0.1:1521/sidname"
+    BROKER_URL = 'sqla+oracle://scott:tiger@127.0.0.1:1521/sidname'
 
 
 .. _`SQLAlchemy: Supported Databases`:
 .. _`SQLAlchemy: Supported Databases`:
     http://www.sqlalchemy.org/docs/core/engines.html#supported-databases
     http://www.sqlalchemy.org/docs/core/engines.html#supported-databases

+ 16 - 16
docs/getting-started/first-steps-with-celery.rst

@@ -109,13 +109,13 @@ Let's create the file :file:`tasks.py`:
 
 
     from celery import Celery
     from celery import Celery
 
 
-    celery = Celery("tasks", broker="amqp://guest@localhost//")
+    celery = Celery('tasks', broker='amqp://guest@localhost//')
 
 
     @celery.task()
     @celery.task()
     def add(x, y):
     def add(x, y):
         return x + y
         return x + y
 
 
-    if __name__ == "__main__":
+    if __name__ == '__main__':
         celery.start()
         celery.start()
 
 
 The first argument to :class:`~celery.app.Celery` is the name of the current module,
 The first argument to :class:`~celery.app.Celery` is the name of the current module,
@@ -199,12 +199,12 @@ as messages.  The backend is specified via the ``backend`` argument to
 :class:`@Celery`, (or via the :setting:`CELERY_RESULT_BACKEND` setting if
 :class:`@Celery`, (or via the :setting:`CELERY_RESULT_BACKEND` setting if
 you choose to use a configuration module)::
 you choose to use a configuration module)::
 
 
-    celery = Celery("tasks", backend="amqp", broker="amqp://")
+    celery = Celery('tasks', backend='amqp', broker='amqp://')
 
 
 or if you want to use Redis as the result backend, but still use RabbitMQ as
 or if you want to use Redis as the result backend, but still use RabbitMQ as
 the message broker (a popular combination)::
 the message broker (a popular combination)::
 
 
-    celery = Celery("tasks", backend="redis://localhost", broker="amqp://")
+    celery = Celery('tasks', backend='redis://localhost', broker='amqp://')
 
 
 To read more about result backends please see :ref:`task-result-backends`.
 To read more about result backends please see :ref:`task-result-backends`.
 
 
@@ -263,16 +263,16 @@ task payloads by changing the :setting:`CELERY_TASK_SERIALIZER` setting:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    celery.conf.CELERY_TASK_SERIALIZER = "json"
+    celery.conf.CELERY_TASK_SERIALIZER = 'json'
 
 
 If you are configuring many settings at once you can use ``update``:
 If you are configuring many settings at once you can use ``update``:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
     celery.conf.update(
     celery.conf.update(
-        CELERY_TASK_SERIALIZER="json",
-        CELERY_RESULT_SERIALIZER="json",
-        CELERY_TIMEZONE="Europe/Oslo",
+        CELERY_TASK_SERIALIZER='json',
+        CELERY_RESULT_SERIALIZER='json',
+        CELERY_TIMEZONE='Europe/Oslo',
         CELERY_ENABLE_UTC=True,
         CELERY_ENABLE_UTC=True,
     )
     )
 
 
@@ -289,7 +289,7 @@ by calling the :meth:`~@Celery.config_from_object` method:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    celery.config_from_object("celeryconfig")
+    celery.config_from_object('celeryconfig')
 
 
 This module is often called "``celeryconfig``", but you can use any
 This module is often called "``celeryconfig``", but you can use any
 module name.
 module name.
@@ -301,12 +301,12 @@ current directory or on the Python path, it could look like this:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    BROKER_URL = "amqp://"
-    CELERY_RESULT_BACKEND = "amqp://"
+    BROKER_URL = 'amqp://'
+    CELERY_RESULT_BACKEND = 'amqp://'
 
 
-    CELERY_TASK_SERIALIZER = "json"
-    CELERY_RESULT_SERIALIZER = "json"
-    CELERY_TIMEZONE = "Europe/Oslo"
+    CELERY_TASK_SERIALIZER = 'json'
+    CELERY_RESULT_SERIALIZER = 'json'
+    CELERY_TIMEZONE = 'Europe/Oslo'
     CELERY_ENABLE_UTC = True
     CELERY_ENABLE_UTC = True
 
 
 To verify that your configuration file works properly, and does't
 To verify that your configuration file works properly, and does't
@@ -324,7 +324,7 @@ route a misbehaving task to a dedicated queue:
 .. code-block:: python
 .. code-block:: python
 
 
     CELERY_ROUTES = {
     CELERY_ROUTES = {
-        "tasks.add": "low-priority",
+        'tasks.add': 'low-priority',
     }
     }
 
 
 Or instead of routing it you could rate limit the task
 Or instead of routing it you could rate limit the task
@@ -336,7 +336,7 @@ instead, so that only 10 tasks of this type can be processed in a minute
 .. code-block:: python
 .. code-block:: python
 
 
     CELERY_ANNOTATIONS = {
     CELERY_ANNOTATIONS = {
-        "tasks.add": {"rate_limit": "10/m"}
+        'tasks.add': {'rate_limit': '10/m'}
     }
     }
 
 
 If you are using RabbitMQ, Redis or MongoDB as the
 If you are using RabbitMQ, Redis or MongoDB as the

+ 4 - 4
docs/getting-started/intro.rst

@@ -50,11 +50,11 @@ Celery is…
 
 
             from celery import Celery
             from celery import Celery
 
 
-            celery = Celery("hello", broker="amqp://guest@localhost//")
+            celery = Celery('hello', broker='amqp://guest@localhost//')
 
 
             @celery.task()
             @celery.task()
             def hello():
             def hello():
-                return "hello world"
+                return 'hello world'
 
 
     - **Highly Available**
     - **Highly Available**
 
 
@@ -175,8 +175,8 @@ Celery is…
         - :ref:`use logging from my task <task-logging>`
         - :ref:`use logging from my task <task-logging>`
         - :ref:`learn about best practices <task-best-practices>`
         - :ref:`learn about best practices <task-best-practices>`
         - :ref:`create a custom task base class <task-custom-classes>`
         - :ref:`create a custom task base class <task-custom-classes>`
-        - :ref:`add a callback to a group of tasks <chords-ov>`
-        - :ref:`split a task into several chunks <chunking-ov>`
+        - :ref:`add a callback to a group of tasks <chords>`
+        - :ref:`split a task into several chunks <chunking>`
         - :ref:`optimize the worker <guide-optimizing>`
         - :ref:`optimize the worker <guide-optimizing>`
         - :ref:`see a list of built-in task states <task-builtin-states>`
         - :ref:`see a list of built-in task states <task-builtin-states>`
         - :ref:`create custom task states <custom-states>`
         - :ref:`create custom task states <custom-states>`

+ 0 - 1
docs/index.rst

@@ -25,7 +25,6 @@ Contents
     :maxdepth: 1
     :maxdepth: 1
 
 
     copyright
     copyright
-    conventions
 
 
 .. toctree::
 .. toctree::
     :maxdepth: 2
     :maxdepth: 2

+ 1 - 1
docs/tutorials/debugging.rst

@@ -68,7 +68,7 @@ change it and continue execution of the task::
 
 
     (Pdb) result
     (Pdb) result
     4
     4
-    (Pdb) result = "hello from rdb"
+    (Pdb) result = 'hello from rdb'
     (Pdb) continue
     (Pdb) continue
     Connection closed by foreign host.
     Connection closed by foreign host.
 
 

+ 16 - 16
docs/userguide/application.rst

@@ -55,7 +55,7 @@ Whenever you define a task, that task will also be added to the local registry:
     >>> add.name
     >>> add.name
     __main__.add
     __main__.add
 
 
-    >>> celery.tasks["__main__.add"]
+    >>> celery.tasks['__main__.add']
     <@task: __main__.add>
     <@task: __main__.add>
 
 
 and there we see that ``__main__`` again; whenever Celery is not able
 and there we see that ``__main__`` again; whenever Celery is not able
@@ -94,9 +94,9 @@ You can specify another name for the main module:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    >>> celery = Celery("tasks")
+    >>> celery = Celery('tasks')
     >>> celery.main
     >>> celery.main
-    "tasks"
+    'tasks'
 
 
     >>> @celery.task()
     >>> @celery.task()
     ... def add(x, y):
     ... def add(x, y):
@@ -117,7 +117,7 @@ or you can use a dedicated configuration module.
 The configuration is available as :attr:`@Celery.conf`::
 The configuration is available as :attr:`@Celery.conf`::
 
 
     >>> celery.conf.CELERY_TIMEZONE
     >>> celery.conf.CELERY_TIMEZONE
-    "Europe/London"
+    'Europe/London'
 
 
 where you can set configuration values directly::
 where you can set configuration values directly::
 
 
@@ -127,7 +127,7 @@ or you can update several keys at once by using the ``update`` method::
 
 
     >>> celery.conf.update(
     >>> celery.conf.update(
     ...     CELERY_ENABLE_UTC=True,
     ...     CELERY_ENABLE_UTC=True,
-    ...     CELERY_TIMEZONE="Europe/London",
+    ...     CELERY_TIMEZONE='Europe/London',
     ...)
     ...)
 
 
 The configuration object consists of multiple dictionaries
 The configuration object consists of multiple dictionaries
@@ -172,7 +172,7 @@ Example 1: Using the name of a module
     from celery import Celery
     from celery import Celery
 
 
     celery = Celery()
     celery = Celery()
-    celery.config_from_object("celeryconfig")
+    celery.config_from_object('celeryconfig')
 
 
 
 
 The ``celeryconfig`` module may then look like this:
 The ``celeryconfig`` module may then look like this:
@@ -182,7 +182,7 @@ The ``celeryconfig`` module may then look like this:
 .. code-block:: python
 .. code-block:: python
 
 
     CELERY_ENABLE_UTC = True
     CELERY_ENABLE_UTC = True
-    CELERY_TIMEZONE = "Europe/London"
+    CELERY_TIMEZONE = 'Europe/London'
 
 
 Example 2: Using a configuration module
 Example 2: Using a configuration module
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -206,7 +206,7 @@ Example 3:  Using a configuration class/object
 
 
     class Config:
     class Config:
         CELERY_ENABLE_UTC = True
         CELERY_ENABLE_UTC = True
-        CELERY_TIMEZONE = "Europe/London"
+        CELERY_TIMEZONE = 'Europe/London'
 
 
     celery.config_from_object(Config)
     celery.config_from_object(Config)
 
 
@@ -225,10 +225,10 @@ environment variable named :envvar:`CELERY_CONFIG_MODULE`:
     from celery import Celery
     from celery import Celery
 
 
     #: Set default configuration module name
     #: Set default configuration module name
-    os.environ.setdefault("CELERY_CONFIG_MODULE", "celeryconfig")
+    os.environ.setdefault('CELERY_CONFIG_MODULE', 'celeryconfig')
 
 
     celery = Celery()
     celery = Celery()
-    celery.config_from_envvar("CELERY_CONFIG_MODULE")
+    celery.config_from_envvar('CELERY_CONFIG_MODULE')
 
 
 You can then specify the configuration module to use via the environment::
 You can then specify the configuration module to use via the environment::
 
 
@@ -381,11 +381,11 @@ chain breaks::
     .. code-block:: python
     .. code-block:: python
 
 
         def hello(to):
         def hello(to):
-            return "hello %s" % to
+            return 'hello %s' % to
 
 
         >>> from celery.execute import apply_async
         >>> from celery.execute import apply_async
 
 
-        >>> apply_async(hello, ("world!", ))
+        >>> apply_async(hello, ('world!', ))
 
 
     or you could also create a ``Task`` class to set
     or you could also create a ``Task`` class to set
     certain options, or override other behavior
     certain options, or override other behavior
@@ -399,10 +399,10 @@ chain breaks::
             send_error_emails = True
             send_error_emails = True
 
 
             def run(self, to):
             def run(self, to):
-                return "hello %s" % to
+                return 'hello %s' % to
         tasks.register(Hello)
         tasks.register(Hello)
 
 
-        >>> Hello.delay("world!")
+        >>> Hello.delay('world!')
 
 
     Later, it was decided that passing arbitrary call-ables
     Later, it was decided that passing arbitrary call-ables
     was an anti-pattern, since it makes it very hard to use
     was an anti-pattern, since it makes it very hard to use
@@ -415,7 +415,7 @@ chain breaks::
 
 
         @task(send_error_emails=True)
         @task(send_error_emails=True)
         def hello(x):
         def hello(x):
-            return "hello %s" % to
+            return 'hello %s' % to
 
 
 Abstract Tasks
 Abstract Tasks
 ==============
 ==============
@@ -442,7 +442,7 @@ class: :class:`celery.Task`.
         abstract = True
         abstract = True
 
 
         def __call__(self, *args, **kwargs):
         def __call__(self, *args, **kwargs):
-            print("TASK STARTING: %s[%s]" % (self.name, self.request.id))
+            print('TASK STARTING: %s[%s]' % (self.name, self.request.id))
             return self.run(*args, **kwargs)
             return self.run(*args, **kwargs)
 
 
 
 

+ 12 - 12
docs/userguide/calling.rst

@@ -39,7 +39,7 @@ The API defines a standard set of execution options, as well as three methods:
     - ``T.delay(arg, kwarg=value)``
     - ``T.delay(arg, kwarg=value)``
         always a shortcut to ``.apply_async``.
         always a shortcut to ``.apply_async``.
 
 
-    - ``T.apply_async((arg, ), {"kwarg": value})``
+    - ``T.apply_async((arg, ), {'kwarg': value})``
 
 
     - ``T.apply_async(countdown=10)``
     - ``T.apply_async(countdown=10)``
         executes 10 seconds from now.
         executes 10 seconds from now.
@@ -62,13 +62,13 @@ function:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    task.delay(arg1, arg2, kwarg1="x", kwarg2="y")
+    task.delay(arg1, arg2, kwarg1='x', kwarg2='y')
 
 
 Using :meth:`~@Task.apply_async` instead we have to write:
 Using :meth:`~@Task.apply_async` instead we have to write:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    task.apply_async(args=[arg1, arg2], kwargs={"kwarg1": "x", "kwarg2": "y"})
+    task.apply_async(args=[arg1, arg2], kwargs={'kwarg1': 'x', 'kwarg2': 'y'})
 
 
 .. sidebar:: Tip
 .. sidebar:: Tip
 
 
@@ -99,7 +99,7 @@ called `add`, returning the sum of two arguments:
 
 
     .. code-block:: python
     .. code-block:: python
 
 
-        task.s(arg1, arg2, kwarg1="x", kwargs2="y").apply_async()
+        task.s(arg1, arg2, kwarg1='x', kwargs2='y').apply_async()
 
 
 .. _calling-links:
 .. _calling-links:
 
 
@@ -146,7 +146,7 @@ This is an example error callback:
     def error_handler(uuid):
     def error_handler(uuid):
         result = AsyncResult(uuid)
         result = AsyncResult(uuid)
         exc = result.get(propagate=False)
         exc = result.get(propagate=False)
-        print("Task %r raised exception: %r\n%r" % (
+        print('Task %r raised exception: %r\n%r' % (
               exc, result.traceback))
               exc, result.traceback))
 
 
 it can be added to the task using the ``link_error`` execution
 it can be added to the task using the ``link_error`` execution
@@ -282,10 +282,10 @@ For example, the default policy correlates to:
 .. code-block:: python
 .. code-block:: python
 
 
     add.apply_async((2, 2), retry=True, retry_policy={
     add.apply_async((2, 2), retry=True, retry_policy={
-        "max_retries": 3,
-        "interval_start": 0,
-        "interval_step": 0.2,
-        "interval_max": 0.2,
+        'max_retries': 3,
+        'interval_start': 0,
+        'interval_step': 0.2,
+        'interval_max': 0.2,
     })
     })
 
 
 the maximum time spent retrying will be 0.4 seconds.  It is set relatively
 the maximum time spent retrying will be 0.4 seconds.  It is set relatively
@@ -383,7 +383,7 @@ Example setting a custom serializer for a single task invocation:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    >>> add.apply_async((10, 10), serializer="json")
+    >>> add.apply_async((10, 10), serializer='json')
 
 
 .. _calling-compression:
 .. _calling-compression:
 
 
@@ -403,7 +403,7 @@ to use when sending a task:
 
 
 Example specifying the compression used when calling a task::
 Example specifying the compression used when calling a task::
 
 
-    >>> add.apply_async((2, 2), compression="zlib")
+    >>> add.apply_async((2, 2), compression='zlib')
 
 
 .. _calling-connections:
 .. _calling-connections:
 
 
@@ -457,7 +457,7 @@ Celery can route tasks to different queues.
 
 
 Simple routing (name <-> name) is accomplished using the ``queue`` option::
 Simple routing (name <-> name) is accomplished using the ``queue`` option::
 
 
-    add.apply_async(queue="priority.high")
+    add.apply_async(queue='priority.high')
 
 
 You can then assign workers to the ``priority.high`` queue by using
 You can then assign workers to the ``priority.high`` queue by using
 the workers :option:`-Q` argument::
 the workers :option:`-Q` argument::

+ 6 - 6
docs/userguide/canvas.rst

@@ -76,7 +76,7 @@ creates partials:
 
 
     >>> s = add.s(2, 2)
     >>> s = add.s(2, 2)
     >>> s.delay(debug=True)                    # -> add(2, 2, debug=True)
     >>> s.delay(debug=True)                    # -> add(2, 2, debug=True)
-    >>> s.apply_async(kwargs={"debug": True})  # same
+    >>> s.apply_async(kwargs={'debug': True})  # same
 
 
 - Any options added will be merged with the options in the signature,
 - Any options added will be merged with the options in the signature,
   with the new options taking precedence::
   with the new options taking precedence::
@@ -89,7 +89,7 @@ You can also clone subtasks to augment these::
     >>> s = add.s(2)
     >>> s = add.s(2)
     proj.tasks.add(2)
     proj.tasks.add(2)
 
 
-    >>> s.clone(args=(4, ), kwargs={"debug": True})
+    >>> s.clone(args=(4, ), kwargs={'debug': True})
     proj.tasks.add(2, 4, debug=True)
     proj.tasks.add(2, 4, debug=True)
 
 
 Partials are meant to be used with callbacks, any tasks linked or chord
 Partials are meant to be used with callbacks, any tasks linked or chord
@@ -219,8 +219,8 @@ the error callbacks take the id of the parent task as argument instead:
     def log_error(task_id):
     def log_error(task_id):
         result = celery.AsyncResult(task_id)
         result = celery.AsyncResult(task_id)
         result.get(propagate=False)  # make sure result written.
         result.get(propagate=False)  # make sure result written.
-        with open("/var/errors/%s" % (task_id, )) as fh:
-            fh.write("--\n\n%s %s %s" % (
+        with open('/var/errors/%s' % (task_id, )) as fh:
+            fh.write('--\n\n%s %s %s' % (
                 task_id, result.result, result.traceback))
                 task_id, result.result, result.traceback))
 
 
 To make it even easier to link tasks together there is
 To make it even easier to link tasks together there is
@@ -288,7 +288,7 @@ In addition you can work with the result graph as a
 
 
 You can even convert these graphs to *dot* format::
 You can even convert these graphs to *dot* format::
 
 
-    >>> with open("graph.dot", "w") as fh:
+    >>> with open('graph.dot', 'w') as fh:
     ...     res.parent.parent.graph.to_dot(fh)
     ...     res.parent.parent.graph.to_dot(fh)
 
 
 
 
@@ -562,7 +562,7 @@ to apply the starmap after 10 seconds::
 
 
     >>> add.starmap(zip(range(10), range(10))).apply_async(countdown=10)
     >>> add.starmap(zip(range(10), range(10))).apply_async(countdown=10)
 
 
-.. _chunking-ov:
+.. _chunking:
 
 
 Chunking
 Chunking
 ========
 ========

+ 5 - 5
docs/userguide/monitoring.rst

@@ -502,9 +502,9 @@ Here is an example camera, dumping the snapshot to screen:
             if not state.event_count:
             if not state.event_count:
                 # No new events since last snapshot.
                 # No new events since last snapshot.
                 return
                 return
-            print("Workers: %s" % (pformat(state.workers, indent=4), ))
-            print("Tasks: %s" % (pformat(state.tasks, indent=4), ))
-            print("Total: %s events, %s tasks" % (
+            print('Workers: %s' % (pformat(state.workers, indent=4), ))
+            print('Tasks: %s' % (pformat(state.tasks, indent=4), ))
+            print('Total: %s events, %s tasks' % (
                 state.event_count, state.task_count))
                 state.event_count, state.task_count))
 
 
 See the API reference for :mod:`celery.events.state` to read more
 See the API reference for :mod:`celery.events.state` to read more
@@ -525,11 +525,11 @@ Or you can use it programmatically like this::
     def main():
     def main():
         state = State()
         state = State()
         with establish_connection() as connection:
         with establish_connection() as connection:
-            recv = EventReceiver(connection, handlers={"*": state.event})
+            recv = EventReceiver(connection, handlers={'*': state.event})
             with DumpCam(state, freq=1.0):
             with DumpCam(state, freq=1.0):
                 recv.capture(limit=None, timeout=None)
                 recv.capture(limit=None, timeout=None)
 
 
-    if __name__ == "__main__":
+    if __name__ == '__main__':
         main()
         main()
 
 
 
 

+ 27 - 27
docs/userguide/periodic-tasks.rst

@@ -37,10 +37,10 @@ Example: Run the `tasks.add` task every 30 seconds.
     from datetime import timedelta
     from datetime import timedelta
 
 
     CELERYBEAT_SCHEDULE = {
     CELERYBEAT_SCHEDULE = {
-        "runs-every-30-seconds": {
-            "task": "tasks.add",
-            "schedule": timedelta(seconds=30),
-            "args": (16, 16)
+        'runs-every-30-seconds': {
+            'task': 'tasks.add',
+            'schedule': timedelta(seconds=30),
+            'args': (16, 16)
         },
         },
     }
     }
 
 
@@ -108,10 +108,10 @@ the :class:`~celery.schedules.crontab` schedule type:
 
 
     CELERYBEAT_SCHEDULE = {
     CELERYBEAT_SCHEDULE = {
         # Executes every Monday morning at 7:30 A.M
         # Executes every Monday morning at 7:30 A.M
-        "every-monday-morning": {
-            "task": "tasks.add",
-            "schedule": crontab(hour=7, minute=30, day_of_week=1),
-            "args": (16, 16),
+        'every-monday-morning': {
+            'task': 'tasks.add',
+            'schedule': crontab(hour=7, minute=30, day_of_week=1),
+            'args': (16, 16),
         },
         },
     }
     }
 
 
@@ -124,51 +124,51 @@ The syntax of these crontab expressions are very flexible.  Some examples:
 +-----------------------------------------+--------------------------------------------+
 +-----------------------------------------+--------------------------------------------+
 | ``crontab(minute=0, hour=0)``           | Execute daily at midnight.                 |
 | ``crontab(minute=0, hour=0)``           | Execute daily at midnight.                 |
 +-----------------------------------------+--------------------------------------------+
 +-----------------------------------------+--------------------------------------------+
-| ``crontab(minute=0, hour="*/3")``       | Execute every three hours:                 |
+| ``crontab(minute=0, hour='*/3')``       | Execute every three hours:                 |
 |                                         | 3am, 6am, 9am, noon, 3pm, 6pm, 9pm.        |
 |                                         | 3am, 6am, 9am, noon, 3pm, 6pm, 9pm.        |
 +-----------------------------------------+--------------------------------------------+
 +-----------------------------------------+--------------------------------------------+
 | ``crontab(minute=0,``                   | Same as previous.                          |
 | ``crontab(minute=0,``                   | Same as previous.                          |
 |         ``hour=[0,3,6,9,12,15,18,21])`` |                                            |
 |         ``hour=[0,3,6,9,12,15,18,21])`` |                                            |
 +-----------------------------------------+--------------------------------------------+
 +-----------------------------------------+--------------------------------------------+
-| ``crontab(minute="*/15")``              | Execute every 15 minutes.                  |
+| ``crontab(minute='*/15')``              | Execute every 15 minutes.                  |
 +-----------------------------------------+--------------------------------------------+
 +-----------------------------------------+--------------------------------------------+
-| ``crontab(day_of_week="sunday")``       | Execute every minute (!) at Sundays.       |
+| ``crontab(day_of_week='sunday')``       | Execute every minute (!) at Sundays.       |
 +-----------------------------------------+--------------------------------------------+
 +-----------------------------------------+--------------------------------------------+
-| ``crontab(minute="*",``                 | Same as previous.                          |
-|         ``hour="*",``                   |                                            |
-|         ``day_of_week="sun")``          |                                            |
+| ``crontab(minute='*',``                 | Same as previous.                          |
+|         ``hour='*',``                   |                                            |
+|         ``day_of_week='sun')``          |                                            |
 +-----------------------------------------+--------------------------------------------+
 +-----------------------------------------+--------------------------------------------+
-| ``crontab(minute="*/10",``              | Execute every ten minutes, but only        |
-|         ``hour="3,17,22",``             | between 3-4 am, 5-6 pm and 10-11 pm on     |
-|         ``day_of_week="thu,fri")``      | Thursdays or Fridays.                      |
+| ``crontab(minute='*/10',``              | Execute every ten minutes, but only        |
+|         ``hour='3,17,22',``             | between 3-4 am, 5-6 pm and 10-11 pm on     |
+|         ``day_of_week='thu,fri')``      | Thursdays or Fridays.                      |
 +-----------------------------------------+--------------------------------------------+
 +-----------------------------------------+--------------------------------------------+
-| ``crontab(minute=0, hour="*/2,*/3")``   | Execute every even hour, and every hour    |
+| ``crontab(minute=0, hour='*/2,*/3')``   | Execute every even hour, and every hour    |
 |                                         | divisible by three. This means:            |
 |                                         | divisible by three. This means:            |
 |                                         | at every hour *except*: 1am,               |
 |                                         | at every hour *except*: 1am,               |
 |                                         | 5am, 7am, 11am, 1pm, 5pm, 7pm,             |
 |                                         | 5am, 7am, 11am, 1pm, 5pm, 7pm,             |
 |                                         | 11pm                                       |
 |                                         | 11pm                                       |
 +-----------------------------------------+--------------------------------------------+
 +-----------------------------------------+--------------------------------------------+
-| ``crontab(minute=0, hour="*/5")``       | Execute hour divisible by 5. This means    |
+| ``crontab(minute=0, hour='*/5')``       | Execute hour divisible by 5. This means    |
 |                                         | that it is triggered at 3pm, not 5pm       |
 |                                         | that it is triggered at 3pm, not 5pm       |
 |                                         | (since 3pm equals the 24-hour clock        |
 |                                         | (since 3pm equals the 24-hour clock        |
 |                                         | value of "15", which is divisible by 5).   |
 |                                         | value of "15", which is divisible by 5).   |
 +-----------------------------------------+--------------------------------------------+
 +-----------------------------------------+--------------------------------------------+
-| ``crontab(minute=0, hour="*/3,8-17")``  | Execute every hour divisible by 3, and     |
+| ``crontab(minute=0, hour='*/3,8-17')``  | Execute every hour divisible by 3, and     |
 |                                         | every hour during office hours (8am-5pm).  |
 |                                         | every hour during office hours (8am-5pm).  |
 +-----------------------------------------+--------------------------------------------+
 +-----------------------------------------+--------------------------------------------+
-| ``crontab(day_of_month="2")``           | Execute on the second day of every month.  |
+| ``crontab(day_of_month='2')``           | Execute on the second day of every month.  |
 |                                         |                                            |
 |                                         |                                            |
 +-----------------------------------------+--------------------------------------------+
 +-----------------------------------------+--------------------------------------------+
-| ``crontab(day_of_month="2-30/3")``      | Execute on every even numbered day.        |
+| ``crontab(day_of_month='2-30/3')``      | Execute on every even numbered day.        |
 |                                         |                                            |
 |                                         |                                            |
 +-----------------------------------------+--------------------------------------------+
 +-----------------------------------------+--------------------------------------------+
-| ``crontab(day_of_month="1-7,15-21")``   | Execute on the first and third weeks of    |
+| ``crontab(day_of_month='1-7,15-21')``   | Execute on the first and third weeks of    |
 |                                         | the month.                                 |
 |                                         | the month.                                 |
 +-----------------------------------------+--------------------------------------------+
 +-----------------------------------------+--------------------------------------------+
-| ``crontab(day_of_month="11",``          | Execute on 11th of May every year.         |
-|         ``month_of_year="5")``          |                                            |
+| ``crontab(day_of_month='11',``          | Execute on 11th of May every year.         |
+|         ``month_of_year='5')``          |                                            |
 +-----------------------------------------+--------------------------------------------+
 +-----------------------------------------+--------------------------------------------+
-| ``crontab(month_of_year="*/3")``        | Execute on the first month of every        |
+| ``crontab(month_of_year='*/3')``        | Execute on the first month of every        |
 |                                         | quarter.                                   |
 |                                         | quarter.                                   |
 +-----------------------------------------+--------------------------------------------+
 +-----------------------------------------+--------------------------------------------+
 
 
@@ -186,7 +186,7 @@ the :setting:`CELERY_TIMEZONE` setting:
 .. code-block:: python
 .. code-block:: python
 
 
     CELERY_ENABLE_UTC = True
     CELERY_ENABLE_UTC = True
-    CELERY_TIMEZONE = "Europe/London"
+    CELERY_TIMEZONE = 'Europe/London'
 
 
 .. admonition:: Django Users
 .. admonition:: Django Users
 
 

+ 11 - 9
docs/userguide/remote-tasks.rst

@@ -35,11 +35,11 @@ Whether to use GET or POST is up to you and your requirements.
 The web page should then return a response in the following format
 The web page should then return a response in the following format
 if the execution was successful::
 if the execution was successful::
 
 
-    {"status": "success", "retval": ....}
+    {'status': 'success', 'retval': ....}
 
 
 or if there was an error::
 or if there was an error::
 
 
-    {"status": "failure": "reason": "Invalid moon alignment."}
+    {'status': 'failure': 'reason': 'Invalid moon alignment.'}
 
 
 Enabling the HTTP task
 Enabling the HTTP task
 ----------------------
 ----------------------
@@ -63,11 +63,11 @@ With this information you could define a simple task in Django:
 
 
 
 
     def multiply(request):
     def multiply(request):
-        x = int(request.GET["x"])
-        y = int(request.GET["y"])
+        x = int(request.GET['x'])
+        y = int(request.GET['y'])
         result = x * y
         result = x * y
-        response = {"status": "success", "retval": result}
-        return HttpResponse(serialize(response), mimetype="application/json")
+        response = {'status': 'success', 'retval': result}
+        return HttpResponse(serialize(response), mimetype='application/json')
 
 
 .. _webhook-rails-example:
 .. _webhook-rails-example:
 
 
@@ -82,7 +82,7 @@ or in Ruby on Rails:
         @x = params[:x].to_i
         @x = params[:x].to_i
         @y = params[:y].to_i
         @y = params[:y].to_i
 
 
-        @status = {:status => "success", :retval => @x * @y}
+        @status = {:status => 'success', :retval => @x * @y}
 
 
         render :json => @status
         render :json => @status
     end
     end
@@ -98,7 +98,7 @@ Calling webhook tasks
 To call a task you can use the :class:`~celery.task.http.URL` class:
 To call a task you can use the :class:`~celery.task.http.URL` class:
 
 
     >>> from celery.task.http import URL
     >>> from celery.task.http import URL
-    >>> res = URL("http://example.com/multiply").get_async(x=10, y=10)
+    >>> res = URL('http://example.com/multiply').get_async(x=10, y=10)
 
 
 
 
 :class:`~celery.task.http.URL` is a shortcut to the :class:`HttpDispatchTask`.
 :class:`~celery.task.http.URL` is a shortcut to the :class:`HttpDispatchTask`.
@@ -106,7 +106,9 @@ You can subclass this to extend the
 functionality.
 functionality.
 
 
     >>> from celery.task.http import HttpDispatchTask
     >>> from celery.task.http import HttpDispatchTask
-    >>> res = HttpDispatchTask.delay(url="http://example.com/multiply", method="GET", x=10, y=10)
+    >>> res = HttpDispatchTask.delay(
+    ...     url='http://example.com/multiply',
+    ...     method='GET', x=10, y=10)
     >>> res.get()
     >>> res.get()
     100
     100
 
 

+ 59 - 67
docs/userguide/routing.rst

@@ -38,7 +38,7 @@ Say you have two servers, `x`, and `y` that handles regular tasks,
 and one server `z`, that only handles feed related tasks.  You can use this
 and one server `z`, that only handles feed related tasks.  You can use this
 configuration::
 configuration::
 
 
-    CELERY_ROUTES = {"feed.tasks.import_feed": {"queue": "feeds"}}
+    CELERY_ROUTES = {'feed.tasks.import_feed': {'queue': 'feeds'}}
 
 
 With this route enabled import feed tasks will be routed to the
 With this route enabled import feed tasks will be routed to the
 `"feeds"` queue, while all other tasks will be routed to the default queue
 `"feeds"` queue, while all other tasks will be routed to the default queue
@@ -65,9 +65,9 @@ configuration:
 
 
     from kombu import Exchange, Queue
     from kombu import Exchange, Queue
 
 
-    CELERY_DEFAULT_QUEUE = "default"
+    CELERY_DEFAULT_QUEUE = 'default'
     CELERY_QUEUES = (
     CELERY_QUEUES = (
-        Queue("default", Exchange("default"), routing_key="default"),
+        Queue('default', Exchange('default'), routing_key='default'),
     )
     )
 
 
 .. _routing-autoqueue-details:
 .. _routing-autoqueue-details:
@@ -83,9 +83,9 @@ A queue named `"video"` will be created with the following settings:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    {"exchange": "video",
-     "exchange_type": "direct",
-     "routing_key": "video"}
+    {'exchange': 'video',
+     'exchange_type': 'direct',
+     'routing_key': 'video'}
 
 
 The non-AMQP backends like `ghettoq` does not support exchanges, so they
 The non-AMQP backends like `ghettoq` does not support exchanges, so they
 require the exchange to have the same name as the queue. Using this design
 require the exchange to have the same name as the queue. Using this design
@@ -104,14 +104,14 @@ configuration:
 
 
     from kombu import Queue
     from kombu import Queue
 
 
-    CELERY_DEFAULT_QUEUE = "default"
+    CELERY_DEFAULT_QUEUE = 'default'
     CELERY_QUEUES = (
     CELERY_QUEUES = (
-        Queue("default",    routing_key="task.#"),
-        Queue("feed_tasks", routing_key="feed.#"),
+        Queue('default',    routing_key='task.#'),
+        Queue('feed_tasks', routing_key='feed.#'),
     )
     )
-    CELERY_DEFAULT_EXCHANGE = "tasks"
-    CELERY_DEFAULT_EXCHANGE_TYPE = "topic"
-    CELERY_DEFAULT_ROUTING_KEY = "task.default"
+    CELERY_DEFAULT_EXCHANGE = 'tasks'
+    CELERY_DEFAULT_EXCHANGE_TYPE = 'topic'
+    CELERY_DEFAULT_ROUTING_KEY = 'task.default'
 
 
 :setting:`CELERY_QUEUES` is a list of :class:`~kombu.entitity.Queue`
 :setting:`CELERY_QUEUES` is a list of :class:`~kombu.entitity.Queue`
 instances.
 instances.
@@ -125,9 +125,9 @@ To route a task to the `feed_tasks` queue, you can add an entry in the
 .. code-block:: python
 .. code-block:: python
 
 
     CELERY_ROUTES = {
     CELERY_ROUTES = {
-            "feeds.tasks.import_feed": {
-                "queue": "feed_tasks",
-                "routing_key": "feed.import",
+            'feeds.tasks.import_feed': {
+                'queue': 'feed_tasks',
+                'routing_key': 'feed.import',
             },
             },
     }
     }
 
 
@@ -136,9 +136,9 @@ You can also override this using the `routing_key` argument to
 :meth:`Task.apply_async`, or :func:`~celery.execute.send_task`:
 :meth:`Task.apply_async`, or :func:`~celery.execute.send_task`:
 
 
     >>> from feeds.tasks import import_feed
     >>> from feeds.tasks import import_feed
-    >>> import_feed.apply_async(args=["http://cnn.com/rss"],
-    ...                         queue="feed_tasks",
-    ...                         routing_key="feed.import")
+    >>> import_feed.apply_async(args=['http://cnn.com/rss'],
+    ...                         queue='feed_tasks',
+    ...                         routing_key='feed.import')
 
 
 
 
 To make server `z` consume from the feed queue exclusively you can
 To make server `z` consume from the feed queue exclusively you can
@@ -164,10 +164,10 @@ just specify a custom exchange and exchange type:
     from kombu import Exchange, Queue
     from kombu import Exchange, Queue
 
 
     CELERY_QUEUES = (
     CELERY_QUEUES = (
-        Queue("feed_tasks",    routing_key="feed.#"),
-        Queue("regular_tasks", routing_key="task.#"),
-        Queue("image_tasks",   exchange=Exchange("mediatasks", type="direct"),
-                               routing_key="image.compress"),
+        Queue('feed_tasks',    routing_key='feed.#'),
+        Queue('regular_tasks', routing_key='task.#'),
+        Queue('image_tasks',   exchange=Exchange('mediatasks', type='direct'),
+                               routing_key='image.compress'),
     )
     )
 
 
 If you're confused about these terms, you should read up on AMQP.
 If you're confused about these terms, you should read up on AMQP.
@@ -204,10 +204,10 @@ This is an example task message represented as a Python dictionary:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    {"task": "myapp.tasks.add",
-     "id": "54086c5e-6193-4575-8308-dbab76798756",
-     "args": [4, 4],
-     "kwargs": {}}
+    {'task': 'myapp.tasks.add',
+     'id': '54086c5e-6193-4575-8308-dbab76798756',
+     'args': [4, 4],
+     'kwargs': {}}
 
 
 .. _amqp-producers-consumers-brokers:
 .. _amqp-producers-consumers-brokers:
 
 
@@ -253,13 +253,13 @@ One for video, one for images and one default queue for everything else:
     from kombu import Exchange, Queue
     from kombu import Exchange, Queue
 
 
     CELERY_QUEUES = (
     CELERY_QUEUES = (
-        Queue("default", Exchange("default"), routing_key="default"),
-        Queue("videos",  Exchange("media"),   routing_key="media.video"),
-        Queue("images",  Exchange("media"),   routing_key="media.image"),
+        Queue('default', Exchange('default'), routing_key='default'),
+        Queue('videos',  Exchange('media'),   routing_key='media.video'),
+        Queue('images',  Exchange('media'),   routing_key='media.image'),
     )
     )
-    CELERY_DEFAULT_QUEUE = "default"
-    CELERY_DEFAULT_EXCHANGE_TYPE = "direct"
-    CELERY_DEFAULT_ROUTING_KEY = "default"
+    CELERY_DEFAULT_QUEUE = 'default'
+    CELERY_DEFAULT_EXCHANGE_TYPE = 'direct'
+    CELERY_DEFAULT_ROUTING_KEY = 'default'
 
 
 .. _amqp-exchange-types:
 .. _amqp-exchange-types:
 
 
@@ -385,7 +385,7 @@ From now on all messages sent to the exchange ``testexchange`` with routing
 key ``testkey`` will be moved to this queue.  We can send a message by
 key ``testkey`` will be moved to this queue.  We can send a message by
 using the ``basic.publish`` command::
 using the ``basic.publish`` command::
 
 
-    4> basic.publish "This is a message!" testexchange testkey
+    4> basic.publish 'This is a message!' testexchange testkey
     ok.
     ok.
 
 
 Now that the message is sent we can retrieve it again.  We use the
 Now that the message is sent we can retrieve it again.  We use the
@@ -444,25 +444,17 @@ One for video, one for images and one default queue for everything else:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    CELERY_QUEUES = {
-        "default": {
-            "exchange": "default",
-            "routing_key": "default"},
-        "videos": {
-            "exchange": "media",
-            "exchange_type": "topic",
-            "routing_key": "media.video",
-        },
-        "images": {
-            "exchange": "media",
-            "exchange_type": "topic",
-            "routing_key": "media.image",
-        }
-    }
-    CELERY_DEFAULT_QUEUE = "default"
-    CELERY_DEFAULT_EXCHANGE = "default"
-    CELERY_DEFAULT_EXCHANGE_TYPE = "direct"
-    CELERY_DEFAULT_ROUTING_KEY = "default"
+    default_exchange = Exchange('default', type='direct')
+    media_exchange = Exchange('media', type='direct')
+
+    CELERY_QUEUES = (
+        Queue('default', default_exchange, routing_key='default'),
+        Queue('videos', media_exchange', routing_key='media.video')
+        Queue('images', media_exchange', routing_key='media.image')
+    )
+    CELERY_DEFAULT_QUEUE = 'default'
+    CELERY_DEFAULT_EXCHANGE = 'default'
+    CELERY_DEFAULT_ROUTING_KEY = 'default'
 
 
 Here, the :setting:`CELERY_DEFAULT_QUEUE` will be used to route tasks that
 Here, the :setting:`CELERY_DEFAULT_QUEUE` will be used to route tasks that
 doesn't have an explicit route.
 doesn't have an explicit route.
@@ -503,23 +495,23 @@ All you need to define a new router is to create a class with a
     class MyRouter(object):
     class MyRouter(object):
 
 
         def route_for_task(self, task, args=None, kwargs=None):
         def route_for_task(self, task, args=None, kwargs=None):
-            if task == "myapp.tasks.compress_video":
-                return {"exchange": "video",
-                        "exchange_type": "topic",
-                        "routing_key": "video.compress"}
+            if task == 'myapp.tasks.compress_video':
+                return {'exchange': 'video',
+                        'exchange_type': 'topic',
+                        'routing_key': 'video.compress'}
             return None
             return None
 
 
 If you return the ``queue`` key, it will expand with the defined settings of
 If you return the ``queue`` key, it will expand with the defined settings of
 that queue in :setting:`CELERY_QUEUES`::
 that queue in :setting:`CELERY_QUEUES`::
 
 
-    {"queue": "video", "routing_key": "video.compress"}
+    {'queue': 'video', 'routing_key': 'video.compress'}
 
 
     becomes -->
     becomes -->
 
 
-        {"queue": "video",
-         "exchange": "video",
-         "exchange_type": "topic",
-         "routing_key": "video.compress"}
+        {'queue': 'video',
+         'exchange': 'video',
+         'exchange_type': 'topic',
+         'routing_key': 'video.compress'}
 
 
 
 
 You install router classes by adding them to the :setting:`CELERY_ROUTES`
 You install router classes by adding them to the :setting:`CELERY_ROUTES`
@@ -529,7 +521,7 @@ setting::
 
 
 Router classes can also be added by name::
 Router classes can also be added by name::
 
 
-    CELERY_ROUTES = ("myapp.routers.MyRouter", )
+    CELERY_ROUTES = ('myapp.routers.MyRouter', )
 
 
 
 
 For simple task name -> route mappings like the router example above,
 For simple task name -> route mappings like the router example above,
@@ -538,9 +530,9 @@ same behavior:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    CELERY_ROUTES = ({"myapp.tasks.compress_video": {
-                            "queue": "video",
-                            "routing_key": "video.compress"
+    CELERY_ROUTES = ({'myapp.tasks.compress_video': {
+                            'queue': 'video',
+                            'routing_key': 'video.compress'
                      }}, )
                      }}, )
 
 
 The routers will then be traversed in order, it will stop at the first router
 The routers will then be traversed in order, it will stop at the first router
@@ -556,9 +548,9 @@ Here is an example exchange ``bcast`` that uses this:
 
 
     from kombu.common import Broadcast
     from kombu.common import Broadcast
 
 
-    CELERY_QUEUES = (Broadcast("broadcast_tasks"), )
+    CELERY_QUEUES = (Broadcast('broadcast_tasks'), )
 
 
-    CELERY_ROUTES = {"tasks.reload_cache": "broadcast_tasks"}
+    CELERY_ROUTES = {'tasks.reload_cache': 'broadcast_tasks'}
 
 
 
 
 Now the ``tasks.reload_tasks`` task will be sent to every
 Now the ``tasks.reload_tasks`` task will be sent to every

+ 3 - 3
docs/userguide/security.rst

@@ -141,9 +141,9 @@ with the private key and certificate files located in :`/etc/ssl`.
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    CELERY_SECURITY_KEY = "/etc/ssl/private/worker.key"
-    CELERY_SECURITY_CERTIFICATE = "/etc/ssl/certs/worker.pem"
-    CELERY_SECURITY_CERT_STORE = "/etc/ssl/certs/\*.pem"
+    CELERY_SECURITY_KEY = '/etc/ssl/private/worker.key'
+    CELERY_SECURITY_CERTIFICATE = '/etc/ssl/certs/worker.pem'
+    CELERY_SECURITY_CERT_STORE = '/etc/ssl/certs/\*.pem'
     from celery.security import setup_security
     from celery.security import setup_security
     setup_security()
     setup_security()
 
 

+ 8 - 8
docs/userguide/signals.rst

@@ -30,7 +30,7 @@ Example connecting to the :signal:`task_sent` signal:
     @task_sent.connect
     @task_sent.connect
     def task_sent_handler(sender=None, task_id=None, task=None, args=None,
     def task_sent_handler(sender=None, task_id=None, task=None, args=None,
                           kwargs=None, \*\*kwds):
                           kwargs=None, \*\*kwds):
-        print("Got signal task_sent for task id %s" % (task_id, ))
+        print('Got signal task_sent for task id %s' % (task_id, ))
 
 
 
 
 Some signals also have a sender which you can filter by. For example the
 Some signals also have a sender which you can filter by. For example the
@@ -41,10 +41,10 @@ has been sent by providing the `sender` argument to
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    @task_sent.connect(task_sent_handler, sender="tasks.add")
+    @task_sent.connect(task_sent_handler, sender='tasks.add')
     def task_sent_handler(sender=None, task_id=None, task=None, args=None,
     def task_sent_handler(sender=None, task_id=None, task=None, args=None,
                           kwargs=None, \*\*kwds):
                           kwargs=None, \*\*kwds):
-        print("Got signal task_sent for task id %s" % (task_id, ))
+        print('Got signal task_sent for task id %s' % (task_id, ))
 
 
 .. _signal-ref:
 .. _signal-ref:
 
 
@@ -193,9 +193,9 @@ to setup worker specific configuration:
 
 
     from celery.signals import celeryd_init
     from celery.signals import celeryd_init
 
 
-    @celeryd_init.connect(sender="worker12.example.com")
+    @celeryd_init.connect(sender='worker12.example.com')
     def configure_worker12(conf=None, **kwargs):
     def configure_worker12(conf=None, **kwargs):
-        conf.CELERY_DEFAULT_RATE_LIMIT = "10/m"
+        conf.CELERY_DEFAULT_RATE_LIMIT = '10/m'
 
 
 or to set up configuration for multiple workers you can omit specifying a
 or to set up configuration for multiple workers you can omit specifying a
 sender when you connect:
 sender when you connect:
@@ -206,9 +206,9 @@ sender when you connect:
 
 
     @celeryd_init.connect
     @celeryd_init.connect
     def configure_workers(sender=None, conf=None, **kwargs):
     def configure_workers(sender=None, conf=None, **kwargs):
-        if sender in ("worker1.example.com", "worker2.example.com"):
-            conf.CELERY_DEFAULT_RATE_LIMIT = "10/m"
-        if sender == "worker3.example.com":
+        if sender in ('worker1.example.com', 'worker2.example.com'):
+            conf.CELERY_DEFAULT_RATE_LIMIT = '10/m'
+        if sender == 'worker3.example.com':
             conf.CELERYD_PREFETCH_MULTIPLIER = 0
             conf.CELERYD_PREFETCH_MULTIPLIER = 0
 
 
 Provides arguments:
 Provides arguments:

+ 29 - 29
docs/userguide/tasks.rst

@@ -63,7 +63,7 @@ these can be specified as arguments to the decorator:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    @celery.task(serializer="json")
+    @celery.task(serializer='json')
     def create_user(username, password):
     def create_user(username, password):
         User.objects.create(username=username, password=password)
         User.objects.create(username=username, password=password)
 
 
@@ -110,7 +110,7 @@ For example:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    >>> @celery.task(name="sum-of-two-numbers")
+    >>> @celery.task(name='sum-of-two-numbers')
     >>> def add(x, y):
     >>> def add(x, y):
     ...     return x + y
     ...     return x + y
 
 
@@ -123,7 +123,7 @@ defined in another module.
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    >>> @celery.task(name="tasks.add")
+    >>> @celery.task(name='tasks.add')
     >>> def add(x, y):
     >>> def add(x, y):
     ...     return x + y
     ...     return x + y
 
 
@@ -161,7 +161,7 @@ and an :exc:`~@NotRegistered` error will be raised by the worker.
 
 
 This is also the case if using Django and using `project.myapp`::
 This is also the case if using Django and using `project.myapp`::
 
 
-    INSTALLED_APPS = ("project.myapp", )
+    INSTALLED_APPS = ('project.myapp', )
 
 
 The worker will have the tasks registered as "project.myapp.tasks.*",
 The worker will have the tasks registered as "project.myapp.tasks.*",
 while this is what happens in the client if the module is imported as
 while this is what happens in the client if the module is imported as
@@ -180,7 +180,7 @@ add the project directory to the Python path::
     import sys
     import sys
     sys.path.append(os.getcwd())
     sys.path.append(os.getcwd())
 
 
-    INSTALLED_APPS = ("myapp", )
+    INSTALLED_APPS = ('myapp', )
 
 
 This makes more sense from the reusable app perspective anyway.
 This makes more sense from the reusable app perspective anyway.
 
 
@@ -227,7 +227,7 @@ An example task accessing information in the context is:
 
 
     @celery.task()
     @celery.task()
     def dump_context(x, y):
     def dump_context(x, y):
-        print("Executing task id %r, args: %r kwargs: %r" % (
+        print('Executing task id %r, args: %r kwargs: %r' % (
             add.request.id, add.request.args, add.request.kwargs))
             add.request.id, add.request.args, add.request.kwargs))
 
 
 .. _task-logging:
 .. _task-logging:
@@ -253,7 +253,7 @@ for all of your tasks at the top of your module:
 
 
     @celery.task()
     @celery.task()
     def add(x, y):
     def add(x, y):
-        logger.info("Adding %s + %s" % (x, y))
+        logger.info('Adding %s + %s' % (x, y))
         return x + y
         return x + y
 
 
 Celery uses the standard Python logger library,
 Celery uses the standard Python logger library,
@@ -479,7 +479,7 @@ General
     task is currently running.
     task is currently running.
 
 
     The host name and process id of the worker executing the task
     The host name and process id of the worker executing the task
-    will be available in the state metadata (e.g. `result.info["pid"]`)
+    will be available in the state metadata (e.g. `result.info['pid']`)
 
 
     The global default can be overridden by the
     The global default can be overridden by the
     :setting:`CELERY_TRACK_STARTED` setting.
     :setting:`CELERY_TRACK_STARTED` setting.
@@ -671,8 +671,8 @@ Use :meth:`~@Task.update_state` to update a task's state::
     @celery.task()
     @celery.task()
     def upload_files(filenames):
     def upload_files(filenames):
         for i, file in enumerate(filenames):
         for i, file in enumerate(filenames):
-            current_task.update_state(state="PROGRESS",
-                meta={"current": i, "total": len(filenames)})
+            current_task.update_state(state='PROGRESS',
+                meta={'current': i, 'total': len(filenames)})
 
 
 
 
 Here we created the state `"PROGRESS"`, which tells any application
 Here we created the state `"PROGRESS"`, which tells any application
@@ -786,7 +786,7 @@ If you have a task,
     class NaiveAuthenticateServer(Task):
     class NaiveAuthenticateServer(Task):
 
 
         def __init__(self):
         def __init__(self):
-            self.users = {"george": "password"}
+            self.users = {'george': 'password'}
 
 
         def run(self, username, password):
         def run(self, username, password):
             try:
             try:
@@ -822,7 +822,7 @@ base class for new task types.
         abstract = True
         abstract = True
 
 
         def after_return(self, *args, **kwargs):
         def after_return(self, *args, **kwargs):
-            print("Task returned: %r" % (self.request, ))
+            print('Task returned: %r' % (self.request, ))
 
 
 
 
     @celery.task(base=DebugTask)
     @celery.task(base=DebugTask)
@@ -1132,7 +1132,7 @@ that automatically expands some abbreviations in it:
 
 
     @celery.task()
     @celery.task()
     def expand_abbreviations(article):
     def expand_abbreviations(article):
-        article.body.replace("MyCorp", "My Corporation")
+        article.body.replace('MyCorp', 'My Corporation')
         article.save()
         article.save()
 
 
 First, an author creates an article and saves it, then the author
 First, an author creates an article and saves it, then the author
@@ -1154,7 +1154,7 @@ re-fetch the article in the task body:
     @celery.task()
     @celery.task()
     def expand_abbreviations(article_id):
     def expand_abbreviations(article_id):
         article = Article.objects.get(id=article_id)
         article = Article.objects.get(id=article_id)
-        article.body.replace("MyCorp", "My Corporation")
+        article.body.replace('MyCorp', 'My Corporation')
         article.save()
         article.save()
 
 
     >>> expand_abbreviations(article_id)
     >>> expand_abbreviations(article_id)
@@ -1228,19 +1228,19 @@ The comment model looks like this:
 
 
 
 
     class Comment(models.Model):
     class Comment(models.Model):
-        name = models.CharField(_("name"), max_length=64)
-        email_address = models.EmailField(_("email address"))
-        homepage = models.URLField(_("home page"),
+        name = models.CharField(_('name'), max_length=64)
+        email_address = models.EmailField(_('email address'))
+        homepage = models.URLField(_('home page'),
                                    blank=True, verify_exists=False)
                                    blank=True, verify_exists=False)
-        comment = models.TextField(_("comment"))
-        pub_date = models.DateTimeField(_("Published date"),
+        comment = models.TextField(_('comment'))
+        pub_date = models.DateTimeField(_('Published date'),
                                         editable=False, auto_add_now=True)
                                         editable=False, auto_add_now=True)
-        is_spam = models.BooleanField(_("spam?"),
+        is_spam = models.BooleanField(_('spam?'),
                                       default=False, editable=False)
                                       default=False, editable=False)
 
 
         class Meta:
         class Meta:
-            verbose_name = _("comment")
-            verbose_name_plural = _("comments")
+            verbose_name = _('comment')
+            verbose_name_plural = _('comments')
 
 
 
 
 In the view where the comment is posted, we first write the comment
 In the view where the comment is posted, we first write the comment
@@ -1268,11 +1268,11 @@ blog/views.py
             model = Comment
             model = Comment
 
 
 
 
-    def add_comment(request, slug, template_name="comments/create.html"):
+    def add_comment(request, slug, template_name='comments/create.html'):
         post = get_object_or_404(Entry, slug=slug)
         post = get_object_or_404(Entry, slug=slug)
-        remote_addr = request.META.get("REMOTE_ADDR")
+        remote_addr = request.META.get('REMOTE_ADDR')
 
 
-        if request.method == "post":
+        if request.method == 'post':
             form = CommentForm(request.POST, request.FILES)
             form = CommentForm(request.POST, request.FILES)
             if form.is_valid():
             if form.is_valid():
                 comment = form.save()
                 comment = form.save()
@@ -1283,7 +1283,7 @@ blog/views.py
         else:
         else:
             form = CommentForm()
             form = CommentForm()
 
 
-        context = RequestContext(request, {"form": form})
+        context = RequestContext(request, {'form': form})
         return render_to_response(template_name, context_instance=context)
         return render_to_response(template_name, context_instance=context)
 
 
 
 
@@ -1315,13 +1315,13 @@ blog/tasks.py
     @celery.task()
     @celery.task()
     def spam_filter(comment_id, remote_addr=None):
     def spam_filter(comment_id, remote_addr=None):
         logger = spam_filter.get_logger()
         logger = spam_filter.get_logger()
-        logger.info("Running spam filter for comment %s" % comment_id)
+        logger.info('Running spam filter for comment %s' % comment_id)
 
 
         comment = Comment.objects.get(pk=comment_id)
         comment = Comment.objects.get(pk=comment_id)
         current_domain = Site.objects.get_current().domain
         current_domain = Site.objects.get_current().domain
-        akismet = Akismet(settings.AKISMET_KEY, "http://%s" % domain)
+        akismet = Akismet(settings.AKISMET_KEY, 'http://%s' % domain)
         if not akismet.verify_key():
         if not akismet.verify_key():
-            raise ImproperlyConfigured("Invalid AKISMET_KEY")
+            raise ImproperlyConfigured('Invalid AKISMET_KEY')
 
 
 
 
         is_spam = akismet.comment_check(user_ip=remote_addr,
         is_spam = akismet.comment_check(user_ip=remote_addr,

+ 50 - 50
docs/userguide/workers.rst

@@ -173,15 +173,15 @@ Some remote control commands also have higher-level interfaces using
 
 
 Sending the :control:`rate_limit` command and keyword arguments::
 Sending the :control:`rate_limit` command and keyword arguments::
 
 
-    >>> celery.control.broadcast("rate_limit",
-    ...                          arguments={"task_name": "myapp.mytask",
-    ...                                     "rate_limit": "200/m"})
+    >>> celery.control.broadcast('rate_limit',
+    ...                          arguments={'task_name': 'myapp.mytask',
+    ...                                     'rate_limit': '200/m'})
 
 
 This will send the command asynchronously, without waiting for a reply.
 This will send the command asynchronously, without waiting for a reply.
 To request a reply you have to use the `reply` argument::
 To request a reply you have to use the `reply` argument::
 
 
-    >>> celery.control.broadcast("rate_limit", {
-    ...     "task_name": "myapp.mytask", "rate_limit": "200/m"}, reply=True)
+    >>> celery.control.broadcast('rate_limit', {
+    ...     'task_name': 'myapp.mytask', 'rate_limit': '200/m'}, reply=True)
     [{'worker1.example.com': 'New rate limit set successfully'},
     [{'worker1.example.com': 'New rate limit set successfully'},
      {'worker2.example.com': 'New rate limit set successfully'},
      {'worker2.example.com': 'New rate limit set successfully'},
      {'worker3.example.com': 'New rate limit set successfully'}]
      {'worker3.example.com': 'New rate limit set successfully'}]
@@ -189,10 +189,10 @@ To request a reply you have to use the `reply` argument::
 Using the `destination` argument you can specify a list of workers
 Using the `destination` argument you can specify a list of workers
 to receive the command::
 to receive the command::
 
 
-    >>> celery.control.broadcast("rate_limit", {
-    ...     "task_name": "myapp.mytask",
-    ...     "rate_limit": "200/m"}, reply=True,
-    ...                             destination=["worker1.example.com"])
+    >>> celery.control.broadcast('rate_limit', {
+    ...     'task_name': 'myapp.mytask',
+    ...     'rate_limit': '200/m'}, reply=True,
+    ...                             destination=['worker1.example.com'])
     [{'worker1.example.com': 'New rate limit set successfully'}]
     [{'worker1.example.com': 'New rate limit set successfully'}]
 
 
 
 
@@ -226,13 +226,13 @@ Terminating a task also revokes it.
 
 
 ::
 ::
 
 
-    >>> celery.control.revoke("d9078da5-9915-40a0-bfa1-392c7bde42ed")
+    >>> celery.control.revoke('d9078da5-9915-40a0-bfa1-392c7bde42ed')
 
 
-    >>> celery.control.revoke("d9078da5-9915-40a0-bfa1-392c7bde42ed",
+    >>> celery.control.revoke('d9078da5-9915-40a0-bfa1-392c7bde42ed',
     ...                       terminate=True)
     ...                       terminate=True)
 
 
-    >>> celery.control.revoke("d9078da5-9915-40a0-bfa1-392c7bde42ed",
-    ...                       terminate=True, signal="SIGKILL")
+    >>> celery.control.revoke('d9078da5-9915-40a0-bfa1-392c7bde42ed',
+    ...                       terminate=True, signal='SIGKILL')
 
 
 .. _worker-persistent-revokes:
 .. _worker-persistent-revokes:
 
 
@@ -312,7 +312,7 @@ Example changing the time limit for the ``tasks.crawl_the_web`` task
 to have a soft time limit of one minute, and a hard time limit of
 to have a soft time limit of one minute, and a hard time limit of
 two minutes::
 two minutes::
 
 
-    >>> celery.control.time_limit("tasks.crawl_the_web",
+    >>> celery.control.time_limit('tasks.crawl_the_web',
                                   soft=60, hard=120, reply=True)
                                   soft=60, hard=120, reply=True)
     [{'worker1.example.com': {'ok': 'time limits set successfully'}}]
     [{'worker1.example.com': {'ok': 'time limits set successfully'}}]
 
 
@@ -331,13 +331,13 @@ Changing rate-limits at runtime
 Example changing the rate limit for the `myapp.mytask` task to accept
 Example changing the rate limit for the `myapp.mytask` task to accept
 200 tasks a minute on all servers::
 200 tasks a minute on all servers::
 
 
-    >>> celery.control.rate_limit("myapp.mytask", "200/m")
+    >>> celery.control.rate_limit('myapp.mytask', '200/m')
 
 
 Example changing the rate limit on a single host by specifying the
 Example changing the rate limit on a single host by specifying the
 destination host name::
 destination host name::
 
 
-    >>> celery.control.rate_limit("myapp.mytask", "200/m",
-    ...            destination=["worker1.example.com"])
+    >>> celery.control.rate_limit('myapp.mytask', '200/m',
+    ...            destination=['worker1.example.com'])
 
 
 .. warning::
 .. warning::
 
 
@@ -464,23 +464,23 @@ being imported by the worker processes:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    >>> celery.control.broadcast("pool_restart",
-    ...                          arguments={"modules": ["foo", "bar"]})
+    >>> celery.control.broadcast('pool_restart',
+    ...                          arguments={'modules': ['foo', 'bar']})
 
 
 Use the ``reload`` argument to reload modules it has already imported:
 Use the ``reload`` argument to reload modules it has already imported:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    >>> celery.control.broadcast("pool_restart",
-    ...                          arguments={"modules": ["foo"],
-    ...                                     "reload": True})
+    >>> celery.control.broadcast('pool_restart',
+    ...                          arguments={'modules': ['foo'],
+    ...                                     'reload': True})
 
 
 If you don't specify any modules then all known tasks modules will
 If you don't specify any modules then all known tasks modules will
 be imported/reloaded:
 be imported/reloaded:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    >>> celery.control.broadcast("pool_restart", arguments={"reload": True})
+    >>> celery.control.broadcast('pool_restart', arguments={'reload': True})
 
 
 The ``modules`` argument is a list of modules to modify. ``reload``
 The ``modules`` argument is a list of modules to modify. ``reload``
 specifies whether to reload modules if they have previously been imported.
 specifies whether to reload modules if they have previously been imported.
@@ -518,11 +518,11 @@ and it supports the same commands as the :class:`@Celery.control` interface.
     >>> i = celery.control.inspect()
     >>> i = celery.control.inspect()
 
 
     # Specify multiple nodes to inspect.
     # Specify multiple nodes to inspect.
-    >>> i = celery.control.inspect(["worker1.example.com",
-                                    "worker2.example.com"])
+    >>> i = celery.control.inspect(['worker1.example.com',
+                                    'worker2.example.com'])
 
 
     # Specify a single node to inspect.
     # Specify a single node to inspect.
-    >>> i = celery.control.inspect("worker1.example.com")
+    >>> i = celery.control.inspect('worker1.example.com')
 
 
 .. _worker-inspect-registered-tasks:
 .. _worker-inspect-registered-tasks:
 
 
@@ -546,10 +546,10 @@ You can get a list of active tasks using
 
 
     >>> i.active()
     >>> i.active()
     [{'worker1.example.com':
     [{'worker1.example.com':
-        [{"name": "tasks.sleeptask",
-          "id": "32666e9b-809c-41fa-8e93-5ae0c80afbbf",
-          "args": "(8,)",
-          "kwargs": "{}"}]}]
+        [{'name': 'tasks.sleeptask',
+          'id': '32666e9b-809c-41fa-8e93-5ae0c80afbbf',
+          'args': '(8,)',
+          'kwargs': '{}'}]}]
 
 
 .. _worker-inspect-eta-schedule:
 .. _worker-inspect-eta-schedule:
 
 
@@ -561,18 +561,18 @@ You can get a list of tasks waiting to be scheduled by using
 
 
     >>> i.scheduled()
     >>> i.scheduled()
     [{'worker1.example.com':
     [{'worker1.example.com':
-        [{"eta": "2010-06-07 09:07:52", "priority": 0,
-          "request": {
-            "name": "tasks.sleeptask",
-            "id": "1a7980ea-8b19-413e-91d2-0b74f3844c4d",
-            "args": "[1]",
-            "kwargs": "{}"}},
-         {"eta": "2010-06-07 09:07:53", "priority": 0,
-          "request": {
-            "name": "tasks.sleeptask",
-            "id": "49661b9a-aa22-4120-94b7-9ee8031d219d",
-            "args": "[2]",
-            "kwargs": "{}"}}]}]
+        [{'eta': '2010-06-07 09:07:52', 'priority': 0,
+          'request': {
+            'name': 'tasks.sleeptask',
+            'id': '1a7980ea-8b19-413e-91d2-0b74f3844c4d',
+            'args': '[1]',
+            'kwargs': '{}'}},
+         {'eta': '2010-06-07 09:07:53', 'priority': 0,
+          'request': {
+            'name': 'tasks.sleeptask',
+            'id': '49661b9a-aa22-4120-94b7-9ee8031d219d',
+            'args': '[2]',
+            'kwargs': '{}'}}]}]
 
 
 .. note::
 .. note::
 
 
@@ -591,10 +591,10 @@ You can get a list of these using
 
 
     >>> i.reserved()
     >>> i.reserved()
     [{'worker1.example.com':
     [{'worker1.example.com':
-        [{"name": "tasks.sleeptask",
-          "id": "32666e9b-809c-41fa-8e93-5ae0c80afbbf",
-          "args": "(8,)",
-          "kwargs": "{}"}]}]
+        [{'name': 'tasks.sleeptask',
+          'id': '32666e9b-809c-41fa-8e93-5ae0c80afbbf',
+          'args': '(8,)',
+          'kwargs': '{}'}]}]
 
 
 
 
 Additional Commands
 Additional Commands
@@ -607,8 +607,8 @@ Remote shutdown
 
 
 This command will gracefully shut down the worker remotely::
 This command will gracefully shut down the worker remotely::
 
 
-    >>> celery.control.broadcast("shutdown") # shutdown all workers
-    >>> celery.control.broadcast("shutdown, destination="worker1.example.com")
+    >>> celery.control.broadcast('shutdown') # shutdown all workers
+    >>> celery.control.broadcast('shutdown, destination='worker1.example.com')
 
 
 .. control:: ping
 .. control:: ping
 
 
@@ -668,6 +668,6 @@ Here's an example control command that restarts the broker connection:
 
 
     @Panel.register
     @Panel.register
     def reset_connection(panel):
     def reset_connection(panel):
-        panel.logger.critical("Connection reset by remote control.")
+        panel.logger.critical('Connection reset by remote control.')
         panel.consumer.reset_connection()
         panel.consumer.reset_connection()
-        return {"ok": "connection reset"}
+        return {'ok': 'connection reset'}

+ 7 - 7
docs/whatsnew-2.5.rst

@@ -69,7 +69,7 @@ that could result in a race condition leading to an annoying warning.
     As an alternative to deleting the old exchange you can
     As an alternative to deleting the old exchange you can
     configure a new name for the exchange::
     configure a new name for the exchange::
 
 
-        CELERY_RESULT_EXCHANGE = "celeryresults2"
+        CELERY_RESULT_EXCHANGE = 'celeryresults2'
 
 
     But you have to make sure that all clients and workers
     But you have to make sure that all clients and workers
     use this new setting, so they are updated to use the same
     use this new setting, so they are updated to use the same
@@ -269,22 +269,22 @@ for the ``tasks.add`` task:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    CELERY_ANNOTATIONS = {"tasks.add": {"rate_limit": "10/s"}}
+    CELERY_ANNOTATIONS = {'tasks.add': {'rate_limit': '10/s'}}
 
 
 or change the same for all tasks:
 or change the same for all tasks:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-   CELERY_ANNOTATIONS = {"*": {"rate_limit": "10/s"}}
+   CELERY_ANNOTATIONS = {'*': {'rate_limit': '10/s'}}
 
 
 You can change methods too, for example the ``on_failure`` handler:
 You can change methods too, for example the ``on_failure`` handler:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
     def my_on_failure(self, exc, task_id, args, kwargs, einfo):
     def my_on_failure(self, exc, task_id, args, kwargs, einfo):
-        print("Oh no! Task failed: %r" % (exc, ))
+        print('Oh no! Task failed: %r' % (exc, ))
 
 
-    CELERY_ANNOTATIONS = {"*": {"on_failure": my_on_failure}}
+    CELERY_ANNOTATIONS = {'*': {'on_failure': my_on_failure}}
 
 
 If you need more flexibility then you can also create objects
 If you need more flexibility then you can also create objects
 that filter for tasks to annotate:
 that filter for tasks to annotate:
@@ -294,8 +294,8 @@ that filter for tasks to annotate:
     class MyAnnotate(object):
     class MyAnnotate(object):
 
 
         def annotate(self, task):
         def annotate(self, task):
-            if task.name.startswith("tasks."):
-                return {"rate_limit": "10/s"}
+            if task.name.startswith('tasks.'):
+                return {'rate_limit': '10/s'}
 
 
     CELERY_ANNOTATIONS = (MyAnnotate(), {...})
     CELERY_ANNOTATIONS = (MyAnnotate(), {...})
 
 

+ 16 - 16
docs/whatsnew-2.6.rst

@@ -160,7 +160,7 @@ Tasks can now have callbacks and errbacks, and dependencies are recorded
 
 
             .. code-block:: python
             .. code-block:: python
 
 
-                with open("graph.dot") as fh:
+                with open('graph.dot') as fh:
                     result.graph.to_dot(fh)
                     result.graph.to_dot(fh)
 
 
             which can than be used to produce an image::
             which can than be used to produce an image::
@@ -192,7 +192,7 @@ Tasks can now have callbacks and errbacks, and dependencies are recorded
 
 
 - Adds ``subtask.link(subtask)`` + ``subtask.link_error(subtask)``
 - Adds ``subtask.link(subtask)`` + ``subtask.link_error(subtask)``
 
 
-    Shortcut to ``s.options.setdefault("link", []).append(subtask)``
+    Shortcut to ``s.options.setdefault('link', []).append(subtask)``
 
 
 - Adds ``subtask.flatten_links()``
 - Adds ``subtask.flatten_links()``
 
 
@@ -214,7 +214,7 @@ Tasks can now have callbacks and errbacks, and dependencies are recorded
 
 
     as a shortcut to::
     as a shortcut to::
 
 
-        >>> task.subtask((arg1, arg2), {"kw": 1})
+        >>> task.subtask((arg1, arg2), {'kw': 1})
 
 
 - Tasks can be chained by using the ``|`` operator::
 - Tasks can be chained by using the ``|`` operator::
 
 
@@ -280,9 +280,9 @@ Additional control commands made public
     .. code-block:: python
     .. code-block:: python
 
 
         >>> celery.control.add_consumer(queue_name,
         >>> celery.control.add_consumer(queue_name,
-        ...     destination=["w1.example.com"])
+        ...     destination=['w1.example.com'])
         >>> celery.control.cancel_consumer(queue_name,
         >>> celery.control.cancel_consumer(queue_name,
-        ...     destination=["w1.example.com"])
+        ...     destination=['w1.example.com'])
 
 
     or using the :program:`celery control` command::
     or using the :program:`celery control` command::
 
 
@@ -304,7 +304,7 @@ Additional control commands made public
     .. code-block:: python
     .. code-block:: python
 
 
         >>> celery.control.autoscale(max=10, min=5,
         >>> celery.control.autoscale(max=10, min=5,
-        ...     destination=["w1.example.com"])
+        ...     destination=['w1.example.com'])
 
 
     or using the :program:`celery control` command::
     or using the :program:`celery control` command::
 
 
@@ -319,8 +319,8 @@ Additional control commands made public
 
 
     .. code-block:: python
     .. code-block:: python
 
 
-        >>> celery.control.pool_grow(2, destination=["w1.example.com"])
-        >>> celery.contorl.pool_shrink(2, destination=["w1.example.com"])
+        >>> celery.control.pool_grow(2, destination=['w1.example.com'])
+        >>> celery.contorl.pool_shrink(2, destination=['w1.example.com'])
 
 
     or using the :program:`celery control` command::
     or using the :program:`celery control` command::
 
 
@@ -388,7 +388,7 @@ Logging support now conforms better with best practices.
 
 
         @celery.task()
         @celery.task()
         def add(x, y):
         def add(x, y):
-            logger.debug("Adding %r + %r" % (x, y))
+            logger.debug('Adding %r + %r' % (x, y))
             return x + y
             return x + y
 
 
     The resulting logger will then inherit from the ``"celery.task"`` logger
     The resulting logger will then inherit from the ``"celery.task"`` logger
@@ -437,7 +437,7 @@ without also initializing the default app environment::
         abstract = True
         abstract = True
 
 
         def __call__(self, *args, **kwargs):
         def __call__(self, *args, **kwargs):
-            print("CALLING %r" % (self, ))
+            print('CALLING %r' % (self, ))
             return self.run(*args, **kwargs)
             return self.run(*args, **kwargs)
 
 
     >>> DebugTask
     >>> DebugTask
@@ -513,13 +513,13 @@ In Other News
 
 
     .. code-block:: python
     .. code-block:: python
 
 
-        celery = Celery(broker="redis://")
+        celery = Celery(broker='redis://')
 
 
 - Result backends can now be set using an URL
 - Result backends can now be set using an URL
 
 
     Currently only supported by redis.  Example use::
     Currently only supported by redis.  Example use::
 
 
-        CELERY_RESULT_BACKEND = "redis://localhost/1"
+        CELERY_RESULT_BACKEND = 'redis://localhost/1'
 
 
 - Heartbeat frequency now every 5s, and frequency sent with event
 - Heartbeat frequency now every 5s, and frequency sent with event
 
 
@@ -544,11 +544,11 @@ In Other News
 
 
             @wraps(fun)
             @wraps(fun)
             def _inner(*args, **kwargs):
             def _inner(*args, **kwargs):
-                print("ARGS: %r" % (args, ))
+                print('ARGS: %r' % (args, ))
             return _inner
             return _inner
 
 
         CELERY_ANNOTATIONS = {
         CELERY_ANNOTATIONS = {
-            "tasks.add": {"@__call__": debug_args},
+            'tasks.add': {'@__call__': debug_args},
         }
         }
 
 
     Also tasks are now always bound by class so that
     Also tasks are now always bound by class so that
@@ -616,7 +616,7 @@ In Other News
 
 
     .. code-block:: python
     .. code-block:: python
 
 
-        i = celery.control.inspect(connection=BrokerConnection("redis://"))
+        i = celery.control.inspect(connection=BrokerConnection('redis://'))
         i.active_queues()
         i.active_queues()
 
 
 * Module :mod:`celery.app.task` is now a module instead of a package.
 * Module :mod:`celery.app.task` is now a module instead of a package.
@@ -702,7 +702,7 @@ to create tasks out of methods::
         def __init__(self):
         def __init__(self):
             self.value = 1
             self.value = 1
 
 
-        @celery.task(name="Counter.increment", filter=task_method)
+        @celery.task(name='Counter.increment', filter=task_method)
         def increment(self, n=1):
         def increment(self, n=1):
             self.value += 1
             self.value += 1
             return self.value
             return self.value