浏览代码

Always use app = Celery...

Ask Solem 11 年之前
父节点
当前提交
6dad23d0a9

+ 8 - 2
celery/contrib/methods.py

@@ -46,9 +46,9 @@ or with any task decorator:
         from celery.task import task  # ALSO BAD
         from celery.task import task  # ALSO BAD
 
 
         # GOOD:
         # GOOD:
-        celery = Celery(...)
+        app = Celery(...)
 
 
-        @celery.task(filter=task_method)
+        @app.task(filter=task_method)
         def foo(self): pass
         def foo(self): pass
 
 
         # ALSO GOOD:
         # ALSO GOOD:
@@ -57,6 +57,12 @@ or with any task decorator:
         @current_app.task(filter=task_method)
         @current_app.task(filter=task_method)
         def foo(self): pass
         def foo(self): pass
 
 
+        # ALSO GOOD:
+        from celery import shared_task
+
+        @shared_task(filter=task_method)
+        def foo(self): pass
+
 Caveats
 Caveats
 -------
 -------
 
 

+ 6 - 6
docs/django/first-steps-with-django.rst

@@ -28,9 +28,9 @@ that defines the Celery instance:
     from celery import Celery
     from celery import Celery
     from django.conf import settings
     from django.conf import settings
 
 
-    celery = Celery('proj.celery')
-    celery.config_from_object(settings)
-    celery.autodiscover_tasks(settings.INSTALLED_APPS, related_name='tasks')
+    app = Celery('proj.celery')
+    app.config_from_object(settings)
+    app.autodiscover_tasks(settings.INSTALLED_APPS, related_name='tasks')
 
 
     @celery.task(bind=True)
     @celery.task(bind=True)
     def debug_task(self):
     def debug_task(self):
@@ -41,7 +41,7 @@ First we create the Celery app instance:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    celery = Celery('proj')
+    app = Celery('proj')
 
 
 Then we add the Django settings module as a configuration source
 Then we add the Django settings module as a configuration source
 for Celery.  This means that you don't have to use multiple
 for Celery.  This means that you don't have to use multiple
@@ -50,7 +50,7 @@ from the Django settings.
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    celery.config_from_object(settings)
+    app.config_from_object(settings)
 
 
 Next, a common practice for reusable apps is to define all tasks
 Next, a common practice for reusable apps is to define all tasks
 in a separate ``tasks.py`` module, and Celery does have a way to
 in a separate ``tasks.py`` module, and Celery does have a way to
@@ -58,7 +58,7 @@ autodiscover these modules:
 
 
 .. code-block:: python
 .. code-block:: python
 
 
-    celery.autodiscover_tasks(settings.INSTALLED_APPS, related_name='tasks')
+    app.autodiscover_tasks(settings.INSTALLED_APPS, related_name='tasks')
 
 
 With the line above Celery will automatically discover tasks in reusable
 With the line above Celery will automatically discover tasks in reusable
 apps if you follow the ``tasks.py`` convention::
 apps if you follow the ``tasks.py`` convention::

+ 1 - 1
docs/getting-started/brokers/ironmq.rst

@@ -27,7 +27,7 @@ First, you'll need to import the iron_celery library right after you import Cele
     from celery import Celery
     from celery import Celery
     import iron_celery
     import iron_celery
 
 
-    celery = Celery('mytasks', broker='ironmq://', backend='ironcache://')
+    app = Celery('mytasks', broker='ironmq://', backend='ironcache://')
 
 
 You have to specify IronMQ in the broker URL::
 You have to specify IronMQ in the broker URL::
 
 

+ 2 - 2
docs/getting-started/introduction.rst

@@ -85,9 +85,9 @@ Celery is…
 
 
             from celery import Celery
             from celery import Celery
 
 
-            celery = Celery('hello', broker='amqp://guest@localhost//')
+            app = Celery('hello', broker='amqp://guest@localhost//')
 
 
-            @celery.task
+            @app.task
             def hello():
             def hello():
                 return 'hello world'
                 return 'hello world'
 
 

+ 4 - 4
docs/history/changelog-3.0.rst

@@ -1352,8 +1352,8 @@ If you're looking for versions prior to 3.0.x you should go to :ref:`history`.
     As an example, Celery can lazily use the configuration of a Flask app::
     As an example, Celery can lazily use the configuration of a Flask app::
 
 
         flask_app = Flask()
         flask_app = Flask()
-        celery = Celery()
-        celery.add_defaults(lambda: flask_app.config)
+        celery_app = Celery()
+        celery_app.add_defaults(lambda: flask_app.config)
 
 
 - Revoked tasks were not marked as revoked in the result backend (Issue #871).
 - Revoked tasks were not marked as revoked in the result backend (Issue #871).
 
 
@@ -2564,8 +2564,8 @@ See :ref:`whatsnew-3.0`.
     As an example, Celery can lazily use the configuration of a Flask app::
     As an example, Celery can lazily use the configuration of a Flask app::
 
 
         flask_app = Flask()
         flask_app = Flask()
-        celery = Celery()
-        celery.add_defaults(lambda: flask_app.config)
+        celery_app = Celery()
+        celery_app.add_defaults(lambda: flask_app.config)
 
 
 - Revoked tasks were not marked as revoked in the result backend (Issue #871).
 - Revoked tasks were not marked as revoked in the result backend (Issue #871).
 
 

+ 3 - 3
docs/internals/app-overview.rst

@@ -16,9 +16,9 @@ Examples
 Creating a Celery instance::
 Creating a Celery instance::
 
 
     >>> from celery import Celery
     >>> from celery import Celery
-    >>> celery = Celery()
-    >>> celery.config_from_object("celeryconfig")
-    >>> celery.config_from_envvar("CELERY_CONFIG_MODULE")
+    >>> app = Celery()
+    >>> app.config_from_object("celeryconfig")
+    >>> #app.config_from_envvar("CELERY_CONFIG_MODULE")
 
 
 
 
 Creating tasks:
 Creating tasks:

+ 1 - 2
docs/internals/guide.rst

@@ -219,8 +219,7 @@ from a module in the project, this module could look something like this:
 
 
     from celery import Celery
     from celery import Celery
 
 
-    celery = Celery()
-    celery.config_from_object(BROKER_URL='amqp://')
+    app = Celery(broker='amqp://')
 
 
 
 
 Module Overview
 Module Overview

+ 2 - 2
docs/userguide/extending.rst

@@ -552,9 +552,9 @@ Example adding a custom option to the :program:`celery worker` command:
     from celery import Celery
     from celery import Celery
     from optparse import make_option as Option
     from optparse import make_option as Option
 
 
-    celery = Celery(broker='amqp://')
+    app = Celery(broker='amqp://')
 
 
-    celery.user_options['worker'].add(
+    app.user_options['worker'].add(
         Option('--enable-my-option', action='store_true', default=False,
         Option('--enable-my-option', action='store_true', default=False,
                help='Enable custom option.'),
                help='Enable custom option.'),
     )
     )

+ 3 - 3
docs/whatsnew-3.0.rst

@@ -696,10 +696,10 @@ In Other News
 - App instance factory methods have been converted to be cached
 - App instance factory methods have been converted to be cached
   descriptors that creates a new subclass on access.
   descriptors that creates a new subclass on access.
 
 
-    This means that e.g. ``celery.Worker`` is an actual class
+    This means that e.g. ``app.Worker`` is an actual class
     and will work as expected when::
     and will work as expected when::
 
 
-        class Worker(celery.Worker):
+        class Worker(app.Worker):
             ...
             ...
 
 
 - New signal: :signal:`task_success`.
 - New signal: :signal:`task_success`.
@@ -711,7 +711,7 @@ In Other News
 
 
     .. code-block:: python
     .. code-block:: python
 
 
-        celery = Celery(broker='redis://')
+        app = Celery(broker='redis://')
 
 
 - Result backends can now be set using an URL
 - Result backends can now be set using an URL
 
 

+ 3 - 3
docs/whatsnew-3.1.rst

@@ -232,8 +232,8 @@ In Other News
         from celery import Celery
         from celery import Celery
         from optparse import make_option as Option
         from optparse import make_option as Option
 
 
-        celery = Celery()
-        celery.user_options['worker'].add(
+        app = Celery()
+        app.user_options['worker'].add(
             Option('--my-argument'),
             Option('--my-argument'),
         )
         )
 
 
@@ -247,7 +247,7 @@ In Other News
 
 
     Celery event messages have included a logical clock value for some time,
     Celery event messages have included a logical clock value for some time,
     but starting with this version that field is also used to order them
     but starting with this version that field is also used to order them
-    (if the monitor is using ``celery.events.state``).
+    (that is if the monitor is using :mod:`celery.events.state`).
 
 
     The logical clock is currently implemented using Lamport timestamps,
     The logical clock is currently implemented using Lamport timestamps,
     which does not have a high degree of accuracy, but should be good
     which does not have a high degree of accuracy, but should be good

+ 9 - 3
examples/app/myapp.py

@@ -15,15 +15,21 @@ using the `-A` / `--app` option::
 
 
     $ celery -A myapp worker -l info
     $ celery -A myapp worker -l info
 
 
+With the `-A myproj` argument the program will search for an app
+instance in the module ``myproj``.  You can also specify an explicit
+name using the fully qualified form::
+
+    $ celery -A myapp:app worker -l info
+
 """
 """
 from celery import Celery
 from celery import Celery
 
 
-celery = Celery('myapp', broker='amqp://guest@localhost//')
+app = Celery('myapp', broker='amqp://guest@localhost//')
 
 
 
 
-@celery.task()
+@app.task()
 def add(x, y):
 def add(x, y):
     return x + y
     return x + y
 
 
 if __name__ == '__main__':
 if __name__ == '__main__':
-    celery.start()
+    app.start()

+ 3 - 3
examples/tutorial/tasks.py

@@ -2,12 +2,12 @@ from __future__ import absolute_import
 
 
 from celery import Celery
 from celery import Celery
 
 
-celery = Celery('tasks', broker='amqp://')
+app = Celery('tasks', broker='amqp://')
 
 
 
 
-@celery.task()
+@app.task()
 def add(x, y):
 def add(x, y):
     return x + y
     return x + y
 
 
 if __name__ == '__main__':
 if __name__ == '__main__':
-    celery.start()
+    app.start()

+ 5 - 5
funtests/benchmarks/bench_worker.py

@@ -25,8 +25,8 @@ BROKER_TRANSPORT = os.environ.get('BROKER', 'librabbitmq')
 if hasattr(sys, 'pypy_version_info'):
 if hasattr(sys, 'pypy_version_info'):
     BROKER_TRANSPORT = 'pyamqp'
     BROKER_TRANSPORT = 'pyamqp'
 
 
-celery = Celery('bench_worker')
-celery.conf.update(
+app = Celery('bench_worker')
+app.conf.update(
     BROKER_TRANSPORT=BROKER_TRANSPORT,
     BROKER_TRANSPORT=BROKER_TRANSPORT,
     BROKER_POOL_LIMIT=10,
     BROKER_POOL_LIMIT=10,
     CELERYD_POOL='solo',
     CELERYD_POOL='solo',
@@ -53,7 +53,7 @@ def tdiff(then):
     return time.time() - then
     return time.time() - then
 
 
 
 
-@celery.task(cur=0, time_start=None, queue='bench.worker', bare=True)
+@app.task(cur=0, time_start=None, queue='bench.worker', bare=True)
 def it(_, n):
 def it(_, n):
     i = it.cur  # use internal counter, as ordering can be skewed
     i = it.cur  # use internal counter, as ordering can be skewed
                 # by previous runs, or the broker.
                 # by previous runs, or the broker.
@@ -81,8 +81,8 @@ def bench_apply(n=DEFAULT_ITS):
 def bench_work(n=DEFAULT_ITS, loglevel='CRITICAL'):
 def bench_work(n=DEFAULT_ITS, loglevel='CRITICAL'):
     loglevel = os.environ.get('BENCH_LOGLEVEL') or loglevel
     loglevel = os.environ.get('BENCH_LOGLEVEL') or loglevel
     if loglevel:
     if loglevel:
-        celery.log.setup_logging_subsystem(loglevel=loglevel)
-    worker = celery.WorkController(concurrency=15,
+        app.log.setup_logging_subsystem(loglevel=loglevel)
+    worker = app.WorkController(concurrency=15,
                                    queues=['bench.worker'])
                                    queues=['bench.worker'])
 
 
     try:
     try: