Prechádzať zdrojové kódy

Merge branch '2.5'

Conflicts:
	Changelog
	README.rst
	celery/__init__.py
	celery/app/__init__.py
	celery/app/task/__init__.py
	celery/apps/worker.py
	celery/bin/celeryctl.py
	celery/task/chords.py
	celery/task/trace.py
	celery/utils/dispatch/signal.py
	celery/worker/autoreload.py
	docs/includes/introduction.txt
	pavement.py
Ask Solem 13 rokov pred
rodič
commit
786bc7276f

+ 1 - 0
AUTHORS

@@ -55,6 +55,7 @@ Honza Kral <honza.kral@gmail.com>
 Ignas Mikalajūnas <ignas.mikalajunas@gmail.com>
 Ionel Maries Cristian <contact@ionelmc.ro>
 Ionut Turturica <jonozzz@yahoo.com>
+Iurii Kriachko <iurii.kriachko@gmail.com>
 Ivan Metzlar <metzlar@gmail.com>
 Jannis Leidel <jannis@leidel.info>
 Jason Baker <amnorvend@gmail.com>

+ 118 - 0
Changelog

@@ -14,6 +14,124 @@
 
 See :ref:`whatsnew-2.6`.
 
+.. _version-2.5.2:
+
+2.5.2
+=====
+:release-date: 2012-04-13 04:30 P.M GMT
+
+.. _v252-news:
+
+News
+----
+
+- Now depends on Kombu 2.1.5.
+
+- Django documentation has been moved to the main Celery docs.
+
+    See :ref:`django`.
+
+- New :signal:`celeryd_init` signal can be used to configure workers
+  by hostname.
+
+- Signal.connect can now be used as a decorator.
+
+    Example:
+
+    .. code-block:: python
+
+        from celery.signals import task_sent
+
+        @task_sent.connect
+        def on_task_sent(**kwargs):
+            print("sent task: %r" % (kwargs, ))
+
+- Invalid task messages are now rejected instead of acked.
+
+    This means that they will be moved to the dead-letter queue
+    introduced in the latest RabbitMQ version (but must be enabled
+    manually, consult the RabbitMQ documentation).
+
+- Internal logging calls has been cleaned up to work
+  better with tools like Sentry.
+
+    Contributed by David Cramer.
+
+- New method ``subtask.clone()`` can be used to clone an existing
+  subtask with augmented arguments/options.
+
+    Example:
+
+    .. code-block:: python
+
+        >>> s = add.subtask((5, ))
+        >>> new = s.clone(args=(10, ), countdown=5})
+        >>> new.args
+        (10, 5)
+
+        >>> new.options
+        {"countdown": 5}
+
+- Chord callbacks are now triggered in eager mode.
+
+.. _v252-fixes:
+
+Fixes
+-----
+
+- Programs now verifies that the pidfile is actually written correctly
+  (Issue #641).
+
+    Hopefully this will crash the worker immediately if the system
+    is out of space to store the complete pidfile.
+
+    In addition, we now verify that existing pidfiles contain
+    a new line so that a partially written pidfile is detected as broken,
+    as before doing:
+
+        echo -n "1" > celeryd.pid
+
+    would cause celeryd to think that an existing instance was already
+    running (init has pid 1 after all).
+
+- Fixed 2.5 compatibility issue with use of print_exception.
+
+    Fix contributed by Martin Melin.
+
+- Fixed 2.5 compatibility issue with imports.
+
+    Fix contributed by Iurii Kriachko.
+
+- All programs now fix up ``__package__`` when called as main.
+
+    This fixes compatibility with Python 2.5.
+
+    Fix contributed by Martin Melin.
+
+- celeryctl can now be configured on the command line.
+
+    Like with celeryd it is now possible to configure celery settings
+    on the command line for celeryctl::
+
+        $ celeryctl -- broker.pool_limit=30
+
+- Version dependency for python-dateutil fixed to be strict.
+
+    Fix contributed by Thomas Meson.
+
+- ``Task.__call__`` is now optimized away in the task tracer
+  rather than when the task class is created.
+
+    This fixes a bug where a custom __call__  may mysteriously disappear.
+
+- Autoreload's inotify support has been improved.
+
+    Contributed by Mher Movsisyan.
+
+- The Django broker documentation has been improved.
+
+- Removed confusing warning at top of routing user guide.
+
 .. _version-2.5.1:
 
 2.5.1

+ 4 - 0
celery/__init__.py

@@ -5,7 +5,11 @@
 
 from __future__ import absolute_import
 
+<<<<<<< HEAD
 VERSION = (2, 6, 0, "a1")
+=======
+VERSION = (2, 5, 2)
+>>>>>>> 2.5
 __version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
 __author__ = "Ask Solem"
 __contact__ = "ask@celeryproject.org"

+ 0 - 1
celery/app/__init__.py

@@ -79,5 +79,4 @@ if os.environ.get("CELERY_TRACE_APP"):  # pragma: no cover
 else:
     disable_trace()
 
-
 App = Celery  # XXX Compat

+ 7 - 4
celery/app/base.py

@@ -30,7 +30,10 @@ from celery.utils import cached_property, register_after_fork
 from celery.utils.functional import first
 from celery.utils.imports import instantiate, symbol_by_name
 
-from . import annotations
+from .annotations import (
+    _first_match, _first_match_any,
+    prepare as prepare_annotations,
+)
 from .builtins import load_builtin_tasks
 from .defaults import DEFAULTS, find_deprecated_settings
 from .state import _tls
@@ -221,10 +224,10 @@ class Celery(object):
 
     def annotate_task(self, task):
         if self.annotations:
-            match = annotations._first_match(self.annotations, task)
+            match = _first_match(self.annotations, task)
             for attr, value in (match or {}).iteritems():
                 setattr(task, attr, value)
-            match_any = annotations._first_match_any(self.annotations)
+            match_any = _first_match_any(self.annotations)
             for attr, value in (match_any or {}).iteritems():
                 setattr(task, attr, value)
 
@@ -235,7 +238,7 @@ class Celery(object):
 
     @cached_property
     def annotations(self):
-        return annotations.prepare(self.conf.CELERY_ANNOTATIONS)
+        return prepare_annotations(self.conf.CELERY_ANNOTATIONS)
 
     def __repr__(self):
         return "<Celery: %s:0x%x>" % (self.main or "__main__", id(self), )

+ 5 - 2
celery/task/chords.py

@@ -28,6 +28,9 @@ class chord(object):
 
     def __call__(self, body, **options):
         tid = body.options.setdefault("task_id", uuid())
-        self.Chord.apply_async((list(self.tasks), body), self.options,
-                                **options)
+        result = self.Chord.apply_async((list(self.tasks), body),
+                                        self.options, **options)
+
+        if self.Chord.app.conf.CELERY_ALWAYS_EAGER:
+            return subtask(body).apply(args=(result.result.join(),))
         return body.type.AsyncResult(tid)

+ 3 - 1
celery/task/trace.py

@@ -25,6 +25,8 @@ import traceback
 
 from warnings import warn
 
+from kombu.utils import kwdict
+
 from celery import current_app
 from celery import states, signals
 from celery.app.state import _tls
@@ -133,7 +135,6 @@ def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
     # saving the extra method call and a line less in the stack trace.
     fun = task if defines_custom_call(task) else task.run
 
-    task = task or current_app.tasks[name]
     loader = loader or current_app.loader
     backend = task.backend
     ignore_result = task.ignore_result
@@ -163,6 +164,7 @@ def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
 
     def trace_task(uuid, args, kwargs, request=None):
         R = I = None
+        kwargs = kwdict(kwargs)
         try:
             _tls.current_task = task
             update_request(request or {}, args=args,

+ 2 - 1
celery/worker/autoreload.py

@@ -157,8 +157,9 @@ class InotifyMonitor(_ProcessEvent):
             self._wm = pyinotify.WatchManager()
             self._notifier = pyinotify.Notifier(self._wm, self)
             add_watch = self._wm.add_watch
+            flags = pyinotify.IN_MODIFY | pyinotify.IN_ATTRIB
             for m in self._modules:
-                add_watch(m, pyinotify.IN_MODIFY | pyinotify.IN_ATTRIB)
+                add_watch(m, flags)
             self._notifier.loop()
         finally:
             if self._wm:

+ 162 - 0
docs/django/djdo/first-steps-with-django.rst

@@ -0,0 +1,162 @@
+=========================
+ First steps with Django
+=========================
+
+Configuring your Django project to use Celery
+=============================================
+
+You need four simple steps to use celery with your Django project.
+
+    1. Install the ``django-celery`` library::
+
+        $ pip install django-celery
+
+    2. Add the following lines to ``settings.py``::
+
+        import djcelery
+        djcelery.setup_loader()
+
+    3. Add ``djcelery`` to ``INSTALLED_APPS``.
+
+    4. Create the celery database tables.
+
+        If you are using south_ for schema migrations, you'll want to::
+
+            $ python manage.py migrate djcelery
+
+        For those who are not using south, a normal ``syncdb`` will work::
+
+            $ python manage.py syncdb
+
+.. _south: http://pypi.python.org/pypi/South/
+
+By default Celery uses `RabbitMQ`_ as the broker, but there are several
+alternatives to choose from, see :ref:`celerytut-broker`.
+
+All settings mentioned in the Celery documentation should be added
+to your Django project's ``settings.py`` module. For example
+we can configure the :setting:`BROKER_URL` setting to specify
+what broker to use::
+
+    BROKER_URL = "amqp://guest:guest@localhost:5672/"
+
+That's it.
+
+.. _`RabbitMQ`: http://www.rabbitmq.com/
+
+Special note for mod_wsgi users
+-------------------------------
+
+If you're using ``mod_wsgi`` to deploy your Django application you need to
+include the following in your ``.wsgi`` module::
+
+    import djcelery
+    djcelery.setup_loader()
+
+Defining and executing tasks
+============================
+
+Tasks are defined by wrapping functions in the ``@task`` decorator.
+It is a common practice to put these in their own module named ``tasks.py``,
+and the worker will automatically go through the apps in ``INSTALLED_APPS``
+to import these modules.
+
+For a simple demonstration we can create a new Django app called
+``celerytest``.  To create this app you need to be in the directoryw
+of your Django project where ``manage.py`` is located and execute::
+
+    $ python manage.py startapp celerytest
+
+After our new app has been created we can define our task by editing
+a new file called ``celerytest/tasks.py``:
+
+.. code-block:: python
+
+    from celery import task
+
+    @task
+    def add(x, y):
+        return x + y
+
+Our example task is pretty pointless, it just returns the sum of two
+arguments, but it will do for demonstration, and it is referenced in many
+parts of the Celery documentation.
+
+Starting the worker process
+===========================
+
+You can start a worker instance by using the ``celeryd`` manage command::
+
+    $ python manage.py celeryd --loglevel=info
+
+In production you probably want to run the worker in the
+background as a daemon, see `Running Celery as a daemon`_.
+For a complete listing of the command line options available, use the help command::
+
+    $ python manage.py help celeryd
+
+.. _`Running Celery as a Daemon`:
+    http://docs.celeryq.org/en/latest/cookbook/daemonizing.html
+
+Executing our task
+==================
+
+Now that the worker is running we can open up a new terminal to actually
+execute our task::
+
+    >>> from celerytest.tasks import add
+
+    >>> add.delay(2, 2)
+
+
+The ``delay`` method is a handy shortcut to the ``apply_async`` method which
+enables you to have greater control of the task execution.
+To read more about executing tasks, including specifying the time at which
+the task should execute see :ref:`guide-executing`.
+
+.. note::
+
+    Tasks need to be stored in a real module, they can't
+    be defined in the python shell or ipython/bpython. This is because the
+    worker server must be able to import the task function so that it can
+    execute it.
+
+The task should now be executed by the worker you started earlier,
+and you can verify that by looking at the workers console output.
+
+Applying a task returns an :class:`~celery.result.AsyncResult` instance,
+which can be used to check the state of the task, wait for the task to finish
+or get its return value (or if the task failed, the exception and traceback).
+
+By default django-celery stores this state in the Django database,
+you may consider choosing an alternate result backend or disabling
+states alltogether (see :ref:`task-result-backends`).
+
+To demonstrate how the results work we can execute the task again,
+but this time keep the result instance returned::
+
+    >>> result = add.delay(4, 4)
+    >>> result.ready() # returns True if the task has finished processing.
+    False
+    >>> result.result # task is not ready, so no return value yet.
+    None
+    >>> result.get()   # Waits until the task is done and returns the retval.
+    8
+    >>> result.result # direct access to result, doesn't re-raise errors.
+    8
+    >>> result.successful() # returns True if the task didn't end in failure.
+    True
+
+If the task raises an exception, the return value of ``result.successful()``
+will be ``False``, and ``result.result`` will contain the exception instance
+raised by the task.
+
+Where to go from here
+=====================
+
+To learn more you should read the `Celery User Guide`_, and the
+`Celery Documentation`_ in general
+
+
+.. _`Celery User Guide`: http://docs.celeryproject.org/en/latest/userguide/
+.. _`Celery Documentation`: http://docs.celeryproject.org/

+ 15 - 0
docs/django/djdo/index.rst

@@ -0,0 +1,15 @@
+.. _django:
+
+=========
+ Django
+=========
+
+:Release: |version|
+:Date: |today|
+
+.. toctree::
+    :maxdepth: 2
+
+    first-steps-with-django
+    unit-testing
+

+ 56 - 0
docs/django/djdo/unit-testing.rst

@@ -0,0 +1,56 @@
+================
+ Unit Testing
+================
+
+Testing with Django
+-------------------
+
+The first problem you'll run in to when trying to write a test that runs a
+task is that Django's test runner doesn't use the same database as your celery
+daemon is using. If you're using the database backend, this means that your
+tombstones won't show up in your test database and you won't be able to
+get the return value or check the status of your tasks.
+
+There are two ways to get around this. You can either take advantage of
+``CELERY_ALWAYS_EAGER = True`` to skip the daemon, or you can avoid testing
+anything that needs to check the status or result of a task.
+
+Using a custom test runner to test with celery
+----------------------------------------------
+
+If you're going the ``CELERY_ALWAYS_EAGER`` route, which is probably better than
+just never testing some parts of your app, a custom Django test runner does the
+trick. Celery provides a simple test runner, but it's easy enough to roll your
+own if you have other things that need to be done.
+http://docs.djangoproject.com/en/dev/topics/testing/#defining-a-test-runner
+
+For this example, we'll use the ``djcelery.contrib.test_runner`` to test the
+``add`` task from the :ref:`guide-tasks` examples in the Celery
+documentation.
+
+To enable the test runner, set the following settings:
+
+.. code-block:: python
+
+    TEST_RUNNER = 'djcelery.contrib.test_runner.CeleryTestSuiteRunner'
+
+Then we can put the tests in a ``tests.py`` somewhere:
+
+.. code-block:: python
+
+    from django.test import TestCase
+    from myapp.tasks import add
+
+    class AddTestCase(TestCase):
+
+        def testNoError(self):
+            """Test that the ``add`` task runs with no errors,
+            and returns the correct result."""
+            result = add.delay(8, 8)
+
+            self.assertEquals(result.get(), 16)
+            self.assertTrue(result.successful())
+
+
+This test assumes that you put your example ``add`` task in ``maypp.tasks``
+so adjust the import for wherever you put the class.

+ 1 - 12
docs/getting-started/brokers/django.rst

@@ -21,21 +21,10 @@ configuration values.
 
     BROKER_URL = "django://"
 
-#. Add :mod:`django.transport.kombu` to `INSTALLED_APPS`::
+#. Add :mod:`kombu.transport.django` to `INSTALLED_APPS`::
 
     INSTALLED_APPS = ("kombu.transport.django", )
 
-#. Verify your database settings::
-
-    DATABASE_ENGINE = "mysql"
-    DATABASE_NAME = "mydb"
-    DATABASE_USER = "myuser"
-    DATABASE_PASSWORD = "secret"
-
-  The above is just an example, if you haven't configured your database before
-  you should read the Django database settings reference:
-  http://docs.djangoproject.com/en/1.1/ref/settings/#database-engine
-
 #. Sync your database schema::
 
     $ python manage.py syncdb

+ 1 - 0
docs/index.rst

@@ -14,6 +14,7 @@ Contents:
     :maxdepth: 1
 
     configuration
+    django
     cookbook/index
     django/index
     contributing

+ 1 - 2
pavement.py

@@ -97,8 +97,7 @@ def flake8(options):
 ])
 def flakeplus(options):
     noerror = getattr(options, "noerror", False)
-    sh("flakeplus celery",
-       ignore_error=noerror)
+    sh("flakeplus celery", ignore_error=noerror)
 
 
 @task