Browse Source

Fix stupid mistakes

Ask Solem 16 years ago
parent
commit
62229191be
6 changed files with 37 additions and 20 deletions
  1. 1 1
      README.rst
  2. 1 1
      celery/__init__.py
  3. 17 5
      celery/conf.py
  4. 11 8
      celery/messaging.py
  5. 7 3
      celery/task.py
  6. 0 2
      celery/worker.py

+ 1 - 1
README.rst

@@ -2,7 +2,7 @@
 celery - Distributed Task Queue for Django.
 ============================================
 
-:Version: 0.2.9
+:Version: 0.2.10
 
 Introduction
 ============

+ 1 - 1
celery/__init__.py

@@ -1,5 +1,5 @@
 """Distributed Task Queue for Django"""
-VERSION = (0, 2, 9)
+VERSION = (0, 2, 14)
 __version__ = ".".join(map(str, VERSION))
 __author__ = "Ask Solem"
 __contact__ = "askh@opera.com"

+ 17 - 5
celery/conf.py

@@ -3,7 +3,8 @@ from django.conf import settings
 import logging
 
 DEFAULT_AMQP_EXCHANGE = "celery"
-DEFAULT_AMQP_ROUTING_KEY = "celery"
+DEFAULT_AMQP_PUBLISHER_ROUTING_KEY = "celery"
+DEFAULT_AMQP_CONSUMER_ROUTING_KEY = "celery"
 DEFAULT_AMQP_CONSUMER_QUEUE = "celery"
 DEFAULT_AMQP_EXCHANGE_TYPE = "direct"
 DEFAULT_DAEMON_CONCURRENCY = 10
@@ -126,13 +127,24 @@ AMQP_EXCHANGE_TYPE = getattr(settings, "CELERY_AMQP_EXCHANGE_TYPE",
                         DEFAULT_AMQP_EXCHANGE_TYPE)
 
 """
-.. data:: AMQP_ROUTING_KEY
+.. data:: AMQP_PUBLISHER_ROUTING_KEY
    
-    The AMQP routing key.
+    The default AMQP routing key used when publishing tasks.
 
 """
-AMQP_ROUTING_KEY = getattr(settings, "CELERY_AMQP_ROUTING_KEY",
-                           DEFAULT_AMQP_ROUTING_KEY)
+AMQP_PUBLISHER_ROUTING_KEY = getattr(settings,
+                                "CELERY_AMQP_PUBLISHER_ROUTING_KEY",
+                                DEFAULT_AMQP_PUBLISHER_ROUTING_KEY)
+
+"""
+.. data:: AMQP_CONSUMER_ROUTING_KEY
+   
+    The AMQP routing key used when consuming tasks.
+
+"""
+AMQP_CONSUMER_ROUTING_KEY = getattr(settings,
+                                "CELERY_AMQP_CONSUMER_ROUTING_KEY",
+                                DEFAULT_AMQP_CONSUMER_ROUTING_KEY)
 
 """
 .. data:: AMQP_CONSUMER_QUEUE

+ 11 - 8
celery/messaging.py

@@ -20,25 +20,26 @@ class NoProcessConsumer(Consumer):
 class TaskPublisher(Publisher):
     """The AMQP Task Publisher class."""
     exchange = conf.AMQP_EXCHANGE
-    routing_key = conf.AMQP_ROUTING_KEY
+    routing_key = conf.AMQP_PUBLISHER_ROUTING_KEY
 
     def delay_task(self, task_name, task_args, task_kwargs, **kwargs):
         """Delay task for execution by the celery nodes."""
-        return self._delay_task(task_name=task_name, args=task_args,
-                                kwargs=task_kwargs, **kwargs)
+        return self._delay_task(task_name=task_name, task_args=task_args,
+                                task_kwargs=task_kwargs, **kwargs)
 
     def delay_task_in_set(self, task_name, taskset_id, task_args,
             task_kwargs, **kwargs):
         """Delay a task which part of a task set."""
         return self._delay_task(task_name=task_name, part_of_set=taskset_id,
-                                args=task_args, kwargs=task_kwargs, **kwargs)
+                                task_args=task_args, task_kwargs=task_kwargs,
+                                **kwargs)
 
     def requeue_task(self, task_name, task_id, task_args, task_kwargs,
             part_of_set=None, **kwargs):
         """Requeue a failed task."""
         return self._delay_task(task_name=task_name, part_of_set=part_of_set,
-                                task_id=task_id, args=task_args,
-                                kwargs=task_kwargs, **kwargs)
+                                task_id=task_id, task_args=task_args,
+                                task_kwargs=task_kwargs, **kwargs)
 
     def _delay_task(self, task_name, task_id=None, part_of_set=None,
             task_args=None, task_kwargs=None, **kwargs):
@@ -58,7 +59,9 @@ class TaskPublisher(Publisher):
         }
         if part_of_set:
             message_data["taskset"] = part_of_set
-        self.send(message_data)
+        self.send(message_data,
+                routing_key=routing_key, priority=priority,
+                immediate=immediate, mandatory=mandatory)
         return task_id
 
 
@@ -66,5 +69,5 @@ class TaskConsumer(NoProcessConsumer):
     """The AMQP Task Consumer class."""
     queue = conf.AMQP_CONSUMER_QUEUE
     exchange = conf.AMQP_EXCHANGE
-    routing_key = conf.AMQP_ROUTING_KEY
+    routing_key = conf.AMQP_CONSUMER_ROUTING_KEY
     exchange_type = conf.AMQP_EXCHANGE_TYPE

+ 7 - 3
celery/task.py

@@ -18,8 +18,8 @@ import uuid
 import pickle
 
 
-def apply_async(task, args, kwargs, routing_key=None, immediate=None,
-        mandatory=None, connect_timeout=None, priority=None):
+def apply_async(task, args=None, kwargs=None, routing_key=None,
+        immediate=None, mandatory=None, connect_timeout=None, priority=None):
     """Run a task asynchronously by the celery daemon(s).
 
     :param task: The task to run (a callable object, or a :class:`Task`
@@ -45,6 +45,10 @@ def apply_async(task, args, kwargs, routing_key=None, immediate=None,
     :keyword priority: The task priority, a number between ``0`` and ``9``.
 
     """
+    if not args:
+        args = []
+    if not kwargs:
+        kwargs = []
     message_opts = {"routing_key": routing_key,
                     "immediate": immediate,
                     "mandatory": mandatory,
@@ -262,7 +266,7 @@ class Task(object):
         return apply_async(cls, args, kwargs)
 
     @classmethod
-    def apply_async(cls, args, kwargs, **options):
+    def apply_async(cls, args=None, kwargs=None, **options):
         """Delay this task for execution by the ``celery`` daemon(s).
 
         :param args: positional arguments passed on to the task.

+ 0 - 2
celery/worker.py

@@ -350,10 +350,8 @@ class WorkController(object):
                 time.sleep(1)
 
         while True:
-            print("!!!!! Running tick...")
             [event.tick() for event in events]
             try:
-                print("Trying to execute task.")
                 result, task_name, task_id = self.execute_next_task()
             except ValueError:
                 # execute_next_task didn't return a r/name/id tuple,