Просмотр исходного кода

Merge branch 'paltman/scheduled-periodic-task' into crontab

Ask Solem 15 лет назад
Родитель
Сommit
b1e70fe1e7

+ 22 - 22
Changelog

@@ -186,28 +186,28 @@ Remote control commands
 
 	Dumps the workers currently registered ETA schedule.
 
-		>>> from celery.task.control import broadcast
-		>>> broadcast("dump_schedule", reply=True)
-		[{'w1': []},
-		{'w3': []},
-		{'w2': ['0. 2010-05-12 11:06:00 pri0 <TaskWrapper:
-					{name:"opalfeeds.tasks.refresh_feed_slice",
-					 id:"95b45760-4e73-4ce8-8eac-f100aa80273a",
-					 args:"(<Feeds freq_max:3600 freq_min:60
-								   start:2184.0 stop:3276.0>,)",
-					 kwargs:"{'page': 2}"}>']},
-		{'w4': ['0. 2010-05-12 11:00:00 pri0 <TaskWrapper:
-					{name:"opalfeeds.tasks.refresh_feed_slice",
-					 id:"c053480b-58fb-422f-ae68-8d30a464edfe",
-					 args:"(<Feeds freq_max:3600 freq_min:60
-								   start:1092.0 stop:2184.0>,)",
-					 kwargs:"{\'page\': 1}"}>',
-				'1. 2010-05-12 11:12:00 pri0 <TaskWrapper:
-					{name:"opalfeeds.tasks.refresh_feed_slice",
-					 id:"ab8bc59e-6cf8-44b8-88d0-f1af57789758",
-					 args:"(<Feeds freq_max:3600 freq_min:60
-								   start:3276.0 stop:4365>,)",
-					 kwargs:"{\'page\': 3}"}>']}]
+        >>> from celery.task.control import broadcast
+        >>> broadcast("dump_schedule", reply=True)
+        [{'w1': []},
+         {'w3': []},
+         {'w2': ['0. 2010-05-12 11:06:00 pri0 <TaskWrapper:
+                    {name:"opalfeeds.tasks.refresh_feed_slice",
+                     id:"95b45760-4e73-4ce8-8eac-f100aa80273a",
+                     args:"(<Feeds freq_max:3600 freq_min:60
+                                   start:2184.0 stop:3276.0>,)",
+                     kwargs:"{'page': 2}"}>']},
+         {'w4': ['0. 2010-05-12 11:00:00 pri0 <TaskWrapper:
+                    {name:"opalfeeds.tasks.refresh_feed_slice",
+                     id:"c053480b-58fb-422f-ae68-8d30a464edfe",
+                     args:"(<Feeds freq_max:3600 freq_min:60
+                                   start:1092.0 stop:2184.0>,)",
+                     kwargs:"{\'page\': 1}"}>',
+                '1. 2010-05-12 11:12:00 pri0 <TaskWrapper:
+                    {name:"opalfeeds.tasks.refresh_feed_slice",
+                     id:"ab8bc59e-6cf8-44b8-88d0-f1af57789758",
+                     args:"(<Feeds freq_max:3600 freq_min:60
+                                   start:3276.0 stop:4365>,)",
+                     kwargs:"{\'page\': 3}"}>']}]
 
 1.0.2 [2010-03-31 12:50 P.M CET]
 ================================

+ 8 - 0
FAQ

@@ -274,6 +274,14 @@ Windows: The ``-B`` / ``--beat`` option to celeryd doesn't work?
 **Answer**: That's right. Run ``celerybeat`` and ``celeryd`` as separate
 services instead.
 
+Tasks
+=====
+
+How can I reuse the same connection when applying tasks?
+--------------------------------------------------------
+
+**Answer**: See :doc:`userguide/executing`.
+
 Results
 =======
 

+ 2 - 0
celery/conf.py

@@ -61,6 +61,7 @@ _DEFAULTS = {
     "CELERY_EVENT_EXCHANGE": "celeryevent",
     "CELERY_EVENT_EXCHANGE_TYPE": "direct",
     "CELERY_EVENT_ROUTING_KEY": "celeryevent",
+    "CELERY_EVENT_SERIALIZER": "pickle",
     "CELERY_RESULT_EXCHANGE": "celeryresults",
     "CELERY_MAX_CACHED_RESULTS": 5000,
     "CELERY_TRACK_STARTED": False,
@@ -200,6 +201,7 @@ EVENT_QUEUE = _get("CELERY_EVENT_QUEUE")
 EVENT_EXCHANGE = _get("CELERY_EVENT_EXCHANGE")
 EVENT_EXCHANGE_TYPE = _get("CELERY_EVENT_EXCHANGE_TYPE")
 EVENT_ROUTING_KEY = _get("CELERY_EVENT_ROUTING_KEY")
+EVENT_SERIALIZER = _get("CELERY_EVENT_SERIALIZER")
 
 # :--- Broker connections                           <-   --   --- - ----- -- #
 BROKER_CONNECTION_TIMEOUT = _get("CELERY_BROKER_CONNECTION_TIMEOUT",

+ 1 - 0
celery/messaging.py

@@ -125,6 +125,7 @@ class EventPublisher(Publisher):
     exchange = conf.EVENT_EXCHANGE
     exchange_type = conf.EVENT_EXCHANGE_TYPE
     routing_key = conf.EVENT_ROUTING_KEY
+    serializer = conf.EVENT_SERIALIZER
 
 
 class EventConsumer(Consumer):

+ 12 - 0
celery/task/base.py

@@ -740,6 +740,9 @@ class crontab(schedule):
         represents the day of week that execution should occur.
 
     """
+    daynames = "sun", "mon", "tue", "wed", "thu", "fri", "sat"
+    weekdays = dict((name, dow) for name, dow in zip(daynames, range(7)))
+
     def __init__(self, minute=None, hour=None, day_of_week=None,
             nowfun=datetime.now):
         self.hour = hour                  # (0 - 23)
@@ -747,6 +750,15 @@ class crontab(schedule):
         self.day_of_week = day_of_week    # (0 - 6) (Sunday=0)
         self.nowfun = nowfun
 
+        if isinstance(self.day_of_week, basestring):
+            abbreviation = self.day_of_week[0:3].lower()
+            try:
+                self.day_of_week = self.weekdays[abbreviation]
+            except KeyError:
+                # Show original day name in exception, instead of abbr.
+                raise KeyError(self.day_of_week)
+
+
     def remaining_estimate(self, last_run_at):
         # remaining_estimate controls the frequency of scheduler
         # ticks. The scheduler needs to wake up every second in this case.

+ 1 - 1
celery/tests/test_task.py

@@ -483,7 +483,7 @@ class DailyPeriodic(task.PeriodicTask):
 
 
 class WeeklyPeriodic(task.PeriodicTask):
-    run_every = task.crontab(hour=7, minute=30, day_of_week=4)
+    run_every = task.crontab(hour=7, minute=30, day_of_week="thursday")
 
 
 def patch_crontab_nowfun(cls, retval):

+ 4 - 0
contrib/supervisord/celeryd.conf

@@ -20,6 +20,10 @@ autostart=true
 autorestart=true
 startsecs=10
 
+; Need to wait for currently executing tasks to finish at shutdown.
+; Increase this if you have very long running tasks.
+stopwaitsecs = 600
+
 ; if rabbitmq is supervised, set its priority higher
 ; so it starts first
 priority=998

+ 4 - 0
contrib/supervisord/django/celeryd.conf

@@ -13,6 +13,10 @@ autostart=true
 autorestart=true
 startsecs=10
 
+; Need to wait for currently executing tasks to finish at shutdown.
+; Increase this if you have very long running tasks.
+stopwaitsecs = 600
+
 ; if rabbitmq is supervised, set its priority higher
 ; so it starts first
 priority=998

+ 5 - 0
docs/internals/moduleindex.rst

@@ -8,6 +8,11 @@ Worker
 celery.worker
 -------------
 
+* :class:`celery.worker.WorkController`
+
+This is the worker's main process. It starts and stops all the components
+required by the worker: Pool, Mediator, Scheduler, ClockService, and Listener.
+
 celery.worker.job
 -----------------
 

+ 10 - 1
docs/tutorials/external.rst

@@ -41,7 +41,8 @@ http://robertpogorzelski.com/blog/2009/09/10/rabbitmq-celery-and-django/
 Message Queues, Django and Celery Quick Start
 =============================================
 
-Celery tutorial by `Rich Leland`_, the installation section is Mac OS X specific:
+Celery tutorial by `Rich Leland`_, the installation section is Mac OS X
+specific:
 http://mathematism.com/2010/feb/16/message-queues-django-and-celery-quick-start/
 
 .. _`Rich Leland`: http://twitter.com/richleland
@@ -55,6 +56,14 @@ Background task processing and deferred execution in Django
 
 .. _`Alon Swartz`: http://twitter.com/alonswartz
 
+Build a processing queue [...] in less than a day using RabbitMQ and Celery
+===========================================================================
+
+Tutorial in 2 parts written by `Tim Bull`_:
+http://timbull.com/build-a-processing-queue-with-multi-threading
+
+.. _`Tim Bull`: http://twitter.com/timbull
+
 How to get celeryd to work on FreeBSD
 =====================================