瀏覽代碼

Project changes name to celery

Ask Solem 16 年之前
父節點
當前提交
71face9ab0
共有 18 個文件被更改,包括 100 次插入102 次删除
  1. 1 1
      MANIFEST.in
  2. 18 18
      README.rst
  3. 1 1
      celery/__init__.py
  4. 0 0
      celery/bin/__init__.py
  5. 11 11
      celery/bin/celeryd
  6. 9 9
      celery/conf.py
  7. 0 0
      celery/discovery.py
  8. 1 1
      celery/log.py
  9. 1 1
      celery/managers.py
  10. 7 7
      celery/messaging.py
  11. 2 2
      celery/models.py
  12. 1 1
      celery/platform.py
  13. 0 0
      celery/process.py
  14. 1 1
      celery/registry.py
  15. 6 6
      celery/task.py
  16. 9 9
      celery/worker.py
  17. 24 26
      contrib/debian/init.d/celeryd
  18. 8 8
      setup.py

+ 1 - 1
MANIFEST.in

@@ -3,4 +3,4 @@ include README.rst
 include MANIFEST.in
 include LICENSE
 include Changelog
-recursive-include crunchy *
+recursive-include celery *

+ 18 - 18
README.rst

@@ -1,5 +1,5 @@
 ============================================
-crunchy - Distributed Task Queue for Django.
+celery - Distributed Task Queue for Django.
 ============================================
 
 :Authors:
@@ -9,22 +9,22 @@ crunchy - Distributed Task Queue for Django.
 Introduction
 ------------
 
-``crunchy`` is a distributed task queue framework for Django.
+``celery`` is a distributed task queue framework for Django.
 More information will follow.
 
 Installation
 =============
 
-You can install ``crunchy`` either via the Python Package Index (PyPI)
+You can install ``celery`` either via the Python Package Index (PyPI)
 or from source.
 
 To install using ``pip``,::
 
-    $ pip install crunchy
+    $ pip install celery
 
 To install using ``easy_install``,::
 
-    $ easy_install crunchy
+    $ easy_install celery
 
 If you have downloaded a source tarball you can install it
 by doing the following,::
@@ -41,7 +41,7 @@ Have to write a cool tutorial, but here is some simple usage info.
 and you need to have the amqp server setup in your settings file, as described
 in the `carrot distribution README`_.
 
-*Note* If you're running ``SQLite`` as the database backend, ``crunchd`` will
+*Note* If you're running ``SQLite`` as the database backend, ``celeryd`` will
 only be able to process one message at a time, this because ``SQLite`` doesn't
 allow concurrent writes.
 
@@ -52,8 +52,8 @@ allow concurrent writes.
 Defining tasks
 --------------
 
-    >>> from crunchy.task import tasks
-    >>> from crunchy.log import setup_logger
+    >>> from celery.task import tasks
+    >>> from celery.log import setup_logger
     >>> def do_something(some_arg, **kwargs):
     ...     logger = setup_logger(**kwargs)
     ...     logger.info("Did something: %s" % some_arg)
@@ -61,20 +61,20 @@ Defining tasks
 
 *Note* Task functions only supports keyword arguments.
 
-Tell the crunch daemon to run a task
+Tell the celery daemon to run a task
 -------------------------------------
 
-    >>> from crunchy.task import delay_task
+    >>> from celery.task import delay_task
     >>> delay_task("do_something", some_arg="foo bar baz")
 
 
-Running the crunch daemon
+Running the celery daemon
 --------------------------
 
 ::
 
     $ cd mydjangoproject
-    $ env DJANGO_SETTINGS_MODULE=settings crunchd
+    $ env DJANGO_SETTINGS_MODULE=settings celeryd
     [....]
     [2009-04-23 17:44:05,115: INFO/Process-1] Did something: foo bar baz
     [2009-04-23 17:44:05,118: INFO/MainProcess] Waiting for queue.
@@ -85,14 +85,14 @@ Running the crunch daemon
 Autodiscovery of tasks
 -----------------------
 
-``crunchy`` has an autodiscovery feature like the Django Admin, that
+``celery`` has an autodiscovery feature like the Django Admin, that
 automatically loads any ``tasks.py`` module in the applications listed
 in ``settings.INSTALLED_APPS``.
 
 A good place to add this command could be in your ``urls.py``,
 ::
 
-    from crunchy.task import tasks
+    from celery.task import tasks
     tasks.autodiscover()
 
 
@@ -100,8 +100,8 @@ A good place to add this command could be in your ``urls.py``,
 Then you can add new tasks in your applications ``tasks.py`` module,
 ::
 
-    from crunchy.task import tasks
-    from crunchy.log import setup_logger
+    from celery.task import tasks
+    from celery.log import setup_logger
     from clickcounter.models import ClickCount
 
     def increment_click(for_url, **kwargs):
@@ -121,7 +121,7 @@ Periodic tasks are tasks that are run every ``n`` seconds. They don't
 support extra arguments. Here's an example of a periodic task:
 
 
-    >>> from crunchy.task import tasks, PeriodicTask
+    >>> from celery.task import tasks, PeriodicTask
     >>> class MyPeriodicTask(PeriodicTask):
     ...     name = "foo.my-periodic-task"
     ...     run_every = 30 # seconds
@@ -133,7 +133,7 @@ support extra arguments. Here's an example of a periodic task:
     >>> tasks.register(MyPeriodicTask)
 
 
-For periodic tasks to work you need to add crunchy to ``INSTALLED_APPS``,
+For periodic tasks to work you need to add ``celery`` to ``INSTALLED_APPS``,
 and issue a ``syncdb``.
 
 License

+ 1 - 1
crunchy/__init__.py → celery/__init__.py

@@ -3,5 +3,5 @@ VERSION = (0, 1, 1)
 __version__ = ".".join(map(str, VERSION))
 __author__ = "Ask Solem"
 __contact__ = "askh@opera.com"
-__homepage__ = "http://github.com/ask/crunchy/"
+__homepage__ = "http://github.com/ask/celery/"
 __docformat__ = "restructuredtext"

+ 0 - 0
crunchy/bin/__init__.py → celery/bin/__init__.py


+ 11 - 11
crunchy/bin/crunchd → celery/bin/celeryd

@@ -3,13 +3,13 @@ import os
 import sys
 sys.path.append(os.getcwd())
 from django.conf import settings
-from crunchy.platform import PIDFile, daemonize, remove_pidfile
-from crunchy.log import setup_logger
-from crunchy.conf import LOG_LEVELS, DAEMON_LOG_FILE, DAEMON_LOG_LEVEL
-from crunchy.conf import DAEMON_CONCURRENCY, DAEMON_PID_FILE
-from crunchy.conf import QUEUE_WAKEUP_AFTER
-from crunchy import discovery
-from crunchy.worker import TaskDaemon
+from celery.platform import PIDFile, daemonize, remove_pidfile
+from celery.log import setup_logger
+from celery.conf import LOG_LEVELS, DAEMON_LOG_FILE, DAEMON_LOG_LEVEL
+from celery.conf import DAEMON_CONCURRENCY, DAEMON_PID_FILE
+from celery.conf import QUEUE_WAKEUP_AFTER
+from celery import discovery
+from celery.worker import TaskDaemon
 import traceback
 import optparse
 import atexit
@@ -25,7 +25,7 @@ def main(concurrency=DAEMON_CONCURRENCY, daemon=False,
                 UserWarning)
         concurrency = 1
     if daemon:
-        sys.stderr.write("Launching crunchd in the background...\n")
+        sys.stderr.write("Launching celeryd in the background...\n")
         pidfile_handler = PIDFile(pidfile)
         pidfile_handler.check()
         daemonize(pidfile=pidfile_handler)
@@ -34,15 +34,15 @@ def main(concurrency=DAEMON_CONCURRENCY, daemon=False,
         logfile = None # log to stderr when not running as daemon.
 
     discovery.autodiscover()
-    crunchd = TaskDaemon(concurrency=concurrency,
+    celeryd = TaskDaemon(concurrency=concurrency,
                                loglevel=loglevel,
                                logfile=logfile,
                                queue_wakeup_after=queue_wakeup_after)
     try:
-        crunchd.run()
+        celeryd.run()
     except Exception, e:
         raise
-        emergency_error(logfile, "crunchd raised exception %s: %s\n%s" % (
+        emergency_error(logfile, "celeryd raised exception %s: %s\n%s" % (
                             e.__class__, e, traceback.format_exc()))
 
 

+ 9 - 9
crunchy/conf.py → celery/conf.py

@@ -12,7 +12,7 @@ DEFAULT_QUEUE_WAKEUP_AFTER = 0.3
 # every ``EMPTY_MSG_EMIT_EVERY`` *seconds*.
 DEFAULT_EMPTY_MSG_EMIT_EVERY = 5 
 
-DEFAULT_DAEMON_PID_FILE = "crunchd.pid"
+DEFAULT_DAEMON_PID_FILE = "celeryd.pid"
 
 # The format we log messages in.
 DEFAULT_LOG_FMT = '[%(asctime)s: %(levelname)s/%(processName)s] %(message)s'
@@ -21,7 +21,7 @@ DEFAULT_LOG_FMT = '[%(asctime)s: %(levelname)s/%(processName)s] %(message)s'
 DEFAULT_DAEMON_LOG_LEVEL = "INFO"
 
 # Default log file
-DEFAULT_DAEMON_LOG_FILE = "crunchd.log"
+DEFAULT_DAEMON_LOG_FILE = "celeryd.log"
 
 # Table of loglevels to constants for use in settings.py.
 LOG_LEVELS = {
@@ -34,18 +34,18 @@ LOG_LEVELS = {
     "FATAL": logging.FATAL,
 }
 
-LOG_FORMAT = getattr(settings, "CRUNCHD_DAEMON_LOG_FORMAT",
+LOG_FORMAT = getattr(settings, "CELERYD_DAEMON_LOG_FORMAT",
                             DEFAULT_LOG_FMT)
-DAEMON_LOG_FILE = getattr(settings, "CRUNCHD_LOG_FILE",
+DAEMON_LOG_FILE = getattr(settings, "CELERYD_LOG_FILE",
                             DEFAULT_DAEMON_LOG_FILE)
-DAEMON_LOG_LEVEL = LOG_LEVELS[getattr(settings, "CRUNCHD_DAEMON_LOG_LEVEL",
+DAEMON_LOG_LEVEL = LOG_LEVELS[getattr(settings, "CELERYD_DAEMON_LOG_LEVEL",
                                DEFAULT_DAEMON_LOG_LEVEL).upper()]
 
-QUEUE_WAKEUP_AFTER = getattr(settings, "CRUNCHD_QUEUE_WAKEUP_AFTER",
+QUEUE_WAKEUP_AFTER = getattr(settings, "CELERYD_QUEUE_WAKEUP_AFTER",
                                 DEFAULT_QUEUE_WAKEUP_AFTER)
-EMPTY_MSG_EMIT_EVERY = getattr(settings, "CRUNCHD_EMPTY_MSG_EMIT_EVERY",
+EMPTY_MSG_EMIT_EVERY = getattr(settings, "CELERYD_EMPTY_MSG_EMIT_EVERY",
                                 DEFAULT_EMPTY_MSG_EMIT_EVERY)
-DAEMON_PID_FILE = getattr("settings", "CRUNCHD_PID_FILE",
+DAEMON_PID_FILE = getattr("settings", "CELERYD_PID_FILE",
                             DEFAULT_DAEMON_PID_FILE)
-DAEMON_CONCURRENCY = getattr("settings", "CRUNCHD_CONCURRENCY",
+DAEMON_CONCURRENCY = getattr("settings", "CELERYD_CONCURRENCY",
                                 DEFAULT_DAEMON_CONCURRENCY)

+ 0 - 0
crunchy/discovery.py → celery/discovery.py


+ 1 - 1
crunchy/log.py → celery/log.py

@@ -2,7 +2,7 @@ import multiprocessing
 import os
 import time
 import logging
-from crunchy.conf import LOG_FORMAT, DAEMON_LOG_LEVEL
+from celery.conf import LOG_FORMAT, DAEMON_LOG_LEVEL
 
 
 def setup_logger(loglevel=DAEMON_LOG_LEVEL, logfile=None, format=LOG_FORMAT):

+ 1 - 1
crunchy/managers.py → celery/managers.py

@@ -1,5 +1,5 @@
 from django.db import models
-from crunchy.registry import tasks
+from celery.registry import tasks
 from datetime import datetime, timedelta
 
 

+ 7 - 7
crunchy/messaging.py → celery/messaging.py

@@ -10,19 +10,19 @@ class NoProcessConsumer(Consumer):
 
 
 class TaskPublisher(Publisher):
-    exchange = "crunchy"
-    routing_key = "crunchy"
+    exchange = "celery"
+    routing_key = "celery"
 
     def delay_task(self, task_name, **kwargs):
         task_id = uuid.uuid4()
         message_data = dict(kwargs)
-        message_data["crunchTASK"] = task_name
-        message_data["crunchID"] = str(task_id)
+        message_data["celeryTASK"] = task_name
+        message_data["celeryID"] = str(task_id)
         self.send(message_data)
         return task_id
 
 
 class TaskConsumer(NoProcessConsumer):
-    queue = "crunchy"
-    exchange = "crunchy"
-    routing_key = "crunchy"
+    queue = "celery"
+    exchange = "celery"
+    routing_key = "celery"

+ 2 - 2
crunchy/models.py → celery/models.py

@@ -1,6 +1,6 @@
 from django.db import models
-from crunchy.registry import tasks
-from crunchy.managers import PeriodicTaskManager
+from celery.registry import tasks
+from celery.managers import PeriodicTaskManager
 from django.utils.translation import ugettext_lazy as _
 
 

+ 1 - 1
crunchy/platform.py → celery/platform.py

@@ -40,7 +40,7 @@ class PIDFile(object):
                    sys.stderr.write("Stale pidfile exists. removing it.\n")
                    self.remove()
             else:
-                raise SystemExit("crunchd is already running.")
+                raise SystemExit("celeryd is already running.")
 
     def remove(self):
         os.unlink(self.pidfile)

+ 0 - 0
crunchy/process.py → celery/process.py


+ 1 - 1
crunchy/registry.py → celery/registry.py

@@ -1,4 +1,4 @@
-from crunchy import discovery
+from celery import discovery
 from UserDict import UserDict
 
 

+ 6 - 6
crunchy/task.py → celery/task.py

@@ -1,7 +1,7 @@
 from carrot.connection import DjangoAMQPConnection
-from crunchy.log import setup_logger
-from crunchy.registry import tasks
-from crunchy.messaging import TaskPublisher, TaskConsumer
+from celery.log import setup_logger
+from celery.registry import tasks
+from celery.messaging import TaskPublisher, TaskConsumer
 
 
 def delay_task(task_name, **kwargs):
@@ -41,11 +41,11 @@ class Task(object):
         return setup_logger(**kwargs)
 
     def get_publisher(self):
-        """Get a crunchy task message publisher."""
+        """Get a celery task message publisher."""
         return TaskPublisher(connection=DjangoAMQPConnection)
 
     def get_consumer(self):
-        """Get a crunchy task message consumer."""
+        """Get a celery task message consumer."""
         return TaskConsumer(connection=DjangoAMQPConnection)
 
     @classmethod
@@ -65,7 +65,7 @@ class PeriodicTask(Task):
 
 
 class TestTask(Task):
-    name = "crunchy-test-task"
+    name = "celery-test-task"
 
     def run(self, some_arg, **kwargs):
         logger = self.get_logger(**kwargs)

+ 9 - 9
crunchy/worker.py → celery/worker.py

@@ -1,11 +1,11 @@
 from carrot.connection import DjangoAMQPConnection
-from crunchy.messaging import TaskConsumer
-from crunchy.conf import DAEMON_CONCURRENCY, DAEMON_LOG_FILE
-from crunchy.conf import QUEUE_WAKEUP_AFTER, EMPTY_MSG_EMIT_EVERY
-from crunchy.log import setup_logger
-from crunchy.registry import tasks
-from crunchy.process import ProcessQueue
-from crunchy.models import PeriodicTaskMeta
+from celery.messaging import TaskConsumer
+from celery.conf import DAEMON_CONCURRENCY, DAEMON_LOG_FILE
+from celery.conf import QUEUE_WAKEUP_AFTER, EMPTY_MSG_EMIT_EVERY
+from celery.log import setup_logger
+from celery.registry import tasks
+from celery.process import ProcessQueue
+from celery.models import PeriodicTaskMeta
 import multiprocessing
 import simplejson
 import traceback
@@ -50,8 +50,8 @@ class TaskDaemon(object):
             raise EmptyQueue()
 
         message_data = simplejson.loads(message.body)
-        task_name = message_data.pop("crunchTASK")
-        task_id = message_data.pop("crunchID")
+        task_name = message_data.pop("celeryTASK")
+        task_id = message_data.pop("celeryID")
         self.logger.info("Got task from broker: %s[%s]" % (
                             task_name, task_id))
         if task_name not in self.task_registry:

+ 24 - 26
contrib/debian/init.d/crunchd → contrib/debian/init.d/celeryd

@@ -1,12 +1,12 @@
 #! /bin/sh
 
 ### BEGIN INIT INFO
-# Provides:		crunchd
+# Provides:		celeryd
 # Required-Start:	
 # Required-Stop:	
 # Default-Start:	2 3 4 5
 # Default-Stop:		1
-# Short-Description:	crunchy task worker daemon
+# Short-Description:	celery task worker daemon
 ### END INIT INFO
 
 set -e
@@ -14,19 +14,17 @@ set -e
 VIRTUALENV=/opt/Opal/current
 DJANGO_PROJECT_DIR=/opt/Opal/release/opal
 DJANGO_SETTINGS_MODULE=settings
-CRUNCHD_PID_FILE="/var/run/crunchd.pid"
-CRUNCHD_LOG_FILE="/var/log/crunchd.log"
-CRUNCHD_LOG_LEVEL="INFO"
-CRUNCHD="crunchd"
+CELERYD_PID_FILE="/var/run/celeryd.pid"
+CELERYD_LOG_FILE="/var/log/celeryd.log"
+CELERYD_LOG_LEVEL="INFO"
+CELERYD="celeryd"
 
 export DJANGO_SETTINGS_MODULE
 
-# /etc/init.d/ssh: start and stop the crunchy task worker daemon.
+# /etc/init.d/ssh: start and stop the celery task worker daemon.
 
-test -x "$CRUNCHD" || exit 0
-
-if test -f /etc/default/crunchd; then
-    . /etc/default/crunchd
+if test -f /etc/default/celeryd; then
+    . /etc/default/celeryd
 fi
 
 
@@ -34,11 +32,11 @@ fi
 
 chdir $DJANGO_PROJECT_DIR
 
-CRUNCHD_OPTS="-f $CRUNCHD_LOG_FILE -l $CRUNCHD_LOG_LEVEL -p \
-                    $CRUNCHD_PID_FILE -d"
+CELERYD_OPTS="-f $CELERYD_LOG_FILE -l $CELERYD_LOG_LEVEL -p \
+                    $CELERYD_PID_FILE -d"
 
 if [ -n "$2" ]; then
-    CRUNCHD_OPTS="$CRUNCHD_OPTS $2"
+    CELERYD_OPTS="$CELERYD_OPTS $2"
 fi
 
 # Are we running from init?
@@ -70,16 +68,16 @@ fi
 case "$1" in
   start)
 	check_dev_null
-	log_daemon_msg "Starting crunchy task worker server" "crunchd"
-	if start-stop-daemon --start --quiet --oknodo --pidfile $CRUNCHD_PID_FILE --exec $CRUNCHD -- $CRUNCHD_OPTS; then
+	log_daemon_msg "Starting celery task worker server" "celeryd"
+	if start-stop-daemon --start --quiet --oknodo --pidfile $CELERYD_PID_FILE --exec $CELERYD -- $CELERYD_OPTS; then
 	    log_end_msg 0
 	else
 	    log_end_msg 1
 	fi
 	;;
   stop)
-	log_daemon_msg "Stopping crunchy task worker server" "crunchd"
-	if start-stop-daemon --stop --quiet --oknodo --pidfile $CRUNCHD_PID_FILE; then log_end_msg 0
+	log_daemon_msg "Stopping celery task worker server" "celeryd"
+	if start-stop-daemon --stop --quiet --oknodo --pidfile $CELERYD_PID_FILE; then log_end_msg 0
 	else
 	    log_end_msg 1
 	fi
@@ -90,11 +88,11 @@ case "$1" in
 	;;
 
   restart)
-	log_daemon_msg "Restarting crunchgy task worker server" "crunchd"
-	start-stop-daemon --stop --quiet --oknodo --retry 30 --pidfile $CRUNCHD_PID_FILE
+	log_daemon_msg "Restarting celery task worker server" "celeryd"
+	start-stop-daemon --stop --quiet --oknodo --retry 30 --pidfile $CELERYD_PID_FILE
 	check_for_no_start log_end_msg
 	check_dev_null log_end_msg
-	if start-stop-daemon --start --quiet --oknodo --pidfile $CRUNCHD_PID_FILE --exec $CRUNCHD -- $CRUNCHD_OPTS; then
+	if start-stop-daemon --start --quiet --oknodo --pidfile $CELERYD_PID_FILE --exec $CELERYD -- $CELERYD_OPTS; then
 	    log_end_msg 0
 	else
 	    log_end_msg 1
@@ -102,16 +100,16 @@ case "$1" in
 	;;
 
   try-restart)
-	log_daemon_msg "Restarting crunchy task worker server" "crunchd"
+	log_daemon_msg "Restarting celery task worker server" "celeryd"
 	set +e
-	start-stop-daemon --stop --quiet --retry 30 --pidfile $CRUNCHD_PID_FILE
+	start-stop-daemon --stop --quiet --retry 30 --pidfile $CELERYD_PID_FILE
 	RET="$?"
 	set -e
 	case $RET in
 	    0)
 		# old daemon stopped
 		check_dev_null log_end_msg
-		if start-stop-daemon --start --quiet --oknodo --pidfile $CRUNCHD_PID_FILE --exec $CRUNCHD -- $CRUNCHD_OPTS; then
+		if start-stop-daemon --start --quiet --oknodo --pidfile $CELERYD_PID_FILE --exec $CELERYD -- $CELERYD_OPTS; then
 		    log_end_msg 0
 		else
 		    log_end_msg 1
@@ -131,11 +129,11 @@ case "$1" in
 	;;
 
   status)
-	status_of_proc -p $CRUNCHD_PID_FILE $CRUNCHD crunchd && exit 0 || exit $?
+	status_of_proc -p $CELERYD_PID_FILE $CELERYD celeryd && exit 0 || exit $?
 	;;
 
   *)
-	log_action_msg "Usage: /etc/init.d/crunchd {start|stop|force-reload|restart|try-restart|status}"
+	log_action_msg "Usage: /etc/init.d/celeryd {start|stop|force-reload|restart|try-restart|status}"
 	exit 1
 esac
 

+ 8 - 8
setup.py

@@ -10,7 +10,7 @@ except ImportError:
     use_setuptools()
     from setuptools import setup, find_packages
 
-import crunchy
+import celery
 
 install_requires = ["carrot", "django"]
 py_version_info = sys.version_info
@@ -21,15 +21,15 @@ if (py_major_version == 2 and py_minor_version <=5) or py_major_version < 2:
     install_requires.append("multiprocessing")    
 
 setup(
-    name='crunchy',
-    version=crunchy.__version__,
-    description=crunchy.__doc__,
-    author=crunchy.__author__,
-    author_email=crunchy.__contact__,
-    url=crunchy.__homepage__,
+    name='celery',
+    version=celery.__version__,
+    description=celery.__doc__,
+    author=celery.__author__,
+    author_email=celery.__contact__,
+    url=celery.__homepage__,
     platforms=["any"],
     packages=find_packages(exclude=['ez_setup']),
-    scripts=["crunchy/bin/crunchd"],
+    scripts=["celery/bin/celeryd"],
     install_requires=[
         'simplejson',
         'carrot',