Przeglądaj źródła

Django specific modules moved to the django-celery package.

http://github.com/ask/django-celery
Ask Solem 15 lat temu
rodzic
commit
159e4415c7

+ 4 - 3
celery/backends/__init__.py

@@ -5,12 +5,12 @@ from celery.utils import get_cls_by_name
 
 BACKEND_ALIASES = {
     "amqp": "celery.backends.amqp.AMQPBackend",
-    "database": "celery.backends.database.DatabaseBackend",
-    "db": "celery.backends.database.DatabaseBackend",
     "redis": "celery.backends.pyredis.RedisBackend",
-    "cache": "celery.backends.cache.CacheBackend",
     "mongodb": "celery.backends.mongodb.MongoBackend",
     "tyrant": "celery.backends.tyrant.TyrantBackend",
+    "db": "djcelery.backends.database.DatabaseBackend",
+    "database": "djcelery.backends.database.DatabaseBackend",
+    "cache": "djcelery.backends.cache.CacheBackend",
 }
 
 _backend_cache = {}
@@ -19,6 +19,7 @@ _backend_cache = {}
 def get_backend_cls(backend):
     """Get backend class by name/alias"""
     if backend not in _backend_cache:
+        print("GET_CLS_BY_NAME: %s" % backend)
         _backend_cache[backend] = get_cls_by_name(backend, BACKEND_ALIASES)
     return _backend_cache[backend]
 

+ 0 - 62
celery/backends/cache.py

@@ -1,62 +0,0 @@
-"""celery.backends.cache"""
-from datetime import timedelta
-
-from django.utils.encoding import smart_str
-from django.core.cache import cache, get_cache
-from django.core.cache.backends.base import InvalidCacheBackendError
-
-from celery import conf
-from celery.utils.timeutils import timedelta_seconds
-from celery.backends.base import KeyValueStoreBackend
-
-# CELERY_CACHE_BACKEND overrides the django-global(tm) backend settings.
-if conf.CELERY_CACHE_BACKEND:
-    cache = get_cache(conf.CELERY_CACHE_BACKEND)
-
-
-class DjangoMemcacheWrapper(object):
-    """Wrapper class to django's memcache backend class, that overrides the
-    :meth:`get` method in order to remove the forcing of unicode strings
-    since it may cause binary or pickled data to break."""
-
-    def __init__(self, cache):
-        self.cache = cache
-
-    def get(self, key, default=None):
-        val = self.cache._cache.get(smart_str(key))
-        if val is None:
-            return default
-        else:
-            return val
-
-    def set(self, key, value, timeout=0):
-        self.cache.set(key, value, timeout)
-
-# Check if django is using memcache as the cache backend. If so, wrap the
-# cache object in a DjangoMemcacheWrapper that fixes a bug with retrieving
-# pickled data
-from django.core.cache.backends.base import InvalidCacheBackendError
-try:
-    from django.core.cache.backends.memcached import CacheClass
-except InvalidCacheBackendError:
-    pass
-else:
-    if isinstance(cache, CacheClass):
-        cache = DjangoMemcacheWrapper(cache)
-
-
-class CacheBackend(KeyValueStoreBackend):
-    """Backend using the Django cache framework to store task metadata."""
-
-    def __init__(self, *args, **kwargs):
-        super(CacheBackend, self).__init__(self, *args, **kwargs)
-        expires = conf.TASK_RESULT_EXPIRES
-        if isinstance(expires, timedelta):
-            expires = timedelta_seconds(conf.TASK_RESULT_EXPIRES)
-        self.expires = expires
-
-    def get(self, key):
-        return cache.get(key)
-
-    def set(self, key, value):
-        cache.set(key, value, self.expires)

+ 0 - 34
celery/backends/database.py

@@ -1,34 +0,0 @@
-from celery.models import TaskMeta, TaskSetMeta
-from celery.backends.base import BaseDictBackend
-
-
-class DatabaseBackend(BaseDictBackend):
-    """The database backends. Using Django models to store task metadata."""
-
-    def _store_result(self, task_id, result, status, traceback=None):
-        """Store return value and status of an executed task."""
-        TaskMeta.objects.store_result(task_id, result, status,
-                                      traceback=traceback)
-        return result
-
-    def _save_taskset(self, taskset_id, result):
-        """Store the result of an executed taskset."""
-        TaskSetMeta.objects.store_result(taskset_id, result)
-        return result
-
-    def _get_task_meta_for(self, task_id):
-        """Get task metadata for a task by id."""
-        meta = TaskMeta.objects.get_task(task_id)
-        if meta:
-            return meta.to_dict()
-
-    def _restore_taskset(self, taskset_id):
-        """Get taskset metadata for a taskset by id."""
-        meta = TaskSetMeta.objects.restore_taskset(taskset_id)
-        if meta:
-            return meta.to_dict()
-
-    def cleanup(self):
-        """Delete expired metadata."""
-        TaskMeta.objects.delete_expired()
-        TaskSetMeta.objects.delete_expired()

+ 4 - 3
celery/loaders/__init__.py

@@ -6,11 +6,12 @@ from carrot.utils import rpartition
 
 from celery.utils import get_full_cls_name, first
 from celery.loaders.default import Loader as DefaultLoader
-from celery.loaders.djangoapp import Loader as DjangoLoader
+
+from djcelery.loaders.djangoapp import Loader as DjangoLoader
 
 _DEFAULT_LOADER_CLASS_NAME = "Loader"
-LOADER_ALIASES = {"django": "celery.loaders.djangoapp.Loader",
-                  "default": "celery.loaders.default.Loader"}
+LOADER_ALIASES = {"default": "celery.loaders.default.Loader",
+                  "django": "djcelery.loaders.djangoapp.Loader"}
 _loader_cache = {}
 _loader = None
 _settings = None

+ 13 - 7
celery/loaders/default.py

@@ -16,6 +16,18 @@ def wanted_module_item(item):
     return not item.startswith("_")
 
 
+class Settings(dict):
+
+    def __getattr__(self, key):
+        try:
+            return self[key]
+        except KeyError:
+            raise AttributeError(key)
+
+    def __setattr_(self, key, value):
+        self[key] = value
+
+
 class Loader(BaseLoader):
     """The default loader.
 
@@ -24,13 +36,7 @@ class Loader(BaseLoader):
     """
 
     def setup_django_env(self, settingsdict):
-        config = dict(DEFAULT_SETTINGS, **settingsdict)
-
-        from django.conf import settings
-        if not settings.configured:
-            settings.configure()
-        for config_key, config_value in config.items():
-            setattr(settings, config_key, config_value)
+        settings = Settings(DEFAULT_SETTINGS, **settingsdict)
         installed_apps = set(list(DEFAULT_SETTINGS["INSTALLED_APPS"]) + \
                              list(settings.INSTALLED_APPS))
         settings.INSTALLED_APPS = tuple(installed_apps)

+ 0 - 100
celery/loaders/djangoapp.py

@@ -1,100 +0,0 @@
-import imp
-import importlib
-
-from celery.loaders.base import BaseLoader
-
-_RACE_PROTECTION = False
-
-
-class Loader(BaseLoader):
-    """The Django loader."""
-    _db_reuse = 0
-
-    def read_configuration(self):
-        """Load configuration from Django settings."""
-        from django.conf import settings
-        return settings
-
-    def close_database(self):
-        from django.db import connection
-        db_reuse_max = getattr(self.conf, "CELERY_DB_REUSE_MAX", None)
-        if not db_reuse_max:
-            return connection.close()
-        if self._db_reuse >= db_reuse_max:
-            self._db_reuse = 0
-            return connection.close()
-        self._db_reuse += 1
-
-    def on_task_init(self, task_id, task):
-        """This method is called before a task is executed.
-
-        Does everything necessary for Django to work in a long-living,
-        multiprocessing environment.
-
-        """
-        # See http://groups.google.com/group/django-users/
-        #            browse_thread/thread/78200863d0c07c6d/
-        self.close_database()
-
-        # ## Reset cache connection only if using memcached/libmemcached
-        from django.core import cache
-        # XXX At Opera we use a custom memcached backend that uses
-        # libmemcached instead of libmemcache (cmemcache). Should find a
-        # better solution for this, but for now "memcached" should probably
-        # be unique enough of a string to not make problems.
-        cache_backend = cache.settings.CACHE_BACKEND
-        try:
-            parse_backend = cache.parse_backend_uri
-        except AttributeError:
-            parse_backend = lambda backend: backend.split(":", 1)
-        cache_scheme = parse_backend(cache_backend)[0]
-
-        if "memcached" in cache_scheme:
-            cache.cache.close()
-
-    def on_worker_init(self):
-        """Called when the worker starts.
-
-        Automatically discovers any ``tasks.py`` files in the applications
-        listed in ``INSTALLED_APPS``.
-
-        """
-        self.import_default_modules()
-        autodiscover()
-
-
-def autodiscover():
-    """Include tasks for all applications in :setting:`INSTALLED_APPS`."""
-    from django.conf import settings
-    global _RACE_PROTECTION
-
-    if _RACE_PROTECTION:
-        return
-    _RACE_PROTECTION = True
-    try:
-        return filter(None, [find_related_module(app, "tasks")
-                                for app in settings.INSTALLED_APPS])
-    finally:
-        _RACE_PROTECTION = False
-
-
-def find_related_module(app, related_name):
-    """Given an application name and a module name, tries to find that
-    module in the application."""
-
-    try:
-        app_path = importlib.import_module(app).__path__
-    except AttributeError:
-        return
-
-    try:
-        imp.find_module(related_name, app_path)
-    except ImportError:
-        return
-
-    module = importlib.import_module("%s.%s" % (app, related_name))
-
-    try:
-        return getattr(module, related_name)
-    except AttributeError:
-        return

+ 0 - 0
celery/management/__init__.py


+ 0 - 0
celery/management/commands/__init__.py


+ 0 - 18
celery/management/commands/camqadm.py

@@ -1,18 +0,0 @@
-"""
-
-Celery AMQP Administration Tool using the AMQP API.
-
-"""
-from django.core.management.base import BaseCommand
-
-from celery.bin.camqadm import camqadm, OPTION_LIST
-
-
-class Command(BaseCommand):
-    """Run the celery daemon."""
-    option_list = BaseCommand.option_list + OPTION_LIST
-    help = 'Celery AMQP Administration Tool using the AMQP API.'
-
-    def handle(self, *args, **options):
-        """Handle the management command."""
-        camqadm(*args, **options)

+ 0 - 18
celery/management/commands/celerybeat.py

@@ -1,18 +0,0 @@
-"""
-
-Start the celery clock service from the Django management command.
-
-"""
-from django.core.management.base import BaseCommand
-
-from celery.bin.celerybeat import run_clockservice, OPTION_LIST
-
-
-class Command(BaseCommand):
-    """Run the celery periodic task scheduler."""
-    option_list = BaseCommand.option_list + OPTION_LIST
-    help = 'Run the celery periodic task scheduler'
-
-    def handle(self, *args, **options):
-        """Handle the management command."""
-        run_clockservice(**options)

+ 0 - 18
celery/management/commands/celeryd.py

@@ -1,18 +0,0 @@
-"""
-
-Start the celery daemon from the Django management command.
-
-"""
-from django.core.management.base import BaseCommand
-
-from celery.bin.celeryd import run_worker, OPTION_LIST
-
-
-class Command(BaseCommand):
-    """Run the celery daemon."""
-    option_list = BaseCommand.option_list + OPTION_LIST
-    help = 'Run the celery daemon'
-
-    def handle(self, *args, **options):
-        """Handle the management command."""
-        run_worker(**options)

+ 0 - 37
celery/management/commands/celerymon.py

@@ -1,37 +0,0 @@
-"""
-
-Start the celery clock service from the Django management command.
-
-"""
-import sys
-from django.core.management.base import BaseCommand
-
-#try:
-from celerymonitor.bin.celerymond import run_monitor, OPTION_LIST
-#except ImportError:
-#    OPTION_LIST = ()
-#    run_monitor = None
-
-MISSING = """
-You don't have celerymon installed, please install it by running the following
-command:
-
-    $ easy_install celerymon
-
-or if you're using pip (like you should be):
-
-    $ pip install celerymon
-"""
-
-
-class Command(BaseCommand):
-    """Run the celery monitor."""
-    option_list = BaseCommand.option_list + OPTION_LIST
-    help = 'Run the celery monitor'
-
-    def handle(self, *args, **options):
-        """Handle the management command."""
-        if run_monitor is None:
-            sys.stderr.write(MISSING)
-        else:
-            run_monitor(**options)

+ 0 - 149
celery/managers.py

@@ -1,149 +0,0 @@
-from datetime import datetime
-from itertools import count
-
-from billiard.utils.functional import wraps
-
-from django.db import models
-from django.db import transaction
-from django.db.models.query import QuerySet
-
-
-def transaction_retry(max_retries=1):
-    """Decorator for methods doing database operations.
-
-    If the database operation fails, it will retry the operation
-    at most ``max_retries`` times.
-
-    """
-    def _outer(fun):
-
-        @wraps(fun)
-        def _inner(*args, **kwargs):
-            _max_retries = kwargs.pop("exception_retry_count", max_retries)
-            for retries in count(0):
-                try:
-                    return fun(*args, **kwargs)
-                except Exception: # pragma: no cover
-                    # Depending on the database backend used we can experience
-                    # various exceptions. E.g. psycopg2 raises an exception
-                    # if some operation breaks the transaction, so saving
-                    # the task result won't be possible until we rollback
-                    # the transaction.
-                    if retries >= _max_retries:
-                        raise
-                    transaction.rollback_unless_managed()
-
-        return _inner
-
-    return _outer
-
-
-def update_model_with_dict(obj, fields):
-    [setattr(obj, attr_name, attr_value)
-        for attr_name, attr_value in fields.items()]
-    obj.save()
-    return obj
-
-
-class ExtendedQuerySet(QuerySet):
-
-    def update_or_create(self, **kwargs):
-        obj, created = self.get_or_create(**kwargs)
-
-        if not created:
-            fields = dict(kwargs.pop("defaults", {}))
-            fields.update(kwargs)
-            update_model_with_dict(obj, fields)
-
-        return obj
-
-
-class ExtendedManager(models.Manager):
-
-    def get_query_set(self):
-        return ExtendedQuerySet(self.model)
-
-    def update_or_create(self, **kwargs):
-        return self.get_query_set().update_or_create(**kwargs)
-
-
-class ResultManager(ExtendedManager):
-
-    def get_all_expired(self):
-        """Get all expired task results."""
-        from celery import conf
-        expires = conf.TASK_RESULT_EXPIRES
-        return self.filter(date_done__lt=datetime.now() - expires)
-
-    def delete_expired(self):
-        """Delete all expired taskset results."""
-        self.get_all_expired().delete()
-
-
-class TaskManager(ResultManager):
-    """Manager for :class:`celery.models.Task` models."""
-
-    @transaction_retry(max_retries=1)
-    def get_task(self, task_id):
-        """Get task meta for task by ``task_id``.
-
-        :keyword exception_retry_count: How many times to retry by
-            transaction rollback on exception. This could theoretically
-            happen in a race condition if another worker is trying to
-            create the same task. The default is to retry once.
-
-        """
-        task, created = self.get_or_create(task_id=task_id)
-        return task
-
-    @transaction_retry(max_retries=2)
-    def store_result(self, task_id, result, status, traceback=None):
-        """Store the result and status of a task.
-
-        :param task_id: task id
-
-        :param result: The return value of the task, or an exception
-            instance raised by the task.
-
-        :param status: Task status. See
-            :meth:`celery.result.AsyncResult.get_status` for a list of
-            possible status values.
-
-        :keyword traceback: The traceback at the point of exception (if the
-            task failed).
-
-        :keyword exception_retry_count: How many times to retry by
-            transaction rollback on exception. This could theoretically
-            happen in a race condition if another worker is trying to
-            create the same task. The default is to retry twice.
-
-        """
-        return self.update_or_create(task_id=task_id, defaults={
-                                        "status": status,
-                                        "result": result,
-                                        "traceback": traceback})
-
-
-class TaskSetManager(ResultManager):
-    """Manager for :class:`celery.models.TaskSet` models."""
-
-
-    @transaction_retry(max_retries=1)
-    def restore_taskset(self, taskset_id):
-        """Get taskset meta for task by ``taskset_id``."""
-        try:
-            return self.get(taskset_id=taskset_id)
-        except self.model.DoesNotExist:
-            return None
-
-    @transaction_retry(max_retries=2)
-    def store_result(self, taskset_id, result):
-        """Store the result of a taskset.
-
-        :param taskset_id: task set id
-
-        :param result: The return value of the taskset
-
-        """
-        return self.update_or_create(taskset_id=taskset_id,
-                                     defaults={"result": result})

+ 3 - 1
celery/messaging.py

@@ -14,6 +14,7 @@ from billiard.utils.functional import wraps
 from celery import conf
 from celery import signals
 from celery.utils import gen_unique_id, mitemgetter, noop
+from celery.loaders import load_settings
 
 
 MSG_OPTIONS = ("mandatory", "priority",
@@ -209,7 +210,8 @@ class BroadcastConsumer(Consumer):
 
 def establish_connection(connect_timeout=conf.BROKER_CONNECTION_TIMEOUT):
     """Establish a connection to the message broker."""
-    return DjangoBrokerConnection(connect_timeout=connect_timeout)
+    return DjangoBrokerConnection(connect_timeout=connect_timeout,
+                                  settings=load_settings())
 
 
 def with_connection(fun):

+ 0 - 67
celery/models.py

@@ -1,67 +0,0 @@
-import django
-from django.db import models
-from django.utils.translation import ugettext_lazy as _
-
-from picklefield.fields import PickledObjectField
-
-from celery import conf
-from celery import states
-from celery.managers import TaskManager, TaskSetManager
-
-TASK_STATUSES_CHOICES = zip(states.ALL_STATES, states.ALL_STATES)
-
-
-class TaskMeta(models.Model):
-    """Task result/status."""
-    task_id = models.CharField(_(u"task id"), max_length=255, unique=True)
-    status = models.CharField(_(u"task status"), max_length=50,
-            default=states.PENDING, choices=TASK_STATUSES_CHOICES)
-    result = PickledObjectField(null=True, default=None)
-    date_done = models.DateTimeField(_(u"done at"), auto_now=True)
-    traceback = models.TextField(_(u"traceback"), blank=True, null=True)
-
-    objects = TaskManager()
-
-    class Meta:
-        """Model meta-data."""
-        verbose_name = _(u"task meta")
-        verbose_name_plural = _(u"task meta")
-
-    def to_dict(self):
-        return {"task_id": self.task_id,
-                "status": self.status,
-                "result": self.result,
-                "date_done": self.date_done,
-                "traceback": self.traceback}
-
-    def __unicode__(self):
-        return u"<Task: %s state->%s>" % (self.task_id, self.status)
-
-
-class TaskSetMeta(models.Model):
-    """TaskSet result"""
-    taskset_id = models.CharField(_(u"task id"), max_length=255, unique=True)
-    result = PickledObjectField()
-    date_done = models.DateTimeField(_(u"done at"), auto_now=True)
-
-    objects = TaskSetManager()
-
-    class Meta:
-        """Model meta-data."""
-        verbose_name = _(u"taskset meta")
-        verbose_name_plural = _(u"taskset meta")
-
-    def to_dict(self):
-        return {"taskset_id": self.taskset_id,
-                "result": self.result,
-                "date_done": self.date_done}
-
-    def __unicode__(self):
-        return u"<TaskSet: %s>" % (self.taskset_id)
-
-if (django.VERSION[0], django.VERSION[1]) >= (1, 1):
-    # keep models away from syncdb/reset if database backend is not
-    # being used.
-    if conf.RESULT_BACKEND != 'database':
-        TaskMeta._meta.managed = False
-        TaskSetMeta._meta.managed = False

+ 2 - 1
celery/tests/test_backends/__init__.py

@@ -1,10 +1,11 @@
 import unittest2 as unittest
 
 from celery import backends
-from celery.backends.database import DatabaseBackend
 from celery.backends.amqp import AMQPBackend
 from celery.backends.pyredis import RedisBackend
 
+from djcelery.backends.database import DatabaseBackend
+
 
 class TestBackends(unittest.TestCase):
 

+ 8 - 8
celery/tests/test_backends/test_cache.py

@@ -7,9 +7,9 @@ from django.core.cache.backends.base import InvalidCacheBackendError
 from celery import result
 from celery import states
 from celery.utils import gen_unique_id
-from celery.backends.cache import CacheBackend
 from celery.datastructures import ExceptionInfo
 
+from djcelery.backends.cache import CacheBackend
 
 class SomeClass(object):
 
@@ -82,18 +82,18 @@ class TestCustomCacheBackend(unittest.TestCase):
     def test_custom_cache_backend(self):
         from celery import conf
         prev_backend = conf.CELERY_CACHE_BACKEND
-        prev_module = sys.modules["celery.backends.cache"]
+        prev_module = sys.modules["djcelery.backends.cache"]
         conf.CELERY_CACHE_BACKEND = "dummy://"
-        sys.modules.pop("celery.backends.cache")
+        sys.modules.pop("djcelery.backends.cache")
         try:
-            from celery.backends.cache import cache
+            from djcelery.backends.cache import cache
             from django.core.cache import cache as django_cache
             self.assertEqual(cache.__class__.__module__,
                               "django.core.cache.backends.dummy")
             self.assertIsNot(cache, django_cache)
         finally:
             conf.CELERY_CACHE_BACKEND = prev_backend
-            sys.modules["celery.backends.cache"] = prev_module
+            sys.modules["djcelery.backends.cache"] = prev_module
 
 
 class TestMemcacheWrapper(unittest.TestCase):
@@ -109,9 +109,9 @@ class TestMemcacheWrapper(unittest.TestCase):
             return
         prev_cache_cls = memcached.CacheClass
         memcached.CacheClass = locmem.CacheClass
-        prev_backend_module = sys.modules.pop("celery.backends.cache")
+        prev_backend_module = sys.modules.pop("djcelery.backends.cache")
         try:
-            from celery.backends.cache import cache, DjangoMemcacheWrapper
+            from djcelery.backends.cache import cache, DjangoMemcacheWrapper
             self.assertIsInstance(cache, DjangoMemcacheWrapper)
 
             key = "cu.test_memcache_wrapper"
@@ -124,4 +124,4 @@ class TestMemcacheWrapper(unittest.TestCase):
                               val)
         finally:
             memcached.CacheClass = prev_cache_cls
-            sys.modules["celery.backends.cache"] = prev_backend_module
+            sys.modules["djcelery.backends.cache"] = prev_backend_module

+ 2 - 1
celery/tests/test_backends/test_database.py

@@ -4,7 +4,8 @@ from datetime import timedelta
 from celery import states
 from celery.task import PeriodicTask
 from celery.utils import gen_unique_id
-from celery.backends.database import DatabaseBackend
+
+from djcelery.backends.database import DatabaseBackend
 
 
 class SomeClass(object):

+ 2 - 1
celery/tests/test_discovery.py

@@ -2,9 +2,10 @@ import unittest2 as unittest
 
 from django.conf import settings
 
-from celery.loaders.djangoapp import autodiscover
 from celery.task import tasks
 
+from djcelery.loaders.djangoapp import autodiscover
+
 
 class TestDiscovery(unittest.TestCase):
 

+ 3 - 1
celery/tests/test_loaders.py

@@ -5,12 +5,14 @@ import unittest2 as unittest
 from celery import task
 from celery import loaders
 from celery.loaders import base
-from celery.loaders import djangoapp
 from celery.loaders import default
 
+from djcelery.loaders import djangoapp
+
 from celery.tests.utils import with_environ
 
 
+
 class TestLoaders(unittest.TestCase):
 
     def test_get_loader_cls(self):

+ 2 - 1
celery/tests/test_models.py

@@ -3,7 +3,8 @@ from datetime import datetime, timedelta
 
 from celery import states
 from celery.utils import gen_unique_id
-from celery.models import TaskMeta, TaskSetMeta
+
+from djcelery.models import TaskMeta, TaskSetMeta
 
 
 class TestModels(unittest.TestCase):

+ 2 - 1
celery/tests/test_worker_job.py

@@ -12,7 +12,6 @@ from celery import states
 from celery.log import setup_logger
 from celery.task.base import Task
 from celery.utils import gen_unique_id
-from celery.models import TaskMeta
 from celery.result import AsyncResult
 from celery.worker.job import WorkerTaskTrace, TaskWrapper
 from celery.worker.pool import TaskPool
@@ -23,6 +22,8 @@ from celery.datastructures import ExceptionInfo
 from celery.tests.utils import execute_context
 from celery.tests.compat import catch_warnings
 
+from djcelery.models import TaskMeta
+
 scratch = {"ACK": False}
 some_kwargs_scratchpad = {}
 

+ 0 - 16
celery/urls.py

@@ -1,16 +0,0 @@
-"""
-
-URLs defined for celery.
-
-"""
-from django.conf.urls.defaults import patterns, url
-
-from celery import views
-
-
-urlpatterns = patterns("",
-    url(r'^(?P<task_id>[\w\d\-]+)/done/?$', views.is_task_successful,
-        name="celery-is_task_successful"),
-    url(r'^(?P<task_id>[\w\d\-]+)/status/?$', views.task_status,
-        name="celery-task_status"),
-)

+ 3 - 0
celery/utils/__init__.py

@@ -225,7 +225,10 @@ def get_cls_by_name(name, aliases={}):
         return name # already a class
 
     name = aliases.get(name) or name
+    print("NAME: %s" % name)
+    print("ALIASES: %s" % aliases)
     module_name, _, cls_name = rpartition(name, ".")
+    print("MODULE_NAME=%s CLS_NAME=%s" % (module_name, cls_name))
     module = importlib.import_module(module_name)
     return getattr(module, cls_name)
 

+ 0 - 106
celery/views.py

@@ -1,106 +0,0 @@
-from django.http import HttpResponse, Http404
-
-from anyjson import serialize as JSON_dump
-from billiard.utils.functional import wraps
-
-from celery.utils import get_full_cls_name
-from celery.result import AsyncResult
-from celery.registry import tasks
-from celery.backends import default_backend
-
-
-def task_view(task):
-    """Decorator turning any task into a view that applies the task
-    asynchronously.
-
-    Returns a JSON dictionary containing the keys ``ok``, and
-        ``task_id``.
-
-    """
-
-    def _applier(request, **options):
-        kwargs = request.method == "POST" and \
-            request.POST.copy() or request.GET.copy()
-        kwargs = dict((key.encode("utf-8"), value)
-                    for key, value in kwargs.items())
-
-        result = task.apply_async(kwargs=kwargs)
-        response_data = {"ok": "true", "task_id": result.task_id}
-        return HttpResponse(JSON_dump(response_data),
-                            mimetype="application/json")
-
-    return _applier
-
-
-def apply(request, task_name):
-    """View applying a task.
-
-    **Note:** Please use this with caution. Preferably you shouldn't make this
-        publicly accessible without ensuring your code is safe!
-
-    """
-    try:
-        task = tasks[task_name]
-    except KeyError:
-        raise Http404("apply: no such task")
-    return task_view(task)(request)
-
-
-def is_task_successful(request, task_id):
-    """Returns task execute status in JSON format."""
-    response_data = {"task": {"id": task_id,
-                              "executed": AsyncResult(task_id).successful()}}
-    return HttpResponse(JSON_dump(response_data), mimetype="application/json")
-
-
-def task_status(request, task_id):
-    """Returns task status and result in JSON format."""
-    status = default_backend.get_status(task_id)
-    res = default_backend.get_result(task_id)
-    response_data = dict(id=task_id, status=status, result=res)
-    if status in default_backend.EXCEPTION_STATES:
-        traceback = default_backend.get_traceback(task_id)
-        response_data.update({"result": str(res.args[0]),
-                              "exc": get_full_cls_name(res.__class__),
-                              "traceback": traceback})
-
-    return HttpResponse(JSON_dump({"task": response_data}),
-            mimetype="application/json")
-
-
-def task_webhook(fun):
-    """Decorator turning a function into a task webhook.
-
-    If an exception is raised within the function, the decorated
-    function catches this and returns an error JSON response, otherwise
-    it returns the result as a JSON response.
-
-
-    Example:
-
-    .. code-block:: python
-
-        @task_webhook
-        def add(request):
-            x = int(request.GET["x"])
-            y = int(request.GET["y"])
-            return x + y
-
-        >>> response = add(request)
-        >>> response.content
-        '{"status": "success", "retval": 100}'
-
-    """
-
-    @wraps(fun)
-    def _inner(*args, **kwargs):
-        try:
-            retval = fun(*args, **kwargs)
-        except Exception, exc:
-            response = {"status": "failure", "reason": str(exc)}
-        else:
-            response = {"status": "success", "retval": retval}
-
-        return HttpResponse(JSON_dump(response), mimetype="application/json")
-
-    return _inner

+ 2 - 2
tests/settings.py

@@ -11,7 +11,7 @@ SITE_ID = 300
 DEBUG = True
 TEMPLATE_DEBUG = DEBUG
 
-ROOT_URLCONF = "urls"
+ROOT_URLCONF = "tests.urls"
 
 ADMINS = (
     # ('Your Name', 'your_email@domain.com'),
@@ -70,7 +70,7 @@ INSTALLED_APPS = (
     'django.contrib.sessions',
     'django.contrib.sites',
     'django_nose',
-    'celery',
+    'djcelery',
     'someapp',
     'someappwotask',
 )

+ 2 - 2
tests/urls.py

@@ -1,6 +1,6 @@
 from django.conf.urls.defaults import (patterns, url, include,
                                        handler500, handler404)
-from celery.views import apply
+from djcelery.views import apply
 
 # Uncomment the next two lines to enable the admin:
 # from django.contrib import admin
@@ -17,6 +17,6 @@ urlpatterns = patterns('',
     # Uncomment the next line to enable the admin:
     # (r'^admin/(.*)', admin.site.root),
     url(r"^apply/(?P<task_name>.+?)/", apply, name="celery-apply"),
-    url(r"^celery/", include("celery.urls")),
+    url(r"^celery/", include("djcelery.urls")),
 
 )