Ask Solem преди 13 години
родител
ревизия
fa2d784f82

+ 1 - 0
celery/__init__.py

@@ -22,6 +22,7 @@ old_module, new_module = recreate_module(__name__,  # pragma: no cover
         "celery.app":       ["Celery", "bugreport"],
         "celery.app.state": ["current_app", "current_task"],
         "celery.canvas":    ["chain", "chord", "group", "subtask"],
+        "celery.utils":     ["uuid"],
     },
     direct={"task": "celery.task"},
     __package__="celery",

+ 9 - 1
celery/app/builtins.py

@@ -125,6 +125,7 @@ def add_group_task(app):
 @builtin_task
 def add_chain_task(app):
     from celery.canvas import maybe_subtask
+    from celery.result import EagerResult
 
     class Chain(app.Task):
         name = "celery.chain"
@@ -133,7 +134,6 @@ def add_chain_task(app):
         def apply_async(self, args=(), kwargs={}, **options):
             if self.app.conf.CELERY_ALWAYS_EAGER:
                 return self.apply(args, kwargs, **options)
-            tasks = kwargs["tasks"]
             tasks = [maybe_subtask(task).clone(task_id=uuid(), **kwargs)
                         for task in kwargs["tasks"]]
             reduce(lambda a, b: a.link(b), tasks)
@@ -143,6 +143,14 @@ def add_chain_task(app):
             reduce(lambda a, b: a.set_parent(b), reversed(results))
             return results[-1]
 
+        def apply(self, args=(), kwargs={}, **options):
+            tasks = [maybe_subtask(task).clone() for task in kwargs["tasks"]]
+            res = prev = None
+            for task in tasks:
+                res = task.apply((prev.get(), ) if prev else ())
+                res.parent, prev = prev, res
+            return res
+
     return Chain
 
 

+ 2 - 1
celery/app/control.py

@@ -104,7 +104,7 @@ class Control(object):
         return Inspect(self, destination=destination, timeout=timeout,
                              callback=callback)
 
-    def discard_all(self, connection=None):
+    def purge(self, connection=None):
         """Discard all waiting tasks.
 
         This will ignore all tasks waiting for execution, and they will
@@ -116,6 +116,7 @@ class Control(object):
         with self.app.default_connection(connection) as conn:
             return self.app.amqp.get_task_consumer(connection=conn)\
                                 .discard_all()
+    discard_all = purge
 
     def revoke(self, task_id, destination=None, terminate=False,
             signal="SIGTERM", **kwargs):

+ 3 - 0
celery/app/utils.py

@@ -5,8 +5,11 @@ import kombu
 import os
 import platform as _platform
 
+from operator import add
+
 from celery import datastructures
 from celery import platforms
+from celery.utils.functional import maybe_list
 from celery.utils.text import pretty
 from celery.utils.imports import qualname
 

+ 1 - 2
celery/apps/beat.py

@@ -68,8 +68,7 @@ class Beat(configurated):
     def start_scheduler(self):
         c = self.colored
         if self.pidfile:
-            pidlock = platforms.create_pidlock(self.pidfile).acquire()
-            atexit.register(pidlock.release)
+            platforms.create_pidlock(self.pidfile)
         beat = self.Service(app=self.app,
                             max_interval=self.max_interval,
                             scheduler_cls=self.scheduler_cls,

+ 35 - 68
celery/apps/worker.py

@@ -51,7 +51,7 @@ EXTRA_INFO_FMT = """
 %(tasks)s
 """
 
-UNKNOWN_QUEUE_ERROR = """\
+UNKNOWN_QUEUE = """\
 Trying to select queue subset of %r, but queue %s is not
 defined in the CELERY_QUEUES setting.
 
@@ -69,7 +69,7 @@ class Worker(configurated):
     redirect_stdouts = from_config()
     redirect_stdouts_level = from_config()
 
-    def __init__(self, hostname=None, discard=False, embed_clockservice=False,
+    def __init__(self, hostname=None, purge=False, beat=False,
             queues=None, include=None, app=None, pidfile=None,
             autoscale=None, autoreload=False, no_execv=False, **kwargs):
         self.app = app = app_or_default(app or self.app)
@@ -86,14 +86,11 @@ class Worker(configurated):
                 self.concurrency = cpu_count()
             except NotImplementedError:
                 self.concurrency = 2
-        self.discard = discard
-        self.embed_clockservice = embed_clockservice
-        if self.app.IS_WINDOWS and self.embed_clockservice:
-            self.die("-B option does not work on Windows.  "
-                     "Please run celerybeat as a separate service.")
+        self.purge = purge
+        self.beat = beat
         self.use_queues = [] if queues is None else queues
         self.queues = None
-        self.include = [] if include is None else include
+        self.include = include
         self.pidfile = pidfile
         self.autoscale = None
         self.autoreload = autoreload
@@ -107,34 +104,29 @@ class Worker(configurated):
 
         if isinstance(self.use_queues, basestring):
             self.use_queues = self.use_queues.split(",")
-        if isinstance(self.include, basestring):
-            self.include = self.include.split(",")
-
-        try:
-            self.loglevel = mlevel(self.loglevel)
-        except KeyError:
-            self.die("Unknown level %r. Please use one of %s." % (
-                        self.loglevel,
-                        "|".join(l for l in LOG_LEVELS.keys()
-                                    if isinstance(l, basestring))))
+        if self.include:
+            if isinstance(self.include, basestring):
+                self.include = self.include.split(",")
+            app.conf.CELERY_IMPORTS = tuple(
+                    self.include) + tuple(app.conf.CELERY_IMPORTS)
+        self.loglevel = mlevel(self.loglevel)
 
     def run(self):
-        self.init_loader()
         self.init_queues()
-        self.worker_init()
+        self.app.loader.init_worker()
         self.redirect_stdouts_to_logger()
 
         if getattr(os, "getuid", None) and os.getuid() == 0:
             warnings.warn(RuntimeWarning(
                 "Running celeryd with superuser privileges is discouraged!"))
 
-        if self.discard:
+        if self.purge:
             self.purge_messages()
 
         # Dump configuration to screen so we have some basic information
         # for when users sends bug reports.
         print(str(self.colored.cyan(" \n", self.startup_info())) +
-              str(self.colored.reset(self.extra_info())))
+              str(self.colored.reset(self.extra_info() or "")))
         self.set_process_status("-active-")
 
         try:
@@ -150,50 +142,34 @@ class Worker(configurated):
         try:
             self.app.select_queues(self.use_queues)
         except KeyError, exc:
-            raise ImproperlyConfigured(
-                        UNKNOWN_QUEUE_ERROR % (self.use_queues, exc))
-
-    def init_loader(self):
-        self.loader = self.app.loader
-        self.settings = self.app.conf
-        for module in self.include:
-            self.loader.import_task_module(module)
+            raise ImproperlyConfigured(UNKNOWN_QUEUE % (self.use_queues, exc))
 
     def redirect_stdouts_to_logger(self):
         self.app.log.setup(self.loglevel, self.logfile,
-                           self.redirect_stdouts,
-                           self.redirect_stdouts_level)
+                           self.redirect_stdouts, self.redirect_stdouts_level)
 
     def purge_messages(self):
-        count = self.app.control.discard_all()
-        print("discard: Erased %d %s from the queue.\n" % (
+        count = self.app.control.purge()
+        print("purge: Erased %d %s from the queue.\n" % (
                 count, pluralize(count, "message")))
 
-    def worker_init(self):
-        # Run the worker init handler.
-        # (Usually imports task modules and such.)
-        self.loader.init_worker()
-
     def tasklist(self, include_builtins=True):
-        tasklist = self.app.tasks.keys()
+        tasks = self.app.tasks.keys()
         if not include_builtins:
-            tasklist = filter(lambda s: not s.startswith("celery."),
-                              tasklist)
-        return "\n".join("  . %s" % task for task in sorted(tasklist))
+            tasks = filter(lambda s: not s.startswith("celery."), tasks)
+        return "\n".join("  . %s" % task for task in sorted(tasks))
 
     def extra_info(self):
         if self.loglevel <= logging.INFO:
             include_builtins = self.loglevel <= logging.DEBUG
             tasklist = self.tasklist(include_builtins=include_builtins)
             return EXTRA_INFO_FMT % {"tasks": tasklist}
-        return ""
 
     def startup_info(self):
         app = self.app
         concurrency = self.concurrency
         if self.autoscale:
-            cmax, cmin = self.autoscale
-            concurrency = "{min=%s, max=%s}" % (cmin, cmax)
+            concurrency = "{min=%s, max=%s}" % self.autoscale
         return BANNER % {
             "hostname": self.hostname,
             "version": __version__,
@@ -201,24 +177,21 @@ class Worker(configurated):
             "concurrency": concurrency,
             "loglevel": LOG_LEVELS[self.loglevel],
             "logfile": self.logfile or "[stderr]",
-            "celerybeat": "ON" if self.embed_clockservice else "OFF",
+            "celerybeat": "ON" if self.beat else "OFF",
             "events": "ON" if self.send_events else "OFF",
-            "loader": qualname(self.loader),
+            "loader": qualname(self.app.loader),
             "queues": app.amqp.queues.format(indent=18, indent_first=False),
         }
 
     def run_worker(self):
         if self.pidfile:
-            pidlock = platforms.create_pidlock(self.pidfile).acquire()
-            atexit.register(pidlock.release)
+            platforms.create_pidlock(self.pidfile)
         worker = self.WorkController(app=self.app,
-                                    hostname=self.hostname,
-                                    ready_callback=self.on_consumer_ready,
-                                    embed_clockservice=self.embed_clockservice,
-                                    autoscale=self.autoscale,
-                                    autoreload=self.autoreload,
-                                    no_execv=self.no_execv,
-                                    **self.confopts_as_dict())
+                    hostname=self.hostname,
+                    ready_callback=self.on_consumer_ready, beat=self.beat,
+                    autoscale=self.autoscale, autoreload=self.autoreload,
+                    no_execv=self.no_execv,
+                    **self.confopts_as_dict())
         self.install_platform_tweaks(worker)
         signals.worker_init.send(sender=worker)
         worker.start()
@@ -250,26 +223,20 @@ class Worker(configurated):
         os.environ.setdefault("celery_dummy_proxy", "set_by_celeryd")
 
     def set_process_status(self, info):
-        info = "%s (%s)" % (info, platforms.strargv(sys.argv))
         return platforms.set_mp_process_title("celeryd",
-                                              info=info,
-                                              hostname=self.hostname)
+                info="%s (%s)" % (info, platforms.strargv(sys.argv)),
+                hostname=self.hostname)
 
-    def die(self, msg, exitcode=1):
-        sys.stderr.write("Error: %s\n" % (msg, ))
-        sys.exit(exitcode)
 
 
 def _shutdown_handler(worker, sig="TERM", how="stop", exc=SystemExit,
-        callback=None):
-    types = {"terminate": "Cold", "stop": "Warm"}
+        callback=None, types={"terminate": "Cold", "stop": "Warm"}):
 
     def _handle_request(signum, frame):
-        process_name = current_process()._name
-        if not process_name or process_name == "MainProcess":
+        if current_process()._name == "MainProcess":
             if callback:
                 callback(worker)
-            print("celeryd: %s shutdown (%s)" % (types[how], process_name, ))
+            print("celeryd: %s shutdown (MainProcess)" % types[how])
             getattr(worker, how)(in_sighandler=True)
         raise exc()
     _handle_request.__name__ = "worker_" + how

+ 1 - 1
celery/bin/celery.py

@@ -242,7 +242,7 @@ class purge(Command):
 
     def run(self, *args, **kwargs):
         queues = len(self.app.amqp.queues.keys())
-        messages_removed = self.app.control.discard_all()
+        messages_removed = self.app.control.purge()
         if messages_removed:
             self.out("Purged %s %s from %s known task %s." % (
                 messages_removed, pluralize(messages_removed, "message"),

+ 15 - 5
celery/bin/celeryd.py

@@ -69,7 +69,7 @@
 
 .. cmdoption:: --purge
 
-    Discard all waiting tasks before the daemon is started.
+    Purges all waiting tasks before the daemon is started.
     **WARNING**: This is unrecoverable, and the tasks will be
     deleted from the messaging server.
 
@@ -118,6 +118,7 @@ import sys
 from billiard import freeze_support
 
 from celery.bin.base import Command, Option
+from celery.utils.log import LOG_LEVELS, mlevel
 
 
 class WorkerCommand(Command):
@@ -133,6 +134,17 @@ class WorkerCommand(Command):
         from celery import concurrency
         kwargs["pool_cls"] = concurrency.get_implementation(
                     kwargs.get("pool_cls") or self.app.conf.CELERYD_POOL)
+        if self.app.IS_WINDOWS and kwargs.get("beat"):
+            self.die("-B option does not work on Windows.  "
+                     "Please run celerybeat as a separate service.")
+        loglevel = kwargs.get("loglevel")
+        if loglevel:
+            try:
+                kwargs["loglevel"] = mlevel(loglevel)
+            except KeyError:
+                self.die("Unknown level %r. Please use one of %s." % (
+                    loglevel, "|".join(l for l in LOG_LEVELS.keys()
+                      if isinstance(l, basestring))))
         return self.app.Worker(**kwargs).run()
 
     def get_options(self):
@@ -141,13 +153,11 @@ class WorkerCommand(Command):
             Option('-c', '--concurrency',
                 default=conf.CELERYD_CONCURRENCY, type="int"),
             Option('-P', '--pool', default=conf.CELERYD_POOL, dest="pool_cls"),
-            Option('--purge', '--discard', default=False,
-                action="store_true", dest="discard"),
+            Option('--purge', '--discard', default=False, action="store_true"),
             Option('-f', '--logfile', default=conf.CELERYD_LOG_FILE),
             Option('-l', '--loglevel', default=conf.CELERYD_LOG_LEVEL),
             Option('-n', '--hostname'),
-            Option('-B', '--beat',
-                action="store_true", dest="embed_clockservice"),
+            Option('-B', '--beat', action="store_true"),
             Option('-s', '--schedule', dest="schedule_filename",
                 default=conf.CELERYBEAT_SCHEDULE_FILENAME),
             Option('--scheduler', dest="scheduler_cls"),

+ 3 - 7
celery/canvas.py

@@ -132,7 +132,6 @@ class Signature(dict):
         return self
 
     def apply_async(self, args=(), kwargs={}, **options):
-        """Apply this task asynchronously."""
         # For callbacks: extra args are prepended to the stored args.
         args, kwargs, options = self._merge(args, kwargs, options)
         return self.type.apply_async(args, kwargs, **options)
@@ -144,18 +143,12 @@ class Signature(dict):
         return value
 
     def link(self, callback):
-        """Add a callback task to be applied if this task
-        executes successfully."""
         return self.append_to_list_option("link", callback)
 
     def link_error(self, errback):
-        """Add a callback task to be applied if an error occurs
-        while executing this task."""
         return self.append_to_list_option("link_error", errback)
 
     def flatten_links(self):
-        """Gives a recursive list of dependencies (unchain if you will,
-        but with links intact)."""
         return list(chain_from_iterable(_chain([[self]],
                 (link.flatten_links()
                     for link in maybe_list(self.options.get("link")) or []))))
@@ -201,6 +194,9 @@ class chain(Signature):
         self.tasks = tasks
         self.subtask_type = "chain"
 
+    def __call__(self, *args, **kwargs):
+        return self.apply_async(*args, **kwargs)
+
     @classmethod
     def from_dict(self, d):
         return chain(*d["kwargs"]["tasks"], **kwdict(d["options"]))

+ 4 - 4
celery/datastructures.py

@@ -211,13 +211,13 @@ class AttributeDictMixin(object):
 
     """
 
-    def __getattr__(self, key):
+    def __getattr__(self, k):
         """`d.key -> d[key]`"""
         try:
-            return self[key]
+            return self[k]
         except KeyError:
-            raise AttributeError("'%s' object has no attribute '%s'" % (
-                    self.__class__.__name__, key))
+            raise AttributeError(
+                "'%s' object has no attribute '%s'" % (type(self).__name__, k))
 
     def __setattr__(self, key, value):
         """`d[key] = value -> d.key = value`"""

+ 1 - 2
celery/events/snapshot.py

@@ -95,8 +95,7 @@ def evcam(camera, freq=1.0, maxrate=None, loglevel=0,
     app = app_or_default(app)
 
     if pidfile:
-        pidlock = platforms.create_pidlock(pidfile).acquire()
-        atexit.register(pidlock.release)
+        platforms.create_pidlock(pidfile)
 
     app.log.setup_logging_subsystem(loglevel, logfile)
 

+ 3 - 3
celery/loaders/base.py

@@ -99,9 +99,9 @@ class BaseLoader(object):
                 package=package)
 
     def import_default_modules(self):
-        imports = set(maybe_list(self.conf.get("CELERY_IMPORTS") or ()))
-        return [self.import_task_module(module)
-                    for module in imports | self.builtin_modules]
+        return [self.import_task_module(m)
+            for m in set(maybe_list(self.app.conf.CELERY_IMPORTS))
+                        | self.builtin_modules]
 
     def init_worker(self):
         if not self.worker_initialized:

+ 5 - 3
celery/loaders/default.py

@@ -59,9 +59,11 @@ class Loader(BaseLoader):
                     CONFIG_INVALID_NAME % {
                         "module": configname}), sys.exc_info()[2]
         except ImportError:
-            warnings.warn(NotConfigured(
-                "No %r module found! Please make sure it exists and "
-                "is available to Python." % (configname, )))
+            # billiard sets this if forked using execv
+            if not os.environ.get("FORKED_BY_MULTIPROCESSING"):
+                warnings.warn(NotConfigured(
+                    "No %r module found! Please make sure it exists and "
+                    "is available to Python." % (configname, )))
             return self.setup_settings({})
         else:
             celeryconfig = self.import_from_cwd(configname)

+ 8 - 8
celery/platforms.py

@@ -13,6 +13,7 @@
 from __future__ import absolute_import
 from __future__ import with_statement
 
+import atexit
 import errno
 import os
 import platform as _platform
@@ -51,6 +52,9 @@ PIDFILE_MODE = ((os.R_OK | os.W_OK) << 6) | ((os.R_OK) << 3) | ((os.R_OK))
 
 _setps_bucket = TokenBucket(0.5)  # 30/m, every 2 seconds
 
+PIDLOCKED = """ERROR: Pidfile (%s) already exists.
+Seems we're already running? (PID: %s)"""
+
 
 def pyimplementation():
     if hasattr(_platform, "python_implementation"):
@@ -214,18 +218,14 @@ def create_pidlock(pidfile):
 
     .. code-block:: python
 
-        import atexit
-        pidlock = create_pidlock("/var/run/app.pid").acquire()
-        atexit.register(pidlock.release)
+        pidlock = create_pidlock("/var/run/app.pid")
 
     """
-
     pidlock = PIDFile(pidfile)
     if pidlock.is_locked() and not pidlock.remove_if_stale():
-        raise SystemExit(
-                "ERROR: Pidfile (%s) already exists.\n"
-                "Seems we're already running? (PID: %s)" % (
-                    pidfile, pidlock.read_pid()))
+        raise SystemExit(PIDLOCKED % (pidfile, pidlock.read_pid()))
+    pidlock.acquire()
+    atexit.register(pidlock.release)
     return pidlock
 
 

+ 17 - 7
celery/tests/app/test_loaders.py

@@ -75,6 +75,7 @@ class test_LoaderBase(Case):
 
     def setUp(self):
         self.loader = DummyLoader()
+        self.app = app_or_default()
 
     def test_handlers_pass(self):
         self.loader.on_task_init("foo.task", "feedface-cafebabe")
@@ -101,9 +102,14 @@ class test_LoaderBase(Case):
 
     def test_import_default_modules(self):
         modnames = lambda l: [m.__name__ for m in l]
-        self.assertEqual(sorted(modnames(
-                            self.loader.import_default_modules())),
-                         sorted(modnames([os, sys])))
+        prev, self.app.conf.CELERY_IMPORTS = \
+                self.app.conf.CELERY_IMPORTS, ("os", "sys")
+        try:
+            self.assertEqual(sorted(modnames(
+                                self.loader.import_default_modules())),
+                            sorted(modnames([os, sys])))
+        finally:
+            self.app.conf.CELERY_IMPORTS = prev
 
     def test_import_from_cwd_custom_imp(self):
 
@@ -264,7 +270,11 @@ class test_AppLoader(Case):
         self.assertEqual(self.loader.conf["BAR"], 20)
 
     def test_on_worker_init(self):
-        self.loader.conf["CELERY_IMPORTS"] = ("subprocess", )
-        sys.modules.pop("subprocess", None)
-        self.loader.init_worker()
-        self.assertIn("subprocess", sys.modules)
+        prev, self.app.conf.CELERY_IMPORTS = \
+                self.app.conf.CELERY_IMPORTS, ("subprocess", )
+        try:
+            sys.modules.pop("subprocess", None)
+            self.loader.init_worker()
+            self.assertIn("subprocess", sys.modules)
+        finally:
+            self.app.conf.CELERY_IMPORTS = prev

+ 5 - 5
celery/tests/bin/test_celery.py

@@ -96,7 +96,7 @@ class test_Command(AppCase):
 class test_Delegate(AppCase):
 
     def test_get_options(self):
-        self.assertTrue(worker().get_options())
+        self.assertTrue(worker(app=self.app).get_options())
 
     def test_run(self):
         w = worker()
@@ -153,15 +153,15 @@ class test_apply(AppCase):
 
 class test_purge(AppCase):
 
-    @patch("celery.app.control.Control.discard_all")
-    def test_run(self, discard_all):
+    @patch("celery.app.control.Control.purge")
+    def test_run(self, purge_):
         out = WhateverIO()
         a = purge(app=self.app, stdout=out)
-        discard_all.return_value = 0
+        purge_.return_value = 0
         a.run()
         self.assertIn("No messages purged", out.getvalue())
 
-        discard_all.return_value = 100
+        purge_.return_value = 100
         a.run()
         self.assertIn("100 messages", out.getvalue())
 

+ 6 - 12
celery/tests/bin/test_celerybeat.py

@@ -15,7 +15,7 @@ from celery.app import app_or_default
 from celery.bin import celerybeat as celerybeat_bin
 from celery.apps import beat as beatapp
 
-from celery.tests.utils import AppCase, create_pidlock
+from celery.tests.utils import AppCase
 
 
 class MockedShelveModule(object):
@@ -127,17 +127,11 @@ class test_Beat(AppCase):
         self.assertTrue(logger.critical.called)
 
     @redirect_stdouts
-    def test_use_pidfile(self, stdout, stderr):
-        from celery import platforms
-
-        prev, platforms.create_pidlock = platforms.create_pidlock, \
-                                         create_pidlock
-        try:
-            b = MockBeat2(pidfile="pidfilelockfilepid", socket_timeout=None)
-            b.start_scheduler()
-            self.assertTrue(create_pidlock.instance[0].acquired)
-        finally:
-            platforms.create_pidlock = prev
+    @patch("celery.platforms.create_pidlock")
+    def test_use_pidfile(self, create_pidlock, stdout, stderr):
+        b = MockBeat2(pidfile="pidfilelockfilepid", socket_timeout=None)
+        b.start_scheduler()
+        self.assertTrue(create_pidlock.called)
 
 
 class MockDaemonContext(object):

+ 12 - 20
celery/tests/bin/test_celeryd.py

@@ -21,7 +21,7 @@ from celery.bin.celeryd import WorkerCommand, main as celeryd_main
 from celery.exceptions import ImproperlyConfigured, SystemTerminate
 from celery.utils.log import ensure_process_aware_logger
 
-from celery.tests.utils import AppCase, WhateverIO, create_pidlock
+from celery.tests.utils import AppCase, WhateverIO
 
 ensure_process_aware_logger()
 
@@ -79,7 +79,7 @@ class test_Worker(AppCase):
         celery = Celery(set_as_current=False)
         celery.IS_WINDOWS = True
         with self.assertRaises(SystemExit):
-            celery.Worker(embed_clockservice=True)
+            WorkerCommand(app=celery).run(beat=True)
 
     def test_tasklist(self):
         celery = Celery(set_as_current=False)
@@ -144,9 +144,8 @@ class test_Worker(AppCase):
     @disable_stdouts
     def test_run(self):
         self.Worker().run()
-        self.Worker(discard=True).run()
+        self.Worker(purge=True).run()
         worker = self.Worker()
-        worker.init_loader()
         worker.run()
 
         prev, cd.IGNORE_ERRORS = cd.IGNORE_ERRORS, (KeyError, )
@@ -204,15 +203,14 @@ class test_Worker(AppCase):
         worker1 = self.Worker(include="some.module")
         self.assertListEqual(worker1.include, ["some.module"])
         worker2 = self.Worker(include="some.module,another.package")
-        self.assertListEqual(worker2.include, ["some.module",
-                                               "another.package"])
-        worker3 = self.Worker(include="os,sys")
-        worker3.init_loader()
+        self.assertListEqual(worker2.include,
+                ["some.module", "another.package"])
+        worker3 = self.Worker(include=["os", "sys"])
 
     @disable_stdouts
     def test_unknown_loglevel(self):
         with self.assertRaises(SystemExit):
-            self.Worker(loglevel="ALIEN")
+            WorkerCommand(app=self.app).run(loglevel="ALIEN")
         worker1 = self.Worker(loglevel=0xFFFF)
         self.assertEqual(worker1.loglevel, 0xFFFF)
 
@@ -234,17 +232,11 @@ class test_Worker(AppCase):
             os.getuid = prev
 
     @disable_stdouts
-    def test_use_pidfile(self):
-        from celery import platforms
-
-        prev, platforms.create_pidlock = platforms.create_pidlock, \
-                                         create_pidlock
-        try:
-            worker = self.Worker(pidfile="pidfilelockfilepid")
-            worker.run_worker()
-            self.assertTrue(create_pidlock.instance[0].acquired)
-        finally:
-            platforms.create_pidlock = prev
+    @patch("celery.platforms.create_pidlock")
+    def test_use_pidfile(self, create_pidlock):
+        worker = self.Worker(pidfile="pidfilelockfilepid")
+        worker.run_worker()
+        self.assertTrue(create_pidlock.called)
 
     @disable_stdouts
     def test_redirect_stdouts(self):

+ 1 - 3
celery/tests/events/test_snapshot.py

@@ -131,9 +131,7 @@ class test_evcam(Case):
         finally:
             self.MockReceiver.raise_keyboard_interrupt = False
 
-    @patch("atexit.register")
     @patch("celery.platforms.create_pidlock")
-    def test_evcam_pidfile(self, create_pidlock, atexit):
+    def test_evcam_pidfile(self, create_pidlock):
         evcam(Polaroid, timer=timer, pidfile="/var/pid")
-        self.assertTrue(atexit.called)
         create_pidlock.assert_called_with("/var/pid")

+ 19 - 1
celery/tests/tasks/test_canvas.py

@@ -3,8 +3,9 @@ from __future__ import with_statement
 
 from mock import Mock
 
-from celery import task
+from celery import current_app, task
 from celery.canvas import Signature, chain, group, chord, subtask
+from celery.result import EagerResult
 
 from celery.tests.utils import Case
 
@@ -110,6 +111,23 @@ class test_chain(Case):
         self.assertIsInstance(subtask(x), chain)
         self.assertIsInstance(subtask(dict(x)), chain)
 
+    def test_always_eager(self):
+        current_app.conf.CELERY_ALWAYS_EAGER = True
+        try:
+            self.assertEqual(~(add.s(4, 4) | add.s(8)), 16)
+        finally:
+            current_app.conf.CELERY_ALWAYS_EAGER = False
+
+    def test_apply(self):
+        x = chain(add.s(4, 4), add.s(8), add.s(10))
+        res = x.apply()
+        self.assertIsInstance(res, EagerResult)
+        self.assertEqual(res.get(), 26)
+
+        self.assertEqual(res.parent.get(), 16)
+        self.assertEqual(res.parent.parent.get(), 8)
+        self.assertIsNone(res.parent.parent.parent)
+
 
 class test_group(Case):
 

+ 0 - 17
celery/tests/utils.py

@@ -464,23 +464,6 @@ def patch_modules(*modules):
             sys.modules[name] = mod
 
 
-class create_pidlock(object):
-    instance = [None]
-
-    def __init__(self, file):
-        self.file = file
-        self.instance[0] = self
-
-    def acquire(self):
-        self.acquired = True
-
-        class Object(object):
-            def release(self):
-                pass
-
-        return Object()
-
-
 @contextmanager
 def mock_module(*names):
     prev = {}

+ 1 - 2
celery/tests/worker/test_worker.py

@@ -831,8 +831,7 @@ class test_WorkController(AppCase):
         self.assertTrue(worker.components)
 
     def test_with_embedded_celerybeat(self):
-        worker = WorkController(concurrency=1, loglevel=0,
-                                embed_clockservice=True)
+        worker = WorkController(concurrency=1, loglevel=0, beat=True)
         self.assertTrue(worker.beat)
         self.assertIn(worker.beat, worker.components)
 

+ 3 - 3
celery/worker/__init__.py

@@ -102,14 +102,14 @@ class Pool(abstract.StartStopComponent):
 class Beat(abstract.StartStopComponent):
     """Component used to embed a celerybeat process.
 
-    This will only be enabled if the ``embed_clockservice``
+    This will only be enabled if the ``beat``
     argument is set.
 
     """
     name = "worker.beat"
 
-    def __init__(self, w, embed_clockservice=False, **kwargs):
-        self.enabled = w.embed_clockservice = embed_clockservice
+    def __init__(self, w, beat=False, **kwargs):
+        self.enabled = w.beat = beat
         w.beat = None
 
     def create(self, w):

+ 4 - 6
celery/worker/job.py

@@ -19,7 +19,7 @@ import sys
 
 from datetime import datetime
 
-from kombu.utils import kwdict
+from kombu.utils import kwdict, reprcall
 from kombu.utils.encoding import safe_repr, safe_str
 
 from celery import current_app
@@ -158,8 +158,7 @@ class Request(object):
     def from_message(cls, message, body, **kwargs):
         # should be deprecated
         return Request(body,
-                   delivery_info=getattr(message, "delivery_info", None),
-                   **kwargs)
+            delivery_info=getattr(message, "delivery_info", None), **kwargs)
 
     def extend_with_default_kwargs(self, loglevel, logfile):
         """Extend the tasks keyword arguments with standard task arguments.
@@ -428,9 +427,8 @@ class Request(object):
     __str__ = shortinfo
 
     def __repr__(self):
-        return '<%s: {name:"%s", id:"%s", args:"%s", kwargs:"%s"}>' % (
-                self.__class__.__name__,
-                self.name, self.id, self.args, self.kwargs)
+        return '<%s %s: %s>' % (type(self).__name__, self.id,
+            reprcall(self.name, self.args, self.kwargs))
 
     @property
     def tzlocal(self):

+ 1 - 1
funtests/suite/test_leak.py

@@ -82,7 +82,7 @@ class LeakFunCase(unittest.TestCase):
                     base, sizes.average(), sizes, ))
                 raise
         finally:
-            self.app.control.discard_all()
+            self.app.control.purge()
 
 
 class test_leaks(LeakFunCase):

+ 8 - 0
setup.py

@@ -22,6 +22,14 @@ NAME = "celery"
 entrypoints = {}
 extra = {}
 
+print("CALLING SETUP.PY")
+try:
+    from celery.app import task
+    if "__init__.py" in task.__file__:
+        os.unlink(os.path.abspath(task.__file__))
+except ImportError:
+    pass
+
 # -*- Classifiers -*-
 
 classes = """