Browse Source

Run CI on integration test suite

Ask Solem 8 years ago
parent
commit
665c8842fc

+ 8 - 5
.travis.yml

@@ -9,11 +9,14 @@ env:
   global:
     PYTHONUNBUFFERED=yes
   matrix:
-    - TOXENV=2.7
-    - TOXENV=3.4
-    - TOXENV=3.5
-    - TOXENV=pypy PYPY_VERSION="5.3"
-    - TOXENV=pypy3
+    - TOXENV=2.7-unit
+    - TOXENV=2.7-integration
+    - TOXENV=3.4-unit
+    - TOXENV=3.4-integration
+    - TOXENV=3.5-unit
+    - TOXENV=3.5-integration
+    - TOXENV=pypy-unit PYPY_VERSION="5.3"
+    - TOXENV=pypy3-unit
     - TOXENV=flake8
     - TOXENV=flakeplus
     - TOXENV=apicheck

+ 1 - 0
celery/bootsteps.py

@@ -30,6 +30,7 @@ TERMINATE = 0x3
 
 logger = get_logger(__name__)
 
+
 def _pre(ns, fmt):
     return '| {0}: {1}'.format(ns.alias, fmt)
 

+ 21 - 0
celery/contrib/pytest.py

@@ -40,8 +40,15 @@ def _create_app(request, enable_logging=False, use_trap=False, **config):
             test_app.set_current()
         yield test_app
 
+
 @pytest.fixture(scope='session')
 def use_celery_app_trap():
+    # type: () -> bool
+    """You can override this fixture to enable the app trap.
+
+    The app trap raises an exception whenever something attempts
+    to use the current or default apps.
+    """
     return False
 
 
@@ -79,16 +86,30 @@ def celery_session_worker(request, celery_session_app,
 
 @pytest.fixture(scope='session')
 def celery_enable_logging():
+    # type: () -> bool
+    """You can override this fixture to enable logging."""
     return False
 
 
 @pytest.fixture(scope='session')
 def celery_includes():
+    # type: () -> Sequence[str]
+    """You can override this include modules when a worker start.
+
+    You can have this return a list of module names to import,
+    these can be task modules, modules registering signals, and so on.
+    """
     return ()
 
 
 @pytest.fixture(scope='session')
 def celery_worker_pool():
+    # type: () -> Union[str, Any]
+    """You can override this fixture to set the worker pool.
+
+    The "solo" pool is used by default, but you can set this to
+    return e.g. "prefork".
+    """
     return 'solo'
 
 

+ 11 - 0
celery/contrib/testing/app.py

@@ -1,3 +1,4 @@
+"""Create Celery app instances used for testing."""
 from __future__ import absolute_import, unicode_literals
 
 import weakref
@@ -10,6 +11,7 @@ from kombu.utils.imports import symbol_by_name
 from celery import Celery
 from celery import _state
 
+#: Contains the default configuration values for the test app.
 DEFAULT_TEST_CONFIG = {
     'worker_hijack_root_logger': False,
     'worker_log_color': False,
@@ -63,6 +65,11 @@ def TestApp(name=None, config=None, enable_logging=False, set_as_current=False,
 
 @contextmanager
 def set_trap(app):
+    """Contextmanager that installs the trap app.
+
+    The trap means that anything trying to use the current or default app
+    will raise an exception.
+    """
     trap = Trap()
     prev_tls = _state._tls
     _state.set_default_app(trap)
@@ -77,6 +84,10 @@ def set_trap(app):
 
 @contextmanager
 def setup_default_app(app, use_trap=False):
+    """Setup default app for testing.
+
+    Ensures state is clean after the test returns.
+    """
     prev_current_app = _state.get_current_app()
     prev_default_app = _state.default_app
     prev_finalizers = set(_state._on_app_finalizers)

+ 18 - 4
celery/contrib/testing/manager.py

@@ -1,3 +1,4 @@
+"""Integration testing utilities."""
 from __future__ import absolute_import, print_function, unicode_literals
 
 import socket
@@ -18,10 +19,12 @@ E_STILL_WAITING = 'Still waiting for {0}.  Trying again {when}: {exc!r}'
 
 
 class Sentinel(Exception):
-    pass
+    """Signifies the end of something."""
 
 
 def humanize_seconds(secs, prefix='', sep='', now='now', **kwargs):
+    # type: (float, str, str, str, **Any) -> str
+    """Represent seconds in a human readable way."""
     s = _humanize_seconds(secs, prefix, sep, now, **kwargs)
     if s == now and secs > 0:
         return '{prefix}{sep}{0:.2f} seconds'.format(
@@ -30,10 +33,12 @@ def humanize_seconds(secs, prefix='', sep='', now='now', **kwargs):
 
 
 class ManagerMixin(object):
+    """Mixin that adds :class:`Manager` capabilities."""
 
     def _init_manager(self,
-                      block_timeout=30 * 60, no_join=False,
+                      block_timeout=30 * 60.0, no_join=False,
                       stdout=None, stderr=None):
+        # type: (float, bool, TextIO, TextIO) -> None
         self.stdout = sys.stdout if stdout is None else stdout
         self.stderr = sys.stderr if stderr is None else stderr
         self.connerrors = self.app.connection().recoverable_connection_errors
@@ -41,15 +46,24 @@ class ManagerMixin(object):
         self.no_join = no_join
 
     def remark(self, s, sep='-'):
+        # type: (str, str) -> None
         print('{0}{1}'.format(sep, s), file=self.stdout)
 
     def missing_results(self, r):
+        # type: (Sequence[AsyncResult]) -> Sequence[str]
         return [res.id for res in r if res.id not in res.backend._cache]
 
     def wait_for(self, fun, catch,
                  desc='thing', args=(), kwargs={}, errback=None,
                  max_retries=10, interval_start=0.1, interval_step=0.5,
                  interval_max=5.0, emit_warning=False, **options):
+        # type: (Callable, Sequence[Any], str, Tuple, Dict, Callable,
+        #        int, float, float, float, bool, **Any) -> Any
+        """Wait for event to happen.
+
+        The `catch` argument specifies the exception that means the event
+        has not happened yet.
+        """
         def on_error(exc, intervals, retries):
             interval = next(intervals)
             if emit_warning:
@@ -73,6 +87,7 @@ class ManagerMixin(object):
                                interval_start=0.1, interval_step=0.02,
                                interval_max=1.0, emit_warning=False,
                                **options):
+        """Make sure something does not happen (at least for a while)."""
         try:
             return self.wait_for(
                 fun, catch, desc=desc, max_retries=max_retries,
@@ -114,9 +129,7 @@ class ManagerMixin(object):
         return self.app.control.inspect(timeout=timeout)
 
     def query_tasks(self, ids, timeout=0.5):
-        print('BROKER: %r' % (self.app.connection().as_uri(),))
         for reply in items(self.inspect(timeout).query_task(*ids) or {}):
-            print('REPLY: %r' %( reply,))
             yield reply
 
     def query_task_states(self, ids, timeout=0.5):
@@ -166,6 +179,7 @@ class ManagerMixin(object):
 
 
 class Manager(ManagerMixin):
+    """Test helpers for task integration tests."""
 
     def __init__(self, app, **kwargs):
         self.app = app

+ 13 - 1
celery/contrib/testing/mocks.py

@@ -1,3 +1,4 @@
+"""Useful mocks for unit testing."""
 from __future__ import absolute_import, unicode_literals
 
 import numbers
@@ -15,6 +16,9 @@ except ImportError:
 
 def TaskMessage(name, id=None, args=(), kwargs={}, callbacks=None,
                 errbacks=None, chain=None, shadow=None, utc=None, **options):
+    # type: (str, str, Sequence, Mapping, Sequence[Signature],
+    #        Sequence[Signature], Sequence[Signature],
+    #        str, bool, **Any) -> Any
     """Create task message in protocol 2 format."""
     from celery import uuid
     from kombu.serialization import dumps
@@ -36,6 +40,8 @@ def TaskMessage(name, id=None, args=(), kwargs={}, callbacks=None,
 
 def TaskMessage1(name, id=None, args=(), kwargs={}, callbacks=None,
                  errbacks=None, chain=None, **options):
+    # type: (str, str, Sequence, Mapping, Sequence[Signature],
+    #        Sequence[Signature], Sequence[Signature]) -> Any
     """Create task message in protocol 1 format."""
     from celery import uuid
     from kombu.serialization import dumps
@@ -58,7 +64,13 @@ def TaskMessage1(name, id=None, args=(), kwargs={}, callbacks=None,
 
 
 def task_message_from_sig(app, sig, utc=True, TaskMessage=TaskMessage):
-    """Create task message from :class:`celery.Signature`."""
+    # type: (Celery, Signature, bool, Any) -> Any
+    """Create task message from :class:`celery.Signature`.
+
+    Example:
+        >>> m = task_message_from_sig(app, add.s(2, 2))
+        >>> amqp_client.basic_publish(m, exchange='ex', routing_key='rkey')
+    """
     sig.freeze()
     callbacks = sig.options.pop('link', None)
     errbacks = sig.options.pop('link_error', None)

+ 3 - 0
celery/contrib/testing/tasks.py

@@ -1,3 +1,4 @@
+"""Helper tasks for integration tests."""
 from __future__ import absolute_import, unicode_literals
 
 from celery import shared_task
@@ -5,4 +6,6 @@ from celery import shared_task
 
 @shared_task(name='celery.ping')
 def ping():
+    # type: () -> str
+    """Simple task that just returns 'pong'."""
     return 'pong'

+ 72 - 43
celery/contrib/testing/worker.py

@@ -1,3 +1,4 @@
+"""Embedded workers for integration tests."""
 from __future__ import absolute_import, unicode_literals
 
 import os
@@ -18,30 +19,79 @@ WORKER_LOGLEVEL = os.environ.get('WORKER_LOGLEVEL', 'error')
 
 
 class TestWorkController(worker.WorkController):
+    """Worker that can synchronize on being fully started."""
 
     def __init__(self, *args, **kwargs):
+        # type: (*Any, **Any) -> None
         self._on_started = threading.Event()
         super(TestWorkController, self).__init__(*args, **kwargs)
 
     def on_consumer_ready(self, consumer):
+        # type: (celery.worker.consumer.Consumer) -> None
+        """Callback called when the Consumer blueprint is fully started."""
         self._on_started.set()
         test_worker_started.send(
             sender=self.app, worker=self, consumer=consumer)
 
     def ensure_started(self):
+        # type: () -> None
+        """Wait for worker to be fully up and running.
+
+        Warning:
+            Worker must be started within a thread for this to work,
+            or it will block forever.
+        """
         self._on_started.wait()
 
 
 @contextmanager
-def start_worker_thread(app,
-                        concurrency=1,
-                        pool='solo',
-                        loglevel=WORKER_LOGLEVEL,
-                        logfile=None,
-                        WorkController=TestWorkController,
-                        **kwargs):
+def start_worker(app,
+                 concurrency=1,
+                 pool='solo',
+                 loglevel=WORKER_LOGLEVEL,
+                 logfile=None,
+                 perform_ping_check=True,
+                 ping_task_timeout=10.0,
+                 **kwargs):
+    # type: (Celery, int, str, Union[str, int],
+    #        str, bool, float, **Any) -> # Iterable
+    """Start embedded worker.
+
+    Yields:
+        celery.app.worker.Worker: worker instance.
+    """
+    test_worker_starting.send(sender=app)
+
+    with _start_worker_thread(app,
+                              concurrency=concurrency,
+                              pool=pool,
+                              loglevel=loglevel,
+                              logfile=logfile,
+                              **kwargs) as worker:
+        if perform_ping_check:
+            from .tasks import ping
+            with allow_join_result():
+                assert ping.delay().get(timeout=ping_task_timeout) == 'pong'
+
+        yield worker
+    test_worker_stopped.send(sender=app, worker=worker)
+
+
+@contextmanager
+def _start_worker_thread(app,
+                         concurrency=1,
+                         pool='solo',
+                         loglevel=WORKER_LOGLEVEL,
+                         logfile=None,
+                         WorkController=TestWorkController,
+                         **kwargs):
+    # type: (Celery, int, str, Union[str, int], str, Any, **Any) -> Iterable
+    """Start Celery worker in a thread.
+
+    Yields:
+        celery.worker.Worker: worker instance.
+    """
     setup_app_for_worker(app, loglevel, logfile)
-    print('BROKER: %r' % (app.conf.broker_url,))
     assert 'celery.ping' in app.tasks
     worker = WorkController(
         app=app,
@@ -60,26 +110,29 @@ def start_worker_thread(app,
     t = threading.Thread(target=worker.start)
     t.start()
     worker.ensure_started()
-    print('WORKER STARTED')
     _set_task_join_will_block(False)
 
     yield worker
 
-    print('STOPPING WORKER')
     from celery.worker import state
     state.should_terminate = 0
-    print('JOINING WORKER THREAD')
     t.join(10)
     state.should_terminate = None
 
 
 @contextmanager
-def start_worker_process(app,
-                         concurrency=1,
-                         pool='solo',
-                         loglevel=WORKER_LOGLEVEL,
-                         logfile=None,
-                         **kwargs):
+def _start_worker_process(app,
+                          concurrency=1,
+                          pool='solo',
+                          loglevel=WORKER_LOGLEVEL,
+                          logfile=None,
+                          **kwargs):
+    # type (Celery, int, str, Union[int, str], str, **Any) -> Iterable
+    """Start worker in separate process.
+
+    Yields:
+        celery.app.worker.Worker: worker instance.
+    """
     from celery.apps.multi import Cluster, Node
 
     app.set_current()
@@ -89,33 +142,9 @@ def start_worker_process(app,
     cluster.stopwait()
 
 
-@contextmanager
-def start_worker(app,
-                 concurrency=1,
-                 pool='solo',
-                 loglevel=WORKER_LOGLEVEL,
-                 logfile=None,
-                 perform_ping_check=True,
-                 ping_task_timeout=10.0,
-                 **kwargs):
-    test_worker_starting.send(sender=app)
-
-    with start_worker_thread(app,
-                             concurrency=concurrency,
-                             pool=pool,
-                             loglevel=loglevel,
-                             logfile=logfile,
-                             **kwargs) as worker:
-        if perform_ping_check:
-            from .tasks import ping
-            with allow_join_result():
-                assert ping.delay().get(timeout=ping_task_timeout) == 'pong'
-
-        yield worker
-    test_worker_stopped.send(sender=app, worker=worker)
-
-
 def setup_app_for_worker(app, loglevel, logfile):
+    # type: (Celery, Union[str, int], str) -> None
+    """Setup the app to be used for starting an embedded worker."""
     app.finalize()
     app.set_current()
     app.set_default()

+ 0 - 1
celery/worker/control.py

@@ -110,7 +110,6 @@ def _wanted_config_key(key):
 )
 def query_task(state, ids, **kwargs):
     """Query for task information by id."""
-    print('GET IDS: %r' % (ids,))
     return {
         req.id: (_state_of_task(req), req.info())
         for req in _find_requests_by_id(maybe_list(ids))

+ 2 - 0
docs/conf.py

@@ -30,6 +30,8 @@ globals().update(conf.build_config(
         'celery.five',
         'celery.__main__',
         'celery.task',
+        'celery.contrib.testing',
+        'celery.contrib.testing.tasks',
         'celery.task.base',
         'celery.bin',
         'celery.bin.celeryd_detach',

+ 1 - 1
docs/reference/celery.contrib.pytest.rst

@@ -1,5 +1,5 @@
 ====================================
- ``celery.utils.pytest``
+ ``celery.contrib.pytest``
 ====================================
 
 .. contents::

+ 16 - 0
docs/reference/celery.contrib.testing.app.rst

@@ -0,0 +1,16 @@
+====================================
+ ``celery.contrib.testing.app``
+====================================
+
+.. contents::
+    :local:
+
+API Reference
+=============
+
+.. currentmodule:: celery.contrib.testing.app
+
+.. automodule:: celery.contrib.testing.app
+    :members:
+    :undoc-members:
+

+ 16 - 0
docs/reference/celery.contrib.testing.manager.rst

@@ -0,0 +1,16 @@
+====================================
+ ``celery.contrib.testing.manager``
+====================================
+
+.. contents::
+    :local:
+
+API Reference
+=============
+
+.. currentmodule:: celery.contrib.testing.manager
+
+.. automodule:: celery.contrib.testing.manager
+    :members:
+    :undoc-members:
+

+ 16 - 0
docs/reference/celery.contrib.testing.mocks.rst

@@ -0,0 +1,16 @@
+====================================
+ ``celery.contrib.testing.mocks``
+====================================
+
+.. contents::
+    :local:
+
+API Reference
+=============
+
+.. currentmodule:: celery.contrib.testing.mocks
+
+.. automodule:: celery.contrib.testing.mocks
+    :members:
+    :undoc-members:
+

+ 16 - 0
docs/reference/celery.contrib.testing.worker.rst

@@ -0,0 +1,16 @@
+====================================
+ ``celery.contrib.testing.worker``
+====================================
+
+.. contents::
+    :local:
+
+API Reference
+=============
+
+.. currentmodule:: celery.contrib.testing.worker
+
+.. automodule:: celery.contrib.testing.worker
+    :members:
+    :undoc-members:
+

+ 6 - 0
docs/reference/index.rst

@@ -38,6 +38,10 @@
     celery.contrib.migrate
     celery.contrib.pytest
     celery.contrib.sphinx
+    celery.contrib.testing.worker
+    celery.contrib.testing.app
+    celery.contrib.testing.manager
+    celery.contrib.testing.mocks
     celery.contrib.rdb
     celery.events
     celery.events.receiver
@@ -69,3 +73,5 @@
     celery.bin.events
     celery.bin.logtool
     celery.bin.amqp
+    celery.bin.graph
+    celery.bin.multi

+ 4 - 0
requirements/README.rst

@@ -31,6 +31,10 @@ Index
 
     Extra test requirements required for Python 2.7 by the CI suite (Tox).
 
+* :file:`requirements/test-integration.txt`
+
+    Extra requirements needed when running the integration test suite.
+
 * :file:`requirements/doc.txt`
 
     Extra requirements required to build the Sphinx documentation.

+ 3 - 0
requirements/test-integration.txt

@@ -0,0 +1,3 @@
+simplejson
+cyanide>=1.3
+-r extras/redis.txt

+ 1 - 0
t/integration/conftest.py

@@ -4,6 +4,7 @@ import pytest
 
 from celery.contrib.testing.manager import Manager
 
+
 @pytest.fixture(scope='session')
 def celery_config():
     return {

+ 2 - 0
tox.ini

@@ -17,6 +17,8 @@ deps=
     3.4,3.5: -r{toxinidir}/requirements/test-ci-default.txt
     pypy,pypy3: -r{toxinidir}/requirements/test-ci-base.txt
 
+    integration: -r{toxinidir}/requirements/test-integration.txt
+
     linkcheck,apicheck,configcheck: -r{toxinidir}/requirements/docs.txt
     flake8,flakeplus,pydocstyle: -r{toxinidir}/requirements/pkgutils.txt
 sitepackages = False