conftest.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326
  1. from __future__ import absolute_import, unicode_literals
  2. import logging
  3. import os
  4. import sys
  5. import threading
  6. import warnings
  7. from importlib import import_module
  8. import pytest
  9. from case import Mock
  10. from case.utils import decorator
  11. from kombu import Queue
  12. from celery.backends.cache import CacheBackend, DummyClient
  13. # we have to import the pytest plugin fixtures here,
  14. # in case user did not do the `python setup.py develop` yet,
  15. # that installs the pytest plugin into the setuptools registry.
  16. from celery.contrib.pytest import (celery_app, celery_enable_logging,
  17. celery_parameters, depends_on_current_app)
  18. from celery.contrib.testing.app import TestApp, Trap
  19. from celery.contrib.testing.mocks import (TaskMessage, TaskMessage1,
  20. task_message_from_sig)
  21. # Tricks flake8 into silencing redefining fixtures warnings.
  22. __all__ = (
  23. 'celery_app', 'celery_enable_logging', 'depends_on_current_app',
  24. 'celery_parameters'
  25. )
  26. try:
  27. WindowsError = WindowsError # noqa
  28. except NameError:
  29. class WindowsError(Exception):
  30. pass
  31. PYPY3 = getattr(sys, 'pypy_version_info', None) and sys.version_info[0] > 3
  32. CASE_LOG_REDIRECT_EFFECT = 'Test {0} didn\'t disable LoggingProxy for {1}'
  33. CASE_LOG_LEVEL_EFFECT = 'Test {0} modified the level of the root logger'
  34. CASE_LOG_HANDLER_EFFECT = 'Test {0} modified handlers for the root logger'
  35. @pytest.fixture(scope='session')
  36. def celery_config():
  37. return {
  38. 'broker_url': 'memory://',
  39. 'broker_transport_options': {
  40. 'polling_interval': 0.1
  41. },
  42. 'result_backend': 'cache+memory://',
  43. 'task_default_queue': 'testcelery',
  44. 'task_default_exchange': 'testcelery',
  45. 'task_default_routing_key': 'testcelery',
  46. 'task_queues': (
  47. Queue('testcelery', routing_key='testcelery'),
  48. ),
  49. 'accept_content': ('json', 'pickle'),
  50. # Mongo results tests (only executed if installed and running)
  51. 'mongodb_backend_settings': {
  52. 'host': os.environ.get('MONGO_HOST') or 'localhost',
  53. 'port': os.environ.get('MONGO_PORT') or 27017,
  54. 'database': os.environ.get('MONGO_DB') or 'celery_unittests',
  55. 'taskmeta_collection': (
  56. os.environ.get('MONGO_TASKMETA_COLLECTION') or
  57. 'taskmeta_collection'
  58. ),
  59. 'user': os.environ.get('MONGO_USER'),
  60. 'password': os.environ.get('MONGO_PASSWORD'),
  61. }
  62. }
  63. @pytest.fixture(scope='session')
  64. def use_celery_app_trap():
  65. return True
  66. @pytest.fixture(autouse=True)
  67. def reset_cache_backend_state(celery_app):
  68. """Fixture that resets the internal state of the cache result backend."""
  69. yield
  70. backend = celery_app.__dict__.get('backend')
  71. if backend is not None:
  72. if isinstance(backend, CacheBackend):
  73. if isinstance(backend.client, DummyClient):
  74. backend.client.cache.clear()
  75. backend._cache.clear()
  76. @decorator
  77. def assert_signal_called(signal, **expected):
  78. """Context that verifes signal is called before exiting."""
  79. handler = Mock()
  80. def on_call(**kwargs):
  81. return handler(**kwargs)
  82. signal.connect(on_call)
  83. try:
  84. yield handler
  85. finally:
  86. signal.disconnect(on_call)
  87. handler.assert_called_with(signal=signal, **expected)
  88. @pytest.fixture
  89. def app(celery_app):
  90. yield celery_app
  91. @pytest.fixture(autouse=True, scope='session')
  92. def AAA_disable_multiprocessing():
  93. # pytest-cov breaks if a multiprocessing.Process is started,
  94. # so disable them completely to make sure it doesn't happen.
  95. from case import patch
  96. stuff = [
  97. 'multiprocessing.Process',
  98. 'billiard.Process',
  99. 'billiard.context.Process',
  100. 'billiard.process.Process',
  101. 'billiard.process.BaseProcess',
  102. 'multiprocessing.Process',
  103. ]
  104. ctxs = [patch(s) for s in stuff]
  105. [ctx.__enter__() for ctx in ctxs]
  106. yield
  107. [ctx.__exit__(*sys.exc_info()) for ctx in ctxs]
  108. def alive_threads():
  109. return [thread for thread in threading.enumerate() if thread.is_alive()]
  110. @pytest.fixture(autouse=True)
  111. def task_join_will_not_block():
  112. from celery import _state
  113. from celery import result
  114. prev_res_join_block = result.task_join_will_block
  115. _state.orig_task_join_will_block = _state.task_join_will_block
  116. prev_state_join_block = _state.task_join_will_block
  117. result.task_join_will_block = \
  118. _state.task_join_will_block = lambda: False
  119. _state._set_task_join_will_block(False)
  120. yield
  121. result.task_join_will_block = prev_res_join_block
  122. _state.task_join_will_block = prev_state_join_block
  123. _state._set_task_join_will_block(False)
  124. @pytest.fixture(scope='session', autouse=True)
  125. def record_threads_at_startup(request):
  126. try:
  127. request.session._threads_at_startup
  128. except AttributeError:
  129. request.session._threads_at_startup = alive_threads()
  130. @pytest.fixture(autouse=True)
  131. def threads_not_lingering(request):
  132. yield
  133. assert request.session._threads_at_startup == alive_threads()
  134. @pytest.fixture(autouse=True)
  135. def AAA_reset_CELERY_LOADER_env():
  136. yield
  137. assert not os.environ.get('CELERY_LOADER')
  138. @pytest.fixture(autouse=True)
  139. def test_cases_shortcuts(request, app, patching, celery_config):
  140. if request.instance:
  141. @app.task
  142. def add(x, y):
  143. return x + y
  144. # IMPORTANT: We set an .app attribute for every test case class.
  145. request.instance.app = app
  146. request.instance.Celery = TestApp
  147. request.instance.assert_signal_called = assert_signal_called
  148. request.instance.task_message_from_sig = task_message_from_sig
  149. request.instance.TaskMessage = TaskMessage
  150. request.instance.TaskMessage1 = TaskMessage1
  151. request.instance.CELERY_TEST_CONFIG = celery_config
  152. request.instance.add = add
  153. request.instance.patching = patching
  154. yield
  155. if request.instance:
  156. request.instance.app = None
  157. @pytest.fixture(autouse=True)
  158. def sanity_no_shutdown_flags_set():
  159. yield
  160. # Make sure no test left the shutdown flags enabled.
  161. from celery.worker import state as worker_state
  162. # check for EX_OK
  163. assert worker_state.should_stop is not False
  164. assert worker_state.should_terminate is not False
  165. # check for other true values
  166. assert not worker_state.should_stop
  167. assert not worker_state.should_terminate
  168. @pytest.fixture(autouse=True)
  169. def sanity_stdouts(request):
  170. yield
  171. from celery.utils.log import LoggingProxy
  172. assert sys.stdout
  173. assert sys.stderr
  174. assert sys.__stdout__
  175. assert sys.__stderr__
  176. this = request.node.name
  177. if isinstance(sys.stdout, (LoggingProxy, Mock)) or \
  178. isinstance(sys.__stdout__, (LoggingProxy, Mock)):
  179. raise RuntimeError(CASE_LOG_REDIRECT_EFFECT.format(this, 'stdout'))
  180. if isinstance(sys.stderr, (LoggingProxy, Mock)) or \
  181. isinstance(sys.__stderr__, (LoggingProxy, Mock)):
  182. raise RuntimeError(CASE_LOG_REDIRECT_EFFECT.format(this, 'stderr'))
  183. @pytest.fixture(autouse=True)
  184. def sanity_logging_side_effects(request):
  185. from _pytest.logging import LogCaptureHandler
  186. root = logging.getLogger()
  187. rootlevel = root.level
  188. roothandlers = [
  189. x for x in root.handlers if not isinstance(x, LogCaptureHandler)]
  190. yield
  191. this = request.node.name
  192. root_now = logging.getLogger()
  193. if root_now.level != rootlevel:
  194. raise RuntimeError(CASE_LOG_LEVEL_EFFECT.format(this))
  195. newhandlers = [x for x in root_now.handlers if not isinstance(
  196. x, LogCaptureHandler)]
  197. if newhandlers != roothandlers:
  198. raise RuntimeError(CASE_LOG_HANDLER_EFFECT.format(this))
  199. def setup_session(scope='session'):
  200. using_coverage = (
  201. os.environ.get('COVER_ALL_MODULES') or '--with-coverage' in sys.argv
  202. )
  203. os.environ.update(
  204. # warn if config module not found
  205. C_WNOCONF='yes',
  206. KOMBU_DISABLE_LIMIT_PROTECTION='yes',
  207. )
  208. if using_coverage and not PYPY3:
  209. from warnings import catch_warnings
  210. with catch_warnings(record=True):
  211. import_all_modules()
  212. warnings.resetwarnings()
  213. from celery._state import set_default_app
  214. set_default_app(Trap())
  215. def teardown():
  216. # Don't want SUBDEBUG log messages at finalization.
  217. try:
  218. from multiprocessing.util import get_logger
  219. except ImportError:
  220. pass
  221. else:
  222. get_logger().setLevel(logging.WARNING)
  223. # Make sure test database is removed.
  224. import os
  225. if os.path.exists('test.db'):
  226. try:
  227. os.remove('test.db')
  228. except WindowsError:
  229. pass
  230. # Make sure there are no remaining threads at shutdown.
  231. import threading
  232. remaining_threads = [thread for thread in threading.enumerate()
  233. if thread.getName() != 'MainThread']
  234. if remaining_threads:
  235. sys.stderr.write(
  236. '\n\n**WARNING**: Remaining threads at teardown: %r...\n' % (
  237. remaining_threads))
  238. def find_distribution_modules(name=__name__, file=__file__):
  239. current_dist_depth = len(name.split('.')) - 1
  240. current_dist = os.path.join(os.path.dirname(file),
  241. *([os.pardir] * current_dist_depth))
  242. abs = os.path.abspath(current_dist)
  243. dist_name = os.path.basename(abs)
  244. for dirpath, dirnames, filenames in os.walk(abs):
  245. package = (dist_name + dirpath[len(abs):]).replace('/', '.')
  246. if '__init__.py' in filenames:
  247. yield package
  248. for filename in filenames:
  249. if filename.endswith('.py') and filename != '__init__.py':
  250. yield '.'.join([package, filename])[:-3]
  251. def import_all_modules(name=__name__, file=__file__,
  252. skip=('celery.decorators',
  253. 'celery.task')):
  254. for module in find_distribution_modules(name, file):
  255. if not module.startswith(skip):
  256. try:
  257. import_module(module)
  258. except ImportError:
  259. pass
  260. except OSError as exc:
  261. warnings.warn(UserWarning(
  262. 'Ignored error importing module {0}: {1!r}'.format(
  263. module, exc,
  264. )))