conftest.py 9.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313
  1. from __future__ import absolute_import, unicode_literals
  2. import logging
  3. import os
  4. import pytest
  5. import sys
  6. import threading
  7. import warnings
  8. from functools import partial
  9. from importlib import import_module
  10. from case import Mock
  11. from case.utils import decorator
  12. from kombu import Queue
  13. from celery.contrib.testing.app import Trap, TestApp
  14. from celery.contrib.testing.mocks import (
  15. TaskMessage, TaskMessage1, task_message_from_sig,
  16. )
  17. from celery.contrib.pytest import reset_cache_backend_state # noqa
  18. from celery.contrib.pytest import depends_on_current_app # noqa
  19. __all__ = ['app', 'reset_cache_backend_state', 'depends_on_current_app']
  20. try:
  21. WindowsError = WindowsError # noqa
  22. except NameError:
  23. class WindowsError(Exception):
  24. pass
  25. PYPY3 = getattr(sys, 'pypy_version_info', None) and sys.version_info[0] > 3
  26. CASE_LOG_REDIRECT_EFFECT = 'Test {0} didn\'t disable LoggingProxy for {1}'
  27. CASE_LOG_LEVEL_EFFECT = 'Test {0} modified the level of the root logger'
  28. CASE_LOG_HANDLER_EFFECT = 'Test {0} modified handlers for the root logger'
  29. @pytest.fixture(scope='session')
  30. def celery_config():
  31. return {
  32. #: Don't want log output when running suite.
  33. 'task_default_queue': 'testcelery',
  34. 'task_default_exchange': 'testcelery',
  35. 'task_default_routing_key': 'testcelery',
  36. 'task_queues': (
  37. Queue('testcelery', routing_key='testcelery'),
  38. ),
  39. 'accept_content': ('json', 'pickle'),
  40. # Mongo results tests (only executed if installed and running)
  41. 'mongodb_backend_settings': {
  42. 'host': os.environ.get('MONGO_HOST') or 'localhost',
  43. 'port': os.environ.get('MONGO_PORT') or 27017,
  44. 'database': os.environ.get('MONGO_DB') or 'celery_unittests',
  45. 'taskmeta_collection': (
  46. os.environ.get('MONGO_TASKMETA_COLLECTION') or
  47. 'taskmeta_collection'
  48. ),
  49. 'user': os.environ.get('MONGO_USER'),
  50. 'password': os.environ.get('MONGO_PASSWORD'),
  51. }
  52. }
  53. @pytest.fixture(scope='session')
  54. def use_celery_app_trap():
  55. return True
  56. @decorator
  57. def assert_signal_called(signal, **expected):
  58. """Context that verifes signal is called before exiting."""
  59. handler = Mock()
  60. call_handler = partial(handler)
  61. signal.connect(call_handler)
  62. try:
  63. yield handler
  64. finally:
  65. signal.disconnect(call_handler)
  66. handler.assert_called_with(signal=signal, **expected)
  67. @pytest.fixture
  68. def app(celery_app):
  69. yield celery_app
  70. @pytest.fixture(autouse=True, scope='session')
  71. def AAA_disable_multiprocessing():
  72. # pytest-cov breaks if a multiprocessing.Process is started,
  73. # so disable them completely to make sure it doesn't happen.
  74. from case import patch
  75. stuff = [
  76. 'multiprocessing.Process',
  77. 'billiard.Process',
  78. 'billiard.context.Process',
  79. 'billiard.process.Process',
  80. 'billiard.process.BaseProcess',
  81. 'multiprocessing.Process',
  82. ]
  83. ctxs = [patch(s) for s in stuff]
  84. [ctx.__enter__() for ctx in ctxs]
  85. yield
  86. [ctx.__exit__(*sys.exc_info()) for ctx in ctxs]
  87. def alive_threads():
  88. return [thread for thread in threading.enumerate() if thread.is_alive()]
  89. @pytest.fixture(autouse=True)
  90. def task_join_will_not_block():
  91. from celery import _state
  92. from celery import result
  93. prev_res_join_block = result.task_join_will_block
  94. _state.orig_task_join_will_block = _state.task_join_will_block
  95. prev_state_join_block = _state.task_join_will_block
  96. result.task_join_will_block = \
  97. _state.task_join_will_block = lambda: False
  98. _state._set_task_join_will_block(False)
  99. yield
  100. result.task_join_will_block = prev_res_join_block
  101. _state.task_join_will_block = prev_state_join_block
  102. _state._set_task_join_will_block(False)
  103. @pytest.fixture(scope='session', autouse=True)
  104. def record_threads_at_startup(request):
  105. try:
  106. request.session._threads_at_startup
  107. except AttributeError:
  108. request.session._threads_at_startup = alive_threads()
  109. @pytest.fixture(autouse=True)
  110. def threads_not_lingering(request):
  111. yield
  112. assert request.session._threads_at_startup == alive_threads()
  113. @pytest.fixture(autouse=True)
  114. def AAA_reset_CELERY_LOADER_env():
  115. yield
  116. assert not os.environ.get('CELERY_LOADER')
  117. @pytest.fixture(autouse=True)
  118. def test_cases_shortcuts(request, app, patching, celery_config):
  119. if request.instance:
  120. @app.task
  121. def add(x, y):
  122. return x + y
  123. # IMPORTANT: We set an .app attribute for every test case class.
  124. request.instance.app = app
  125. request.instance.Celery = TestApp
  126. request.instance.assert_signal_called = assert_signal_called
  127. request.instance.task_message_from_sig = task_message_from_sig
  128. request.instance.TaskMessage = TaskMessage
  129. request.instance.TaskMessage1 = TaskMessage1
  130. request.instance.CELERY_TEST_CONFIG = celery_config
  131. request.instance.add = add
  132. request.instance.patching = patching
  133. yield
  134. if request.instance:
  135. request.instance.app = None
  136. @pytest.fixture(autouse=True)
  137. def zzzz_test_cases_calls_setup_teardown(request):
  138. if request.instance:
  139. # we set the .patching attribute for every test class.
  140. setup = getattr(request.instance, 'setup', None)
  141. # we also call .setup() and .teardown() after every test method.
  142. setup and setup()
  143. yield
  144. if request.instance:
  145. teardown = getattr(request.instance, 'teardown', None)
  146. teardown and teardown()
  147. @pytest.fixture(autouse=True)
  148. def sanity_no_shutdown_flags_set():
  149. yield
  150. # Make sure no test left the shutdown flags enabled.
  151. from celery.worker import state as worker_state
  152. # check for EX_OK
  153. assert worker_state.should_stop is not False
  154. assert worker_state.should_terminate is not False
  155. # check for other true values
  156. assert not worker_state.should_stop
  157. assert not worker_state.should_terminate
  158. @pytest.fixture(autouse=True)
  159. def sanity_stdouts(request):
  160. yield
  161. from celery.utils.log import LoggingProxy
  162. assert sys.stdout
  163. assert sys.stderr
  164. assert sys.__stdout__
  165. assert sys.__stderr__
  166. this = request.node.name
  167. if isinstance(sys.stdout, (LoggingProxy, Mock)) or \
  168. isinstance(sys.__stdout__, (LoggingProxy, Mock)):
  169. raise RuntimeError(CASE_LOG_REDIRECT_EFFECT.format(this, 'stdout'))
  170. if isinstance(sys.stderr, (LoggingProxy, Mock)) or \
  171. isinstance(sys.__stderr__, (LoggingProxy, Mock)):
  172. raise RuntimeError(CASE_LOG_REDIRECT_EFFECT.format(this, 'stderr'))
  173. @pytest.fixture(autouse=True)
  174. def sanity_logging_side_effects(request):
  175. root = logging.getLogger()
  176. rootlevel = root.level
  177. roothandlers = root.handlers
  178. yield
  179. this = request.node.name
  180. root_now = logging.getLogger()
  181. if root_now.level != rootlevel:
  182. raise RuntimeError(CASE_LOG_LEVEL_EFFECT.format(this))
  183. if root_now.handlers != roothandlers:
  184. raise RuntimeError(CASE_LOG_HANDLER_EFFECT.format(this))
  185. def setup_session(scope='session'):
  186. using_coverage = (
  187. os.environ.get('COVER_ALL_MODULES') or '--with-coverage' in sys.argv
  188. )
  189. os.environ.update(
  190. # warn if config module not found
  191. C_WNOCONF='yes',
  192. KOMBU_DISABLE_LIMIT_PROTECTION='yes',
  193. )
  194. if using_coverage and not PYPY3:
  195. from warnings import catch_warnings
  196. with catch_warnings(record=True):
  197. import_all_modules()
  198. warnings.resetwarnings()
  199. from celery._state import set_default_app
  200. set_default_app(Trap())
  201. def teardown():
  202. # Don't want SUBDEBUG log messages at finalization.
  203. try:
  204. from multiprocessing.util import get_logger
  205. except ImportError:
  206. pass
  207. else:
  208. get_logger().setLevel(logging.WARNING)
  209. # Make sure test database is removed.
  210. import os
  211. if os.path.exists('test.db'):
  212. try:
  213. os.remove('test.db')
  214. except WindowsError:
  215. pass
  216. # Make sure there are no remaining threads at shutdown.
  217. import threading
  218. remaining_threads = [thread for thread in threading.enumerate()
  219. if thread.getName() != 'MainThread']
  220. if remaining_threads:
  221. sys.stderr.write(
  222. '\n\n**WARNING**: Remaining threads at teardown: %r...\n' % (
  223. remaining_threads))
  224. def find_distribution_modules(name=__name__, file=__file__):
  225. current_dist_depth = len(name.split('.')) - 1
  226. current_dist = os.path.join(os.path.dirname(file),
  227. *([os.pardir] * current_dist_depth))
  228. abs = os.path.abspath(current_dist)
  229. dist_name = os.path.basename(abs)
  230. for dirpath, dirnames, filenames in os.walk(abs):
  231. package = (dist_name + dirpath[len(abs):]).replace('/', '.')
  232. if '__init__.py' in filenames:
  233. yield package
  234. for filename in filenames:
  235. if filename.endswith('.py') and filename != '__init__.py':
  236. yield '.'.join([package, filename])[:-3]
  237. def import_all_modules(name=__name__, file=__file__,
  238. skip=('celery.decorators',
  239. 'celery.task')):
  240. for module in find_distribution_modules(name, file):
  241. if not module.startswith(skip):
  242. try:
  243. import_module(module)
  244. except ImportError:
  245. pass
  246. except OSError as exc:
  247. warnings.warn(UserWarning(
  248. 'Ignored error importing module {0}: {1!r}'.format(
  249. module, exc,
  250. )))