conftest.py 8.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253
  1. from __future__ import absolute_import, unicode_literals
  2. import logging
  3. import os
  4. import pytest
  5. import sys
  6. import threading
  7. import warnings
  8. from importlib import import_module
  9. from case import Mock
  10. from celery.utils.pytest import (
  11. CELERY_TEST_CONFIG, Trap, TestApp,
  12. assert_signal_called, TaskMessage, TaskMessage1, task_message_from_sig,
  13. )
  14. from celery.utils.pytest import app # noqa
  15. from celery.utils.pytest import reset_cache_backend_state # noqa
  16. from celery.utils.pytest import depends_on_current_app # noqa
  17. __all__ = ['app', 'reset_cache_backend_state', 'depends_on_current_app']
  18. try:
  19. WindowsError = WindowsError # noqa
  20. except NameError:
  21. class WindowsError(Exception):
  22. pass
  23. PYPY3 = getattr(sys, 'pypy_version_info', None) and sys.version_info[0] > 3
  24. CASE_LOG_REDIRECT_EFFECT = 'Test {0} didn\'t disable LoggingProxy for {1}'
  25. CASE_LOG_LEVEL_EFFECT = 'Test {0} modified the level of the root logger'
  26. CASE_LOG_HANDLER_EFFECT = 'Test {0} modified handlers for the root logger'
  27. @pytest.fixture(autouse=True, scope='session')
  28. def AAA_disable_multiprocessing(request):
  29. # pytest-cov breaks if a multiprocessing.Process is started,
  30. # so disable them completely to make sure it doesn't happen.
  31. from case import patch
  32. stuff = [
  33. 'multiprocessing.Process',
  34. 'billiard.Process',
  35. 'billiard.context.Process',
  36. 'billiard.process.Process',
  37. 'billiard.process.BaseProcess',
  38. 'multiprocessing.Process',
  39. ]
  40. ctxs = [patch(s) for s in stuff]
  41. [ctx.__enter__() for ctx in ctxs]
  42. def fin():
  43. [ctx.__exit__(*sys.exc_info()) for ctx in ctxs]
  44. request.addfinalizer(fin)
  45. def alive_threads():
  46. return [thread for thread in threading.enumerate() if thread.is_alive()]
  47. @pytest.fixture(autouse=True)
  48. def task_join_will_not_block(request):
  49. from celery import _state
  50. from celery import result
  51. prev_res_join_block = result.task_join_will_block
  52. _state.orig_task_join_will_block = _state.task_join_will_block
  53. prev_state_join_block = _state.task_join_will_block
  54. result.task_join_will_block = \
  55. _state.task_join_will_block = lambda: False
  56. _state._set_task_join_will_block(False)
  57. def fin():
  58. result.task_join_will_block = prev_res_join_block
  59. _state.task_join_will_block = prev_state_join_block
  60. _state._set_task_join_will_block(False)
  61. request.addfinalizer(fin)
  62. @pytest.fixture(scope='session', autouse=True)
  63. def record_threads_at_startup(request):
  64. try:
  65. request.session._threads_at_startup
  66. except AttributeError:
  67. request.session._threads_at_startup = alive_threads()
  68. @pytest.fixture(autouse=True)
  69. def threads_not_lingering(request):
  70. def fin():
  71. assert request.session._threads_at_startup == alive_threads()
  72. request.addfinalizer(fin)
  73. @pytest.fixture(autouse=True)
  74. def test_cases_shortcuts(request, app, patching):
  75. if request.instance:
  76. @app.task
  77. def add(x, y):
  78. return x + y
  79. # IMPORTANT: We set an .app attribute for every test case class.
  80. request.instance.app = app
  81. request.instance.Celery = TestApp
  82. request.instance.assert_signal_called = assert_signal_called
  83. request.instance.task_message_from_sig = task_message_from_sig
  84. request.instance.TaskMessage = TaskMessage
  85. request.instance.TaskMessage1 = TaskMessage1
  86. request.instance.CELERY_TEST_CONFIG = dict(CELERY_TEST_CONFIG)
  87. request.instance.add = add
  88. request.instance.patching = patching
  89. def fin():
  90. request.instance.app = None
  91. request.addfinalizer(fin)
  92. @pytest.fixture(autouse=True)
  93. def zzzz_test_cases_calls_setup_teardown(request):
  94. if request.instance:
  95. # we set the .patching attribute for every test class.
  96. setup = getattr(request.instance, 'setup', None)
  97. # we also call .setup() and .teardown() after every test method.
  98. teardown = getattr(request.instance, 'teardown', None)
  99. setup and setup()
  100. teardown and request.addfinalizer(teardown)
  101. @pytest.fixture(autouse=True)
  102. def sanity_no_shutdown_flags_set(request):
  103. def fin():
  104. # Make sure no test left the shutdown flags enabled.
  105. from celery.worker import state as worker_state
  106. # check for EX_OK
  107. assert worker_state.should_stop is not False
  108. assert worker_state.should_terminate is not False
  109. # check for other true values
  110. assert not worker_state.should_stop
  111. assert not worker_state.should_terminate
  112. request.addfinalizer(fin)
  113. @pytest.fixture(autouse=True)
  114. def sanity_stdouts(request):
  115. def fin():
  116. from celery.utils.log import LoggingProxy
  117. assert sys.stdout
  118. assert sys.stderr
  119. assert sys.__stdout__
  120. assert sys.__stderr__
  121. this = request.node.name
  122. if isinstance(sys.stdout, (LoggingProxy, Mock)) or \
  123. isinstance(sys.__stdout__, (LoggingProxy, Mock)):
  124. raise RuntimeError(CASE_LOG_REDIRECT_EFFECT.format(this, 'stdout'))
  125. if isinstance(sys.stderr, (LoggingProxy, Mock)) or \
  126. isinstance(sys.__stderr__, (LoggingProxy, Mock)):
  127. raise RuntimeError(CASE_LOG_REDIRECT_EFFECT.format(this, 'stderr'))
  128. request.addfinalizer(fin)
  129. @pytest.fixture(autouse=True)
  130. def sanity_logging_side_effects(request):
  131. root = logging.getLogger()
  132. rootlevel = root.level
  133. roothandlers = root.handlers
  134. def fin():
  135. this = request.node.name
  136. root_now = logging.getLogger()
  137. if root_now.level != rootlevel:
  138. raise RuntimeError(CASE_LOG_LEVEL_EFFECT.format(this))
  139. if root_now.handlers != roothandlers:
  140. raise RuntimeError(CASE_LOG_HANDLER_EFFECT.format(this))
  141. request.addfinalizer(fin)
  142. def setup_session(scope='session'):
  143. using_coverage = (
  144. os.environ.get('COVER_ALL_MODULES') or '--with-coverage' in sys.argv
  145. )
  146. os.environ.update(
  147. # warn if config module not found
  148. C_WNOCONF='yes',
  149. KOMBU_DISABLE_LIMIT_PROTECTION='yes',
  150. )
  151. if using_coverage and not PYPY3:
  152. from warnings import catch_warnings
  153. with catch_warnings(record=True):
  154. import_all_modules()
  155. warnings.resetwarnings()
  156. from celery._state import set_default_app
  157. set_default_app(Trap())
  158. def teardown():
  159. # Don't want SUBDEBUG log messages at finalization.
  160. try:
  161. from multiprocessing.util import get_logger
  162. except ImportError:
  163. pass
  164. else:
  165. get_logger().setLevel(logging.WARNING)
  166. # Make sure test database is removed.
  167. import os
  168. if os.path.exists('test.db'):
  169. try:
  170. os.remove('test.db')
  171. except WindowsError:
  172. pass
  173. # Make sure there are no remaining threads at shutdown.
  174. import threading
  175. remaining_threads = [thread for thread in threading.enumerate()
  176. if thread.getName() != 'MainThread']
  177. if remaining_threads:
  178. sys.stderr.write(
  179. '\n\n**WARNING**: Remaining threads at teardown: %r...\n' % (
  180. remaining_threads))
  181. def find_distribution_modules(name=__name__, file=__file__):
  182. current_dist_depth = len(name.split('.')) - 1
  183. current_dist = os.path.join(os.path.dirname(file),
  184. *([os.pardir] * current_dist_depth))
  185. abs = os.path.abspath(current_dist)
  186. dist_name = os.path.basename(abs)
  187. for dirpath, dirnames, filenames in os.walk(abs):
  188. package = (dist_name + dirpath[len(abs):]).replace('/', '.')
  189. if '__init__.py' in filenames:
  190. yield package
  191. for filename in filenames:
  192. if filename.endswith('.py') and filename != '__init__.py':
  193. yield '.'.join([package, filename])[:-3]
  194. def import_all_modules(name=__name__, file=__file__,
  195. skip=('celery.decorators',
  196. 'celery.task')):
  197. for module in find_distribution_modules(name, file):
  198. if not module.startswith(skip):
  199. try:
  200. import_module(module)
  201. except ImportError:
  202. pass
  203. except OSError as exc:
  204. warnings.warn(UserWarning(
  205. 'Ignored error importing module {0}: {1!r}'.format(
  206. module, exc,
  207. )))