log.py 8.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256
  1. # -*- coding: utf-8 -*-
  2. """
  3. celery.app.log
  4. ~~~~~~~~~~~~~~
  5. The Celery instances logging section: ``Celery.log``.
  6. Sets up logging for the worker and other programs,
  7. redirects stdouts, colors log output, patches logging
  8. related compatibility fixes, and so on.
  9. """
  10. from __future__ import absolute_import
  11. import logging
  12. import os
  13. import sys
  14. from logging.handlers import WatchedFileHandler
  15. from kombu.utils.encoding import set_default_encoding_file
  16. from celery import signals
  17. from celery._state import get_current_task
  18. from celery.five import class_property, string_t
  19. from celery.utils import isatty, node_format
  20. from celery.utils.log import (
  21. get_logger, mlevel,
  22. ColorFormatter, LoggingProxy, get_multiprocessing_logger,
  23. reset_multiprocessing_logger,
  24. )
  25. from celery.utils.term import colored
  26. __all__ = ['TaskFormatter', 'Logging']
  27. MP_LOG = os.environ.get('MP_LOG', False)
  28. class TaskFormatter(ColorFormatter):
  29. def format(self, record):
  30. task = get_current_task()
  31. if task and task.request:
  32. record.__dict__.update(task_id=task.request.id,
  33. task_name=task.name)
  34. else:
  35. record.__dict__.setdefault('task_name', '???')
  36. record.__dict__.setdefault('task_id', '???')
  37. return ColorFormatter.format(self, record)
  38. class Logging(object):
  39. #: The logging subsystem is only configured once per process.
  40. #: setup_logging_subsystem sets this flag, and subsequent calls
  41. #: will do nothing.
  42. _setup = False
  43. def __init__(self, app):
  44. self.app = app
  45. self.loglevel = mlevel(logging.WARN)
  46. self.format = self.app.conf.worker_log_format
  47. self.task_format = self.app.conf.worker_task_log_format
  48. self.colorize = self.app.conf.worker_log_color
  49. def setup(self, loglevel=None, logfile=None, redirect_stdouts=False,
  50. redirect_level='WARNING', colorize=None, hostname=None):
  51. handled = self.setup_logging_subsystem(
  52. loglevel, logfile, colorize=colorize, hostname=hostname,
  53. )
  54. if not handled:
  55. if redirect_stdouts:
  56. self.redirect_stdouts(redirect_level)
  57. os.environ.update(
  58. CELERY_LOG_LEVEL=str(loglevel) if loglevel else '',
  59. CELERY_LOG_FILE=str(logfile) if logfile else '',
  60. )
  61. return handled
  62. def redirect_stdouts(self, loglevel=None, name='celery.redirected'):
  63. self.redirect_stdouts_to_logger(
  64. get_logger(name), loglevel=loglevel
  65. )
  66. os.environ.update(
  67. CELERY_LOG_REDIRECT='1',
  68. CELERY_LOG_REDIRECT_LEVEL=str(loglevel or ''),
  69. )
  70. def setup_logging_subsystem(self, loglevel=None, logfile=None, format=None,
  71. colorize=None, hostname=None, **kwargs):
  72. if self.already_setup:
  73. return
  74. if logfile and hostname:
  75. logfile = node_format(logfile, hostname)
  76. self.already_setup = True
  77. loglevel = mlevel(loglevel or self.loglevel)
  78. format = format or self.format
  79. colorize = self.supports_color(colorize, logfile)
  80. reset_multiprocessing_logger()
  81. receivers = signals.setup_logging.send(
  82. sender=None, loglevel=loglevel, logfile=logfile,
  83. format=format, colorize=colorize,
  84. )
  85. if not receivers:
  86. root = logging.getLogger()
  87. if self.app.conf.worker_hijack_root_logger:
  88. root.handlers = []
  89. get_logger('celery').handlers = []
  90. get_logger('celery.task').handlers = []
  91. get_logger('celery.redirected').handlers = []
  92. # Configure root logger
  93. self._configure_logger(
  94. root, logfile, loglevel, format, colorize, **kwargs
  95. )
  96. # Configure the multiprocessing logger
  97. self._configure_logger(
  98. get_multiprocessing_logger(),
  99. logfile, loglevel if MP_LOG else logging.ERROR,
  100. format, colorize, **kwargs
  101. )
  102. signals.after_setup_logger.send(
  103. sender=None, logger=root,
  104. loglevel=loglevel, logfile=logfile,
  105. format=format, colorize=colorize,
  106. )
  107. # then setup the root task logger.
  108. self.setup_task_loggers(loglevel, logfile, colorize=colorize)
  109. try:
  110. stream = logging.getLogger().handlers[0].stream
  111. except (AttributeError, IndexError):
  112. pass
  113. else:
  114. set_default_encoding_file(stream)
  115. # This is a hack for multiprocessing's fork+exec, so that
  116. # logging before Process.run works.
  117. logfile_name = logfile if isinstance(logfile, string_t) else ''
  118. os.environ.update(_MP_FORK_LOGLEVEL_=str(loglevel),
  119. _MP_FORK_LOGFILE_=logfile_name,
  120. _MP_FORK_LOGFORMAT_=format)
  121. return receivers
  122. def _configure_logger(self, logger, logfile, loglevel,
  123. format, colorize, **kwargs):
  124. if logger is not None:
  125. self.setup_handlers(logger, logfile, format,
  126. colorize, **kwargs)
  127. if loglevel:
  128. logger.setLevel(loglevel)
  129. def setup_task_loggers(self, loglevel=None, logfile=None, format=None,
  130. colorize=None, propagate=False, **kwargs):
  131. """Setup the task logger.
  132. If `logfile` is not specified, then `sys.stderr` is used.
  133. Will return the base task logger object.
  134. """
  135. loglevel = mlevel(loglevel or self.loglevel)
  136. format = format or self.task_format
  137. colorize = self.supports_color(colorize, logfile)
  138. logger = self.setup_handlers(
  139. get_logger('celery.task'),
  140. logfile, format, colorize,
  141. formatter=TaskFormatter, **kwargs
  142. )
  143. logger.setLevel(loglevel)
  144. # this is an int for some reason, better to not question why.
  145. logger.propagate = int(propagate)
  146. signals.after_setup_task_logger.send(
  147. sender=None, logger=logger,
  148. loglevel=loglevel, logfile=logfile,
  149. format=format, colorize=colorize,
  150. )
  151. return logger
  152. def redirect_stdouts_to_logger(self, logger, loglevel=None,
  153. stdout=True, stderr=True):
  154. """Redirect :class:`sys.stdout` and :class:`sys.stderr` to a
  155. logging instance.
  156. :param logger: The :class:`logging.Logger` instance to redirect to.
  157. :param loglevel: The loglevel redirected messages will be logged as.
  158. """
  159. proxy = LoggingProxy(logger, loglevel)
  160. if stdout:
  161. sys.stdout = proxy
  162. if stderr:
  163. sys.stderr = proxy
  164. return proxy
  165. def supports_color(self, colorize=None, logfile=None):
  166. colorize = self.colorize if colorize is None else colorize
  167. if self.app.IS_WINDOWS:
  168. # Windows does not support ANSI color codes.
  169. return False
  170. if colorize or colorize is None:
  171. # Only use color if there is no active log file
  172. # and stderr is an actual terminal.
  173. return logfile is None and isatty(sys.stderr)
  174. return colorize
  175. def colored(self, logfile=None, enabled=None):
  176. return colored(enabled=self.supports_color(enabled, logfile))
  177. def setup_handlers(self, logger, logfile, format, colorize,
  178. formatter=ColorFormatter, **kwargs):
  179. if self._is_configured(logger):
  180. return logger
  181. handler = self._detect_handler(logfile)
  182. handler.setFormatter(formatter(format, use_color=colorize))
  183. logger.addHandler(handler)
  184. return logger
  185. def _detect_handler(self, logfile=None):
  186. """Create log handler with either a filename, an open stream
  187. or :const:`None` (stderr)."""
  188. logfile = sys.__stderr__ if logfile is None else logfile
  189. if hasattr(logfile, 'write'):
  190. return logging.StreamHandler(logfile)
  191. return WatchedFileHandler(logfile)
  192. def _has_handler(self, logger):
  193. return any(
  194. not isinstance(h, logging.NullHandler)
  195. for h in logger.handlers or []
  196. )
  197. def _is_configured(self, logger):
  198. return self._has_handler(logger) and not getattr(
  199. logger, '_rudimentary_setup', False)
  200. def setup_logger(self, name='celery', *args, **kwargs):
  201. """Deprecated: No longer used."""
  202. self.setup_logging_subsystem(*args, **kwargs)
  203. return logging.root
  204. def get_default_logger(self, name='celery', **kwargs):
  205. return get_logger(name)
  206. @class_property
  207. def already_setup(cls):
  208. return cls._setup
  209. @already_setup.setter # noqa
  210. def already_setup(cls, was_setup):
  211. cls._setup = was_setup