beat.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486
  1. # -*- coding: utf-8 -*-
  2. """
  3. celery.beat
  4. ~~~~~~~~~~~
  5. The Celery periodic task scheduler.
  6. :copyright: (c) 2009 - 2011 by Ask Solem.
  7. :license: BSD, see LICENSE for more details.
  8. """
  9. from __future__ import absolute_import
  10. import errno
  11. import os
  12. import time
  13. import shelve
  14. import sys
  15. import threading
  16. import traceback
  17. try:
  18. import multiprocessing
  19. except ImportError:
  20. multiprocessing = None # noqa
  21. from datetime import datetime
  22. from . import __version__
  23. from . import platforms
  24. from . import registry
  25. from . import signals
  26. from .app import app_or_default
  27. from .log import SilenceRepeated
  28. from .schedules import maybe_schedule, crontab
  29. from .utils import cached_property, instantiate, maybe_promise
  30. from .utils.timeutils import humanize_seconds
  31. class SchedulingError(Exception):
  32. """An error occured while scheduling a task."""
  33. class ScheduleEntry(object):
  34. """An entry in the scheduler.
  35. :keyword name: see :attr:`name`.
  36. :keyword schedule: see :attr:`schedule`.
  37. :keyword args: see :attr:`args`.
  38. :keyword kwargs: see :attr:`kwargs`.
  39. :keyword options: see :attr:`options`.
  40. :keyword last_run_at: see :attr:`last_run_at`.
  41. :keyword total_run_count: see :attr:`total_run_count`.
  42. :keyword relative: Is the time relative to when the server starts?
  43. """
  44. #: The task name
  45. name = None
  46. #: The schedule (run_every/crontab)
  47. schedule = None
  48. #: Positional arguments to apply.
  49. args = None
  50. #: Keyword arguments to apply.
  51. kwargs = None
  52. #: Task execution options.
  53. options = None
  54. #: The time and date of when this task was last scheduled.
  55. last_run_at = None
  56. #: Total number of times this task has been scheduled.
  57. total_run_count = 0
  58. def __init__(self, name=None, task=None, last_run_at=None,
  59. total_run_count=None, schedule=None, args=(), kwargs={},
  60. options={}, relative=False):
  61. self.name = name
  62. self.task = task
  63. self.args = args
  64. self.kwargs = kwargs
  65. self.options = options
  66. self.schedule = maybe_schedule(schedule, relative)
  67. self.last_run_at = last_run_at or self._default_now()
  68. self.total_run_count = total_run_count or 0
  69. def _default_now(self):
  70. return datetime.utcnow()
  71. def _next_instance(self, last_run_at=None):
  72. """Returns a new instance of the same class, but with
  73. its date and count fields updated."""
  74. return self.__class__(**dict(self,
  75. last_run_at=last_run_at or datetime.utcnow(),
  76. total_run_count=self.total_run_count + 1))
  77. __next__ = next = _next_instance # for 2to3
  78. def update(self, other):
  79. """Update values from another entry.
  80. Does only update "editable" fields (task, schedule, args, kwargs,
  81. options).
  82. """
  83. self.__dict__.update({"task": other.task, "schedule": other.schedule,
  84. "args": other.args, "kwargs": other.kwargs,
  85. "options": other.options})
  86. def is_due(self):
  87. """See :meth:`celery.task.base.PeriodicTask.is_due`."""
  88. return self.schedule.is_due(self.last_run_at)
  89. def __iter__(self):
  90. return vars(self).iteritems()
  91. def __repr__(self):
  92. return ("<Entry: %(name)s %(task)s(*%(args)s, **%(kwargs)s) "
  93. "{%(schedule)s}>" % vars(self))
  94. class Scheduler(object):
  95. """Scheduler for periodic tasks.
  96. :keyword schedule: see :attr:`schedule`.
  97. :keyword logger: see :attr:`logger`.
  98. :keyword max_interval: see :attr:`max_interval`.
  99. """
  100. Entry = ScheduleEntry
  101. #: The schedule dict/shelve.
  102. schedule = None
  103. #: Current logger.
  104. logger = None
  105. #: Maximum time to sleep between re-checking the schedule.
  106. max_interval = 1
  107. #: How often to sync the schedule (3 minutes by default)
  108. sync_every = 3 * 60
  109. _last_sync = None
  110. def __init__(self, schedule=None, logger=None, max_interval=None,
  111. app=None, Publisher=None, lazy=False, **kwargs):
  112. app = self.app = app_or_default(app)
  113. self.data = maybe_promise({} if schedule is None else schedule)
  114. self.logger = logger or app.log.get_default_logger(name="celery.beat")
  115. self.max_interval = max_interval or \
  116. app.conf.CELERYBEAT_MAX_LOOP_INTERVAL
  117. self.Publisher = Publisher or app.amqp.TaskPublisher
  118. if not lazy:
  119. self.setup_schedule()
  120. def install_default_entries(self, data):
  121. entries = {}
  122. if self.app.conf.CELERY_TASK_RESULT_EXPIRES:
  123. if "celery.backend_cleanup" not in data:
  124. entries["celery.backend_cleanup"] = {
  125. "task": "celery.backend_cleanup",
  126. "schedule": crontab("0", "4", "*"),
  127. "options": {"expires": 12 * 3600}}
  128. self.update_from_dict(entries)
  129. def maybe_due(self, entry, publisher=None):
  130. is_due, next_time_to_run = entry.is_due()
  131. if is_due:
  132. self.logger.debug("Scheduler: Sending due task %s", entry.task)
  133. try:
  134. result = self.apply_async(entry, publisher=publisher)
  135. except Exception, exc:
  136. self.logger.error("Message Error: %s\n%s", exc,
  137. traceback.format_stack(),
  138. exc_info=sys.exc_info())
  139. else:
  140. self.logger.debug("%s sent. id->%s", entry.task,
  141. result.task_id)
  142. return next_time_to_run
  143. def tick(self):
  144. """Run a tick, that is one iteration of the scheduler.
  145. Executes all due tasks.
  146. """
  147. remaining_times = []
  148. try:
  149. for entry in self.schedule.itervalues():
  150. next_time_to_run = self.maybe_due(entry, self.publisher)
  151. if next_time_to_run:
  152. remaining_times.append(next_time_to_run)
  153. except RuntimeError:
  154. pass
  155. return min(remaining_times + [self.max_interval])
  156. def should_sync(self):
  157. return (not self._last_sync or
  158. (time.time() - self._last_sync) > self.sync_every)
  159. def reserve(self, entry):
  160. new_entry = self.schedule[entry.name] = entry.next()
  161. return new_entry
  162. def apply_async(self, entry, publisher=None, **kwargs):
  163. # Update timestamps and run counts before we actually execute,
  164. # so we have that done if an exception is raised (doesn't schedule
  165. # forever.)
  166. entry = self.reserve(entry)
  167. task = registry.tasks.get(entry.task)
  168. try:
  169. if task:
  170. result = task.apply_async(entry.args, entry.kwargs,
  171. publisher=publisher,
  172. **entry.options)
  173. else:
  174. result = self.send_task(entry.task, entry.args, entry.kwargs,
  175. publisher=publisher,
  176. **entry.options)
  177. except Exception, exc:
  178. raise SchedulingError("Couldn't apply scheduled task %s: %s" % (
  179. entry.name, exc))
  180. if self.should_sync():
  181. self._do_sync()
  182. return result
  183. def send_task(self, *args, **kwargs): # pragma: no cover
  184. return self.app.send_task(*args, **kwargs)
  185. def setup_schedule(self):
  186. self.install_default_entries(self.data)
  187. def _do_sync(self):
  188. try:
  189. self.logger.debug("Celerybeat: Synchronizing schedule...")
  190. self.sync()
  191. finally:
  192. self._last_sync = time.time()
  193. def sync(self):
  194. pass
  195. def close(self):
  196. self.sync()
  197. def add(self, **kwargs):
  198. entry = self.Entry(**kwargs)
  199. self.schedule[entry.name] = entry
  200. return entry
  201. def _maybe_entry(self, name, entry):
  202. if isinstance(entry, self.Entry):
  203. return entry
  204. return self.Entry(**dict(entry, name=name))
  205. def update_from_dict(self, dict_):
  206. self.schedule.update(dict((name, self._maybe_entry(name, entry))
  207. for name, entry in dict_.items()))
  208. def merge_inplace(self, b):
  209. schedule = self.schedule
  210. A, B = set(schedule.keys()), set(b.keys())
  211. # Remove items from disk not in the schedule anymore.
  212. for key in A ^ B:
  213. schedule.pop(key, None)
  214. # Update and add new items in the schedule
  215. for key in B:
  216. entry = self.Entry(**dict(b[key], name=key))
  217. if schedule.get(key):
  218. schedule[key].update(entry)
  219. else:
  220. schedule[key] = entry
  221. def get_schedule(self):
  222. return self.data
  223. def set_schedule(self, schedule):
  224. self.data = schedule
  225. def _ensure_connected(self):
  226. # callback called for each retry while the connection
  227. # can't be established.
  228. def _error_handler(exc, interval):
  229. self.logger.error("Celerybeat: Connection error: %s. "
  230. "Trying again in %s seconds...", exc, interval)
  231. return self.connection.ensure_connection(_error_handler,
  232. self.app.conf.BROKER_CONNECTION_MAX_RETRIES)
  233. @cached_property
  234. def connection(self):
  235. return self.app.broker_connection()
  236. @cached_property
  237. def publisher(self):
  238. return self.Publisher(connection=self._ensure_connected())
  239. @property
  240. def schedule(self):
  241. return self.get_schedule()
  242. @property
  243. def info(self):
  244. return ""
  245. class PersistentScheduler(Scheduler):
  246. persistence = shelve
  247. _store = None
  248. def __init__(self, *args, **kwargs):
  249. self.schedule_filename = kwargs.get("schedule_filename")
  250. Scheduler.__init__(self, *args, **kwargs)
  251. def _remove_db(self):
  252. for suffix in "", ".db", ".dat", ".bak", ".dir":
  253. try:
  254. os.remove(self.schedule_filename + suffix)
  255. except OSError, exc:
  256. if exc.errno != errno.ENOENT:
  257. raise
  258. def setup_schedule(self):
  259. try:
  260. self._store = self.persistence.open(self.schedule_filename,
  261. writeback=True)
  262. entries = self._store.setdefault("entries", {})
  263. except Exception, exc:
  264. self.logger.error("Removing corrupted schedule file %r: %r",
  265. self.schedule_filename, exc, exc_info=True)
  266. self._remove_db()
  267. self._store = self.persistence.open(self.schedule_filename,
  268. writeback=True)
  269. else:
  270. if "__version__" not in self._store:
  271. self._store.clear() # remove schedule at 2.2.2 upgrade.
  272. entries = self._store.setdefault("entries", {})
  273. self.merge_inplace(self.app.conf.CELERYBEAT_SCHEDULE)
  274. self.install_default_entries(self.schedule)
  275. self._store["__version__"] = __version__
  276. self.sync()
  277. self.logger.debug("Current schedule:\n" +
  278. "\n".join(repr(entry)
  279. for entry in entries.itervalues()))
  280. def get_schedule(self):
  281. return self._store["entries"]
  282. def sync(self):
  283. if self._store is not None:
  284. self._store.sync()
  285. def close(self):
  286. self.sync()
  287. self._store.close()
  288. @property
  289. def info(self):
  290. return " . db -> %s" % (self.schedule_filename, )
  291. class Service(object):
  292. scheduler_cls = PersistentScheduler
  293. def __init__(self, logger=None, max_interval=None, schedule_filename=None,
  294. scheduler_cls=None, app=None):
  295. app = self.app = app_or_default(app)
  296. self.max_interval = max_interval or \
  297. app.conf.CELERYBEAT_MAX_LOOP_INTERVAL
  298. self.scheduler_cls = scheduler_cls or self.scheduler_cls
  299. self.logger = logger or app.log.get_default_logger(name="celery.beat")
  300. self.schedule_filename = schedule_filename or \
  301. app.conf.CELERYBEAT_SCHEDULE_FILENAME
  302. self._is_shutdown = threading.Event()
  303. self._is_stopped = threading.Event()
  304. self.debug = SilenceRepeated(self.logger.debug,
  305. 10 if self.max_interval < 60 else 1)
  306. def start(self, embedded_process=False):
  307. self.logger.info("Celerybeat: Starting...")
  308. self.logger.debug("Celerybeat: Ticking with max interval->%s",
  309. humanize_seconds(self.scheduler.max_interval))
  310. signals.beat_init.send(sender=self)
  311. if embedded_process:
  312. signals.beat_embedded_init.send(sender=self)
  313. platforms.set_process_title("celerybeat")
  314. try:
  315. while not self._is_shutdown.isSet():
  316. interval = self.scheduler.tick()
  317. self.debug("Celerybeat: Waking up %s." % (
  318. humanize_seconds(interval, prefix="in ")))
  319. time.sleep(interval)
  320. except (KeyboardInterrupt, SystemExit):
  321. self._is_shutdown.set()
  322. finally:
  323. self.sync()
  324. def sync(self):
  325. self.scheduler.close()
  326. self._is_stopped.set()
  327. def stop(self, wait=False):
  328. self.logger.info("Celerybeat: Shutting down...")
  329. self._is_shutdown.set()
  330. wait and self._is_stopped.wait() # block until shutdown done.
  331. def get_scheduler(self, lazy=False):
  332. filename = self.schedule_filename
  333. scheduler = instantiate(self.scheduler_cls,
  334. app=self.app,
  335. schedule_filename=filename,
  336. logger=self.logger,
  337. max_interval=self.max_interval,
  338. lazy=lazy)
  339. return scheduler
  340. @cached_property
  341. def scheduler(self):
  342. return self.get_scheduler()
  343. class _Threaded(threading.Thread):
  344. """Embedded task scheduler using threading."""
  345. def __init__(self, *args, **kwargs):
  346. super(_Threaded, self).__init__()
  347. self.service = Service(*args, **kwargs)
  348. self.setDaemon(True)
  349. self.setName("Beat")
  350. def run(self):
  351. self.service.start()
  352. def stop(self):
  353. self.service.stop(wait=True)
  354. if multiprocessing is not None:
  355. class _Process(multiprocessing.Process):
  356. """Embedded task scheduler using multiprocessing."""
  357. def __init__(self, *args, **kwargs):
  358. super(_Process, self).__init__()
  359. self.service = Service(*args, **kwargs)
  360. self.name = "Beat"
  361. def run(self):
  362. platforms.signals.reset("SIGTERM")
  363. self.service.start(embedded_process=True)
  364. def stop(self):
  365. self.service.stop()
  366. self.terminate()
  367. else:
  368. _Process = None
  369. def EmbeddedService(*args, **kwargs):
  370. """Return embedded clock service.
  371. :keyword thread: Run threaded instead of as a separate process.
  372. Default is :const:`False`.
  373. """
  374. if kwargs.pop("thread", False) or _Process is None:
  375. # Need short max interval to be able to stop thread
  376. # in reasonable time.
  377. kwargs.setdefault("max_interval", 1)
  378. return _Threaded(*args, **kwargs)
  379. return _Process(*args, **kwargs)