beat.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456
  1. import errno
  2. import os
  3. import time
  4. import shelve
  5. import sys
  6. import threading
  7. import traceback
  8. try:
  9. import multiprocessing
  10. except ImportError:
  11. multiprocessing = None
  12. from datetime import datetime
  13. from kombu.utils import cached_property
  14. from celery import __version__
  15. from celery import platforms
  16. from celery import registry
  17. from celery import signals
  18. from celery.app import app_or_default
  19. from celery.log import SilenceRepeated
  20. from celery.schedules import maybe_schedule, crontab
  21. from celery.utils import instantiate, maybe_promise
  22. from celery.utils.timeutils import humanize_seconds
  23. class SchedulingError(Exception):
  24. """An error occured while scheduling a task."""
  25. class ScheduleEntry(object):
  26. """An entry in the scheduler.
  27. :keyword name: see :attr:`name`.
  28. :keyword schedule: see :attr:`schedule`.
  29. :keyword args: see :attr:`args`.
  30. :keyword kwargs: see :attr:`kwargs`.
  31. :keyword options: see :attr:`options`.
  32. :keyword last_run_at: see :attr:`last_run_at`.
  33. :keyword total_run_count: see :attr:`total_run_count`.
  34. :keyword relative: Is the time relative to when the server starts?
  35. """
  36. #: The task name
  37. name = None
  38. #: The schedule (run_every/crontab)
  39. schedule = None
  40. #: Positional arguments to apply.
  41. args = None
  42. #: Keyword arguments to apply.
  43. kwargs = None
  44. #: Task execution options.
  45. options = None
  46. #: The time and date of when this task was last scheduled.
  47. last_run_at = None
  48. #: Total number of times this task has been scheduled.
  49. total_run_count = 0
  50. def __init__(self, name=None, task=None, last_run_at=None,
  51. total_run_count=None, schedule=None, args=(), kwargs={},
  52. options={}, relative=False):
  53. self.name = name
  54. self.task = task
  55. self.schedule = maybe_schedule(schedule, relative)
  56. self.args = args
  57. self.kwargs = kwargs
  58. self.options = options
  59. self.last_run_at = last_run_at or datetime.now()
  60. self.total_run_count = total_run_count or 0
  61. def next(self, last_run_at=None):
  62. """Returns a new instance of the same class, but with
  63. its date and count fields updated."""
  64. last_run_at = last_run_at or datetime.now()
  65. total_run_count = self.total_run_count + 1
  66. return self.__class__(**dict(self,
  67. last_run_at=last_run_at,
  68. total_run_count=total_run_count))
  69. def update(self, other):
  70. """Update values from another entry.
  71. Does only update "editable" fields (schedule, args,
  72. kwargs, options).
  73. """
  74. self.task = other.task
  75. self.schedule = other.schedule
  76. self.args = other.args
  77. self.kwargs = other.kwargs
  78. self.options = other.options
  79. def is_due(self):
  80. """See :meth:`celery.task.base.PeriodicTask.is_due`."""
  81. return self.schedule.is_due(self.last_run_at)
  82. def __iter__(self):
  83. return vars(self).iteritems()
  84. def __repr__(self):
  85. return "<Entry: %s %s(*%s, **%s) {%s}>" % (
  86. self.name, self.task, self.args, self.kwargs, self.schedule)
  87. class Scheduler(object):
  88. """Scheduler for periodic tasks.
  89. :keyword schedule: see :attr:`schedule`.
  90. :keyword logger: see :attr:`logger`.
  91. :keyword max_interval: see :attr:`max_interval`.
  92. """
  93. Entry = ScheduleEntry
  94. #: The schedule dict/shelve.
  95. schedule = None
  96. #: Current logger.
  97. logger = None
  98. #: Maximum time to sleep between re-checking the schedule.
  99. max_interval = 1
  100. def __init__(self, schedule=None, logger=None, max_interval=None,
  101. app=None, Publisher=None, lazy=False, **kwargs):
  102. if schedule is None:
  103. schedule = {}
  104. self.app = app_or_default(app)
  105. conf = self.app.conf
  106. self.data = maybe_promise(schedule)
  107. self.logger = logger or self.app.log.get_default_logger(
  108. name="celery.beat")
  109. self.max_interval = max_interval or conf.CELERYBEAT_MAX_LOOP_INTERVAL
  110. self.Publisher = Publisher or self.app.amqp.TaskPublisher
  111. if not lazy:
  112. self.setup_schedule()
  113. def install_default_entries(self, data):
  114. entries = {}
  115. if self.app.conf.CELERY_TASK_RESULT_EXPIRES:
  116. if "celery.backend_cleanup" not in data:
  117. entries["celery.backend_cleanup"] = {
  118. "task": "celery.backend_cleanup",
  119. "schedule": crontab("0", "4", "*"),
  120. "options": {"expires": 12 * 3600}}
  121. self.update_from_dict(entries)
  122. def maybe_due(self, entry, publisher=None):
  123. is_due, next_time_to_run = entry.is_due()
  124. if is_due:
  125. self.logger.debug("Scheduler: Sending due task %s" % entry.task)
  126. try:
  127. result = self.apply_async(entry, publisher=publisher)
  128. except Exception, exc:
  129. self.logger.error("Message Error: %s\n%s" % (exc,
  130. traceback.format_stack()), exc_info=sys.exc_info())
  131. else:
  132. self.logger.debug("%s sent. id->%s" % (entry.task,
  133. result.task_id))
  134. return next_time_to_run
  135. def tick(self):
  136. """Run a tick, that is one iteration of the scheduler.
  137. Executes all due tasks.
  138. """
  139. remaining_times = []
  140. try:
  141. for entry in self.schedule.itervalues():
  142. next_time_to_run = self.maybe_due(entry, self.publisher)
  143. if next_time_to_run:
  144. remaining_times.append(next_time_to_run)
  145. except RuntimeError:
  146. pass
  147. return min(remaining_times + [self.max_interval])
  148. def reserve(self, entry):
  149. new_entry = self.schedule[entry.name] = entry.next()
  150. return new_entry
  151. def apply_async(self, entry, publisher=None, **kwargs):
  152. # Update timestamps and run counts before we actually execute,
  153. # so we have that done if an exception is raised (doesn't schedule
  154. # forever.)
  155. entry = self.reserve(entry)
  156. try:
  157. task = registry.tasks[entry.task]
  158. except KeyError:
  159. task = None
  160. try:
  161. if task:
  162. result = task.apply_async(entry.args, entry.kwargs,
  163. publisher=publisher,
  164. **entry.options)
  165. else:
  166. result = self.send_task(entry.task, entry.args, entry.kwargs,
  167. publisher=publisher,
  168. **entry.options)
  169. except Exception, exc:
  170. raise SchedulingError("Couldn't apply scheduled task %s: %s" % (
  171. entry.name, exc))
  172. return result
  173. def send_task(self, *args, **kwargs): # pragma: no cover
  174. return self.app.send_task(*args, **kwargs)
  175. def setup_schedule(self):
  176. self.install_default_entries(self.data)
  177. def sync(self):
  178. pass
  179. def close(self):
  180. self.sync()
  181. def add(self, **kwargs):
  182. entry = self.Entry(**kwargs)
  183. self.schedule[entry.name] = entry
  184. return entry
  185. def _maybe_entry(self, name, entry):
  186. if isinstance(entry, self.Entry):
  187. return entry
  188. return self.Entry(**dict(entry, name=name))
  189. def update_from_dict(self, dict_):
  190. self.schedule.update(dict((name, self._maybe_entry(name, entry))
  191. for name, entry in dict_.items()))
  192. def merge_inplace(self, b):
  193. schedule = self.schedule
  194. A, B = set(schedule.keys()), set(b.keys())
  195. # Remove items from disk not in the schedule anymore.
  196. for key in A ^ B:
  197. schedule.pop(key, None)
  198. # Update and add new items in the schedule
  199. for key in B:
  200. entry = self.Entry(**dict(b[key], name=key))
  201. if schedule.get(key):
  202. schedule[key].update(entry)
  203. else:
  204. schedule[key] = entry
  205. def get_schedule(self):
  206. return self.data
  207. def set_schedule(self, schedule):
  208. self.data = schedule
  209. @cached_property
  210. def connection(self):
  211. return self.app.broker_connection()
  212. @cached_property
  213. def publisher(self):
  214. return self.Publisher(connection=self.connection)
  215. @property
  216. def schedule(self):
  217. return self.get_schedule()
  218. @property
  219. def info(self):
  220. return ""
  221. class PersistentScheduler(Scheduler):
  222. persistence = shelve
  223. _store = None
  224. def __init__(self, *args, **kwargs):
  225. self.schedule_filename = kwargs.get("schedule_filename")
  226. Scheduler.__init__(self, *args, **kwargs)
  227. def _remove_db(self):
  228. for suffix in "", ".db", ".dat", ".bak", ".dir":
  229. try:
  230. os.remove(self.schedule_filename + suffix)
  231. except OSError, exc:
  232. if exc.errno != errno.ENOENT:
  233. raise
  234. def setup_schedule(self):
  235. try:
  236. self._store = self.persistence.open(self.schedule_filename,
  237. writeback=True)
  238. entries = self._store.setdefault("entries", {})
  239. except Exception, exc:
  240. self.logger.error("Removing corrupted schedule file %r: %r" % (
  241. self.schedule_filename, exc))
  242. self._remove_db()
  243. self._store = self.persistence.open(self.schedule_filename,
  244. writeback=True)
  245. else:
  246. if "__version__" not in self._store:
  247. self._store.clear() # remove schedule at 2.2.2 upgrade.
  248. entries = self._store.setdefault("entries", {})
  249. self.merge_inplace(self.app.conf.CELERYBEAT_SCHEDULE)
  250. self.install_default_entries(self.schedule)
  251. self._store["__version__"] = __version__
  252. self.sync()
  253. self.logger.debug("Current schedule:\n" +
  254. "\n".join(repr(entry)
  255. for entry in entries.itervalues()))
  256. def get_schedule(self):
  257. return self._store["entries"]
  258. def sync(self):
  259. if self._store is not None:
  260. self.logger.debug("CeleryBeat: Syncing schedule to disk...")
  261. self._store.sync()
  262. def close(self):
  263. self.sync()
  264. self._store.close()
  265. @property
  266. def info(self):
  267. return " . db -> %s" % (self.schedule_filename, )
  268. class Service(object):
  269. scheduler_cls = PersistentScheduler
  270. def __init__(self, logger=None, max_interval=None, schedule_filename=None,
  271. scheduler_cls=None, app=None):
  272. self.app = app_or_default(app)
  273. self.max_interval = max_interval or \
  274. self.app.conf.CELERYBEAT_MAX_LOOP_INTERVAL
  275. self.scheduler_cls = scheduler_cls or self.scheduler_cls
  276. self.logger = logger or self.app.log.get_default_logger(
  277. name="celery.beat")
  278. self.schedule_filename = schedule_filename or \
  279. self.app.conf.CELERYBEAT_SCHEDULE_FILENAME
  280. self._shutdown = threading.Event()
  281. self._stopped = threading.Event()
  282. silence = self.max_interval < 60 and 10 or 1
  283. self.debug = SilenceRepeated(self.logger.debug,
  284. max_iterations=silence)
  285. def start(self, embedded_process=False):
  286. self.logger.info("Celerybeat: Starting...")
  287. self.logger.debug("Celerybeat: "
  288. "Ticking with max interval->%s" % (
  289. humanize_seconds(self.scheduler.max_interval)))
  290. signals.beat_init.send(sender=self)
  291. if embedded_process:
  292. signals.beat_embedded_init.send(sender=self)
  293. platforms.set_process_title("celerybeat")
  294. try:
  295. try:
  296. while not self._shutdown.isSet():
  297. interval = self.scheduler.tick()
  298. self.debug("Celerybeat: Waking up %s." % (
  299. humanize_seconds(interval, prefix="in ")))
  300. time.sleep(interval)
  301. except (KeyboardInterrupt, SystemExit):
  302. self._shutdown.set()
  303. finally:
  304. self.sync()
  305. def sync(self):
  306. self.scheduler.close()
  307. self._stopped.set()
  308. def stop(self, wait=False):
  309. self.logger.info("Celerybeat: Shutting down...")
  310. self._shutdown.set()
  311. wait and self._stopped.wait() # block until shutdown done.
  312. def get_scheduler(self, lazy=False):
  313. filename = self.schedule_filename
  314. scheduler = instantiate(self.scheduler_cls,
  315. app=self.app,
  316. schedule_filename=filename,
  317. logger=self.logger,
  318. max_interval=self.max_interval,
  319. lazy=lazy)
  320. return scheduler
  321. @cached_property
  322. def scheduler(self):
  323. return self.get_scheduler()
  324. class _Threaded(threading.Thread):
  325. """Embedded task scheduler using threading."""
  326. def __init__(self, *args, **kwargs):
  327. super(_Threaded, self).__init__()
  328. self.service = Service(*args, **kwargs)
  329. self.setDaemon(True)
  330. self.setName("Beat")
  331. def run(self):
  332. self.service.start()
  333. def stop(self):
  334. self.service.stop(wait=True)
  335. if multiprocessing is not None:
  336. class _Process(multiprocessing.Process):
  337. """Embedded task scheduler using multiprocessing."""
  338. def __init__(self, *args, **kwargs):
  339. super(_Process, self).__init__()
  340. self.service = Service(*args, **kwargs)
  341. self.name = "Beat"
  342. def run(self):
  343. platforms.reset_signal("SIGTERM")
  344. self.service.start(embedded_process=True)
  345. def stop(self):
  346. self.service.stop()
  347. self.terminate()
  348. else:
  349. _Process = None
  350. def EmbeddedService(*args, **kwargs):
  351. """Return embedded clock service.
  352. :keyword thread: Run threaded instead of as a separate process.
  353. Default is :const:`False`.
  354. """
  355. if kwargs.pop("thread", False) or _Process is None:
  356. # Need short max interval to be able to stop thread
  357. # in reasonable time.
  358. kwargs.setdefault("max_interval", 1)
  359. return _Threaded(*args, **kwargs)
  360. return _Process(*args, **kwargs)