bench_worker.py 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115
  1. import os
  2. import sys
  3. import time
  4. os.environ.update(
  5. NOSETPS='yes',
  6. USE_FAST_LOCALS='yes',
  7. )
  8. import anyjson
  9. JSONIMP = os.environ.get('JSONIMP')
  10. if JSONIMP:
  11. anyjson.force_implementation(JSONIMP)
  12. print('anyjson implementation: %r' % (anyjson.implementation.name, ))
  13. from celery import Celery, group
  14. DEFAULT_ITS = 40000
  15. BROKER_TRANSPORT = os.environ.get('BROKER', 'librabbitmq')
  16. if hasattr(sys, 'pypy_version_info'):
  17. BROKER_TRANSPORT = 'amqplib'
  18. celery = Celery(__name__)
  19. celery.conf.update(BROKER_TRANSPORT=BROKER_TRANSPORT,
  20. BROKER_POOL_LIMIT=10,
  21. CELERYD_POOL='solo',
  22. CELERYD_PREFETCH_MULTIPLIER=0,
  23. CELERY_DISABLE_RATE_LIMITS=True,
  24. CELERY_DEFAULT_DELIVERY_MODE=1,
  25. CELERY_QUEUES = {
  26. 'bench.worker': {
  27. 'exchange': 'bench.worker',
  28. 'routing_key': 'bench.worker',
  29. 'no_ack': True,
  30. 'exchange_durable': False,
  31. 'queue_durable': False,
  32. 'auto_delete': True,
  33. }
  34. },
  35. CELERY_TASK_SERIALIZER='json',
  36. CELERY_DEFAULT_QUEUE='bench.worker',
  37. CELERY_BACKEND=None,
  38. )#CELERY_MESSAGE_COMPRESSION='zlib')
  39. def tdiff(then):
  40. return time.time() - then
  41. @celery.task(cur=0, time_start=None, queue='bench.worker', bare=True)
  42. def it(_, n):
  43. i = it.cur # use internal counter, as ordering can be skewed
  44. # by previous runs, or the broker.
  45. if i and not i % 5000:
  46. print >> sys.stderr, '(%s so far: %ss)' % (i, tdiff(it.subt))
  47. it.subt = time.time()
  48. if not i:
  49. it.subt = it.time_start = time.time()
  50. elif i == n - 1:
  51. total = tdiff(it.time_start)
  52. print >> sys.stderr, '(%s so far: %ss)' % (i, tdiff(it.subt))
  53. print('-- process %s tasks: %ss total, %s tasks/s} ' % (
  54. n, total, n / (total + .0)))
  55. sys.exit()
  56. it.cur += 1
  57. def bench_apply(n=DEFAULT_ITS):
  58. time_start = time.time()
  59. group(it.s(i, n) for i in xrange(n))()
  60. print('-- apply %s tasks: %ss' % (n, time.time() - time_start, ))
  61. def bench_work(n=DEFAULT_ITS, loglevel='CRITICAL'):
  62. loglevel = os.environ.get('BENCH_LOGLEVEL') or loglevel
  63. if loglevel:
  64. celery.log.setup_logging_subsystem(loglevel=loglevel)
  65. worker = celery.WorkController(concurrency=15,
  66. queues=['bench.worker'])
  67. try:
  68. print('STARTING WORKER')
  69. worker.start()
  70. except SystemExit:
  71. raise
  72. assert sum(worker.state.total_count.values()) == n + 1
  73. def bench_both(n=DEFAULT_ITS):
  74. bench_apply(n)
  75. bench_work(n)
  76. def main(argv=sys.argv):
  77. n = DEFAULT_ITS
  78. if len(argv) < 2:
  79. print('Usage: %s [apply|work|both] [n=20k]' % (
  80. os.path.basename(argv[0]), ))
  81. return sys.exit(1)
  82. try:
  83. try:
  84. n = int(argv[2])
  85. except IndexError:
  86. pass
  87. return {'apply': bench_apply,
  88. 'work': bench_work,
  89. 'both': bench_both}[argv[1]](n=n)
  90. except:
  91. raise
  92. if __name__ == '__main__':
  93. main()