bench_worker.py 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117
  1. from __future__ import print_function
  2. import os
  3. import sys
  4. import time
  5. os.environ.update(
  6. NOSETPS='yes',
  7. USE_FAST_LOCALS='yes',
  8. )
  9. import anyjson
  10. JSONIMP = os.environ.get('JSONIMP')
  11. if JSONIMP:
  12. anyjson.force_implementation(JSONIMP)
  13. print('anyjson implementation: {0!r}'.format(anyjson.implementation.name))
  14. from celery import Celery, group
  15. DEFAULT_ITS = 40000
  16. BROKER_TRANSPORT = os.environ.get('BROKER', 'librabbitmq')
  17. if hasattr(sys, 'pypy_version_info'):
  18. BROKER_TRANSPORT = 'amqplib'
  19. celery = Celery(__name__)
  20. celery.conf.update(BROKER_TRANSPORT=BROKER_TRANSPORT,
  21. BROKER_POOL_LIMIT=10,
  22. CELERYD_POOL='solo',
  23. CELERYD_PREFETCH_MULTIPLIER=0,
  24. CELERY_DISABLE_RATE_LIMITS=True,
  25. CELERY_DEFAULT_DELIVERY_MODE=1,
  26. CELERY_QUEUES = {
  27. 'bench.worker': {
  28. 'exchange': 'bench.worker',
  29. 'routing_key': 'bench.worker',
  30. 'no_ack': True,
  31. 'exchange_durable': False,
  32. 'queue_durable': False,
  33. 'auto_delete': True,
  34. }
  35. },
  36. CELERY_TASK_SERIALIZER='json',
  37. CELERY_DEFAULT_QUEUE='bench.worker',
  38. CELERY_BACKEND=None,
  39. )#CELERY_MESSAGE_COMPRESSION='zlib')
  40. def tdiff(then):
  41. return time.time() - then
  42. @celery.task(cur=0, time_start=None, queue='bench.worker', bare=True)
  43. def it(_, n):
  44. i = it.cur # use internal counter, as ordering can be skewed
  45. # by previous runs, or the broker.
  46. if i and not i % 5000:
  47. print('({0} so far: {1}s)'.format(i, tdiff(it.subt)), file=sys.stderr)
  48. it.subt = time.time()
  49. if not i:
  50. it.subt = it.time_start = time.time()
  51. elif i == n - 1:
  52. total = tdiff(it.time_start)
  53. print('({0} so far: {1}s)'.format(i, tdiff(it.subt)), file=sys.stderr)
  54. print('-- process {0} tasks: {1}s total, {2} tasks/s} '.format(
  55. n, total, n / (total + .0)))
  56. sys.exit()
  57. it.cur += 1
  58. def bench_apply(n=DEFAULT_ITS):
  59. time_start = time.time()
  60. group(it.s(i, n) for i in xrange(n))()
  61. print('-- apply {0} tasks: {1}s'.format(n, time.time() - time_start))
  62. def bench_work(n=DEFAULT_ITS, loglevel='CRITICAL'):
  63. loglevel = os.environ.get('BENCH_LOGLEVEL') or loglevel
  64. if loglevel:
  65. celery.log.setup_logging_subsystem(loglevel=loglevel)
  66. worker = celery.WorkController(concurrency=15,
  67. queues=['bench.worker'])
  68. try:
  69. print('STARTING WORKER')
  70. worker.start()
  71. except SystemExit:
  72. raise
  73. assert sum(worker.state.total_count.values()) == n + 1
  74. def bench_both(n=DEFAULT_ITS):
  75. bench_apply(n)
  76. bench_work(n)
  77. def main(argv=sys.argv):
  78. n = DEFAULT_ITS
  79. if len(argv) < 2:
  80. print('Usage: {0} [apply|work|both] [n=20k]'.format(
  81. os.path.basename(argv[0])))
  82. return sys.exit(1)
  83. try:
  84. try:
  85. n = int(argv[2])
  86. except IndexError:
  87. pass
  88. return {'apply': bench_apply,
  89. 'work': bench_work,
  90. 'both': bench_both}[argv[1]](n=n)
  91. except:
  92. raise
  93. if __name__ == '__main__':
  94. main()