Ask Solem 9 years ago
parent
commit
8eb9e02d59
40 changed files with 267 additions and 267 deletions
  1. 4 4
      celery/app/base.py
  2. 2 2
      celery/app/defaults.py
  3. 1 1
      celery/backends/database/__init__.py
  4. 1 1
      celery/backends/riak.py
  5. 1 1
      celery/bin/base.py
  6. 2 2
      celery/bin/celery.py
  7. 1 1
      celery/tests/app/test_amqp.py
  8. 4 4
      celery/tests/backends/test_amqp.py
  9. 1 1
      celery/tests/case.py
  10. 1 1
      celery/tests/concurrency/test_eventlet.py
  11. 1 1
      celery/tests/concurrency/test_prefork.py
  12. 7 7
      celery/tests/utils/test_debug.py
  13. 2 2
      celery/tests/utils/test_platforms.py
  14. 3 3
      celery/tests/utils/test_saferepr.py
  15. 1 1
      celery/tests/worker/test_request.py
  16. 2 2
      celery/tests/worker/test_worker.py
  17. 1 1
      celery/utils/saferepr.py
  18. 4 4
      docs/_ext/applyxrefs.py
  19. 31 31
      docs/_ext/literals_to_xrefs.py
  20. 54 54
      docs/history/changelog-1.0.rst
  21. 49 49
      docs/history/changelog-2.0.rst
  22. 10 10
      docs/history/changelog-2.1.rst
  23. 10 10
      docs/history/changelog-2.2.rst
  24. 2 2
      docs/history/changelog-2.3.rst
  25. 2 2
      docs/history/changelog-2.5.rst
  26. 6 6
      docs/internals/app-overview.rst
  27. 2 2
      docs/reference/celery.rst
  28. 2 2
      docs/userguide/extending.rst
  29. 1 1
      docs/userguide/workers.rst
  30. 3 3
      docs/whatsnew-4.0.rst
  31. 5 5
      examples/celery_http_gateway/settings.py
  32. 2 2
      examples/django/manage.py
  33. 8 8
      examples/django/proj/settings.py
  34. 1 1
      examples/django/proj/wsgi.py
  35. 1 1
      examples/eventlet/README.rst
  36. 3 3
      examples/httpexample/README.rst
  37. 5 5
      examples/httpexample/settings.py
  38. 4 4
      extra/release/attribution.py
  39. 26 26
      extra/release/bump_version.py
  40. 1 1
      funtests/stress/stress/app.py

+ 4 - 4
celery/app/base.py

@@ -322,7 +322,7 @@ class Celery(object):
 
         .. code-block:: python
 
-            @app.task(exchange="feeds")
+            @app.task(exchange='feeds')
             def refresh_feed(url):
                 return …
 
@@ -472,7 +472,7 @@ class Celery(object):
 
         .. code-block:: pycon
 
-            >>> celery.config_from_object("myapp.celeryconfig")
+            >>> celery.config_from_object('myapp.celeryconfig')
 
             >>> from myapp import celeryconfig
             >>> celery.config_from_object(celeryconfig)
@@ -493,8 +493,8 @@ class Celery(object):
 
         .. code-block:: pycon
 
-            >>> os.environ["CELERY_CONFIG_MODULE"] = "myapp.celeryconfig"
-            >>> celery.config_from_envvar("CELERY_CONFIG_MODULE")
+            >>> os.environ['CELERY_CONFIG_MODULE'] = 'myapp.celeryconfig'
+            >>> celery.config_from_envvar('CELERY_CONFIG_MODULE')
 
         """
         module_name = os.environ.get(variable_name)

+ 2 - 2
celery/app/defaults.py

@@ -125,9 +125,9 @@ NAMESPACES = Namespace(
         backend_options=Option({}, type='dict'),
     ),
     cassandra=Namespace(
-        entry_ttl=Option(type="float"),
+        entry_ttl=Option(type='float'),
         keyspace=Option(type='string'),
-        port=Option(type="string"),
+        port=Option(type='string'),
         read_consistency=Option(type='string'),
         servers=Option(type='list'),
         table=Option(type='string'),

+ 1 - 1
celery/backends/database/__init__.py

@@ -57,7 +57,7 @@ def retry(fun):
                 return fun(*args, **kwargs)
             except (DatabaseError, InvalidRequestError, StaleDataError):
                 logger.warning(
-                    "Failed operation %s. Retrying %s more times.",
+                    'Failed operation %s. Retrying %s more times.',
                     fun.__name__, max_retries - retries - 1,
                     exc_info=True,
                 )

+ 1 - 1
celery/backends/riak.py

@@ -38,7 +38,7 @@ if sys.version_info[0] == 3:
 else:
 
     def str_decode(s, encoding):
-        return s.decode("ascii")
+        return s.decode('ascii')
 
 
 def is_ascii(s):

+ 1 - 1
celery/bin/base.py

@@ -671,7 +671,7 @@ class Command(object):
 
 
 def daemon_options(parser, default_pidfile=None, default_logfile=None):
-    group = OptionGroup(parser, "Daemonization Options")
+    group = OptionGroup(parser, 'Daemonization Options')
     group.add_option('-f', '--logfile', default=default_logfile),
     group.add_option('--pidfile', default=default_pidfile),
     group.add_option('--uid', default=None),

+ 2 - 2
celery/bin/celery.py

@@ -656,7 +656,7 @@ class shell(Command):  # pragma: no cover
         IPShell(argv=[], user_ns=self.locals).mainloop()
 
     def _no_ipython(self):  # pragma: no cover
-        raise ImportError("no suitable ipython found")
+        raise ImportError('no suitable ipython found')
 
     def invoke_bpython_shell(self):
         import bpython
@@ -676,7 +676,7 @@ class upgrade(Command):
     choices = {'settings'}
 
     def usage(self, command):
-        return "%prog <command> settings [filename] [options]"
+        return '%prog <command> settings [filename] [options]'
 
     def run(self, *args, **kwargs):
         try:

+ 1 - 1
celery/tests/app/test_amqp.py

@@ -225,7 +225,7 @@ class test_AMQP(AppCase):
         self.assertEqual(kwargs['exchange'], '')
 
     def test_send_event_exchange_string(self):
-        evd = Mock(name="evd")
+        evd = Mock(name='evd')
         self.app.amqp.send_task_message(
             Mock(), 'foo', self.simple_message, retry=False,
             exchange='xyz', routing_key='xyb',

+ 4 - 4
celery/tests/backends/test_amqp.py

@@ -240,15 +240,15 @@ class test_AMQPBackend(AppCase):
 
     def test_drain_events_decodes_exceptions_in_meta(self):
         tid = uuid()
-        b = self.create_backend(serializer="json")
-        b.store_result(tid, RuntimeError("aap"), states.FAILURE)
+        b = self.create_backend(serializer='json')
+        b.store_result(tid, RuntimeError('aap'), states.FAILURE)
         result = AsyncResult(tid, backend=b)
 
         with self.assertRaises(Exception) as cm:
             result.get()
 
-        self.assertEqual(cm.exception.__class__.__name__, "RuntimeError")
-        self.assertEqual(str(cm.exception), "aap")
+        self.assertEqual(cm.exception.__class__.__name__, 'RuntimeError')
+        self.assertEqual(str(cm.exception), 'aap')
 
     def test_no_expires(self):
         b = self.create_backend(expires=None)

+ 1 - 1
celery/tests/case.py

@@ -309,7 +309,7 @@ def alive_threads():
 class Case(unittest.TestCase):
 
     def patch(self, *path, **options):
-        manager = patch(".".join(path), **options)
+        manager = patch('.'.join(path), **options)
         patched = manager.start()
         self.addCleanup(manager.stop)
         return patched

+ 1 - 1
celery/tests/concurrency/test_eventlet.py

@@ -38,7 +38,7 @@ class test_aaa_eventlet_patch(EventletCase):
     @patch('eventlet.debug.hub_blocking_detection', create=True)
     @patch('eventlet.monkey_patch', create=True)
     def test_aaa_blockdetecet(self, monkey_patch, hub_blocking_detection):
-        os.environ['EVENTLET_NOBLOCK'] = "10.3"
+        os.environ['EVENTLET_NOBLOCK'] = '10.3'
         try:
             from celery import maybe_patch_concurrency
             maybe_patch_concurrency(['x', '-P', 'eventlet'])

+ 1 - 1
celery/tests/concurrency/test_prefork.py

@@ -90,7 +90,7 @@ class test_process_initializer(AppCase):
                 )
 
                 with patch('celery.app.trace.setup_worker_optimizations') as S:
-                    os.environ['FORKED_BY_MULTIPROCESSING'] = "1"
+                    os.environ['FORKED_BY_MULTIPROCESSING'] = '1'
                     try:
                         process_initializer(app, 'luke.worker.com')
                         S.assert_called_with(app, 'luke.worker.com')

+ 7 - 7
celery/tests/utils/test_debug.py

@@ -56,18 +56,18 @@ class test_sample(Case):
 class test_hfloat(Case):
 
     def test_hfloat(self):
-        self.assertEqual(str(debug.hfloat(10, 5)), "10")
-        self.assertEqual(str(debug.hfloat(10.45645234234, 5)), "10.456")
+        self.assertEqual(str(debug.hfloat(10, 5)), '10')
+        self.assertEqual(str(debug.hfloat(10.45645234234, 5)), '10.456')
 
 
 class test_humanbytes(Case):
 
     def test_humanbytes(self):
-        self.assertEqual(debug.humanbytes(2 ** 20), "1MB")
-        self.assertEqual(debug.humanbytes(4 * 2 ** 20), "4MB")
-        self.assertEqual(debug.humanbytes(2 ** 16), "64kB")
-        self.assertEqual(debug.humanbytes(2 ** 16), "64kB")
-        self.assertEqual(debug.humanbytes(2 ** 8), "256b")
+        self.assertEqual(debug.humanbytes(2 ** 20), '1MB')
+        self.assertEqual(debug.humanbytes(4 * 2 ** 20), '4MB')
+        self.assertEqual(debug.humanbytes(2 ** 16), '64kB')
+        self.assertEqual(debug.humanbytes(2 ** 16), '64kB')
+        self.assertEqual(debug.humanbytes(2 ** 8), '256b')
 
 
 class test_mem_rss(Case):

+ 2 - 2
celery/tests/utils/test_platforms.py

@@ -501,9 +501,9 @@ if not platforms.IS_WINDOWS:
                 pass
             x.after_chdir.assert_called_with()
 
-            x = DaemonContext(workdir='/opt/workdir', umask="0755")
+            x = DaemonContext(workdir='/opt/workdir', umask='0755')
             self.assertEqual(x.umask, 493)
-            x = DaemonContext(workdir='/opt/workdir', umask="493")
+            x = DaemonContext(workdir='/opt/workdir', umask='493')
             self.assertEqual(x.umask, 493)
 
             x.redirect_to_null(None)

+ 3 - 3
celery/tests/utils/test_saferepr.py

@@ -14,7 +14,7 @@ from celery.tests.case import Case
 D_NUMBERS = {
     b'integer': 1,
     b'float': 1.3,
-    b'decimal': Decimal("1.3"),
+    b'decimal': Decimal('1.3'),
     b'long': long_t(4),
     b'complex': complex(13.3),
 }
@@ -167,14 +167,14 @@ class test_saferepr(Case):
         # multiple lines.  For that reason, dicts with more than one element
         # aren't tested here.
         types = (
-            0, 0, 0+0j, 0.0, "", b"",
+            0, 0, 0+0j, 0.0, '', b'',
             (), tuple2(), tuple3(),
             [], list2(), list3(),
             set(), set2(), set3(),
             frozenset(), frozenset2(), frozenset3(),
             {}, dict2(), dict3(),
             self.assertTrue, pprint,
-            -6, -6, -6-6j, -1.5, "x", b"x", (3,), [3], {3: 6},
+            -6, -6, -6-6j, -1.5, 'x', b'x', (3,), [3], {3: 6},
             (1, 2), [3, 4], {5: 6},
             tuple2((1, 2)), tuple3((1, 2)), tuple3(range(100)),
             [3, 4], list2([3, 4]), list3([3, 4]), list3(range(100)),

+ 1 - 1
celery/tests/worker/test_request.py

@@ -985,7 +985,7 @@ class test_create_request_class(RequestCase):
             **kwargs)
 
     def test_on_success(self):
-        self.zRequest(id=uuid()).on_success((False, "hey", 3.1222))
+        self.zRequest(id=uuid()).on_success((False, 'hey', 3.1222))
 
     def test_on_success__SystemExit(self,
                                     errors=(SystemExit, KeyboardInterrupt)):

+ 2 - 2
celery/tests/worker/test_worker.py

@@ -838,10 +838,10 @@ class test_WorkController(AppCase):
         self.app.amqp.queues.select.side_effect = KeyError()
         self.app.amqp.queues.deselect.side_effect = KeyError()
         with self.assertRaises(ImproperlyConfigured):
-            self.worker.setup_queues("x,y", exclude="foo,bar")
+            self.worker.setup_queues('x,y', exclude='foo,bar')
         self.app.amqp.queues.select = Mock(name='select')
         with self.assertRaises(ImproperlyConfigured):
-            self.worker.setup_queues("x,y", exclude="foo,bar")
+            self.worker.setup_queues('x,y', exclude='foo,bar')
 
     def test_send_worker_shutdown(self):
         with patch('celery.signals.worker_shutdown') as ws:

+ 1 - 1
celery/utils/saferepr.py

@@ -190,7 +190,7 @@ def reprstream(stack, seen=None, maxlevels=3, level=0, isinstance=isinstance):
                     continue
 
                 if maxlevels and level >= maxlevels:
-                    yield "%s...%s" % (lit_start.value, lit_end.value), it
+                    yield '%s...%s' % (lit_start.value, lit_end.value), it
                     continue
 
                 objid = id(orig)

+ 4 - 4
docs/_ext/applyxrefs.py

@@ -50,7 +50,7 @@ def has_target(fn):
             return (True, None)
 
     if len(lines) < 1:
-        print("Not touching empty file %s." % fn)
+        print('Not touching empty file %s.' % fn)
         return (True, None)
     if lines[0].startswith('.. _'):
         return (True, None)
@@ -73,7 +73,7 @@ def main(argv=None):
 
     for fn in files:
         if fn in DONT_TOUCH:
-            print("Skipping blacklisted file %s." % fn)
+            print('Skipping blacklisted file %s.' % fn)
             continue
 
         target_found, lines = has_target(fn)
@@ -81,10 +81,10 @@ def main(argv=None):
             if testing:
                 print '%s: %s' % (fn, lines[0]),
             else:
-                print "Adding xref to %s" % fn
+                print 'Adding xref to %s' % fn
                 process_file(fn, lines)
         else:
-            print "Skipping %s: already has a xref" % fn
+            print 'Skipping %s: already has a xref' % fn
 
 if __name__ == '__main__':
     sys.exit(main())

+ 31 - 31
docs/_ext/literals_to_xrefs.py

@@ -17,7 +17,7 @@ refre = re.compile(r'``([^`\s]+?)``')
 ROLES = (
     'attr',
     'class',
-    "djadmin",
+    'djadmin',
     'data',
     'exc',
     'file',
@@ -25,21 +25,21 @@ ROLES = (
     'lookup',
     'meth',
     'mod',
-    "djadminopt",
-    "ref",
-    "setting",
-    "term",
-    "tfilter",
-    "ttag",
+    'djadminopt',
+    'ref',
+    'setting',
+    'term',
+    'tfilter',
+    'ttag',
 
     # special
-    "skip",
+    'skip',
 )
 
 ALWAYS_SKIP = [
-    "NULL",
-    "True",
-    "False",
+    'NULL',
+    'True',
+    'False',
 ]
 
 
@@ -48,18 +48,18 @@ def fixliterals(fname):
 
     last = 0
     new = []
-    storage = shelve.open("/tmp/literals_to_xref.shelve")
-    lastvalues = storage.get("lastvalues", {})
+    storage = shelve.open('/tmp/literals_to_xref.shelve')
+    lastvalues = storage.get('lastvalues', {})
 
     for m in refre.finditer(data):
 
         new.append(data[last:m.start()])
         last = m.end()
 
-        line_start = data.rfind("\n", 0, m.start())
-        line_end = data.find("\n", m.end())
-        prev_start = data.rfind("\n", 0, line_start)
-        next_end = data.find("\n", line_end + 1)
+        line_start = data.rfind('\n', 0, m.start())
+        line_end = data.find('\n', m.end())
+        prev_start = data.rfind('\n', 0, line_start)
+        next_end = data.find('\n', line_end + 1)
 
         # Skip always-skip stuff
         if m.group(1) in ALWAYS_SKIP:
@@ -68,50 +68,50 @@ def fixliterals(fname):
 
         # skip when the next line is a title
         next_line = data[m.end():next_end].strip()
-        if next_line[0] in "!-/:-@[-`{-~" and \
+        if next_line[0] in '!-/:-@[-`{-~' and \
                 all(c == next_line[0] for c in next_line):
             new.append(m.group(0))
             continue
 
-        sys.stdout.write("\n" + "-" * 80 + "\n")
+        sys.stdout.write('\n' + '-' * 80 + '\n')
         sys.stdout.write(data[prev_start + 1:m.start()])
-        sys.stdout.write(colorize(m.group(0), fg="red"))
+        sys.stdout.write(colorize(m.group(0), fg='red'))
         sys.stdout.write(data[m.end():next_end])
-        sys.stdout.write("\n\n")
+        sys.stdout.write('\n\n')
 
         replace_type = None
         while replace_type is None:
             replace_type = input(
-                colorize("Replace role: ", fg="yellow")).strip().lower()
+                colorize('Replace role: ', fg='yellow')).strip().lower()
             if replace_type and replace_type not in ROLES:
                 replace_type = None
 
-        if replace_type == "":
+        if replace_type == '':
             new.append(m.group(0))
             continue
 
-        if replace_type == "skip":
+        if replace_type == 'skip':
             new.append(m.group(0))
             ALWAYS_SKIP.append(m.group(1))
             continue
 
         default = lastvalues.get(m.group(1), m.group(1))
-        if default.endswith("()") and \
-                replace_type in ("class", "func", "meth"):
+        if default.endswith('()') and \
+                replace_type in ('class', 'func', 'meth'):
             default = default[:-2]
         replace_value = input(
-            colorize("Text <target> [", fg="yellow") +
-            default + colorize("]: ", fg="yellow"),
+            colorize('Text <target> [', fg='yellow') +
+            default + colorize(']: ', fg='yellow'),
         ).strip()
         if not replace_value:
             replace_value = default
-        new.append(":%s:`%s`" % (replace_type, replace_value))
+        new.append(':%s:`%s`' % (replace_type, replace_value))
         lastvalues[m.group(1)] = replace_value
 
     new.append(data[last:])
-    open(fname, "w").write("".join(new))
+    open(fname, 'w').write(''.join(new))
 
-    storage["lastvalues"] = lastvalues
+    storage['lastvalues'] = lastvalues
     storage.close()
 
 

+ 54 - 54
docs/history/changelog-1.0.rst

@@ -187,15 +187,15 @@ News
 
         @periodic_task(run_every=crontab(hour=7, minute=30))
         def every_morning():
-            print("Runs every morning at 7:30a.m")
+            print('Runs every morning at 7:30a.m')
 
-        @periodic_task(run_every=crontab(hour=7, minute=30, day_of_week="mon"))
+        @periodic_task(run_every=crontab(hour=7, minute=30, day_of_week='mon'))
         def every_monday_morning():
-            print("Run every monday morning at 7:30a.m")
+            print('Run every monday morning at 7:30a.m')
 
         @periodic_task(run_every=crontab(minutes=30))
         def every_hour():
-            print("Runs every hour on the clock. e.g. 1:30, 2:30, 3:30 etc.")
+            print('Runs every hour on the clock. e.g. 1:30, 2:30, 3:30 etc.')
 
     .. note::
         This a late addition. While we have unittests, due to the
@@ -250,11 +250,11 @@ Remote control commands
 
     * rate_limit(task_name, destination=all, reply=False, timeout=1, limit=0)
 
-        Worker returns `{"ok": message}` on success,
-        or `{"failure": message}` on failure.
+        Worker returns `{'ok': message}` on success,
+        or `{'failure': message}` on failure.
 
             >>> from celery.task.control import rate_limit
-            >>> rate_limit("tasks.add", "10/s", reply=True)
+            >>> rate_limit('tasks.add', '10/s', reply=True)
             [{'worker1': {'ok': 'new rate limit set successfully'}},
              {'worker2': {'ok': 'new rate limit set successfully'}}]
 
@@ -272,7 +272,7 @@ Remote control commands
         Worker simply returns `True`.
 
             >>> from celery.task.control import revoke
-            >>> revoke("419e46eb-cf6a-4271-86a8-442b7124132c", reply=True)
+            >>> revoke('419e46eb-cf6a-4271-86a8-442b7124132c', reply=True)
             [{'worker1': True},
              {'worker2'; True}]
 
@@ -289,20 +289,20 @@ Remote control commands
         @Panel.register
         def reset_broker_connection(state, **kwargs):
             state.consumer.reset_connection()
-            return {"ok": "connection re-established"}
+            return {'ok': 'connection re-established'}
 
     With this module imported in the worker, you can launch the command
     using `celery.task.control.broadcast`::
 
         >>> from celery.task.control import broadcast
-        >>> broadcast("reset_broker_connection", reply=True)
+        >>> broadcast('reset_broker_connection', reply=True)
         [{'worker1': {'ok': 'connection re-established'},
          {'worker2': {'ok': 'connection re-established'}}]
 
     **TIP** You can choose the worker(s) to receive the command
     by using the `destination` argument::
 
-        >>> broadcast("reset_broker_connection", destination=["worker1"])
+        >>> broadcast('reset_broker_connection', destination=['worker1'])
         [{'worker1': {'ok': 'connection re-established'}]
 
 * New remote control command: `dump_reserved`
@@ -310,7 +310,7 @@ Remote control commands
     Dumps tasks reserved by the worker, waiting to be executed::
 
         >>> from celery.task.control import broadcast
-        >>> broadcast("dump_reserved", reply=True)
+        >>> broadcast('dump_reserved', reply=True)
         [{'myworker1': [<TaskRequest ....>]}]
 
 * New remote control command: `dump_schedule`
@@ -320,27 +320,27 @@ Remote control commands
     waiting to be executed by the worker.
 
         >>> from celery.task.control import broadcast
-        >>> broadcast("dump_schedule", reply=True)
+        >>> broadcast('dump_schedule', reply=True)
         [{'w1': []},
          {'w3': []},
          {'w2': ['0. 2010-05-12 11:06:00 pri0 <TaskRequest
-                    {name:"opalfeeds.tasks.refresh_feed_slice",
-                     id:"95b45760-4e73-4ce8-8eac-f100aa80273a",
-                     args:"(<Feeds freq_max:3600 freq_min:60
-                                   start:2184.0 stop:3276.0>,)",
-                     kwargs:"{'page': 2}"}>']},
+                    {name:'opalfeeds.tasks.refresh_feed_slice',
+                     id:'95b45760-4e73-4ce8-8eac-f100aa80273a',
+                     args:'(<Feeds freq_max:3600 freq_min:60
+                                   start:2184.0 stop:3276.0>,)',
+                     kwargs:'{'page': 2}'}>']},
          {'w4': ['0. 2010-05-12 11:00:00 pri0 <TaskRequest
-                    {name:"opalfeeds.tasks.refresh_feed_slice",
-                     id:"c053480b-58fb-422f-ae68-8d30a464edfe",
-                     args:"(<Feeds freq_max:3600 freq_min:60
-                                   start:1092.0 stop:2184.0>,)",
-                     kwargs:"{\'page\': 1}"}>',
+                    {name:'opalfeeds.tasks.refresh_feed_slice',
+                     id:'c053480b-58fb-422f-ae68-8d30a464edfe',
+                     args:'(<Feeds freq_max:3600 freq_min:60
+                                   start:1092.0 stop:2184.0>,)',
+                     kwargs:'{\'page\': 1}'}>',
                 '1. 2010-05-12 11:12:00 pri0 <TaskRequest
-                    {name:"opalfeeds.tasks.refresh_feed_slice",
-                     id:"ab8bc59e-6cf8-44b8-88d0-f1af57789758",
-                     args:"(<Feeds freq_max:3600 freq_min:60
-                                   start:3276.0 stop:4365>,)",
-                     kwargs:"{\'page\': 3}"}>']}]
+                    {name:'opalfeeds.tasks.refresh_feed_slice',
+                     id:'ab8bc59e-6cf8-44b8-88d0-f1af57789758',
+                     args:'(<Feeds freq_max:3600 freq_min:60
+                                   start:3276.0 stop:4365>,)',
+                     kwargs:'{\'page\': 3}'}>']}]
 
 .. _v103-fixes:
 
@@ -410,10 +410,10 @@ Fixes
 
     .. code-block:: python
 
-        CELERYD_POOL = "celery.concurrency.processes.TaskPool"
-        CELERYD_MEDIATOR = "celery.worker.controllers.Mediator"
-        CELERYD_ETA_SCHEDULER = "celery.worker.controllers.ScheduleController"
-        CELERYD_CONSUMER = "celery.worker.consumer.Consumer"
+        CELERYD_POOL = 'celery.concurrency.processes.TaskPool'
+        CELERYD_MEDIATOR = 'celery.worker.controllers.Mediator'
+        CELERYD_ETA_SCHEDULER = 'celery.worker.controllers.ScheduleController'
+        CELERYD_CONSUMER = 'celery.worker.consumer.Consumer'
 
     The :setting:`CELERYD_POOL` setting makes it easy to swap out the
     multiprocessing pool with a threaded pool, or how about a
@@ -525,7 +525,7 @@ Fixes
     Example:
 
         >>> from celery.execute import send_task
-        >>> result = send_task("celery.ping", args=[], kwargs={})
+        >>> result = send_task('celery.ping', args=[], kwargs={})
         >>> result.get()
         'pong'
 
@@ -845,9 +845,9 @@ News
 
 * Now supports passing execute options to a TaskSets list of args, e.g.:
 
-    >>> ts = TaskSet(add, [([2, 2], {}, {"countdown": 1}),
-    ...                   ([4, 4], {}, {"countdown": 2}),
-    ...                   ([8, 8], {}, {"countdown": 3})])
+    >>> ts = TaskSet(add, [([2, 2], {}, {'countdown': 1}),
+    ...                   ([4, 4], {}, {'countdown': 2}),
+    ...                   ([8, 8], {}, {'countdown': 3})])
     >>> ts.run()
 
 * Got a 3x performance gain by setting the prefetch count to four times the
@@ -1021,28 +1021,28 @@ Important changes
 * All AMQP_* settings has been renamed to BROKER_*, and in addition
   AMQP_SERVER has been renamed to BROKER_HOST, so before where you had::
 
-        AMQP_SERVER = "localhost"
+        AMQP_SERVER = 'localhost'
         AMQP_PORT = 5678
-        AMQP_USER = "myuser"
-        AMQP_PASSWORD = "mypassword"
-        AMQP_VHOST = "celery"
+        AMQP_USER = 'myuser'
+        AMQP_PASSWORD = 'mypassword'
+        AMQP_VHOST = 'celery'
 
   You need to change that to::
 
-        BROKER_HOST = "localhost"
+        BROKER_HOST = 'localhost'
         BROKER_PORT = 5678
-        BROKER_USER = "myuser"
-        BROKER_PASSWORD = "mypassword"
-        BROKER_VHOST = "celery"
+        BROKER_USER = 'myuser'
+        BROKER_PASSWORD = 'mypassword'
+        BROKER_VHOST = 'celery'
 
 * Custom carrot backends now need to include the backend class name, so before
   where you had::
 
-        CARROT_BACKEND = "mycustom.backend.module"
+        CARROT_BACKEND = 'mycustom.backend.module'
 
   you need to change it to::
 
-        CARROT_BACKEND = "mycustom.backend.module.Backend"
+        CARROT_BACKEND = 'mycustom.backend.module.Backend'
 
   where `Backend` is the class name. This is probably `"Backend"`, as
   that was the previously implied name.
@@ -1441,11 +1441,11 @@ News
 * **IMPORTANT** `tasks.register`: Renamed `task_name` argument to
   `name`, so
 
-        >>> tasks.register(func, task_name="mytask")
+        >>> tasks.register(func, task_name='mytask')
 
   has to be replaced with:
 
-        >>> tasks.register(func, name="mytask")
+        >>> tasks.register(func, name='mytask')
 
 * The daemon now correctly runs if the pidlock is stale.
 
@@ -1736,10 +1736,10 @@ arguments, so be sure to flush your task queue before you upgrade.
   a new backend for Tokyo Tyrant. You can set the backend in your django
   settings file. E.g.::
 
-        CELERY_RESULT_BACKEND = "database"; # Uses the database
-        CELERY_RESULT_BACKEND = "cache"; # Uses the django cache framework
-        CELERY_RESULT_BACKEND = "tyrant"; # Uses Tokyo Tyrant
-        TT_HOST = "localhost"; # Hostname for the Tokyo Tyrant server.
+        CELERY_RESULT_BACKEND = 'database'; # Uses the database
+        CELERY_RESULT_BACKEND = 'cache'; # Uses the django cache framework
+        CELERY_RESULT_BACKEND = 'tyrant'; # Uses Tokyo Tyrant
+        TT_HOST = 'localhost'; # Hostname for the Tokyo Tyrant server.
         TT_PORT = 6657; # Port of the Tokyo Tyrant server.
 
 .. _version-0.1.11:
@@ -1826,7 +1826,7 @@ arguments, so be sure to flush your task queue before you upgrade.
 
 * You can do this by including the celery `urls.py` into your project,
 
-        >>> url(r'^celery/$', include("celery.urls"))
+        >>> url(r'^celery/$', include('celery.urls'))
 
   then visiting the following url,::
 
@@ -1834,7 +1834,7 @@ arguments, so be sure to flush your task queue before you upgrade.
 
   this will return a JSON dictionary like e.g:
 
-        >>> {"task": {"id": $task_id, "executed": true}}
+        >>> {'task': {'id': $task_id, 'executed': true}}
 
 * `delay_task` now returns string id, not `uuid.UUID` instance.
 

+ 49 - 49
docs/history/changelog-2.0.rst

@@ -44,18 +44,18 @@ Fixes
 
     With the follow settings::
 
-        CELERY_QUEUES = {"cpubound": {"exchange": "cpubound",
-                                      "routing_key": "cpubound"}}
+        CELERY_QUEUES = {'cpubound': {'exchange': 'cpubound',
+                                      'routing_key': 'cpubound'}}
 
-        CELERY_ROUTES = {"tasks.add": {"queue": "cpubound",
-                                       "routing_key": "tasks.add",
-                                       "serializer": "json"}}
+        CELERY_ROUTES = {'tasks.add': {'queue': 'cpubound',
+                                       'routing_key': 'tasks.add',
+                                       'serializer': 'json'}}
 
     The final routing options for `tasks.add` will become::
 
-        {"exchange": "cpubound",
-         "routing_key": "tasks.add",
-         "serializer": "json"}
+        {'exchange': 'cpubound',
+         'routing_key': 'tasks.add',
+         'serializer': 'json'}
 
     This was not the case before: the values
     in :setting:`CELERY_QUEUES` would take precedence.
@@ -63,7 +63,7 @@ Fixes
 * Worker crashed if the value of :setting:`CELERY_TASK_ERROR_WHITELIST` was
   not an iterable
 
-* :func:`~celery.execute.apply`: Make sure `kwargs["task_id"]` is
+* :func:`~celery.execute.apply`: Make sure `kwargs['task_id']` is
   always set.
 
 * `AsyncResult.traceback`: Now returns :const:`None`, instead of raising
@@ -218,10 +218,10 @@ Documentation
     Examples::
 
         # Inspect a single worker
-        >>> i = inspect("myworker.example.com")
+        >>> i = inspect('myworker.example.com')
 
         # Inspect several workers
-        >>> i = inspect(["myworker.example.com", "myworker2.example.com"])
+        >>> i = inspect(['myworker.example.com', 'myworker2.example.com'])
 
         # Inspect all workers consuming on this vhost.
         >>> i = inspect()
@@ -339,7 +339,7 @@ Documentation
 
     This example in the docs should now work again::
 
-        CELERY_ROUTES = {"feed.tasks.import_feed": "feeds"}
+        CELERY_ROUTES = {'feed.tasks.import_feed': 'feeds'}
 
 * `CREATE_MISSING_QUEUES` was not honored by apply_async.
 
@@ -367,7 +367,7 @@ Documentation
 
     Example reply::
 
-        >>> broadcast("dump_active", arguments={"safe": False}, reply=True)
+        >>> broadcast('dump_active', arguments={'safe': False}, reply=True)
         [{'worker.local': [
             {'args': '(1,)',
              'time_start': 1278580542.6300001,
@@ -428,17 +428,17 @@ Django integration has been moved to a separate package: `django-celery`_.
 
 * To upgrade you need to install the `django-celery`_ module and change::
 
-    INSTALLED_APPS = "celery"
+    INSTALLED_APPS = 'celery'
 
   to::
 
-    INSTALLED_APPS = "djcelery"
+    INSTALLED_APPS = 'djcelery'
 
 * If you use `mod_wsgi` you need to add the following line to your `.wsgi`
   file::
 
     import os
-    os.environ["CELERY_LOADER"] = "django"
+    os.environ['CELERY_LOADER'] = 'django'
 
 * The following modules has been moved to `django-celery`_:
 
@@ -485,16 +485,16 @@ The `DATABASE_*` settings has been replaced by a single setting:
 .. code-block:: python
 
     # sqlite (filename)
-    CELERY_RESULT_DBURI = "sqlite:///celerydb.sqlite"
+    CELERY_RESULT_DBURI = 'sqlite:///celerydb.sqlite'
 
     # mysql
-    CELERY_RESULT_DBURI = "mysql://scott:tiger@localhost/foo"
+    CELERY_RESULT_DBURI = 'mysql://scott:tiger@localhost/foo'
 
     # postgresql
-    CELERY_RESULT_DBURI = "postgresql://scott:tiger@localhost/mydatabase"
+    CELERY_RESULT_DBURI = 'postgresql://scott:tiger@localhost/mydatabase'
 
     # oracle
-    CELERY_RESULT_DBURI = "oracle://scott:tiger@127.0.0.1:1521/sidname"
+    CELERY_RESULT_DBURI = 'oracle://scott:tiger@127.0.0.1:1521/sidname'
 
 See `SQLAlchemy Connection Strings`_ for more information about connection
 strings.
@@ -503,7 +503,7 @@ To specify additional SQLAlchemy database engine options you can use
 the :setting:`CELERY_RESULT_ENGINE_OPTIONS` setting::
 
     # echo enables verbose logging from SQLAlchemy.
-    CELERY_RESULT_ENGINE_OPTIONS = {"echo": True}
+    CELERY_RESULT_ENGINE_OPTIONS = {'echo': True}
 
 .. _`SQLAlchemy`:
     http://www.sqlalchemy.org
@@ -522,7 +522,7 @@ Cache result backend
 The cache result backend is no longer using the Django cache framework,
 but it supports mostly the same configuration syntax::
 
-    CELERY_CACHE_BACKEND = "memcached://A.example.com:11211;B.example.com"
+    CELERY_CACHE_BACKEND = 'memcached://A.example.com:11211;B.example.com'
 
 To use the cache backend you must either have the `pylibmc`_ or
 `python-memcached`_ library installed, of which the former is regarded
@@ -551,9 +551,9 @@ Backward incompatible changes
     configured::
 
         >>> from carrot.connection import BrokerConnection
-        >>> conn = BrokerConnection("localhost", "guest", "guest", "/")
+        >>> conn = BrokerConnection('localhost', 'guest', 'guest', '/')
         >>> from celery.execute import send_task
-        >>> r = send_task("celery.ping", args=(), kwargs={}, connection=conn)
+        >>> r = send_task('celery.ping', args=(), kwargs={}, connection=conn)
         >>> from celery.backends.amqp import AMQPBackend
         >>> r.backend = AMQPBackend(connection=conn)
         >>> r.get()
@@ -581,11 +581,11 @@ Backward incompatible changes
     Assuming the implicit `Loader` class name is no longer supported,
     if you use e.g.::
 
-        CELERY_LOADER = "myapp.loaders"
+        CELERY_LOADER = 'myapp.loaders'
 
     You need to include the loader class name, like this::
 
-        CELERY_LOADER = "myapp.loaders.Loader"
+        CELERY_LOADER = 'myapp.loaders.Loader'
 
 * :setting:`CELERY_TASK_RESULT_EXPIRES` now defaults to 1 day.
 
@@ -690,11 +690,11 @@ News
 * Added support for using complex crontab-expressions in periodic tasks. For
   example, you can now use::
 
-    >>> crontab(minute="*/15")
+    >>> crontab(minute='*/15')
 
   or even::
 
-    >>> crontab(minute="*/30", hour="8-17,1-2", day_of_week="thu-fri")
+    >>> crontab(minute='*/30', hour='8-17,1-2', day_of_week='thu-fri')
 
   See :ref:`guide-beat`.
 
@@ -735,9 +735,9 @@ News
 
     The missing queues are created with the following options::
 
-        CELERY_QUEUES[name] = {"exchange": name,
-                               "exchange_type": "direct",
-                               "routing_key": "name}
+        CELERY_QUEUES[name] = {'exchange': name,
+                               'exchange_type': 'direct',
+                               'routing_key': 'name}
 
    This feature is added for easily setting up routing using the `-Q`
    option to the worker:
@@ -810,15 +810,15 @@ News
 
     Examples:
 
-        >>> CELERY_ROUTES = {"celery.ping": "default",
-                             "mytasks.add": "cpu-bound",
-                             "video.encode": {
-                                 "queue": "video",
-                                 "exchange": "media"
-                                 "routing_key": "media.video.encode"}}
+        >>> CELERY_ROUTES = {'celery.ping': 'default',
+                             'mytasks.add': 'cpu-bound',
+                             'video.encode': {
+                                 'queue': 'video',
+                                 'exchange': 'media'
+                                 'routing_key': 'media.video.encode'}}
 
-        >>> CELERY_ROUTES = ("myapp.tasks.Router",
-                             {"celery.ping": "default})
+        >>> CELERY_ROUTES = ('myapp.tasks.Router',
+                             {'celery.ping': 'default})
 
     Where `myapp.tasks.Router` could be:
 
@@ -827,8 +827,8 @@ News
         class Router(object):
 
             def route_for_task(self, task, args=None, kwargs=None):
-                if task == "celery.ping":
-                    return "default"
+                if task == 'celery.ping':
+                    return 'default'
 
     route_for_task may return a string or a dict. A string then means
     it's a queue name in :setting:`CELERY_QUEUES`, a dict means it's a custom route.
@@ -840,17 +840,17 @@ News
 
     Example if :func:`~celery.execute.apply_async` has these arguments::
 
-       >>> Task.apply_async(immediate=False, exchange="video",
-       ...                  routing_key="video.compress")
+       >>> Task.apply_async(immediate=False, exchange='video',
+       ...                  routing_key='video.compress')
 
     and a router returns::
 
-        {"immediate": True,
-         "exchange": "urgent"}
+        {'immediate': True,
+         'exchange': 'urgent'}
 
     the final message options will be::
 
-        immediate=True, exchange="urgent", routing_key="video.compress"
+        immediate=True, exchange='urgent', routing_key='video.compress'
 
     (and any default message options defined in the
     :class:`~celery.task.base.Task` class)
@@ -895,7 +895,7 @@ News
 
     Now returns::
 
-        {"ok": "task $id revoked"}
+        {'ok': 'task $id revoked'}
 
     instead of `True`.
 
@@ -904,8 +904,8 @@ News
     Example usage:
 
         >>> from celery.task.control import broadcast
-        >>> broadcast("enable_events")
-        >>> broadcast("disable_events")
+        >>> broadcast('enable_events')
+        >>> broadcast('disable_events')
 
 * Removed top-level tests directory. Test config now in celery.tests.config
 

+ 10 - 10
docs/history/changelog-2.1.rst

@@ -197,7 +197,7 @@ News
 
     .. code-block:: python
 
-        CELERYBEAT_SCHEDULER = "djcelery.schedulers.DatabaseScheduler"
+        CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
 
 * Added Task.expires: Used to set default expiry time for tasks.
 
@@ -237,13 +237,13 @@ News
     .. code-block:: pycon
 
         >>> from celery.task.control import inspect
-        >>> inspect.add_consumer(queue="queue", exchange="exchange",
-        ...                      exchange_type="direct",
-        ...                      routing_key="key",
+        >>> inspect.add_consumer(queue='queue', exchange='exchange',
+        ...                      exchange_type='direct',
+        ...                      routing_key='key',
         ...                      durable=False,
         ...                      auto_delete=True)
 
-        >>> inspect.cancel_consumer("queue")
+        >>> inspect.cancel_consumer('queue')
 
 * celerybeat: Now logs the traceback if a message can't be sent.
 
@@ -455,7 +455,7 @@ News
 
         @signals.setup_logging.connect
         def setup_logging(**kwargs):
-            fileConfig("logging.conf")
+            fileConfig('logging.conf')
 
     If there are no receivers for this signal, the logging subsystem
     will be configured using the :option:`--loglevel`/:option:`--logfile`
@@ -472,8 +472,8 @@ News
 
        def setup_logging(**kwargs):
             import logging
-            fileConfig("logging.conf")
-            stdouts = logging.getLogger("mystdoutslogger")
+            fileConfig('logging.conf')
+            stdouts = logging.getLogger('mystdoutslogger')
             log.redirect_stdouts_to_logger(stdouts, loglevel=logging.WARNING)
 
 * worker Added command line option :option:`-I`/:option:`--include`:
@@ -504,11 +504,11 @@ News
 
     e.g.:
 
-        >>> s = subtask((1, 2), {"foo": "bar"}, baz=1)
+        >>> s = subtask((1, 2), {'foo': 'bar'}, baz=1)
         >>> s.args
         (1, 2)
         >>> s.kwargs
-        {"foo": "bar", "baz": 1}
+        {'foo': 'bar', 'baz': 1}
 
     See issue #182.
 

+ 10 - 10
docs/history/changelog-2.2.rst

@@ -507,7 +507,7 @@ Important Notes
 
                 @task()
                 def add(x, y, **kwargs):
-                    print("In task %s" % kwargs["task_id"])
+                    print('In task %s' % kwargs['task_id'])
                     return x + y
 
         And this will not use magic keyword arguments (new style):
@@ -518,7 +518,7 @@ Important Notes
 
                 @task()
                 def add(x, y):
-                    print("In task %s" % add.request.id)
+                    print('In task %s' % add.request.id)
                     return x + y
 
     In addition, tasks can choose not to accept magic keyword arguments by
@@ -548,12 +548,12 @@ Important Notes
     =====================================  ===================================
     **Magic Keyword Argument**             **Replace with**
     =====================================  ===================================
-    `kwargs["task_id"]`                    `self.request.id`
-    `kwargs["delivery_info"]`              `self.request.delivery_info`
-    `kwargs["task_retries"]`               `self.request.retries`
-    `kwargs["logfile"]`                    `self.request.logfile`
-    `kwargs["loglevel"]`                   `self.request.loglevel`
-    `kwargs["task_is_eager`                `self.request.is_eager`
+    `kwargs['task_id']`                    `self.request.id`
+    `kwargs['delivery_info']`              `self.request.delivery_info`
+    `kwargs['task_retries']`               `self.request.retries`
+    `kwargs['logfile']`                    `self.request.logfile`
+    `kwargs['loglevel']`                   `self.request.loglevel`
+    `kwargs['task_is_eager']`              `self.request.is_eager`
     **NEW**                                `self.request.args`
     **NEW**                                `self.request.kwargs`
     =====================================  ===================================
@@ -862,8 +862,8 @@ News
         >>> from celery.task.control import revoke
 
         >>> revoke(task_id, terminate=True)
-        >>> revoke(task_id, terminate=True, signal="KILL")
-        >>> revoke(task_id, terminate=True, signal="SIGKILL")
+        >>> revoke(task_id, terminate=True, signal='KILL')
+        >>> revoke(task_id, terminate=True, signal='SIGKILL')
 
 * `TaskSetResult.join_native`: Backend-optimized version of `join()`.
 

+ 2 - 2
docs/history/changelog-2.3.rst

@@ -172,7 +172,7 @@ Important Notes
     If you depend on the previous default which was the AMQP backend, then
     you have to set this explicitly before upgrading::
 
-        CELERY_RESULT_BACKEND = "amqp"
+        CELERY_RESULT_BACKEND = 'amqp'
 
     .. note::
 
@@ -250,7 +250,7 @@ News
     at runtime using the :func:`time_limit` remote control command::
 
         >>> from celery.task import control
-        >>> control.time_limit("tasks.sleeptask",
+        >>> control.time_limit('tasks.sleeptask',
         ...                    soft=60, hard=120, reply=True)
         [{'worker1.example.com': {'ok': 'time limits set successfully'}}]
 

+ 2 - 2
docs/history/changelog-2.5.rst

@@ -76,7 +76,7 @@ News
 
         @task_sent.connect
         def on_task_sent(**kwargs):
-            print("sent task: %r" % (kwargs,))
+            print('sent task: %r' % (kwargs,))
 
 - Invalid task messages are now rejected instead of acked.
 
@@ -102,7 +102,7 @@ News
         (10, 5)
 
         >>> new.options
-        {"countdown": 5}
+        {'countdown': 5}
 
 - Chord callbacks are now triggered in eager mode.
 

+ 6 - 6
docs/internals/app-overview.rst

@@ -17,8 +17,8 @@ Creating a Celery instance::
 
     >>> from celery import Celery
     >>> app = Celery()
-    >>> app.config_from_object("celeryconfig")
-    >>> #app.config_from_envvar("CELERY_CONFIG_MODULE")
+    >>> app.config_from_object('celeryconfig')
+    >>> #app.config_from_envvar('CELERY_CONFIG_MODULE')
 
 
 Creating tasks:
@@ -51,21 +51,21 @@ Starting a worker:
 
 .. code-block:: python
 
-    worker = celery.Worker(loglevel="INFO")
+    worker = celery.Worker(loglevel='INFO')
 
 Getting access to the configuration:
 
 .. code-block:: python
 
     celery.conf.task_always_eager = True
-    celery.conf["task_always_eager"] = True
+    celery.conf['task_always_eager'] = True
 
 
 Controlling workers::
 
     >>> celery.control.inspect().active()
-    >>> celery.control.rate_limit(add.name, "100/m")
-    >>> celery.control.broadcast("shutdown")
+    >>> celery.control.rate_limit(add.name, '100/m')
+    >>> celery.control.broadcast('shutdown')
     >>> celery.control.discard_all()
 
 Other interesting attributes::

+ 2 - 2
docs/reference/celery.rst

@@ -202,9 +202,9 @@ See :ref:`guide-canvas` for more about creating task workflows.
     arguments will be ignored and the values in the dict will be used
     instead.
 
-        >>> s = signature("tasks.add", args=(2, 2))
+        >>> s = signature('tasks.add', args=(2, 2))
         >>> signature(s)
-        {"task": "tasks.add", args=(2, 2), kwargs={}, options={}}
+        {'task': 'tasks.add', args=(2, 2), kwargs={}, options={}}
 
     .. method:: signature.__call__(*args \*\*kwargs)
 

+ 2 - 2
docs/userguide/extending.rst

@@ -258,10 +258,10 @@ An example Worker bootstep could be:
             print('Called when the worker is started.')
 
         def stop(self, worker):
-            print("Called when the worker shuts down.")
+            print('Called when the worker shuts down.')
 
         def terminate(self, worker):
-            print("Called when the worker terminates")
+            print('Called when the worker terminates')
 
 
 Every method is passed the current ``WorkController`` instance as the first

+ 1 - 1
docs/userguide/workers.rst

@@ -1151,7 +1151,7 @@ This command will gracefully shut down the worker remotely:
 .. code-block:: pycon
 
     >>> app.control.broadcast('shutdown') # shutdown all workers
-    >>> app.control.broadcast('shutdown, destination="worker1@example.com")
+    >>> app.control.broadcast('shutdown, destination='worker1@example.com')
 
 .. control:: ping
 

+ 3 - 3
docs/whatsnew-4.0.rst

@@ -295,7 +295,7 @@ e442df61b2ff1fe855881c1e2ff9acc970090f54
     Fix contributed by Ross Deane.
 
 - Creating a chord no longer results in multiple values for keyword
-  argument 'task_id'" (Issue #2225).
+  argument 'task_id' (Issue #2225).
 
     Fix contributed by Aneil Mallavarapu
 
@@ -914,7 +914,7 @@ for example::
 
     @task()
     def add(x, y, task_id=None):
-        print("My task id is %r" % (task_id,))
+        print('My task id is %r' % (task_id,))
 
 should be rewritten into::
 
@@ -922,7 +922,7 @@ should be rewritten into::
 
     @task(bind=True)
     def add(self, x, y):
-        print("My task id is {0.request.id}".format(self))
+        print('My task id is {0.request.id}'.format(self))
 
 Settings
 --------

+ 5 - 5
examples/celery_http_gateway/settings.py

@@ -51,17 +51,17 @@ SITE_ID = 1
 USE_I18N = True
 
 # Absolute path to the directory that holds media.
-# Example: "/home/media/media.lawrence.com/"
+# Example: '/home/media/media.lawrence.com/'
 MEDIA_ROOT = ''
 
 # URL that handles the media served from MEDIA_ROOT. Make sure to use a
 # trailing slash if there is a path component (optional in other cases).
-# Examples: "http://media.lawrence.com", "http://example.com/media/"
+# Examples: 'http://media.lawrence.com', 'http://example.com/media/'
 MEDIA_URL = ''
 
 # URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
 # trailing slash.
-# Examples: "http://foo.com/media/", "/media/".
+# Examples: 'http://foo.com/media/', '/media/'.
 ADMIN_MEDIA_PREFIX = '/media/'
 
 # Make this unique, and don't share it with anybody.
@@ -82,8 +82,8 @@ MIDDLEWARE_CLASSES = (
 ROOT_URLCONF = 'celery_http_gateway.urls'
 
 TEMPLATE_DIRS = (
-    # Put strings here, like "/home/html/django_templates" or
-    # "C:/www/django/templates".
+    # Put strings here, like '/home/html/django_templates' or
+    # 'C:/www/django/templates'.
     # Always use forward slashes, even on Windows.
     # Don't forget to use absolute paths, not relative paths.
 )

+ 2 - 2
examples/django/manage.py

@@ -2,8 +2,8 @@
 import os
 import sys
 
-if __name__ == "__main__":
-    os.environ.setdefault("DJANGO_SETTINGS_MODULE", "proj.settings")
+if __name__ == '__main__':
+    os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'proj.settings')
 
     from django.core.management import execute_from_command_line
 

+ 8 - 8
examples/django/proj/settings.py

@@ -62,27 +62,27 @@ USE_L10N = True
 USE_TZ = True
 
 # Absolute filesystem path to the directory that will hold user-uploaded files.
-# Example: "/home/media/media.lawrence.com/media/"
+# Example: '/home/media/media.lawrence.com/media/'
 MEDIA_ROOT = ''
 
 # URL that handles the media served from MEDIA_ROOT. Make sure to use a
 # trailing slash.
-# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
+# Examples: 'http://media.lawrence.com/media/', 'http://example.com/media/'
 MEDIA_URL = ''
 
 # Absolute path to the directory static files should be collected to.
 # Don't put anything in this directory yourself; store your static files
-# in apps' "static/" subdirectories and in STATICFILES_DIRS.
-# Example: "/home/media/media.lawrence.com/static/"
+# in apps' 'static/' subdirectories and in STATICFILES_DIRS.
+# Example: '/home/media/media.lawrence.com/static/'
 STATIC_ROOT = ''
 
 # URL prefix for static files.
-# Example: "http://media.lawrence.com/static/"
+# Example: 'http://media.lawrence.com/static/'
 STATIC_URL = '/static/'
 
 # Additional locations of static files
 STATICFILES_DIRS = (
-    # Put strings here, like "/home/html/static" or "C:/www/django/static".
+    # Put strings here, like '/home/html/static' or 'C:/www/django/static'.
     # Always use forward slashes, even on Windows.
     # Don't forget to use absolute paths, not relative paths.
 )
@@ -119,8 +119,8 @@ ROOT_URLCONF = 'proj.urls'
 WSGI_APPLICATION = 'proj.wsgi.application'
 
 TEMPLATE_DIRS = (
-    # Put strings here, like "/home/html/django_templates"
-    # or "C:/www/django/templates".
+    # Put strings here, like '/home/html/django_templates'
+    # or 'C:/www/django/templates'.
     # Always use forward slashes, even on Windows.
     # Don't forget to use absolute paths, not relative paths.
 )

+ 1 - 1
examples/django/proj/wsgi.py

@@ -15,7 +15,7 @@ framework.
 """
 import os
 
-os.environ.setdefault("DJANGO_SETTINGS_MODULE", "proj.settings")
+os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'proj.settings')
 
 # This application object is used by any WSGI server configured to use this
 # file. This includes Django's development server, if the WSGI_APPLICATION

+ 1 - 1
examples/eventlet/README.rst

@@ -34,7 +34,7 @@ of the response body::
     $ cd examples/eventlet
     $ python
     >>> from tasks import urlopen
-    >>> urlopen.delay("http://www.google.com/").get()
+    >>> urlopen.delay('http://www.google.com/').get()
     9980
 
 To open several URLs at once you can do::

+ 3 - 3
examples/httpexample/README.rst

@@ -18,14 +18,14 @@ To execute the task you could use curl::
 
 which then gives the expected JSON response::
 
-    {"status": "success": "retval": 100}
+    {'status': 'success': 'retval': 100}
 
 
 To execute this http callback task asynchronously you could fire up
-a python shell with a properly configured celery and do:
+a python shell with a properly configured celery and do::
 
     >>> from celery.task.http import URL
-    >>> res = URL("http://localhost:8000/multiply").get_async(x=10, y=10)
+    >>> res = URL('http://localhost:8000/multiply').get_async(x=10, y=10)
     >>> res.wait()
     100
 

+ 5 - 5
examples/httpexample/settings.py

@@ -44,17 +44,17 @@ SITE_ID = 1
 USE_I18N = True
 
 # Absolute path to the directory that holds media.
-# Example: "/home/media/media.lawrence.com/"
+# Example: '/home/media/media.lawrence.com/'
 MEDIA_ROOT = ''
 
 # URL that handles the media served from MEDIA_ROOT. Make sure to use a
 # trailing slash if there is a path component (optional in other cases).
-# Examples: "http://media.lawrence.com", "http://example.com/media/"
+# Examples: 'http://media.lawrence.com', 'http://example.com/media/'
 MEDIA_URL = ''
 
 # URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
 # trailing slash.
-# Examples: "http://foo.com/media/", "/media/".
+# Examples: 'http://foo.com/media/', '/media/'.
 ADMIN_MEDIA_PREFIX = '/media/'
 
 # Make this unique, and don't share it with anybody.
@@ -75,8 +75,8 @@ MIDDLEWARE_CLASSES = (
 ROOT_URLCONF = 'httpexample.urls'
 
 TEMPLATE_DIRS = (
-    # Put strings here, like "/home/html/django_templates" or
-    # "C:/www/django/templates".
+    # Put strings here, like '/home/html/django_templates' or
+    # 'C:/www/django/templates'.
     # Always use forward slashes, even on Windows.
     # Don't forget to use absolute paths, not relative paths.
 )

+ 4 - 4
extra/release/attribution.py

@@ -9,18 +9,18 @@ from pprint import pprint
 def author(line):
     try:
         A, E = line.strip().rsplit(None, 1)
-        E.replace(">", "").replace("<", "")
+        E.replace('>', '').replace('<', '')
     except ValueError:
         A, E = line.strip(), None
     return A.lower() if A else A, E.lower() if E else E
 
 
 def proper_name(name):
-    return name and " " in name
+    return name and ' ' in name
 
 
 def find_missing_authors(seen):
-    with open("AUTHORS") as authors:
+    with open('AUTHORS') as authors:
         known = [author(line) for line in authors.readlines()]
 
     seen_authors = {t[0] for t in seen if proper_name(t[0])}
@@ -32,5 +32,5 @@ def find_missing_authors(seen):
     pprint(seen_authors - known_authors)
 
 
-if __name__ == "__main__":
+if __name__ == '__main__':
     find_missing_authors([author(line) for line in fileinput.input()])

+ 26 - 26
extra/release/bump_version.py

@@ -36,15 +36,15 @@ class StringVersion(object):
 
     def decode(self, s):
         s = rq(s)
-        text = ""
-        major, minor, release = s.split(".")
+        text = ''
+        major, minor, release = s.split('.')
         if not release.isdigit():
-            pos = release.index(re.split("\d+", release)[1][0])
+            pos = release.index(re.split('\d+', release)[1][0])
             release, text = release[:pos], release[pos:]
         return int(major), int(minor), int(release), text
 
     def encode(self, v):
-        return ".".join(map(str, v[:3])) + v[3]
+        return '.'.join(map(str, v[:3])) + v[3]
 to_str = StringVersion().encode
 from_str = StringVersion().decode
 
@@ -52,9 +52,9 @@ from_str = StringVersion().decode
 class TupleVersion(object):
 
     def decode(self, s):
-        v = list(map(rq, s.split(", ")))
+        v = list(map(rq, s.split(', ')))
         return (tuple(map(int, v[0:3])) +
-                tuple(["".join(v[3:])]))
+                tuple([''.join(v[3:])]))
 
     def encode(self, v):
         v = list(v)
@@ -66,7 +66,7 @@ class TupleVersion(object):
 
         if not v[-1]:
             v.pop()
-        return ", ".join(map(quote, v))
+        return ', '.join(map(quote, v))
 
 
 class VersionFile(object):
@@ -98,14 +98,14 @@ class VersionFile(object):
             for line in fh:
                 m = pattern.match(line)
                 if m:
-                    if "?P<keep>" in pattern.pattern:
-                        self._kept, gpos = m.groupdict()["keep"], 1
+                    if '?P<keep>' in pattern.pattern:
+                        self._kept, gpos = m.groupdict()['keep'], 1
                     return self.type.decode(m.groups()[gpos])
 
 
 class PyVersion(VersionFile):
     regex = re.compile(r'^VERSION\s*=\s*\((.+?)\)')
-    wb = "VERSION = ({version})\n"
+    wb = 'VERSION = ({version})\n'
     type = TupleVersion()
 
 
@@ -121,21 +121,21 @@ class CPPVersion(VersionFile):
     type = StringVersion()
 
 
-_filetype_to_type = {"py": PyVersion,
-                     "rst": SphinxVersion,
-                     "txt": SphinxVersion,
-                     "c": CPPVersion,
-                     "h": CPPVersion}
+_filetype_to_type = {'py': PyVersion,
+                     'rst': SphinxVersion,
+                     'txt': SphinxVersion,
+                     'c': CPPVersion,
+                     'h': CPPVersion}
 
 
 def filetype_to_type(filename):
-    _, _, suffix = filename.rpartition(".")
+    _, _, suffix = filename.rpartition('.')
     return _filetype_to_type[suffix](filename)
 
 
 def bump(*files, **kwargs):
-    version = kwargs.get("version")
-    before_commit = kwargs.get("before_commit")
+    version = kwargs.get('version')
+    before_commit = kwargs.get('before_commit')
     files = [filetype_to_type(f) for f in files]
     versions = [v.parse() for v in files]
     current = list(reversed(sorted(versions)))[0]  # find highest
@@ -149,37 +149,37 @@ def bump(*files, **kwargs):
             raise Exception("Can't bump alpha releases")
         next = (major, minor, release + 1, text)
 
-    print("Bump version from {0} -> {1}".format(to_str(current), to_str(next)))
+    print('Bump version from {0} -> {1}'.format(to_str(current), to_str(next)))
 
     for v in files:
-        print("  writing {0.filename!r}...".format(v))
+        print('  writing {0.filename!r}...'.format(v))
         v.write(next)
 
     if before_commit:
         cmd(*shlex.split(before_commit))
 
-    print(cmd("git", "commit", "-m", "Bumps version to {0}".format(
+    print(cmd('git', 'commit', '-m', 'Bumps version to {0}'.format(
         to_str(next)), *[f.filename for f in files]))
-    print(cmd("git", "tag", "v{0}".format(to_str(next))))
+    print(cmd('git', 'tag', 'v{0}'.format(to_str(next))))
 
 
 def main(argv=sys.argv, version=None, before_commit=None):
     if not len(argv) > 1:
-        print("Usage: distdir [docfile] -- <custom version>")
+        print('Usage: distdir [docfile] -- <custom version>')
         sys.exit(0)
 
     args = []
     for arg in argv:
-        if arg.startswith("--before-commit="):
+        if arg.startswith('--before-commit='):
             _, before_commit = arg.split('=')
         else:
             args.append(arg)
 
-    if "--" in args:
+    if '--' in args:
         c = args.index('--')
         version = args[c + 1]
         argv = args[:c]
     bump(*args[1:], version=version, before_commit=before_commit)
 
-if __name__ == "__main__":
+if __name__ == '__main__':
     main()

+ 1 - 1
funtests/stress/stress/app.py

@@ -174,4 +174,4 @@ def marker(s, sep='-'):
         try:
             return _marker.delay(s, sep)
         except Exception as exc:
-            print("Retrying marker.delay(). It failed to start: %s" % exc)
+            print('Retrying marker.delay(). It failed to start: %s' % exc)