Ver código fonte

Merge branch 'master' into master

Omer Katz 7 anos atrás
pai
commit
3af6a635cf
100 arquivos alterados com 3558 adições e 868 exclusões
  1. 2 3
      .bumpversion.cfg
  2. 0 3
      .coveragerc
  3. 35 13
      .travis.yml
  4. 113 113
      CONTRIBUTING.rst
  5. 13 0
      CONTRIBUTORS.txt
  6. 174 231
      Changelog
  7. 1 1
      Makefile
  8. 8 8
      README.rst
  9. 2467 0
      bandit.json
  10. 4 3
      celery/__init__.py
  11. 3 1
      celery/__main__.py
  12. 4 2
      celery/_state.py
  13. 2 2
      celery/app/__init__.py
  14. 9 9
      celery/app/amqp.py
  15. 2 1
      celery/app/annotations.py
  16. 5 2
      celery/app/backends.py
  17. 36 29
      celery/app/base.py
  18. 2 1
      celery/app/builtins.py
  19. 4 1
      celery/app/control.py
  20. 8 4
      celery/app/defaults.py
  21. 2 0
      celery/app/events.py
  22. 4 7
      celery/app/log.py
  23. 4 2
      celery/app/registry.py
  24. 4 1
      celery/app/routes.py
  25. 15 10
      celery/app/task.py
  26. 46 27
      celery/app/trace.py
  27. 10 13
      celery/app/utils.py
  28. 7 5
      celery/apps/beat.py
  29. 3 5
      celery/apps/multi.py
  30. 7 7
      celery/apps/worker.py
  31. 3 4
      celery/backends/amqp.py
  32. 2 3
      celery/backends/async.py
  33. 35 40
      celery/backends/base.py
  34. 12 7
      celery/backends/cache.py
  35. 11 5
      celery/backends/cassandra.py
  36. 9 7
      celery/backends/consul.py
  37. 10 1
      celery/backends/couchbase.py
  38. 7 1
      celery/backends/couchdb.py
  39. 4 4
      celery/backends/database/__init__.py
  40. 6 2
      celery/backends/database/models.py
  41. 3 2
      celery/backends/database/session.py
  42. 28 24
      celery/backends/dynamodb.py
  43. 7 2
      celery/backends/elasticsearch.py
  44. 5 2
      celery/backends/filesystem.py
  45. 12 8
      celery/backends/mongodb.py
  46. 94 22
      celery/backends/redis.py
  47. 6 1
      celery/backends/riak.py
  48. 2 2
      celery/backends/rpc.py
  49. 39 18
      celery/beat.py
  50. 1 1
      celery/bin/__init__.py
  51. 4 6
      celery/bin/amqp.py
  52. 19 25
      celery/bin/base.py
  53. 6 4
      celery/bin/beat.py
  54. 2 0
      celery/bin/call.py
  55. 7 11
      celery/bin/celery.py
  56. 6 3
      celery/bin/celeryd_detach.py
  57. 3 1
      celery/bin/control.py
  58. 4 2
      celery/bin/events.py
  59. 5 2
      celery/bin/graph.py
  60. 1 0
      celery/bin/list.py
  61. 3 1
      celery/bin/logtool.py
  62. 2 0
      celery/bin/migrate.py
  63. 4 1
      celery/bin/multi.py
  64. 2 1
      celery/bin/purge.py
  65. 1 0
      celery/bin/result.py
  66. 3 1
      celery/bin/shell.py
  67. 2 0
      celery/bin/upgrade.py
  68. 3 1
      celery/bin/worker.py
  69. 1 1
      celery/bootsteps.py
  70. 60 52
      celery/canvas.py
  71. 1 1
      celery/concurrency/__init__.py
  72. 4 5
      celery/concurrency/asynpool.py
  73. 2 2
      celery/concurrency/base.py
  74. 8 9
      celery/concurrency/eventlet.py
  75. 4 1
      celery/concurrency/gevent.py
  76. 6 6
      celery/concurrency/prefork.py
  77. 3 1
      celery/concurrency/solo.py
  78. 2 1
      celery/contrib/abortable.py
  79. 3 4
      celery/contrib/migrate.py
  80. 7 18
      celery/contrib/pytest.py
  81. 7 3
      celery/contrib/rdb.py
  82. 28 7
      celery/contrib/sphinx.py
  83. 4 2
      celery/contrib/testing/app.py
  84. 0 1
      celery/contrib/testing/manager.py
  85. 2 0
      celery/contrib/testing/mocks.py
  86. 1 0
      celery/contrib/testing/tasks.py
  87. 3 1
      celery/contrib/testing/worker.py
  88. 2 2
      celery/events/__init__.py
  89. 3 5
      celery/events/cursesmon.py
  90. 1 2
      celery/events/dispatcher.py
  91. 3 1
      celery/events/dumper.py
  92. 4 2
      celery/events/event.py
  93. 1 2
      celery/events/receiver.py
  94. 4 2
      celery/events/snapshot.py
  95. 3 2
      celery/events/state.py
  96. 12 6
      celery/exceptions.py
  97. 3 0
      celery/five.py
  98. 6 8
      celery/fixups/django.py
  99. 1 1
      celery/loaders/__init__.py
  100. 2 1
      celery/loaders/app.py

+ 2 - 3
.bumpversion.cfg

@@ -1,9 +1,9 @@
 [bumpversion]
-current_version = 4.0.2
+current_version = 4.1.0
 commit = True
 tag = True
 parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?P<releaselevel>[a-z]+)?
-serialize = 
+serialize =
 	{major}.{minor}.{patch}{releaselevel}
 	{major}.{minor}.{patch}
 
@@ -12,4 +12,3 @@ serialize =
 [bumpversion:file:docs/includes/introduction.txt]
 
 [bumpversion:file:README.rst]
-

+ 0 - 3
.coveragerc

@@ -14,9 +14,6 @@ omit =
     *celery/task/base.py
     *celery/five.py
     *celery/contrib/sphinx.py
-    *celery/backends/couchdb.py
-    *celery/backends/couchbase.py
-    *celery/backends/riak.py
     *celery/concurrency/asynpool.py
     *celery/utils/debug.py
     *celery/contrib/testing/*

+ 35 - 13
.travis.yml

@@ -9,6 +9,9 @@ python:
   - '3.6'
 os:
     - linux
+stages:
+  - lint
+  - test
 env:
   global:
   - PYTHONUNBUFFERED=yes
@@ -20,40 +23,52 @@ env:
 matrix:
   include:
   - python: '3.5'
-    env: TOXENV=pypy-unit PYPY_VERSION="5.3"
+    env: TOXENV=pypy-unit PYPY_VERSION="pypy2.7-5.8.0"
   - python: '3.5'
-    env: TOXENV=pypy-integration-rabbitmq PYPY_VERSION="5.3"
+    env: TOXENV=pypy-integration-rabbitmq PYPY_VERSION="pypy2.7-5.8.0"
   - python: '3.5'
-    env: TOXENV=pypy-integration-redis PYPY_VERSION="5.3"
+    env: TOXENV=pypy-integration-redis PYPY_VERSION="pypy2.7-5.8.0"
   - python: '3.5'
-    env: TOXENV=pypy-integration-dynamodb PYPY_VERSION="5.3"
+    env: TOXENV=pypy-integration-dynamodb PYPY_VERSION="pypy2.7-5.8.0"
   - python: '3.5'
     env: TOXENV=flake8
+    stage: lint
   - python: '3.5'
     env: TOXENV=flakeplus
+    stage: lint
   - python: '3.5'
     env: TOXENV=apicheck
+    stage: lint
   - python: '3.5'
     env: TOXENV=configcheck
+    stage: lint
+  - python: '3.5'
+    env: TOXENV=bandit
+    stage: lint
   - python: '3.5'
     env: TOXENV=pydocstyle
+    stage: lint
+  - python: '3.5'
+    env: TOXENV=isort-check
+    stage: lint
 before_install:
     - if [[ -v MATRIX_TOXENV ]]; then export TOXENV=${TRAVIS_PYTHON_VERSION}-${MATRIX_TOXENV}; fi; env
     - |
-          if [ "$TOXENV" = "pypy" ]; then
+          if [[ "$TOXENV" =~ "pypy" ]]; then
             export PYENV_ROOT="$HOME/.pyenv"
             if [ -f "$PYENV_ROOT/bin/pyenv" ]; then
               cd "$PYENV_ROOT" && git pull
             else
-              rm -rf "$PYENV_ROOT" && git clone --depth 1 https://github.com/yyuu/pyenv.git "$PYENV_ROOT"
+              rm -rf "$PYENV_ROOT" && git clone --depth 1 https://github.com/pyenv/pyenv.git "$PYENV_ROOT"
             fi
-            "$PYENV_ROOT/bin/pyenv" install "pypy-$PYPY_VERSION"
-            virtualenv --python="$PYENV_ROOT/versions/pypy-$PYPY_VERSION/bin/python" "$HOME/virtualenvs/pypy-$PYPY_VERSION"
-            source "$HOME/virtualenvs/pypy-$PYPY_VERSION/bin/activate"
+            "$PYENV_ROOT/bin/pyenv" install "$PYPY_VERSION"
+            virtualenv --python="$PYENV_ROOT/versions/$PYPY_VERSION/bin/python" "$HOME/virtualenvs/$PYPY_VERSION"
+            source "$HOME/virtualenvs/$PYPY_VERSION/bin/activate"
+            which python
           fi
     - |
           if [[ "$TOXENV" == *dynamodb ]]; then
-              sudo apt-get update && apt-get install -y default-jre supervisor
+              sudo apt-get update && sudo apt-get install -y default-jre supervisor
               mkdir /opt/dynamodb-local
               cd /opt/dynamodb-local && curl --retry 5 --retry-delay 1 -L http://dynamodb-local.s3-website-us-west-2.amazonaws.com/dynamodb_local_latest.tar.gz | tar zx
               cd -
@@ -65,10 +80,17 @@ before_install:
               sleep 10
               curl localhost:8000
           fi
+    - |
+          wget -qO - https://packages.couchbase.com/ubuntu/couchbase.key | sudo apt-key add -
+          sudo apt-add-repository -y 'deb http://packages.couchbase.com/ubuntu trusty trusty/main'
+          sudo apt-get update && sudo apt-get install -y libcouchbase-dev
 after_success:
-  - .tox/$TRAVIS_PYTHON_VERSION/bin/coverage xml
-  - .tox/$TRAVIS_PYTHON_VERSION/bin/codecov -e TOXENV
-install: travis_retry pip install -U tox
+  - |
+          if [[ -v MATRIX_TOXENV || "$TOXENV" =~ "pypy" ]]; then
+              .tox/$TOXENV/bin/coverage xml
+              .tox/$TOXENV/bin/codecov -e TOXENV
+          fi;
+install: travis_retry pip install -U tox | cat
 script: tox -v -- -v
 notifications:
   irc:

+ 113 - 113
CONTRIBUTING.rst

@@ -161,7 +161,7 @@ If you'd like to submit the information encrypted our PGP key is::
 Other bugs
 ----------
 
-Bugs can always be described to the `mailing-list`_, but the best
+Bugs can always be described to the :ref:`mailing-list`, but the best
 way to report an issue and to ensure a timely response is to use the
 issue tracker.
 
@@ -175,7 +175,7 @@ and participate in the discussion.
 2) **Determine if your bug is really a bug**.
 
 You shouldn't file a bug if you're requesting support. For that you can use
-the `mailing-list`_, or `irc-channel`_.
+the :ref:`mailing-list`, or :ref:`irc-channel`.
 
 3) **Make sure your bug hasn't already been reported**.
 
@@ -206,16 +206,16 @@ spelling or other errors on the website/docs/code.
        hard to get or might not be that useful. Try to inspect the process to
        get more diagnostic data. Some ideas:
 
-       * Enable Celery's ``breakpoint_signal`` and use it
+       * Enable Celery's :ref:`breakpoint signal <breakpoint_signal>` and use it
          to inspect the process's state. This will allow you to open a
-         ``pdb`` session.
+         :mod:`pdb` session.
        * Collect tracing data using `strace`_(Linux),
-         ``dtruss`` (macOS), and ``ktrace`` (BSD),
+         :command:`dtruss` (macOS), and :command:`ktrace` (BSD),
          `ltrace`_, and `lsof`_.
 
-    D) Include the output from the ``celery report`` command:
+    D) Include the output from the :command:`celery report` command:
 
-        ::
+        .. code-block:: console
 
             $ celery -A proj report
 
@@ -245,16 +245,16 @@ Issue Trackers
 Bugs for a package in the Celery ecosystem should be reported to the relevant
 issue tracker.
 
-* ``celery``: https://github.com/celery/celery/issues/
-* ``kombu``: https://github.com/celery/kombu/issues
-* ``amqp``: https://github.com/celery/py-amqp/issues
-* ``vine``: https://github.com/celery/vine/issues
-* ``librabbitmq``: https://github.com/celery/librabbitmq/issues
-* ``django-celery-beat``: https://github.com/celery/django-celery-beat/issues
-* ``django-celery-results``: https://github.com/celery/django-celery-results/issues
+* :pypi:`celery`: https://github.com/celery/celery/issues/
+* :pypi:`kombu`: https://github.com/celery/kombu/issues
+* :pypi:`amqp`: https://github.com/celery/py-amqp/issues
+* :pypi:`vine`: https://github.com/celery/vine/issues
+* :pypi:`librabbitmq`: https://github.com/celery/librabbitmq/issues
+* :pypi:`django-celery-beat`: https://github.com/celery/django-celery-beat/issues
+* :pypi:`django-celery-results`: https://github.com/celery/django-celery-results/issues
 
 If you're unsure of the origin of the bug you can ask the
-`mailing-list`_, or just use the Celery issue tracker.
+:ref:`mailing-list`, or just use the Celery issue tracker.
 
 Contributors guide to the code base
 ===================================
@@ -262,7 +262,7 @@ Contributors guide to the code base
 There's a separate section for internal details,
 including details about the code base and a style guide.
 
-Read `internals-guide`_ for more!
+Read :ref:`internals-guide` for more!
 
 .. _versions:
 
@@ -296,7 +296,7 @@ You can see the state of any branch by looking at the Changelog:
 If the branch is in active development the topmost version info should
 contain meta-data like:
 
-::
+.. code-block:: restructuredtext
 
     2.4.0
     ======
@@ -356,17 +356,17 @@ An archived version is named ``X.Y-archived``.
 
 Our currently archived branches are:
 
-* ``2.5-archived``
+* :github_branch:`2.5-archived`
 
-* ``2.4-archived``
+* :github_branch:`2.4-archived`
 
-* ``2.3-archived``
+* :github_branch:`2.3-archived`
 
-* ``2.1-archived``
+* :github_branch:`2.1-archived`
 
-* ``2.0-archived``
+* :github_branch:`2.0-archived`
 
-* ``1.0-archived``
+* :github_branch:`1.0-archived`
 
 Feature branches
 ----------------
@@ -413,14 +413,14 @@ is in the GitHub Guide: `Fork a Repo`_.
 After you have cloned the repository you should checkout your copy
 to a directory on your machine:
 
-::
+.. code-block:: console
 
     $ git clone git@github.com:username/celery.git
 
 When the repository is cloned enter the directory to set up easy access
 to upstream changes:
 
-::
+.. code-block:: console
 
     $ cd celery
     $ git remote add upstream git://github.com/celery/celery.git
@@ -429,7 +429,7 @@ to upstream changes:
 If you need to pull in new changes from upstream you should
 always use the ``--rebase`` option to ``git pull``:
 
-::
+.. code-block:: console
 
     git pull --rebase upstream master
 
@@ -455,12 +455,12 @@ Running the unit test suite
 
 To run the Celery test suite you need to install a few dependencies.
 A complete list of the dependencies needed are located in
-``requirements/test.txt``.
+:file:`requirements/test.txt`.
 
 If you're working on the development version, then you need to
 install the development requirements first:
 
-::
+.. code-block:: console
 
     $ pip install -U -r requirements/dev.txt
 
@@ -469,19 +469,19 @@ THIS REQUIREMENT FILE MAY NOT BE PRESENT, SKIP IF NOT FOUND.
 Both the stable and the development version have testing related
 dependencies, so install these next:
 
-::
+.. code-block:: console
 
     $ pip install -U -r requirements/test.txt
     $ pip install -U -r requirements/default.txt
 
 After installing the dependencies required, you can now execute
-the test suite by calling ``py.test <pytest>``:
+the test suite by calling :pypi:`py.test <pytest>`:
 
-::
+.. code-block:: console
 
     $ py.test
 
-Some useful options to ``py.test`` are:
+Some useful options to :command:`py.test` are:
 
 * ``-x``
 
@@ -498,7 +498,7 @@ Some useful options to ``py.test`` are:
 If you want to run the tests for a single test file only
 you can do so like this:
 
-::
+.. code-block:: console
 
     $ py.test t/unit/worker/test_worker_job.py
 
@@ -524,57 +524,57 @@ the steps outlined here: https://bit.ly/koJoso
 Calculating test coverage
 ~~~~~~~~~~~~~~~~~~~~~~~~~
 
-To calculate test coverage you must first install the ``pytest-cov`` module.
+To calculate test coverage you must first install the :pypi:`pytest-cov` module.
 
-Installing the ``pytest-cov`` module:
+Installing the :pypi:`pytest-cov` module:
 
-::
+.. code-block:: console
 
     $ pip install -U pytest-cov
 
 Code coverage in HTML format
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-#. Run ``py.test`` with the ``--cov-report=html`` argument enabled:
+#. Run :command:`py.test` with the ``--cov-report=html`` argument enabled:
 
-    ::
+    .. code-block:: console
 
         $ py.test --cov=celery --cov-report=html
 
-#. The coverage output will then be located in the ``htmlcov/`` directory:
+#. The coverage output will then be located in the :file:`htmlcov/` directory:
 
-    ::
+    .. code-block:: console
 
         $ open htmlcov/index.html
 
 Code coverage in XML (Cobertura-style)
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-#. Run ``py.test`` with the ``--cov-report=xml`` argument enabled:
+#. Run :command:`py.test` with the ``--cov-report=xml`` argument enabled:
 
-::
+.. code-block:: console
 
     $ py.test --cov=celery --cov-report=xml
 
-#. The coverage XML output will then be located in the ``coverage.xml`` file.
+#. The coverage XML output will then be located in the :file:`coverage.xml` file.
 
 .. _contributing-tox:
 
 Running the tests on all supported Python versions
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-There's a ``tox`` configuration file in the top directory of the
+There's a :pypi:`tox` configuration file in the top directory of the
 distribution.
 
 To run the tests for all supported Python versions simply execute:
 
-::
+.. code-block:: console
 
     $ tox
 
 Use the ``tox -e`` option if you only want to test specific Python versions:
 
-::
+.. code-block:: console
 
     $ tox -e 2.7
 
@@ -582,9 +582,9 @@ Building the documentation
 --------------------------
 
 To build the documentation you need to install the dependencies
-listed in ``requirements/docs.txt`` and ``requirements/default.txt``:
+listed in :file:`requirements/docs.txt` and :file:`requirements/default.txt`:
 
-::
+.. code-block:: console
 
     $ pip install -U -r requirements/docs.txt
     $ pip install -U -r requirements/default.txt
@@ -592,14 +592,14 @@ listed in ``requirements/docs.txt`` and ``requirements/default.txt``:
 After these dependencies are installed you should be able to
 build the docs by running:
 
-::
+.. code-block:: console
 
     $ cd docs
     $ rm -rf _build
     $ make html
 
 Make sure there are no errors or warnings in the build output.
-After building succeeds the documentation is available at ``_build/html``.
+After building succeeds the documentation is available at :file:`_build/html`.
 
 .. _contributing-verify:
 
@@ -607,28 +607,28 @@ Verifying your contribution
 ---------------------------
 
 To use these tools you need to install a few dependencies. These dependencies
-can be found in ``requirements/pkgutils.txt``.
+can be found in :file:`requirements/pkgutils.txt`.
 
 Installing the dependencies:
 
-::
+.. code-block:: console
 
     $ pip install -U -r requirements/pkgutils.txt
 
 pyflakes & PEP-8
 ~~~~~~~~~~~~~~~~
 
-To ensure that your changes conform to ``8`` and to run pyflakes
+To ensure that your changes conform to :pep:`8` and to run pyflakes
 execute:
 
-::
+.. code-block:: console
 
     $ make flakecheck
 
 To not return a negative exit code when this command fails use
 the ``flakes`` target instead:
 
-::
+.. code-block:: console
 
     $ make flakes
 
@@ -638,7 +638,7 @@ API reference
 To make sure that all modules have a corresponding section in the API
 reference please execute:
 
-::
+.. code-block:: console
 
     $ make apicheck
     $ make indexcheck
@@ -646,8 +646,8 @@ reference please execute:
 If files are missing you can add them by copying an existing reference file.
 
 If the module is internal it should be part of the internal reference
-located in ``docs/internals/reference/``. If the module is public
-it should be located in ``docs/reference/``.
+located in :file:`docs/internals/reference/`. If the module is public
+it should be located in :file:`docs/reference/`.
 
 For example if reference is missing for the module ``celery.worker.awesome``
 and this module is considered part of the public API, use the following steps:
@@ -655,14 +655,14 @@ and this module is considered part of the public API, use the following steps:
 
 Use an existing file as a template:
 
-::
+.. code-block:: console
 
     $ cd docs/reference/
     $ cp celery.schedules.rst celery.worker.awesome.rst
 
 Edit the file using your favorite editor:
 
-::
+.. code-block:: console
 
     $ vim celery.worker.awesome.rst
 
@@ -672,7 +672,7 @@ Edit the file using your favorite editor:
 
 Edit the index using your favorite editor:
 
-::
+.. code-block:: console
 
     $ vim index.rst
 
@@ -681,7 +681,7 @@ Edit the index using your favorite editor:
 
 Commit your changes:
 
-::
+.. code-block:: console
 
     # Add the file to git
     $ git add celery.worker.awesome.rst
@@ -698,17 +698,17 @@ You should probably be able to pick up the coding style
 from surrounding code, but it is a good idea to be aware of the
 following conventions.
 
-* All Python code must follow the ``8`` guidelines.
+* All Python code must follow the :pep:`8` guidelines.
 
-``pep8`` is a utility you can use to verify that your code
+:pypi:`pep8` is a utility you can use to verify that your code
 is following the conventions.
 
-* Docstrings must follow the ``257`` conventions, and use the following
+* Docstrings must follow the :pep:`257` conventions, and use the following
   style.
 
     Do this:
 
-    ::
+    .. code-block:: python
 
         def method(self, arg):
             """Short description.
@@ -719,7 +719,7 @@ is following the conventions.
 
     or:
 
-    ::
+    .. code-block:: python
 
         def method(self, arg):
             """Short description."""
@@ -727,7 +727,7 @@ is following the conventions.
 
     but not this:
 
-    ::
+    .. code-block:: python
 
         def method(self, arg):
             """
@@ -736,9 +736,9 @@ is following the conventions.
 
 * Lines shouldn't exceed 78 columns.
 
-  You can enforce this in ``vim`` by setting the ``textwidth`` option:
+  You can enforce this in :command:`vim` by setting the ``textwidth`` option:
 
-  ::
+  .. code-block:: vim
 
         set textwidth=78
 
@@ -765,7 +765,7 @@ is following the conventions.
 
     Example:
 
-    ::
+    .. code-block:: python
 
         import threading
         import time
@@ -786,7 +786,7 @@ is following the conventions.
 
         from __future__ import absolute_import
 
-    * If the module uses the ``with`` statement and must be compatible
+    * If the module uses the :keyword:`with` statement and must be compatible
       with Python 2.5 (celery isn't) then it must also enable that::
 
         from __future__ import with_statement
@@ -811,7 +811,7 @@ is following the conventions.
 
     This requires Python 2.5 or later:
 
-    ::
+    .. code-block:: python
 
         from . import submodule
 
@@ -830,9 +830,9 @@ that require third-party libraries must be added.
 1) Add a new requirements file in `requirements/extras`
 
     For the Cassandra backend this is
-    ``requirements/extras/cassandra.txt``, and the file looks like this:
+    :file:`requirements/extras/cassandra.txt`, and the file looks like this:
 
-    ::
+    .. code-block:: text
 
         pycassa
 
@@ -840,7 +840,7 @@ that require third-party libraries must be added.
     multiple packages are separated by newline. A more complex example could
     be:
 
-    ::
+    .. code-block:: text
 
         # pycassa 2.0 breaks Foo
         pycassa>=1.0,<2.0
@@ -849,22 +849,22 @@ that require third-party libraries must be added.
 2) Modify ``setup.py``
 
     After the requirements file is added you need to add it as an option
-    to ``setup.py`` in the ``extras_require`` section::
+    to :file:`setup.py` in the ``extras_require`` section::
 
         extra['extras_require'] = {
             # ...
             'cassandra': extras('cassandra.txt'),
         }
 
-3) Document the new feature in ``docs/includes/installation.txt``
+3) Document the new feature in :file:`docs/includes/installation.txt`
 
-    You must add your feature to the list in the `bundles`_ section
-    of ``docs/includes/installation.txt``.
+    You must add your feature to the list in the :ref:`bundles` section
+    of :file:`docs/includes/installation.txt`.
 
     After you've made changes to this file you need to render
-    the distro ``README`` file:
+    the distro :file:`README` file:
 
-    ::
+    .. code-block:: console
 
         $ pip install -U requirements/pkgutils.txt
         $ make readme
@@ -872,10 +872,10 @@ that require third-party libraries must be added.
 
 That's all that needs to be done, but remember that if your feature
 adds additional configuration options then these needs to be documented
-in ``docs/configuration.rst``. Also all settings need to be added to the
-``celery/app/defaults.py`` module.
+in :file:`docs/configuration.rst`. Also all settings need to be added to the
+:file:`celery/app/defaults.py` module.
 
-Result backends require a separate section in the ``docs/configuration.rst``
+Result backends require a separate section in the :file:`docs/configuration.rst`
 file.
 
 .. _contact_information:
@@ -888,7 +888,7 @@ regarding the official git repositories, PyPI packages
 Read the Docs pages.
 
 If the issue isn't an emergency then it's better
-to `report an issue`_.
+to :ref:`report an issue <reporting-bugs>`.
 
 
 Committers
@@ -966,7 +966,7 @@ Packages
 :git: https://github.com/celery/celery
 :CI: https://travis-ci.org/#!/celery/celery
 :Windows-CI: https://ci.appveyor.com/project/ask/celery
-:PyPI: ``celery``
+:PyPI: :pypi:`celery`
 :docs: http://docs.celeryproject.org
 
 ``kombu``
@@ -977,7 +977,7 @@ Messaging library.
 :git: https://github.com/celery/kombu
 :CI: https://travis-ci.org/#!/celery/kombu
 :Windows-CI: https://ci.appveyor.com/project/ask/kombu
-:PyPI: ``kombu``
+:PyPI: :pypi:`kombu`
 :docs: https://kombu.readthedocs.io
 
 ``amqp``
@@ -988,7 +988,7 @@ Python AMQP 0.9.1 client.
 :git: https://github.com/celery/py-amqp
 :CI: https://travis-ci.org/#!/celery/py-amqp
 :Windows-CI: https://ci.appveyor.com/project/ask/py-amqp
-:PyPI: ``amqp``
+:PyPI: :pypi:`amqp`
 :docs: https://amqp.readthedocs.io
 
 ``vine``
@@ -999,7 +999,7 @@ Promise/deferred implementation.
 :git: https://github.com/celery/vine/
 :CI: https://travis-ci.org/#!/celery/vine/
 :Windows-CI: https://ci.appveyor.com/project/ask/vine
-:PyPI: ``vine``
+:PyPI: :pypi:`vine`
 :docs: https://vine.readthedocs.io
 
 ``billiard``
@@ -1011,7 +1011,7 @@ that'll eventually be merged into the Python stdlib.
 :git: https://github.com/celery/billiard
 :CI: https://travis-ci.org/#!/celery/billiard/
 :Windows-CI: https://ci.appveyor.com/project/ask/billiard
-:PyPI: ``billiard``
+:PyPI: :pypi:`billiard`
 
 ``django-celery-beat``
 ----------------------
@@ -1021,7 +1021,7 @@ Database-backed Periodic Tasks with admin interface using the Django ORM.
 :git: https://github.com/celery/django-celery-beat
 :CI: https://travis-ci.org/#!/celery/django-celery-beat
 :Windows-CI: https://ci.appveyor.com/project/ask/django-celery-beat
-:PyPI: ``django-celery-beat``
+:PyPI: :pypi:`django-celery-beat`
 
 ``django-celery-results``
 -------------------------
@@ -1031,7 +1031,7 @@ Store task results in the Django ORM, or using the Django Cache Framework.
 :git: https://github.com/celery/django-celery-results
 :CI: https://travis-ci.org/#!/celery/django-celery-results
 :Windows-CI: https://ci.appveyor.com/project/ask/django-celery-results
-:PyPI: ``django-celery-results``
+:PyPI: :pypi:`django-celery-results`
 
 ``librabbitmq``
 ---------------
@@ -1039,7 +1039,7 @@ Store task results in the Django ORM, or using the Django Cache Framework.
 Very fast Python AMQP client written in C.
 
 :git: https://github.com/celery/librabbitmq
-:PyPI: ``librabbitmq``
+:PyPI: :pypi:`librabbitmq`
 
 ``cell``
 --------
@@ -1047,7 +1047,7 @@ Very fast Python AMQP client written in C.
 Actor library.
 
 :git: https://github.com/celery/cell
-:PyPI: ``cell``
+:PyPI: :pypi:`cell`
 
 ``cyme``
 --------
@@ -1055,7 +1055,7 @@ Actor library.
 Distributed Celery Instance manager.
 
 :git: https://github.com/celery/cyme
-:PyPI: ``cyme``
+:PyPI: :pypi:`cyme`
 :docs: https://cyme.readthedocs.io/
 
 
@@ -1065,45 +1065,45 @@ Deprecated
 - ``django-celery``
 
 :git: https://github.com/celery/django-celery
-:PyPI: ``django-celery``
+:PyPI: :pypi:`django-celery`
 :docs: http://docs.celeryproject.org/en/latest/django
 
 - ``Flask-Celery``
 
 :git: https://github.com/ask/Flask-Celery
-:PyPI: ``Flask-Celery``
+:PyPI: :pypi:`Flask-Celery`
 
 - ``celerymon``
 
 :git: https://github.com/celery/celerymon
-:PyPI: ``celerymon``
+:PyPI: :pypi:`celerymon`
 
 - ``carrot``
 
 :git: https://github.com/ask/carrot
-:PyPI: ``carrot``
+:PyPI: :pypi:`carrot`
 
 - ``ghettoq``
 
 :git: https://github.com/ask/ghettoq
-:PyPI: ``ghettoq``
+:PyPI: :pypi:`ghettoq`
 
 - ``kombu-sqlalchemy``
 
 :git: https://github.com/ask/kombu-sqlalchemy
-:PyPI: ``kombu-sqlalchemy``
+:PyPI: :pypi:`kombu-sqlalchemy`
 
 - ``django-kombu``
 
 :git: https://github.com/ask/django-kombu
-:PyPI: ``django-kombu``
+:PyPI: :pypi:`django-kombu`
 
 - ``pylibrabbitmq``
 
-Old name for ``librabbitmq``.
+Old name for :pypi:`librabbitmq`.
 
-:git: ``None``
-:PyPI: ``pylibrabbitmq``
+:git: :const:`None`
+:PyPI: :pypi:`pylibrabbitmq`
 
 .. _release-procedure:
 
@@ -1116,27 +1116,27 @@ Updating the version number
 
 The version number must be updated two places:
 
-    * ``celery/__init__.py``
-    * ``docs/include/introduction.txt``
+    * :file:`celery/__init__.py`
+    * :file:`docs/include/introduction.txt`
 
 After you have changed these files you must render
-the ``README`` files. There's a script to convert sphinx syntax
+the :file:`README` files. There's a script to convert sphinx syntax
 to generic reStructured Text syntax, and the make target `readme`
 does this for you:
 
-::
+.. code-block:: console
 
     $ make readme
 
 Now commit the changes:
 
-::
+.. code-block:: console
 
     $ git commit -a -m "Bumps version to X.Y.Z"
 
 and make a new version tag:
 
-::
+.. code-block:: console
 
     $ git tag vX.Y.Z
     $ git push --tags
@@ -1146,7 +1146,7 @@ Releasing
 
 Commands to make a new public stable release:
 
-::
+.. code-block:: console
 
     $ make distcheck  # checks pep8, autodoc index, runs tests and more
     $ make dist  # NOTE: Runs git clean -xdf and removes files not in the repo.

+ 13 - 0
CONTRIBUTORS.txt

@@ -236,8 +236,21 @@ Jianjian Yu, 2017/04/09
 Brian May, 2017/04/10
 Dmytro Petruk, 2017/04/12
 Joey Wilhelm, 2017/04/12
+Yoichi Nakayama, 2017/04/25
 Simon Schmidt, 2017/05/19
 Anthony Lukach, 2017/05/23
 Samuel Dion-Girardeau, 2017/05/29
 Aydin Sen, 2017/06/14
 Vinod Chandru, 2017/07/11
+Preston Moore, 2017/06/18
+Nicolas Mota, 2017/08/10
+David Davis, 2017/08/11
+Martial Pageau, 2017/08/16
+Sammie S. Taunton, 2017/08/17
+Kxrr, 2017/08/18
+Mads Jensen, 2017/08/20
+Markus Kaiserswerth, 2017/08/30
+Andrew Wong, 2017/09/07
+Arpan Shah, 2017/09/12
+Tobias 'rixx' Kunze, 2017/08/20
+Mikhail Wolfson, 2017/12/11

+ 174 - 231
Changelog

@@ -5,19 +5,164 @@
 ================
 
 This document contains change notes for bugfix releases in
-the 4.0.x series (latentcall), please see :ref:`whatsnew-4.0` for
-an overview of what's new in Celery 4.0.
+the 4.1.x series (latentcall), please see :ref:`whatsnew-4.1` for
+an overview of what's new in Celery 4.1.
 
-.. _version-4.0.3:
+.. _version-4.1.0:
 
-4.0.3
+4.1.0
 =====
-:release-date: 2017-??-?? ??:?? PM PST
-:release-by: Ask Solem
+:release-date: 2017-07-25 00:00 PM PST
+:release-by: Omer Katz
+
+
+- **Configuration**: CELERY_SEND_EVENTS instead of CELERYD_SEND_EVENTS for 3.1.x compatibility (#3997)
+
+ Contributed by **abhinav nilaratna**.
+
+- **App**: Restore behavior so Broadcast queues work. (#3934)
+
+ Contributed by **Patrick Cloke**.
+
+- **Sphinx**: Make appstr use standard format (#4134) (#4139)
+
+ Contributed by **Preston Moore**.
+
+- **App**: Make id, name always accessible from logging.Formatter via extra (#3994)
+
+ Contributed by **Yoichi NAKAYAMA**.
+
+- **Worker**: Add worker_shutting_down signal (#3998)
+
+ Contributed by **Daniel Huang**.
+
+- **PyPy**: Support PyPy version 5.8.0 (#4128)
+
+ Contributed by **Omer Katz**.
+
+- **Results**: Elasticsearch: Fix serializing keys (#3924)
+
+ Contributed by :github_user:`staticfox`.
+
+- **Canvas**: Deserialize all tasks in a chain (#4015)
+
+ Contributed by :github_user:`fcoelho`.
+
+- **Systemd**: Recover loglevel for ExecStart in systemd config (#4023)
+
+ Contributed by **Yoichi NAKAYAMA**.
+
+- **Sphinx**: Use the Sphinx add_directive_to_domain API. (#4037)
+
+ Contributed by **Patrick Cloke**.
+
+- **App**: Pass properties to before_task_publish signal (#4035)
+
+ Contributed by **Javier Domingo Cansino**.
+
+- **Results**: Add SSL option for Redis backends (#3831)
+
+ Contributed by **Chris Kuehl**.
+
+- **Beat**: celery.schedule.crontab: fix reduce (#3826) (#3827)
+
+ Contributed by **Taylor C. Richberger**.
+
+- **State**: Fix celery issues when using flower REST API
+
+ Contributed by **Thierry RAMORASOAVINA**.
+
+- **Results**: Elasticsearch: Fix serializing document id.
+
+ Contributed by **Acey9**.
+
+- **Beat**: Make shallow copy of schedules dictionary
+
+ Contributed by **Brian May**.
+
+- **Beat**: Populate heap when periodic tasks are changed
+
+ Contributed by **Wojciech Żywno**.
+
+- **Task**: Allow class methods to define tasks (#3952)
+
+ Contributed by **georgepsarakis**.
+
+- **Platforms**: Always return boolean value when checking if signal is supported (#3962).
+
+ Contributed by **Jian Yu**.
+
+- **Canvas**: Avoid duplicating chains in chords (#3779)
+
+ Contributed by **Ryan Hiebert**.
+
+- **Canvas**: Lookup task only if list has items (#3847)
+
+ Contributed by **Marc Gibbons**.
+
+- **Results**: Allow unicode message for exception raised in task (#3903)
+
+ Contributed by **George Psarakis**.
+
+- **Python3**: Support for Python 3.6 (#3904, #3903, #3736)
+
+ Contributed by **Jon Dufresne**, **George Psarakis**, **Asif Saifuddin Auvi**, **Omer Katz**.
+
+- **App**: Fix retried tasks with expirations (#3790)
+
+ Contributed by **Brendan MacDonell**.
+
+- * Fixes items format route in docs (#3875)
+
+ Contributed by **Slam**.
+
+- **Utils**: Fix maybe_make_aware (#3850)
+
+ Contributed by **Taylor C. Richberger**.
+
+- **Task**: Fix task ETA issues when timezone is defined in configuration (#3867)
+
+ Contributed by **George Psarakis**.
+
+- **Concurrency**: Consumer does not shutdown properly when embedded in gevent application (#3746)
+
+ Contributed by **Arcadiy Ivanov**.
+
+- **Canvas**: Fix #3725: Task replaced with group does not complete (#3731)
+
+ Contributed by **Morgan Doocy**.
+
+- **Task**: Correct order in chains with replaced tasks (#3730)
+
+ Contributed by **Morgan Doocy**.
+
+- **Result**: Enable synchronous execution of sub-tasks (#3696)
+
+ Contributed by **shalev67**.
+
+- **Task**: Fix request context for blocking task apply (added hostname) (#3716)
+
+ Contributed by **Marat Sharafutdinov**.
+
+- **Utils**: Fix task argument handling (#3678) (#3693)
+
+ Contributed by **Roman Sichny**.
+
+- **Beat**: Provide a transparent method to update the Scheduler heap (#3721)
+
+ Contributed by **Alejandro Pernin**.
+
+- **Beat**: Specify default value for pidfile option of celery beat. (#3722)
+
+ Contributed by **Arnaud Rocher**.
+
+- **Results**: Elasticsearch: Stop generating a new field every time when a new result is being put (#3708)
+
+ Contributed by **Mike Chen**.
 
 - **Requirements**
 
-    - Now depends on :ref:`Kombu 4.0.3 <kombu:version-4.0.3>`.
+    - Now depends on :ref:`Kombu 4.1.0 <kombu:version-4.1.0>`.
 
 - **Results**: Elasticsearch now reuses fields when new results are added.
 
@@ -29,7 +174,7 @@ an overview of what's new in Celery 4.0.
     Contributed by **Andrew de Quincey**.
 
 - **Worker**: Making missing ``*args`` and ``**kwargs`` in Task protocol 1
-return empty value in protocol 2 (Issue #3687).
+  return empty value in protocol 2 (Issue #3687).
 
     Contributed by **Roman Sichny**.
 
@@ -71,7 +216,7 @@ return empty value in protocol 2 (Issue #3687).
 
     Contributed by **Arcadiy Ivanov**.
 
-- **Results**: Added support for using AWS DynamoDB as a result backend.
+- **Results**: Added support for using AWS DynamoDB as a result backend (#3736).
 
     Contributed by **George Psarakis**.
 
@@ -139,248 +284,46 @@ return empty value in protocol 2 (Issue #3687).
 
     - **Bruno Alla**
     - **Jamie Alessio**
+    - **Vivek Anand**
     - **Peter Bittner**
+    - **Kalle Bronsen**
     - **Jon Dufresne**
+    - **James Michael DuPont**
     - **Sergey Fursov**
+    - **Samuel Dion-Girardeau**
     - **Daniel Hahler**
     - **Mike Helmick**
     - **Marc Hörsken**
     - **Christopher Hoskin**
+    - **Daniel Huang**
+    - **Primož Kerin**
     - **Michal Kuffa**
     - **Simon Legner**
+    - **Anthony Lukach**
     - **Ed Morley**
+    - **Jay McGrath**
+    - **Rico Moorman**
+    - **Viraj Navkal**
+    - **Ross Patterson**
     - **Dmytro Petruk**
+    - **Luke Plant**
+    - **Eric Poelke**
     - **Salvatore Rinchiera**
     - **Arnaud Rocher**
+    - **Kirill Romanov**
+    - **Simon Schmidt**
+    - **Tamer Sherif**
     - **YuLun Shih**
+    - **Ask Solem**
     - **Tom 'Biwaa' Riat**
     - **Arthur Vigil**
     - **Joey Wilhelm**
     - **Jian Yu**
+    - **YuLun Shih**
+    - **Arthur Vigil**
+    - **Joey Wilhelm**
     - :github_user:`baixuexue123`
     - :github_user:`bronsen`
     - :github_user:`michael-k`
     - :github_user:`orf`
     - :github_user:`3lnc`
-
-.. _version-4.0.3:
-
-4.0.2
-=====
-:release-date: 2016-12-15 03:40 PM PST
-:release-by: Ask Solem
-
-- **Requirements**
-
-    - Now depends on :ref:`Kombu 4.0.2 <kombu:version-4.0.2>`.
-
-- **Tasks**: Fixed problem with JSON serialization of `group`
-  (``keys must be string`` error, Issue #3688).
-
-- **Worker**: Fixed JSON serialization issue when using ``inspect active``
-  and friends (Issue #3667).
-
-- **App**: Fixed saferef errors when using signals (Issue #3670).
-
-- **Prefork**: Fixed bug with pack requiring bytes argument
-  on Python 2.7.5 and earlier (Issue #3674).
-
-- **Tasks**: Saferepr did not handle unicode in bytestrings on Python 2
-  (Issue #3676).
-
-- **Testing**: Added new ``celery_worker_paremeters`` fixture.
-
-    Contributed by **Michael Howitz**.
-
-- **Tasks**: Added new ``app`` argument to ``GroupResult.restore``
-  (Issue #3669).
-
-    This makes the restore method behave the same way as the ``GroupResult``
-    constructor.
-
-    Contributed by **Andreas Pelme**.
-
-- **Tasks**: Fixed type checking crash when task takes ``*args`` on Python 3
-  (Issue #3678).
-
-- Documentation and examples improvements by:
-
-    - **BLAGA Razvan-Paul**
-    - **Michael Howitz**
-    - :github_user:`paradox41`
-
-.. _version-4.0.1:
-
-4.0.1
-=====
-:release-date: 2016-12-08 05:22 PM PST
-:release-by: Ask Solem
-
-* [Security: `CELERYSA-0003`_] Insecure default configuration
-
-    The default :setting:`accept_content` setting was set to allow
-    deserialization of pickled messages in Celery 4.0.0.
-
-    The insecure default has been fixed in 4.0.1, and you can also
-    configure the 4.0.0 version to explicitly only allow json serialized
-    messages:
-
-    .. code-block:: python
-
-        app.conf.accept_content = ['json']
-
-.. _`CELERYSA-0003`:
-    https://github.com/celery/celery/tree/master/docs/sec/CELERYSA-0003.txt
-
-- **Tasks**: Added new method to register class-based tasks (Issue #3615).
-
-    To register a class based task you should now call ``app.register_task``:
-
-    .. code-block:: python
-
-        from celery import Celery, Task
-
-        app = Celery()
-
-        class CustomTask(Task):
-
-            def run(self):
-                return 'hello'
-
-        app.register_task(CustomTask())
-
-- **Tasks**: Argument checking now supports keyword-only arguments on Python3
-  (Issue #3658).
-
-    Contributed by :github_user:`sww`.
-
-- **Tasks**: The ``task-sent`` event was not being sent even if
-  configured to do so (Issue #3646).
-
-- **Worker**: Fixed AMQP heartbeat support for eventlet/gevent pools
-  (Issue #3649).
-
-- **App**: ``app.conf.humanize()`` would not work if configuration
-  not finalized (Issue #3652).
-
-- **Utils**: ``saferepr`` attempted to show iterables as lists
-  and mappings as dicts.
-
-- **Utils**: ``saferepr`` did not handle unicode-errors
-  when attempting to format ``bytes`` on Python 3 (Issue #3610).
-
-- **Utils**: ``saferepr`` should now properly represent byte strings
-  with non-ascii characters (Issue #3600).
-
-- **Results**: Fixed bug in elasticsearch where _index method missed
-  the body argument (Issue #3606).
-
-    Fix contributed by **何翔宇** (Sean Ho).
-
-- **Canvas**: Fixed :exc:`ValueError` in chord with single task header
-  (Issue #3608).
-
-    Fix contributed by **Viktor Holmqvist**.
-
-- **Task**: Ensure class-based task has name prior to registration
-  (Issue #3616).
-
-    Fix contributed by **Rick Wargo**.
-
-- **Beat**: Fixed problem with strings in shelve (Issue #3644).
-
-    Fix contributed by **Alli**.
-
-- **Worker**: Fixed :exc:`KeyError` in ``inspect stats`` when ``-O`` argument
-  set to something other than ``fast`` or ``fair`` (Issue #3621).
-
-- **Task**: Retried tasks were no longer sent to the original queue
-  (Issue #3622).
-
-- **Worker**: Python 3: Fixed None/int type comparison in
-  :file:`apps/worker.py` (Issue #3631).
-
-- **Results**: Redis has a new :setting:`redis_socket_connect_timeout`
-  setting.
-
-- **Results**: Redis result backend passed the ``socket_connect_timeout``
-  argument to UNIX socket based connections by mistake, causing a crash.
-
-- **Worker**: Fixed missing logo in worker splash screen when running on
-  Python 3.x (Issue #3627).
-
-    Fix contributed by **Brian Luan**.
-
-- **Deps**: Fixed ``celery[redis]`` bundle installation (Issue #3643).
-
-    Fix contributed by **Rémi Marenco**.
-
-- **Deps**: Bundle ``celery[sqs]`` now also requires :pypi:`pycurl`
-  (Issue #3619).
-
-- **Worker**: Hard time limits were no longer being respected (Issue #3618).
-
-- **Worker**: Soft time limit log showed ``Trues`` instead of the number
-  of seconds.
-
-- **App**: ``registry_cls`` argument no longer had any effect (Issue #3613).
-
-- **Worker**: Event producer now uses ``connection_for_Write`` (Issue #3525).
-
-- **Results**: Redis/memcache backends now uses :setting:`result_expires`
-  to expire chord counter (Issue #3573).
-
-    Contributed by **Tayfun Sen**.
-
-- **Django**: Fixed command for upgrading settings with Django (Issue #3563).
-
-    Fix contributed by **François Voron**.
-
-- **Testing**: Added a ``celery_parameters`` test fixture to be able to use
-  customized ``Celery`` init parameters. (#3626)
-
-    Contributed by **Steffen Allner**.
-
-- Documentation improvements contributed by
-
-    - :github_user:`csfeathers`
-    - **Moussa Taifi**
-    - **Yuhannaa**
-    - **Laurent Peuch**
-    - **Christian**
-    - **Bruno Alla**
-    - **Steven Johns**
-    - :github_user:`tnir`
-    - **GDR!**
-
-.. _version-4.0.0:
-
-4.0.0
-=====
-:release-date: 2016-11-04 02:00 P.M PDT
-:release-by: Ask Solem
-
-See :ref:`whatsnew-4.0` (in :file:`docs/whatsnew-4.0.rst`).
-
-.. _version-4.0.0rc7:
-
-4.0.0rc7
-========
-:release-date: 2016-11-02 01:30 P.M PDT
-
-Important notes
----------------
-
-- Database result backend related setting names changed from
-  ``sqlalchemy_*`` -> ``database_*``.
-
-    The ``sqlalchemy_`` named settings won't work at all in this
-    version so you need to rename them.  This is a last minute change,
-    and as they were not supported in 3.1 we will not be providing
-    aliases.
-
-- ``chain(A, B, C)`` now works the same way as ``A | B | C``.
-
-    This means calling ``chain()`` might not actually return a chain,
-    it can return a group or any other type depending on how the
-    workflow can be optimized.

+ 1 - 1
Makefile

@@ -46,7 +46,7 @@ help:
 	@echo "readme               - Regenerate README.rst file."
 	@echo "contrib              - Regenerate CONTRIBUTING.rst file"
 	@echo "clean-dist --------- - Clean all distribution build artifacts."
-	@echo "  clean-git-force    - Remove all uncomitted files."
+	@echo "  clean-git-force    - Remove all uncommitted files."
 	@echo "  clean ------------ - Non-destructive clean"
 	@echo "    clean-pyc        - Remove .pyc/__pycache__ files"
 	@echo "    clean-docs       - Remove documentation build artifacts."

+ 8 - 8
README.rst

@@ -1,8 +1,8 @@
 .. image:: http://docs.celeryproject.org/en/latest/_images/celery-banner-small.png
 
-|build-status| |license| |wheel| |pyversion| |pyimp|
+|build-status| |coverage| |license| |wheel| |pyversion| |pyimp|
 
-:Version: 4.0.2 (latentcall)
+:Version: 4.1.0 (latentcall)
 :Web: http://celeryproject.org/
 :Download: https://pypi.python.org/pypi/celery/
 :Source: https://github.com/celery/celery/
@@ -40,10 +40,10 @@ in such a way that the client enqueues an URL to be requested by a worker.
 What do I need?
 ===============
 
-Celery version 4.0 runs on,
+Celery version 4.1 runs on,
 
-- Python (2.7, 3.4, 3.5)
-- PyPy (5.4, 5.5)
+- Python (2.7, 3.4, 3.5, 3.6)
+- PyPy (5.8)
 
 
 This is the last version to support Python 2.7,
@@ -72,7 +72,7 @@ Get Started
 ===========
 
 If this is the first time you're trying to use Celery, or you're
-new to Celery 4.0 coming from previous versions then you should read our
+new to Celery 4.1 coming from previous versions then you should read our
 getting started tutorials:
 
 - `First steps with Celery`_
@@ -273,7 +273,7 @@ Transports and Backends
 :``celery[sqs]``:
     for using Amazon SQS as a message transport (*experimental*).
 
-:``celery[tblib``]
+:``celery[tblib``]:
     for using the ``task_remote_tracebacks`` feature.
 
 :``celery[memcache]``:
@@ -309,7 +309,7 @@ Transports and Backends
 :``celery[consul]``:
     for using the Consul.io Key/Value store as a message transport or result backend (*experimental*).
 
-:``celery[django]``
+:``celery[django]``:
     specifies the lowest version possible for Django support.
 
     You should probably not use this in your requirements, it's here

+ 2467 - 0
bandit.json

@@ -0,0 +1,2467 @@
+{
+  "errors": [],
+  "generated_at": "2017-12-12T18:18:35Z",
+  "metrics": {
+    "_totals": {
+      "CONFIDENCE.HIGH": 41.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 2.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 1.0,
+      "SEVERITY.LOW": 40.0,
+      "SEVERITY.MEDIUM": 2.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 28375,
+      "nosec": 0
+    },
+    "celery/__init__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 132,
+      "nosec": 0
+    },
+    "celery/__main__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 13,
+      "nosec": 0
+    },
+    "celery/_state.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 120,
+      "nosec": 0
+    },
+    "celery/app/__init__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 59,
+      "nosec": 0
+    },
+    "celery/app/amqp.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 522,
+      "nosec": 0
+    },
+    "celery/app/annotations.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 41,
+      "nosec": 0
+    },
+    "celery/app/backends.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 59,
+      "nosec": 0
+    },
+    "celery/app/base.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 983,
+      "nosec": 0
+    },
+    "celery/app/builtins.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 150,
+      "nosec": 0
+    },
+    "celery/app/control.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 350,
+      "nosec": 0
+    },
+    "celery/app/defaults.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 324,
+      "nosec": 0
+    },
+    "celery/app/events.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 30,
+      "nosec": 0
+    },
+    "celery/app/log.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 199,
+      "nosec": 0
+    },
+    "celery/app/registry.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 48,
+      "nosec": 0
+    },
+    "celery/app/routes.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 106,
+      "nosec": 0
+    },
+    "celery/app/task.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 714,
+      "nosec": 0
+    },
+    "celery/app/trace.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 482,
+      "nosec": 0
+    },
+    "celery/app/utils.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 289,
+      "nosec": 0
+    },
+    "celery/apps/__init__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 0,
+      "nosec": 0
+    },
+    "celery/apps/beat.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 130,
+      "nosec": 0
+    },
+    "celery/apps/multi.py": {
+      "CONFIDENCE.HIGH": 2.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 2.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 406,
+      "nosec": 0
+    },
+    "celery/apps/worker.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 1.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 1.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 288,
+      "nosec": 0
+    },
+    "celery/backends/__init__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 18,
+      "nosec": 0
+    },
+    "celery/backends/amqp.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 257,
+      "nosec": 0
+    },
+    "celery/backends/async.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 231,
+      "nosec": 0
+    },
+    "celery/backends/base.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 626,
+      "nosec": 0
+    },
+    "celery/backends/cache.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 119,
+      "nosec": 0
+    },
+    "celery/backends/cassandra.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 176,
+      "nosec": 0
+    },
+    "celery/backends/consul.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 75,
+      "nosec": 0
+    },
+    "celery/backends/couchbase.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 85,
+      "nosec": 0
+    },
+    "celery/backends/couchdb.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 80,
+      "nosec": 0
+    },
+    "celery/backends/database/__init__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 153,
+      "nosec": 0
+    },
+    "celery/backends/database/models.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 56,
+      "nosec": 0
+    },
+    "celery/backends/database/session.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 47,
+      "nosec": 0
+    },
+    "celery/backends/dynamodb.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 223,
+      "nosec": 0
+    },
+    "celery/backends/elasticsearch.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 113,
+      "nosec": 0
+    },
+    "celery/backends/filesystem.py": {
+      "CONFIDENCE.HIGH": 1.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 1.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 67,
+      "nosec": 0
+    },
+    "celery/backends/mongodb.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 233,
+      "nosec": 0
+    },
+    "celery/backends/redis.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 318,
+      "nosec": 0
+    },
+    "celery/backends/riak.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 99,
+      "nosec": 0
+    },
+    "celery/backends/rpc.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 252,
+      "nosec": 0
+    },
+    "celery/beat.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 513,
+      "nosec": 0
+    },
+    "celery/bin/__init__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 3,
+      "nosec": 0
+    },
+    "celery/bin/amqp.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 290,
+      "nosec": 0
+    },
+    "celery/bin/base.py": {
+      "CONFIDENCE.HIGH": 2.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 1.0,
+      "SEVERITY.LOW": 1.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 498,
+      "nosec": 0
+    },
+    "celery/bin/beat.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 86,
+      "nosec": 0
+    },
+    "celery/bin/call.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 64,
+      "nosec": 0
+    },
+    "celery/bin/celery.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 360,
+      "nosec": 0
+    },
+    "celery/bin/celeryd_detach.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 1.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 1.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 113,
+      "nosec": 0
+    },
+    "celery/bin/control.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 195,
+      "nosec": 0
+    },
+    "celery/bin/events.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 120,
+      "nosec": 0
+    },
+    "celery/bin/graph.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 167,
+      "nosec": 0
+    },
+    "celery/bin/list.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 36,
+      "nosec": 0
+    },
+    "celery/bin/logtool.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 133,
+      "nosec": 0
+    },
+    "celery/bin/migrate.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 54,
+      "nosec": 0
+    },
+    "celery/bin/multi.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 356,
+      "nosec": 0
+    },
+    "celery/bin/purge.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 59,
+      "nosec": 0
+    },
+    "celery/bin/result.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 33,
+      "nosec": 0
+    },
+    "celery/bin/shell.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 137,
+      "nosec": 0
+    },
+    "celery/bin/upgrade.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 78,
+      "nosec": 0
+    },
+    "celery/bin/worker.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 256,
+      "nosec": 0
+    },
+    "celery/bootsteps.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 312,
+      "nosec": 0
+    },
+    "celery/canvas.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 1047,
+      "nosec": 0
+    },
+    "celery/concurrency/__init__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 14,
+      "nosec": 0
+    },
+    "celery/concurrency/asynpool.py": {
+      "CONFIDENCE.HIGH": 17.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 17.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 977,
+      "nosec": 0
+    },
+    "celery/concurrency/base.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 126,
+      "nosec": 0
+    },
+    "celery/concurrency/eventlet.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 114,
+      "nosec": 0
+    },
+    "celery/concurrency/gevent.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 93,
+      "nosec": 0
+    },
+    "celery/concurrency/prefork.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 128,
+      "nosec": 0
+    },
+    "celery/concurrency/solo.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 20,
+      "nosec": 0
+    },
+    "celery/contrib/__init__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 0,
+      "nosec": 0
+    },
+    "celery/contrib/abortable.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 115,
+      "nosec": 0
+    },
+    "celery/contrib/migrate.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 324,
+      "nosec": 0
+    },
+    "celery/contrib/pytest.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 132,
+      "nosec": 0
+    },
+    "celery/contrib/rdb.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 144,
+      "nosec": 0
+    },
+    "celery/contrib/sphinx.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 64,
+      "nosec": 0
+    },
+    "celery/contrib/testing/__init__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 0,
+      "nosec": 0
+    },
+    "celery/contrib/testing/app.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 82,
+      "nosec": 0
+    },
+    "celery/contrib/testing/manager.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 142,
+      "nosec": 0
+    },
+    "celery/contrib/testing/mocks.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 82,
+      "nosec": 0
+    },
+    "celery/contrib/testing/tasks.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 7,
+      "nosec": 0
+    },
+    "celery/contrib/testing/worker.py": {
+      "CONFIDENCE.HIGH": 2.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 2.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 126,
+      "nosec": 0
+    },
+    "celery/events/__init__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 13,
+      "nosec": 0
+    },
+    "celery/events/cursesmon.py": {
+      "CONFIDENCE.HIGH": 1.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 1.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 449,
+      "nosec": 0
+    },
+    "celery/events/dispatcher.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 195,
+      "nosec": 0
+    },
+    "celery/events/dumper.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 87,
+      "nosec": 0
+    },
+    "celery/events/event.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 42,
+      "nosec": 0
+    },
+    "celery/events/receiver.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 111,
+      "nosec": 0
+    },
+    "celery/events/snapshot.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 89,
+      "nosec": 0
+    },
+    "celery/events/state.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 570,
+      "nosec": 0
+    },
+    "celery/exceptions.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 159,
+      "nosec": 0
+    },
+    "celery/five.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 5,
+      "nosec": 0
+    },
+    "celery/fixups/__init__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 1,
+      "nosec": 0
+    },
+    "celery/fixups/django.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 144,
+      "nosec": 0
+    },
+    "celery/loaders/__init__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 15,
+      "nosec": 0
+    },
+    "celery/loaders/app.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 6,
+      "nosec": 0
+    },
+    "celery/loaders/base.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 200,
+      "nosec": 0
+    },
+    "celery/loaders/default.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 32,
+      "nosec": 0
+    },
+    "celery/local.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 439,
+      "nosec": 0
+    },
+    "celery/platforms.py": {
+      "CONFIDENCE.HIGH": 1.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 1.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 606,
+      "nosec": 0
+    },
+    "celery/result.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 799,
+      "nosec": 0
+    },
+    "celery/schedules.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 678,
+      "nosec": 0
+    },
+    "celery/security/__init__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 46,
+      "nosec": 0
+    },
+    "celery/security/certificate.py": {
+      "CONFIDENCE.HIGH": 1.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 1.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 64,
+      "nosec": 0
+    },
+    "celery/security/key.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 14,
+      "nosec": 0
+    },
+    "celery/security/serialization.py": {
+      "CONFIDENCE.HIGH": 3.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 3.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 76,
+      "nosec": 0
+    },
+    "celery/security/utils.py": {
+      "CONFIDENCE.HIGH": 1.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 1.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 22,
+      "nosec": 0
+    },
+    "celery/signals.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 121,
+      "nosec": 0
+    },
+    "celery/states.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 96,
+      "nosec": 0
+    },
+    "celery/task/__init__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 39,
+      "nosec": 0
+    },
+    "celery/task/base.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 189,
+      "nosec": 0
+    },
+    "celery/utils/__init__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 20,
+      "nosec": 0
+    },
+    "celery/utils/abstract.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 100,
+      "nosec": 0
+    },
+    "celery/utils/collections.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 624,
+      "nosec": 0
+    },
+    "celery/utils/debug.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 151,
+      "nosec": 0
+    },
+    "celery/utils/deprecated.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 91,
+      "nosec": 0
+    },
+    "celery/utils/dispatch/__init__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 4,
+      "nosec": 0
+    },
+    "celery/utils/dispatch/signal.py": {
+      "CONFIDENCE.HIGH": 1.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 1.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 267,
+      "nosec": 0
+    },
+    "celery/utils/dispatch/weakref_backports.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 54,
+      "nosec": 0
+    },
+    "celery/utils/encoding.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 6,
+      "nosec": 0
+    },
+    "celery/utils/functional.py": {
+      "CONFIDENCE.HIGH": 1.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 1.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 263,
+      "nosec": 0
+    },
+    "celery/utils/graph.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 247,
+      "nosec": 0
+    },
+    "celery/utils/imports.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 121,
+      "nosec": 0
+    },
+    "celery/utils/iso8601.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 63,
+      "nosec": 0
+    },
+    "celery/utils/log.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 215,
+      "nosec": 0
+    },
+    "celery/utils/nodenames.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 72,
+      "nosec": 0
+    },
+    "celery/utils/objects.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 106,
+      "nosec": 0
+    },
+    "celery/utils/saferepr.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 198,
+      "nosec": 0
+    },
+    "celery/utils/serialization.py": {
+      "CONFIDENCE.HIGH": 5.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 4.0,
+      "SEVERITY.MEDIUM": 1.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 216,
+      "nosec": 0
+    },
+    "celery/utils/static/__init__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 9,
+      "nosec": 0
+    },
+    "celery/utils/sysinfo.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 33,
+      "nosec": 0
+    },
+    "celery/utils/term.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 131,
+      "nosec": 0
+    },
+    "celery/utils/text.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 127,
+      "nosec": 0
+    },
+    "celery/utils/threads.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 258,
+      "nosec": 0
+    },
+    "celery/utils/time.py": {
+      "CONFIDENCE.HIGH": 1.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 1.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 303,
+      "nosec": 0
+    },
+    "celery/utils/timer2.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 118,
+      "nosec": 0
+    },
+    "celery/worker/__init__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 4,
+      "nosec": 0
+    },
+    "celery/worker/autoscale.py": {
+      "CONFIDENCE.HIGH": 1.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 1.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 129,
+      "nosec": 0
+    },
+    "celery/worker/components.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 190,
+      "nosec": 0
+    },
+    "celery/worker/consumer/__init__.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 15,
+      "nosec": 0
+    },
+    "celery/worker/consumer/agent.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 15,
+      "nosec": 0
+    },
+    "celery/worker/consumer/connection.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 26,
+      "nosec": 0
+    },
+    "celery/worker/consumer/consumer.py": {
+      "CONFIDENCE.HIGH": 1.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 1.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 477,
+      "nosec": 0
+    },
+    "celery/worker/consumer/control.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 24,
+      "nosec": 0
+    },
+    "celery/worker/consumer/events.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 50,
+      "nosec": 0
+    },
+    "celery/worker/consumer/gossip.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 171,
+      "nosec": 0
+    },
+    "celery/worker/consumer/heart.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 27,
+      "nosec": 0
+    },
+    "celery/worker/consumer/mingle.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 60,
+      "nosec": 0
+    },
+    "celery/worker/consumer/tasks.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 46,
+      "nosec": 0
+    },
+    "celery/worker/control.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 425,
+      "nosec": 0
+    },
+    "celery/worker/heartbeat.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 47,
+      "nosec": 0
+    },
+    "celery/worker/loops.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 86,
+      "nosec": 0
+    },
+    "celery/worker/pidbox.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 97,
+      "nosec": 0
+    },
+    "celery/worker/request.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 443,
+      "nosec": 0
+    },
+    "celery/worker/state.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 199,
+      "nosec": 0
+    },
+    "celery/worker/strategy.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 130,
+      "nosec": 0
+    },
+    "celery/worker/worker.py": {
+      "CONFIDENCE.HIGH": 0.0,
+      "CONFIDENCE.LOW": 0.0,
+      "CONFIDENCE.MEDIUM": 0.0,
+      "CONFIDENCE.UNDEFINED": 0.0,
+      "SEVERITY.HIGH": 0.0,
+      "SEVERITY.LOW": 0.0,
+      "SEVERITY.MEDIUM": 0.0,
+      "SEVERITY.UNDEFINED": 0.0,
+      "loc": 337,
+      "nosec": 0
+    }
+  },
+  "results": [
+    {
+      "code": "10 from functools import partial\n11 from subprocess import Popen\n12 from time import sleep\n",
+      "filename": "celery/apps/multi.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Consider possible security implications associated with Popen module.",
+      "line_number": 11,
+      "line_range": [
+        11
+      ],
+      "test_id": "B404",
+      "test_name": "blacklist"
+    },
+    {
+      "code": "195         maybe_call(on_spawn, self, argstr=' '.join(argstr), env=env)\n196         pipe = Popen(argstr, env=env)\n197         return self.handle_process_exit(\n",
+      "filename": "celery/apps/multi.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "subprocess call - check for execution of untrusted input.",
+      "line_number": 196,
+      "line_range": [
+        196
+      ],
+      "test_id": "B603",
+      "test_name": "subprocess_without_shell_equals_true"
+    },
+    {
+      "code": "320     ])\n321     os.execv(sys.executable, [sys.executable] + sys.argv)\n322 \n",
+      "filename": "celery/apps/worker.py",
+      "issue_confidence": "MEDIUM",
+      "issue_severity": "LOW",
+      "issue_text": "Starting a process without a shell.",
+      "line_number": 321,
+      "line_range": [
+        321
+      ],
+      "test_id": "B606",
+      "test_name": "start_process_with_no_shell"
+    },
+    {
+      "code": "66             self.set(key, b'test value')\n67             assert self.get(key) == b'test value'\n68             self.delete(key)\n",
+      "filename": "celery/backends/filesystem.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 67,
+      "line_range": [
+        67
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "341         while 1:\n342             val = input(p).lower()\n343             if val in choices:\n",
+      "filename": "celery/bin/base.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "HIGH",
+      "issue_text": "The input method in Python 2 will read from standard input, evaluate and run the resulting string as python source code. This is similar, though in many ways worse, then using eval. On Python 2, use raw_input instead, input is safe in Python 3.",
+      "line_number": 342,
+      "line_range": [
+        342
+      ],
+      "test_id": "B322",
+      "test_name": "blacklist"
+    },
+    {
+      "code": "536                     in_option = m.groups()[0].strip()\n537                 assert in_option, 'missing long opt'\n538             elif in_option and line.startswith(' ' * 4):\n",
+      "filename": "celery/bin/base.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 537,
+      "line_range": [
+        537
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "38                 path = executable\n39             os.execv(path, [path] + argv)\n40         except Exception:  # pylint: disable=broad-except\n",
+      "filename": "celery/bin/celeryd_detach.py",
+      "issue_confidence": "MEDIUM",
+      "issue_severity": "LOW",
+      "issue_text": "Starting a process without a shell.",
+      "line_number": 39,
+      "line_range": [
+        39
+      ],
+      "test_id": "B606",
+      "test_name": "start_process_with_no_shell"
+    },
+    {
+      "code": "28 from numbers import Integral\n29 from pickle import HIGHEST_PROTOCOL\n30 from time import sleep\n",
+      "filename": "celery/concurrency/asynpool.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Consider possible security implications associated with HIGHEST_PROTOCOL module.",
+      "line_number": 29,
+      "line_range": [
+        29
+      ],
+      "test_id": "B403",
+      "test_name": "blacklist"
+    },
+    {
+      "code": "563                     proc in waiting_to_start):\n564                 assert proc.outqR_fd in fileno_to_outq\n565                 assert fileno_to_outq[proc.outqR_fd] is proc\n",
+      "filename": "celery/concurrency/asynpool.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 564,
+      "line_range": [
+        564
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "564                 assert proc.outqR_fd in fileno_to_outq\n565                 assert fileno_to_outq[proc.outqR_fd] is proc\n566                 assert proc.outqR_fd in hub.readers\n",
+      "filename": "celery/concurrency/asynpool.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 565,
+      "line_range": [
+        565
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "565                 assert fileno_to_outq[proc.outqR_fd] is proc\n566                 assert proc.outqR_fd in hub.readers\n567                 error('Timed out waiting for UP message from %r', proc)\n",
+      "filename": "celery/concurrency/asynpool.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 566,
+      "line_range": [
+        566
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "586 \n587             assert not isblocking(proc.outq._reader)\n588 \n589             # handle_result_event is called when the processes outqueue is\n590             # readable.\n591             add_reader(proc.outqR_fd, handle_result_event, proc.outqR_fd)\n",
+      "filename": "celery/concurrency/asynpool.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 587,
+      "line_range": [
+        587,
+        588,
+        589,
+        590
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "1038         synq = None\n1039         assert isblocking(inq._reader)\n1040         assert not isblocking(inq._writer)\n",
+      "filename": "celery/concurrency/asynpool.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 1039,
+      "line_range": [
+        1039
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "1039         assert isblocking(inq._reader)\n1040         assert not isblocking(inq._writer)\n1041         assert not isblocking(outq._reader)\n",
+      "filename": "celery/concurrency/asynpool.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 1040,
+      "line_range": [
+        1040
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "1040         assert not isblocking(inq._writer)\n1041         assert not isblocking(outq._reader)\n1042         assert isblocking(outq._writer)\n",
+      "filename": "celery/concurrency/asynpool.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 1041,
+      "line_range": [
+        1041
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "1041         assert not isblocking(outq._reader)\n1042         assert isblocking(outq._writer)\n1043         if self.synack:\n",
+      "filename": "celery/concurrency/asynpool.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 1042,
+      "line_range": [
+        1042
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "1044             synq = _SimpleQueue(wnonblock=True)\n1045             assert isblocking(synq._reader)\n1046             assert not isblocking(synq._writer)\n",
+      "filename": "celery/concurrency/asynpool.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 1045,
+      "line_range": [
+        1045
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "1045             assert isblocking(synq._reader)\n1046             assert not isblocking(synq._writer)\n1047         return inq, outq, synq\n",
+      "filename": "celery/concurrency/asynpool.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 1046,
+      "line_range": [
+        1046
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "1057             return logger.warning('process with pid=%s already exited', pid)\n1058         assert proc.inqW_fd not in self._fileno_to_inq\n1059         assert proc.inqW_fd not in self._all_inqueues\n",
+      "filename": "celery/concurrency/asynpool.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 1058,
+      "line_range": [
+        1058
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "1058         assert proc.inqW_fd not in self._fileno_to_inq\n1059         assert proc.inqW_fd not in self._all_inqueues\n1060         self._waiting_to_start.discard(proc)\n",
+      "filename": "celery/concurrency/asynpool.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 1059,
+      "line_range": [
+        1059
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "1137         \"\"\"Mark new ownership for ``queues`` to update fileno indices.\"\"\"\n1138         assert queues in self._queues\n1139         b = len(self._queues)\n",
+      "filename": "celery/concurrency/asynpool.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 1138,
+      "line_range": [
+        1138
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "1140         self._queues[queues] = proc\n1141         assert b == len(self._queues)\n1142 \n",
+      "filename": "celery/concurrency/asynpool.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 1141,
+      "line_range": [
+        1141
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "1220                 pass\n1221             assert len(self._queues) == before\n1222 \n",
+      "filename": "celery/concurrency/asynpool.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 1221,
+      "line_range": [
+        1221
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "1227         \"\"\"\n1228         assert not proc._is_alive()\n1229         self._waiting_to_start.discard(proc)\n",
+      "filename": "celery/concurrency/asynpool.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 1228,
+      "line_range": [
+        1228
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "81             with allow_join_result():\n82                 assert ping.delay().get(timeout=ping_task_timeout) == 'pong'\n83 \n",
+      "filename": "celery/contrib/testing/worker.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 82,
+      "line_range": [
+        82
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "102     setup_app_for_worker(app, loglevel, logfile)\n103     assert 'celery.ping' in app.tasks\n104     # Make sure we can connect to the broker\n105     with app.connection() as conn:\n",
+      "filename": "celery/contrib/testing/worker.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 103,
+      "line_range": [
+        103,
+        104
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "173                 return self.win.getkey().upper()\n174             except Exception:  # pylint: disable=broad-except\n175                 pass\n",
+      "filename": "celery/events/cursesmon.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Try, Except, Pass detected.",
+      "line_number": 174,
+      "line_range": [
+        174
+      ],
+      "test_id": "B110",
+      "test_name": "try_except_pass"
+    },
+    {
+      "code": "479         max_groups = os.sysconf('SC_NGROUPS_MAX')\n480     except Exception:  # pylint: disable=broad-except\n481         pass\n",
+      "filename": "celery/platforms.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Try, Except, Pass detected.",
+      "line_number": 480,
+      "line_range": [
+        480
+      ],
+      "test_id": "B110",
+      "test_name": "try_except_pass"
+    },
+    {
+      "code": "21     def __init__(self, cert):\n22         assert crypto is not None\n23         with reraise_errors('Invalid certificate: {0!r}'):\n",
+      "filename": "celery/security/certificate.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 22,
+      "line_range": [
+        22
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "30         \"\"\"Serialize data structure into string.\"\"\"\n31         assert self._key is not None\n32         assert self._cert is not None\n",
+      "filename": "celery/security/serialization.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 31,
+      "line_range": [
+        31
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "31         assert self._key is not None\n32         assert self._cert is not None\n33         with reraise_errors('Unable to serialize: {0!r}', (Exception,)):\n",
+      "filename": "celery/security/serialization.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 32,
+      "line_range": [
+        32
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "46         \"\"\"Deserialize data structure from string.\"\"\"\n47         assert self._cert_store is not None\n48         with reraise_errors('Unable to deserialize: {0!r}', (Exception,)):\n",
+      "filename": "celery/security/serialization.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 47,
+      "line_range": [
+        47
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "21     \"\"\"Context reraising crypto errors as :exc:`SecurityError`.\"\"\"\n22     assert crypto is not None\n23     errors = (crypto.Error,) if errors is None else errors\n",
+      "filename": "celery/security/utils.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 22,
+      "line_range": [
+        22
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "165     def _connect_signal(self, receiver, sender, weak, dispatch_uid):\n166         assert callable(receiver), 'Signal receivers must be callable'\n167         if not fun_accepts_kwargs(receiver):\n",
+      "filename": "celery/utils/dispatch/signal.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 166,
+      "line_range": [
+        166
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "283     # Tasks are rarely, if ever, created at runtime - exec here is fine.\n284     exec(definition, namespace)\n285     result = namespace[name]\n",
+      "filename": "celery/utils/functional.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "MEDIUM",
+      "issue_text": "Use of exec detected.",
+      "line_number": 284,
+      "line_range": [
+        284
+      ],
+      "test_id": "B102",
+      "test_name": "exec_used"
+    },
+    {
+      "code": "21 try:\n22     import cPickle as pickle\n23 except ImportError:\n",
+      "filename": "celery/utils/serialization.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Consider possible security implications associated with cPickle module.",
+      "line_number": 22,
+      "line_range": [
+        22
+      ],
+      "test_id": "B403",
+      "test_name": "blacklist"
+    },
+    {
+      "code": "23 except ImportError:\n24     import pickle  # noqa\n25 \n",
+      "filename": "celery/utils/serialization.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Consider possible security implications associated with pickle module.",
+      "line_number": 24,
+      "line_range": [
+        24
+      ],
+      "test_id": "B403",
+      "test_name": "blacklist"
+    },
+    {
+      "code": "69             loads(dumps(superexc))\n70         except Exception:  # pylint: disable=broad-except\n71             pass\n",
+      "filename": "celery/utils/serialization.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Try, Except, Pass detected.",
+      "line_number": 70,
+      "line_range": [
+        70
+      ],
+      "test_id": "B110",
+      "test_name": "try_except_pass"
+    },
+    {
+      "code": "149     try:\n150         pickle.loads(pickle.dumps(exc))\n151     except Exception:  # pylint: disable=broad-except\n",
+      "filename": "celery/utils/serialization.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "MEDIUM",
+      "issue_text": "Pickle library appears to be in use, possible security issue.",
+      "line_number": 150,
+      "line_range": [
+        150
+      ],
+      "test_id": "B301",
+      "test_name": "blacklist"
+    },
+    {
+      "code": "150         pickle.loads(pickle.dumps(exc))\n151     except Exception:  # pylint: disable=broad-except\n152         pass\n",
+      "filename": "celery/utils/serialization.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Try, Except, Pass detected.",
+      "line_number": 151,
+      "line_range": [
+        151
+      ],
+      "test_id": "B110",
+      "test_name": "try_except_pass"
+    },
+    {
+      "code": "403     if full_jitter:\n404         countdown = random.randrange(countdown + 1)\n405     # Adjust according to maximum wait time and account for negative values.\n",
+      "filename": "celery/utils/time.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Standard pseudo-random generators are not suitable for security/cryptographic purposes.",
+      "line_number": 404,
+      "line_range": [
+        404
+      ],
+      "test_id": "B311",
+      "test_name": "blacklist"
+    },
+    {
+      "code": "75 \n76         assert self.keepalive, 'cannot scale down too fast.'\n77 \n",
+      "filename": "celery/worker/autoscale.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
+      "line_number": 76,
+      "line_range": [
+        76
+      ],
+      "test_id": "B101",
+      "test_name": "assert_used"
+    },
+    {
+      "code": "346             self.connection.collect()\n347         except Exception:  # pylint: disable=broad-except\n348             pass\n",
+      "filename": "celery/worker/consumer/consumer.py",
+      "issue_confidence": "HIGH",
+      "issue_severity": "LOW",
+      "issue_text": "Try, Except, Pass detected.",
+      "line_number": 347,
+      "line_range": [
+        347
+      ],
+      "test_id": "B110",
+      "test_name": "try_except_pass"
+    }
+  ]
+}

+ 4 - 3
celery/__init__.py

@@ -14,7 +14,7 @@ from collections import namedtuple
 
 SERIES = 'latentcall'
 
-__version__ = '4.0.2'
+__version__ = '4.1.0'
 __author__ = 'Ask Solem'
 __contact__ = 'ask@celeryproject.org'
 __homepage__ = 'http://celeryproject.org'
@@ -23,12 +23,12 @@ __keywords__ = 'task job queue distributed messaging actor'
 
 # -eof meta-
 
-__all__ = [
+__all__ = (
     'Celery', 'bugreport', 'shared_task', 'task',
     'current_app', 'current_task', 'maybe_signature',
     'chain', 'chord', 'chunks', 'group', 'signature',
     'xmap', 'xstarmap', 'uuid',
-]
+)
 
 VERSION_BANNER = '{0} ({1})'.format(__version__, SERIES)
 
@@ -145,6 +145,7 @@ def maybe_patch_concurrency(argv=sys.argv,
         from celery import concurrency
         concurrency.get_implementation(pool)
 
+
 # Lazy loading
 from . import local  # noqa
 

+ 3 - 1
celery/__main__.py

@@ -1,9 +1,11 @@
 """Entry-point for the :program:`celery` umbrella command."""
 from __future__ import absolute_import, print_function, unicode_literals
+
 import sys
+
 from . import maybe_patch_concurrency
 
-__all__ = ['main']
+__all__ = ('main',)
 
 
 def main():

+ 4 - 2
celery/_state.py

@@ -7,18 +7,20 @@ like the ``current_app``, and ``current_task``.
 This module shouldn't be used directly.
 """
 from __future__ import absolute_import, print_function, unicode_literals
+
 import os
 import sys
 import threading
 import weakref
+
 from celery.local import Proxy
 from celery.utils.threads import LocalStack
 
-__all__ = [
+__all__ = (
     'set_default_app', 'get_current_app', 'get_current_task',
     'get_current_worker_task', 'current_app', 'current_task',
     'connect_on_app_finalize',
-]
+)
 
 #: Global default app used when no current app.
 default_app = None

+ 2 - 2
celery/app/__init__.py

@@ -10,11 +10,11 @@ from celery._state import (
 from .base import Celery
 from .utils import AppPickler
 
-__all__ = [
+__all__ = (
     'Celery', 'AppPickler', 'app_or_default', 'default_app',
     'bugreport', 'enable_trace', 'disable_trace', 'shared_task',
     'push_current_task', 'pop_current_task',
-]
+)
 
 #: Proxy always returning the app set as default.
 default_app = Proxy(lambda: _state.default_app)

+ 9 - 9
celery/app/amqp.py

@@ -4,13 +4,11 @@ from __future__ import absolute_import, unicode_literals
 
 import numbers
 import sys
-
 from collections import Mapping, namedtuple
 from datetime import timedelta
 from weakref import WeakValueDictionary
 
-from kombu import pools
-from kombu import Connection, Consumer, Exchange, Producer, Queue
+from kombu import Connection, Consumer, Exchange, Producer, Queue, pools
 from kombu.common import Broadcast
 from kombu.utils.functional import maybe_list
 from kombu.utils.objects import cached_property
@@ -25,7 +23,7 @@ from celery.utils.time import maybe_make_aware
 
 from . import routes as _routes
 
-__all__ = ['AMQP', 'Queues', 'task_message']
+__all__ = ('AMQP', 'Queues', 'task_message')
 
 PY3 = sys.version_info[0] == 3
 
@@ -198,9 +196,9 @@ class Queues(dict):
         if exclude:
             exclude = maybe_list(exclude)
             if self._consume_from is None:
-                # using selection
+                # using all queues
                 return self.select(k for k in self if k not in exclude)
-            # using all queues
+            # using selection
             for queue in exclude:
                 self._consume_from.pop(queue, None)
 
@@ -356,6 +354,7 @@ class AMQP(object):
                 'lang': 'py',
                 'task': name,
                 'id': task_id,
+                'shadow': shadow,
                 'eta': eta,
                 'expires': expires,
                 'group': group_id,
@@ -398,7 +397,8 @@ class AMQP(object):
                    chord=None, callbacks=None, errbacks=None, reply_to=None,
                    time_limit=None, soft_time_limit=None,
                    create_sent_event=False, root_id=None, parent_id=None,
-                   shadow=None, now=None, timezone=None):
+                   shadow=None, now=None, timezone=None,
+                   **compat_kwargs):
         args = args or ()
         kwargs = kwargs or {}
         utc = self.utc
@@ -521,8 +521,8 @@ class AMQP(object):
                     exchange_type = 'direct'
 
             # convert to anon-exchange, when exchange not set and direct ex.
-            if not exchange or not routing_key and exchange_type == 'direct':
-                    exchange, routing_key = '', qname
+            if (not exchange or not routing_key) and exchange_type == 'direct':
+                exchange, routing_key = '', qname
             elif exchange is None:
                 # not topic exchange, and exchange not undefined
                 exchange = queue.exchange.name or default_exchange

+ 2 - 1
celery/app/annotations.py

@@ -8,6 +8,7 @@ This prepares and performs the annotations in the
 :setting:`task_annotations` setting.
 """
 from __future__ import absolute_import, unicode_literals
+
 from celery.five import string_t
 from celery.utils.functional import firstmethod, mlazy
 from celery.utils.imports import instantiate
@@ -15,7 +16,7 @@ from celery.utils.imports import instantiate
 _first_match = firstmethod('annotate')
 _first_match_any = firstmethod('annotate_any')
 
-__all__ = ['MapAnnotation', 'prepare', 'resolve_all']
+__all__ = ('MapAnnotation', 'prepare', 'resolve_all')
 
 
 class MapAnnotation(dict):

+ 5 - 2
celery/app/backends.py

@@ -1,14 +1,16 @@
 # -*- coding: utf-8 -*-
 """Backend selection."""
 from __future__ import absolute_import, unicode_literals
+
 import sys
 import types
-from celery.exceptions import ImproperlyConfigured
+
 from celery._state import current_app
+from celery.exceptions import ImproperlyConfigured
 from celery.five import reraise
 from celery.utils.imports import load_extension_class_names, symbol_by_name
 
-__all__ = ['by_name', 'by_url']
+__all__ = ('by_name', 'by_url')
 
 UNKNOWN_BACKEND = """
 Unknown result backend: {0!r}.  Did you spell that correctly? ({1!r})
@@ -19,6 +21,7 @@ BACKEND_ALIASES = {
     'rpc': 'celery.backends.rpc.RPCBackend',
     'cache': 'celery.backends.cache:CacheBackend',
     'redis': 'celery.backends.redis:RedisBackend',
+    'sentinel': 'celery.backends.redis:SentinelBackend',
     'mongodb': 'celery.backends.mongodb:MongoBackend',
     'db': 'celery.backends.database:DatabaseBackend',
     'database': 'celery.backends.database:DatabaseBackend',

+ 36 - 29
celery/app/base.py

@@ -5,8 +5,8 @@ from __future__ import absolute_import, unicode_literals
 import os
 import threading
 import warnings
-
 from collections import defaultdict, deque
+from datetime import datetime
 from operator import attrgetter
 
 from kombu import pools
@@ -18,44 +18,37 @@ from kombu.utils.uuid import uuid
 from vine import starpromise
 from vine.utils import wraps
 
-from celery import platforms
-from celery import signals
-from celery._state import (
-    _task_stack, get_current_app, _set_current_app, set_default_app,
-    _register_app, _deregister_app,
-    get_current_worker_task, connect_on_app_finalize,
-    _announce_app_finalized,
-)
+from celery import platforms, signals
+from celery._state import (_announce_app_finalized, _deregister_app,
+                           _register_app, _set_current_app, _task_stack,
+                           connect_on_app_finalize, get_current_app,
+                           get_current_worker_task, set_default_app)
 from celery.exceptions import AlwaysEagerIgnored, ImproperlyConfigured
-from celery.five import (
-    UserDict, bytes_if_py2, python_2_unicode_compatible, values,
-)
+from celery.five import (UserDict, bytes_if_py2, python_2_unicode_compatible,
+                         values)
 from celery.loaders import get_loader_cls
 from celery.local import PromiseProxy, maybe_evaluate
 from celery.utils import abstract
 from celery.utils.collections import AttributeDictMixin
 from celery.utils.dispatch import Signal
-from celery.utils.functional import first, maybe_list, head_from_fun
-from celery.utils.time import timezone
+from celery.utils.functional import first, head_from_fun, maybe_list
 from celery.utils.imports import gen_task_name, instantiate, symbol_by_name
 from celery.utils.log import get_logger
 from celery.utils.objects import FallbackContext, mro_lookup
+from celery.utils.time import (get_exponential_backoff_interval, timezone,
+                               to_utc)
 
-from .annotations import prepare as prepare_annotations
+# Load all builtin tasks
+from . import builtins  # noqa
 from . import backends
+from .annotations import prepare as prepare_annotations
 from .defaults import find_deprecated_settings
 from .registry import TaskRegistry
-from .utils import (
-    AppPickler, Settings,
-    bugreport, _unpickle_app, _unpickle_app_v2,
-    _old_key_to_new, _new_key_to_old,
-    appstr, detect_settings,
-)
+from .utils import (AppPickler, Settings, _new_key_to_old, _old_key_to_new,
+                    _unpickle_app, _unpickle_app_v2, appstr, bugreport,
+                    detect_settings)
 
-# Load all builtin tasks
-from . import builtins  # noqa
-
-__all__ = ['Celery']
+__all__ = ('Celery',)
 
 logger = get_logger(__name__)
 
@@ -463,6 +456,9 @@ class Celery(object):
 
             autoretry_for = tuple(options.get('autoretry_for', ()))
             retry_kwargs = options.get('retry_kwargs', {})
+            retry_backoff = int(options.get('retry_backoff', False))
+            retry_backoff_max = int(options.get('retry_backoff_max', 600))
+            retry_jitter = options.get('retry_jitter', True)
 
             if autoretry_for and not hasattr(task, '_orig_run'):
 
@@ -471,6 +467,13 @@ class Celery(object):
                     try:
                         return task._orig_run(*args, **kwargs)
                     except autoretry_for as exc:
+                        if retry_backoff:
+                            retry_kwargs['countdown'] = \
+                                get_exponential_backoff_interval(
+                                    factor=retry_backoff,
+                                    retries=task.request.retries,
+                                    maximum=retry_backoff_max,
+                                    full_jitter=retry_jitter)
                         raise task.retry(exc=exc, **retry_kwargs)
 
                 task._orig_run, task.run = task.run, run
@@ -644,7 +647,7 @@ class Celery(object):
             baz/__init__.py
                 models.py
 
-        Then calling ``app.autodiscover_tasks(['foo', bar', 'baz'])`` will
+        Then calling ``app.autodiscover_tasks(['foo', 'bar', 'baz'])`` will
         result in the modules ``foo.tasks`` and ``bar.tasks`` being imported.
 
         Arguments:
@@ -727,6 +730,8 @@ class Celery(object):
             reply_to or self.oid, time_limit, soft_time_limit,
             self.conf.task_send_sent_event,
             root_id, parent_id, shadow, chain,
+            argsrepr=options.get('argsrepr'),
+            kwargsrepr=options.get('kwargsrepr'),
         )
 
         if connection:
@@ -816,7 +821,7 @@ class Celery(object):
             port or conf.broker_port,
             transport=transport or conf.broker_transport,
             ssl=self.either('broker_use_ssl', ssl),
-            heartbeat=heartbeat,
+            heartbeat=heartbeat or self.conf.broker_heartbeat,
             login_method=login_method or conf.broker_login_method,
             failover_strategy=(
                 failover_strategy or conf.broker_failover_strategy
@@ -870,8 +875,8 @@ class Celery(object):
 
     def now(self):
         """Return the current time and date as a datetime."""
-        from datetime import datetime
-        return datetime.utcnow().replace(tzinfo=self.timezone)
+        now_in_utc = to_utc(datetime.utcnow())
+        return now_in_utc.astimezone(self.timezone)
 
     def select_queues(self, queues=None):
         """Select subset of queues.
@@ -1252,4 +1257,6 @@ class Celery(object):
                 if not conf.timezone:
                     return timezone.local
         return timezone.get_timezone(tz)
+
+
 App = Celery  # noqa: E305 XXX compat

+ 2 - 1
celery/app/builtins.py

@@ -4,10 +4,11 @@
 The built-in tasks are always available in all app instances.
 """
 from __future__ import absolute_import, unicode_literals
+
 from celery._state import connect_on_app_finalize
 from celery.utils.log import get_logger
 
-__all__ = []
+__all__ = ()
 logger = get_logger(__name__)
 
 

+ 4 - 1
celery/app/control.py

@@ -5,15 +5,18 @@ Client for worker remote control commands.
 Server implementation is in :mod:`celery.worker.control`.
 """
 from __future__ import absolute_import, unicode_literals
+
 import warnings
+
 from billiard.common import TERM_SIGNAME
 from kombu.pidbox import Mailbox
 from kombu.utils.functional import lazy
 from kombu.utils.objects import cached_property
+
 from celery.exceptions import DuplicateNodenameWarning
 from celery.utils.text import pluralize
 
-__all__ = ['Inspect', 'Control', 'flatten_reply']
+__all__ = ('Inspect', 'Control', 'flatten_reply')
 
 W_DUPNODE = """\
 Received multiple replies from node {0}: {1}.

+ 8 - 4
celery/app/defaults.py

@@ -1,14 +1,16 @@
 # -*- coding: utf-8 -*-
 """Configuration introspection and defaults."""
 from __future__ import absolute_import, unicode_literals
+
 import sys
 from collections import deque, namedtuple
 from datetime import timedelta
+
 from celery.five import items, keys, python_2_unicode_compatible
 from celery.utils.functional import memoize
 from celery.utils.serialization import strtobool
 
-__all__ = ['Option', 'NAMESPACES', 'flatten', 'find']
+__all__ = ('Option', 'NAMESPACES', 'flatten', 'find')
 
 is_jython = sys.platform.startswith('java')
 is_pypy = hasattr(sys, 'pypy_version_info')
@@ -56,8 +58,8 @@ class Option(object):
     deprecate_by = None
     remove_by = None
     old = set()
-    typemap = dict(string=str, int=int, float=float, any=lambda v: v,
-                   bool=strtobool, dict=dict, tuple=tuple)
+    typemap = {'string': str, 'int': int, 'float': float, 'any': lambda v: v,
+               'bool': strtobool, 'dict': dict, 'tuple': tuple}
 
     def __init__(self, default=None, *args, **kwargs):
         self.default = default
@@ -126,6 +128,7 @@ NAMESPACES = Namespace(
         write_consistency=Option(type='string'),
         auth_provider=Option(type='string'),
         auth_kwargs=Option(type='string'),
+        options=Option({}, type='dict'),
     ),
     control=Namespace(
         queue_ttl=Option(300.0, type='float'),
@@ -178,6 +181,7 @@ NAMESPACES = Namespace(
         ),
         persistent=Option(None, type='bool'),
         serializer=Option('json'),
+        backend_transport_options=Option({}, type='dict'),
     ),
     elasticsearch=Namespace(
         __old__=old_ns('celery_elasticsearch'),
@@ -285,7 +289,7 @@ NAMESPACES = Namespace(
             'WARNING', old={'celery_redirect_stdouts_level'},
         ),
         send_task_events=Option(
-            False, type='bool', old={'celeryd_send_events'},
+            False, type='bool', old={'celery_send_events'},
         ),
         state_db=Option(),
         task_log_format=Option(DEFAULT_TASK_LOG_FMT),

+ 2 - 0
celery/app/events.py

@@ -1,6 +1,8 @@
 """Implementation for the app.events shortcuts."""
 from __future__ import absolute_import, unicode_literals
+
 from contextlib import contextmanager
+
 from kombu.utils.objects import cached_property
 
 

+ 4 - 7
celery/app/log.py

@@ -12,7 +12,6 @@ from __future__ import absolute_import, unicode_literals
 import logging
 import os
 import sys
-
 from logging.handlers import WatchedFileHandler
 
 from kombu.utils.encoding import set_default_encoding_file
@@ -22,15 +21,13 @@ from celery._state import get_current_task
 from celery.five import string_t
 from celery.local import class_property
 from celery.platforms import isatty
-from celery.utils.log import (
-    get_logger, mlevel,
-    ColorFormatter, LoggingProxy, get_multiprocessing_logger,
-    reset_multiprocessing_logger,
-)
+from celery.utils.log import (ColorFormatter, LoggingProxy, get_logger,
+                              get_multiprocessing_logger, mlevel,
+                              reset_multiprocessing_logger)
 from celery.utils.nodenames import node_format
 from celery.utils.term import colored
 
-__all__ = ['TaskFormatter', 'Logging']
+__all__ = ('TaskFormatter', 'Logging')
 
 MP_LOG = os.environ.get('MP_LOG', False)
 

+ 4 - 2
celery/app/registry.py

@@ -1,13 +1,15 @@
 # -*- coding: utf-8 -*-
 """Registry of available tasks."""
 from __future__ import absolute_import, unicode_literals
+
 import inspect
 from importlib import import_module
+
 from celery._state import get_current_app
-from celery.exceptions import NotRegistered, InvalidTaskError
+from celery.exceptions import InvalidTaskError, NotRegistered
 from celery.five import items
 
-__all__ = ['TaskRegistry']
+__all__ = ('TaskRegistry',)
 
 
 class TaskRegistry(dict):

+ 4 - 1
celery/app/routes.py

@@ -4,17 +4,20 @@
 Contains utilities for working with task routers, (:setting:`task_routes`).
 """
 from __future__ import absolute_import, unicode_literals
+
 import re
 import string
 from collections import Mapping, OrderedDict
+
 from kombu import Queue
+
 from celery.exceptions import QueueNotFound
 from celery.five import items, string_t
 from celery.utils.collections import lpmerge
 from celery.utils.functional import maybe_evaluate, mlazy
 from celery.utils.imports import symbol_by_name
 
-__all__ = ['MapRoute', 'Router', 'prepare']
+__all__ = ('MapRoute', 'Router', 'prepare')
 
 
 def glob_to_re(glob, quote=string.punctuation.replace('*', '')):

+ 15 - 10
celery/app/task.py

@@ -8,14 +8,13 @@ from billiard.einfo import ExceptionInfo
 from kombu.exceptions import OperationalError
 from kombu.utils.uuid import uuid
 
-from celery import current_app, group
-from celery import states
+from celery import current_app, group, states
 from celery._state import _task_stack
 from celery.canvas import signature
 from celery.exceptions import Ignore, MaxRetriesExceededError, Reject, Retry
 from celery.five import items, python_2_unicode_compatible
 from celery.local import class_property
-from celery.result import EagerResult
+from celery.result import EagerResult, denied_join_result
 from celery.utils import abstract
 from celery.utils.functional import mattrgetter, maybe_list
 from celery.utils.imports import instantiate
@@ -26,7 +25,7 @@ from .annotations import resolve_all as resolve_all_annotations
 from .registry import _unpickle_task_v2
 from .utils import appstr
 
-__all__ = ['Context', 'Task']
+__all__ = ('Context', 'Task')
 
 #: extracts attributes related to publishing a message from an object.
 extract_exec_options = mattrgetter(
@@ -132,7 +131,7 @@ class Context(object):
 
     @property
     def children(self):
-        # children must be an empy list for every thread
+        # children must be an empty list for every thread
         if self._children is None:
             self._children = []
         return self._children
@@ -158,6 +157,9 @@ class Task(object):
     #: Execution strategy used, or the qualified name of one.
     Strategy = 'celery.worker.strategy:default'
 
+    #: Request class used, or the qualified name of one.
+    Request = 'celery.worker.request:Request'
+
     #: This is the instance bound to if the task is a method of a class.
     __self__ = None
 
@@ -383,7 +385,7 @@ class Task(object):
             _task_stack.pop()
 
     def __reduce__(self):
-        # - tasks are pickled into the name of the task only, and the reciever
+        # - tasks are pickled into the name of the task only, and the receiver
         # - simply grabs it from the local registry.
         # - in later versions the module of the task is also included,
         # - and the receiving side tries to import that module so that
@@ -496,7 +498,7 @@ class Task(object):
             headers (Dict): Message headers to be included in the message.
 
         Returns:
-            ~@AsyncResult: Promise of future evaluation.
+            celery.result.AsyncResult: Promise of future evaluation.
 
         Raises:
             TypeError: If not enough arguments are passed, or too many
@@ -519,8 +521,9 @@ class Task(object):
 
         app = self._get_app()
         if app.conf.task_always_eager:
-            return self.apply(args, kwargs, task_id=task_id or uuid(),
-                              link=link, link_error=link_error, **options)
+            with denied_join_result():
+                return self.apply(args, kwargs, task_id=task_id or uuid(),
+                                  link=link, link_error=link_error, **options)
         # add 'self' if this is a "task_method".
         if self.__self__ is not None:
             args = args if isinstance(args, tuple) else tuple(args or ())
@@ -616,7 +619,7 @@ class Task(object):
                 If no exception was raised it will raise the ``exc``
                 argument provided.
             countdown (float): Time in seconds to delay the retry for.
-            eta (~datetime.dateime): Explicit time and date to run the
+            eta (~datetime.datetime): Explicit time and date to run the
                 retry at.
             max_retries (int): If set, overrides the default retry limit for
                 this execution.  Changes to this parameter don't propagate to
@@ -1006,4 +1009,6 @@ class Task(object):
     @property
     def __name__(self):
         return self.__class__.__name__
+
+
 BaseTask = Task  # noqa: E305 XXX compat alias

+ 46 - 27
celery/app/trace.py

@@ -6,50 +6,50 @@ errors are recorded, handlers are applied and so on.
 """
 from __future__ import absolute_import, unicode_literals
 
-# ## ---
-# This is the heart of the worker, the inner loop so to speak.
-# It used to be split up into nice little classes and methods,
-# but in the end it only resulted in bad performance and horrible tracebacks,
-# so instead we now use one closure per task class.
-
-# pylint: disable=redefined-outer-name
-# We cache globals and attribute lookups, so disable this warning.
-# pylint: disable=broad-except
-# We know what we're doing...
-
 import logging
 import os
 import sys
-
 from collections import namedtuple
 from warnings import warn
 
 from billiard.einfo import ExceptionInfo
 from kombu.exceptions import EncodeError
-from kombu.serialization import loads as loads_message, prepare_accept_content
+from kombu.serialization import loads as loads_message
+from kombu.serialization import prepare_accept_content
 from kombu.utils.encoding import safe_repr, safe_str
 
-from celery import current_app, group
-from celery import states, signals
+from celery import current_app, group, signals, states
 from celery._state import _task_stack
-from celery.app.task import Task as BaseTask, Context
-from celery.exceptions import Ignore, Reject, Retry, InvalidTaskError
+from celery.app.task import Context
+from celery.app.task import Task as BaseTask
+from celery.exceptions import Ignore, InvalidTaskError, Reject, Retry
 from celery.five import monotonic, text_t
 from celery.utils.log import get_logger
 from celery.utils.nodenames import gethostname
 from celery.utils.objects import mro_lookup
 from celery.utils.saferepr import saferepr
-from celery.utils.serialization import (
-    get_pickleable_exception, get_pickled_exception, get_pickleable_etype,
-)
+from celery.utils.serialization import (get_pickleable_etype,
+                                        get_pickleable_exception,
+                                        get_pickled_exception)
+
+# ## ---
+# This is the heart of the worker, the inner loop so to speak.
+# It used to be split up into nice little classes and methods,
+# but in the end it only resulted in bad performance and horrible tracebacks,
+# so instead we now use one closure per task class.
 
-__all__ = [
+# pylint: disable=redefined-outer-name
+# We cache globals and attribute lookups, so disable this warning.
+# pylint: disable=broad-except
+# We know what we're doing...
+
+
+__all__ = (
     'TraceInfo', 'build_tracer', 'trace_task',
     'setup_worker_optimizations', 'reset_worker_optimizations',
-]
+)
 
 logger = get_logger(__name__)
-info = logger.info
 
 #: Format string used to log task success.
 LOG_SUCCESS = """\
@@ -116,6 +116,14 @@ _patched = {}
 trace_ok_t = namedtuple('trace_ok_t', ('retval', 'info', 'runtime', 'retstr'))
 
 
+def info(fmt, context):
+    """Log 'fmt % context' with severity 'INFO'.
+
+    'context' is also passed in extra with key 'data' for custom handlers.
+    """
+    logger.info(fmt, context, extra={'data': context})
+
+
 def task_has_custom(task, attr):
     """Return true if the task overrides ``attr``."""
     return mro_lookup(task.__class__, attr, stop={BaseTask, object},
@@ -135,6 +143,13 @@ def get_log_policy(task, einfo, exc):
         return log_policy_unexpected
 
 
+def get_task_name(request, default):
+    """Use 'shadow' in request for the task name if applicable."""
+    # request.shadow could be None or an empty string.
+    # If so, we should use default.
+    return getattr(request, 'shadow', None) or default
+
+
 class TraceInfo(object):
     """Information about task execution."""
 
@@ -179,7 +194,7 @@ class TraceInfo(object):
                                     reason=reason, einfo=einfo)
             info(LOG_RETRY, {
                 'id': req.id,
-                'name': task.name,
+                'name': get_task_name(req, task.name),
                 'exc': text_t(reason),
             })
             return einfo
@@ -227,7 +242,7 @@ class TraceInfo(object):
         context = {
             'hostname': req.hostname,
             'id': req.id,
-            'name': task.name,
+            'name': get_task_name(req, task.name),
             'exc': exception,
             'traceback': traceback,
             'args': sargs,
@@ -437,8 +452,10 @@ def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
                             send_success(sender=task, result=retval)
                         if _does_info:
                             info(LOG_SUCCESS, {
-                                'id': uuid, 'name': name,
-                                'return_value': Rstr, 'runtime': T,
+                                'id': uuid,
+                                'name': get_task_name(task_request, name),
+                                'return_value': Rstr,
+                                'runtime': T,
                             })
 
                 # -* POST *-
@@ -508,6 +525,8 @@ def _trace_task_ret(name, uuid, request, body, content_type,
     R, I, T, Rstr = trace_task(app.tasks[name],
                                uuid, args, kwargs, request, app=app)
     return (1, R, T) if I else (0, Rstr, T)
+
+
 trace_task_ret = _trace_task_ret  # noqa: E305
 
 

+ 10 - 13
celery/app/utils.py

@@ -5,7 +5,6 @@ from __future__ import absolute_import, unicode_literals
 import os
 import platform as _platform
 import re
-
 from collections import Mapping, namedtuple
 from copy import deepcopy
 from types import ModuleType
@@ -16,18 +15,16 @@ from celery.exceptions import ImproperlyConfigured
 from celery.five import items, keys, string_t, values
 from celery.platforms import pyimplementation
 from celery.utils.collections import ConfigurationView
+from celery.utils.imports import import_from_cwd, qualname, symbol_by_name
 from celery.utils.text import pretty
-from celery.utils.imports import import_from_cwd, symbol_by_name, qualname
 
-from .defaults import (
-    _TO_NEW_KEY, _TO_OLD_KEY, _OLD_DEFAULTS, _OLD_SETTING_KEYS,
-    DEFAULTS, SETTING_KEYS, find,
-)
+from .defaults import (_OLD_DEFAULTS, _OLD_SETTING_KEYS, _TO_NEW_KEY,
+                       _TO_OLD_KEY, DEFAULTS, SETTING_KEYS, find)
 
-__all__ = [
+__all__ = (
     'Settings', 'appstr', 'bugreport',
     'filter_hidden_settings', 'find_app',
-]
+)
 
 #: Format used to generate bug-report information.
 BUGREPORT_INFO = """
@@ -70,7 +67,7 @@ FMT_REPLACE_SETTING = '{replace:<36} -> {with_}'
 
 def appstr(app):
     """String used in __repr__ etc, to id app instances."""
-    return '{0}:{1:#x}'.format(app.main or '__main__', id(app))
+    return '{0} at {1:#x}'.format(app.main or '__main__', id(app))
 
 
 class Settings(ConfigurationView):
@@ -278,10 +275,10 @@ class AppPickler(object):
     def build_standard_kwargs(self, main, changes, loader, backend, amqp,
                               events, log, control, accept_magic_kwargs,
                               config_source=None):
-        return dict(main=main, loader=loader, backend=backend, amqp=amqp,
-                    changes=changes, events=events, log=log, control=control,
-                    set_as_current=False,
-                    config_source=config_source)
+        return {'main': main, 'loader': loader, 'backend': backend,
+                'amqp': amqp, 'changes': changes, 'events': events,
+                'log': log, 'control': control, 'set_as_current': False,
+                'config_source': config_source}
 
     def construct(self, cls, **kwargs):
         return cls(**kwargs)

+ 7 - 5
celery/apps/beat.py

@@ -8,17 +8,19 @@ as an actual application, like installing signal handlers
 and so on.
 """
 from __future__ import absolute_import, print_function, unicode_literals
+
 import numbers
 import socket
 import sys
 from datetime import datetime
-from celery import VERSION_BANNER, platforms, beat
+
+from celery import VERSION_BANNER, beat, platforms
 from celery.five import text_t
 from celery.utils.imports import qualname
 from celery.utils.log import LOG_LEVELS, get_logger
 from celery.utils.time import humanize_seconds
 
-__all__ = ['Beat']
+__all__ = ('Beat',)
 
 STARTUP_INFO_FMT = """
 LocalTime -> {timestamp}
@@ -115,9 +117,9 @@ class Beat(object):
         c = self.colored
         return text_t(  # flake8: noqa
             c.blue('__    ', c.magenta('-'),
-            c.blue('    ... __   '), c.magenta('-'),
-            c.blue('        _\n'),
-            c.reset(self.startup_info(service))),
+                   c.blue('    ... __   '), c.magenta('-'),
+                   c.blue('        _\n'),
+                   c.reset(self.startup_info(service))),
         )
 
     def init_loader(self):

+ 3 - 5
celery/apps/multi.py

@@ -6,7 +6,6 @@ import os
 import shlex
 import signal
 import sys
-
 from collections import OrderedDict, defaultdict
 from functools import partial
 from subprocess import Popen
@@ -17,12 +16,11 @@ from kombu.utils.objects import cached_property
 
 from celery.five import UserList, items
 from celery.platforms import IS_WINDOWS, Pidfile, signal_name
-from celery.utils.nodenames import (
-    gethostname, host_format, node_format, nodesplit,
-)
+from celery.utils.nodenames import (gethostname, host_format, node_format,
+                                    nodesplit)
 from celery.utils.saferepr import saferepr
 
-__all__ = ['Cluster', 'Node']
+__all__ = ('Cluster', 'Node')
 
 CELERY_EXE = 'celery'
 

+ 7 - 7
celery/apps/worker.py

@@ -13,30 +13,26 @@ import logging
 import os
 import platform as _platform
 import sys
-
 from datetime import datetime
 from functools import partial
 
 from billiard.process import current_process
 from kombu.utils.encoding import safe_str
 
-from celery import VERSION_BANNER
-from celery import platforms
-from celery import signals
+from celery import VERSION_BANNER, platforms, signals
 from celery.app import trace
 from celery.exceptions import WorkerShutdown, WorkerTerminate
 from celery.five import string, string_t
 from celery.loaders.app import AppLoader
 from celery.platforms import EX_FAILURE, EX_OK, check_privileges, isatty
-from celery.utils import static
-from celery.utils import term
+from celery.utils import static, term
 from celery.utils.debug import cry
 from celery.utils.imports import qualname
 from celery.utils.log import get_logger, in_sighandler, set_in_sighandler
 from celery.utils.text import pluralize
 from celery.worker import WorkController
 
-__all__ = ['Worker']
+__all__ = ('Worker',)
 
 logger = get_logger(__name__)
 is_jython = sys.platform.startswith('java')
@@ -277,6 +273,10 @@ def _shutdown_handler(worker, sig='TERM', how='Warm',
                 if callback:
                     callback(worker)
                 safe_say('worker: {0} shutdown (MainProcess)'.format(how))
+                signals.worker_shutting_down.send(
+                    sender=worker.hostname, sig=sig, how=how,
+                    exitcode=exitcode,
+                )
             if active_thread_count() > 1:
                 setattr(state, {'Warm': 'should_stop',
                                 'Cold': 'should_terminate'}[how], exitcode)

+ 3 - 4
celery/backends/amqp.py

@@ -3,21 +3,20 @@
 from __future__ import absolute_import, unicode_literals
 
 import socket
-
 from collections import deque
 from operator import itemgetter
 
-from kombu import Exchange, Queue, Producer, Consumer
+from kombu import Consumer, Exchange, Producer, Queue
 
 from celery import states
 from celery.exceptions import TimeoutError
-from celery.five import range, monotonic
+from celery.five import monotonic, range
 from celery.utils import deprecated
 from celery.utils.log import get_logger
 
 from .base import BaseBackend
 
-__all__ = ['BacklogLimitExceeded', 'AMQPBackend']
+__all__ = ('BacklogLimitExceeded', 'AMQPBackend')
 
 logger = get_logger(__name__)
 

+ 2 - 3
celery/backends/async.py

@@ -3,7 +3,6 @@ from __future__ import absolute_import, unicode_literals
 
 import socket
 import threading
-
 from collections import deque
 from time import sleep
 from weakref import WeakKeyDictionary
@@ -16,10 +15,10 @@ from celery.exceptions import TimeoutError
 from celery.five import Empty, monotonic
 from celery.utils.threads import THREAD_TIMEOUT_MAX
 
-__all__ = [
+__all__ = (
     'AsyncBackendMixin', 'BaseResultConsumer', 'Drainer',
     'register_drainer',
-]
+)
 
 drainers = {}
 

+ 35 - 40
celery/backends/base.py

@@ -10,39 +10,31 @@ from __future__ import absolute_import, unicode_literals
 
 import sys
 import time
-
 from collections import namedtuple
 from datetime import timedelta
 from weakref import WeakValueDictionary
 
 from billiard.einfo import ExceptionInfo
-from kombu.serialization import (
-    dumps, loads, prepare_accept_content,
-    registry as serializer_registry,
-)
+from kombu.serialization import dumps, loads, prepare_accept_content
+from kombu.serialization import registry as serializer_registry
 from kombu.utils.encoding import bytes_to_str, ensure_bytes, from_utf8
 from kombu.utils.url import maybe_sanitize_url
 
-from celery import states
-from celery import current_app, group, maybe_signature
+from celery import current_app, group, maybe_signature, states
 from celery._state import get_current_task
-from celery.exceptions import (
-    ChordError, TimeoutError, TaskRevokedError, ImproperlyConfigured,
-)
-from celery.five import items, string
-from celery.result import (
-    GroupResult, ResultBase, allow_join_result, result_from_tuple,
-)
+from celery.exceptions import (ChordError, ImproperlyConfigured,
+                               TaskRevokedError, TimeoutError)
+from celery.five import items
+from celery.result import (GroupResult, ResultBase, allow_join_result,
+                           result_from_tuple)
 from celery.utils.collections import BufferMap
 from celery.utils.functional import LRUCache, arity_greater
 from celery.utils.log import get_logger
-from celery.utils.serialization import (
-    get_pickled_exception,
-    get_pickleable_exception,
-    create_exception_cls,
-)
+from celery.utils.serialization import (create_exception_cls,
+                                        get_pickleable_exception,
+                                        get_pickled_exception)
 
-__all__ = ['BaseBackend', 'KeyValueStoreBackend', 'DisabledBackend']
+__all__ = ('BaseBackend', 'KeyValueStoreBackend', 'DisabledBackend')
 
 EXCEPTION_ABLE_CODECS = frozenset({'pickle'})
 PY3 = sys.version_info >= (3, 0)
@@ -237,14 +229,24 @@ class Backend(object):
         serializer = self.serializer if serializer is None else serializer
         if serializer in EXCEPTION_ABLE_CODECS:
             return get_pickleable_exception(exc)
-        return {'exc_type': type(exc).__name__, 'exc_message': string(exc)}
+        return {'exc_type': type(exc).__name__,
+                'exc_message': exc.args,
+                'exc_module': type(exc).__module__}
 
     def exception_to_python(self, exc):
         """Convert serialized exception to Python exception."""
         if exc:
             if not isinstance(exc, BaseException):
-                exc = create_exception_cls(
-                    from_utf8(exc['exc_type']), __name__)(exc['exc_message'])
+                exc_module = exc.get('exc_module')
+                if exc_module is None:
+                    cls = create_exception_cls(
+                        from_utf8(exc['exc_type']), __name__)
+                else:
+                    exc_module = from_utf8(exc_module)
+                    exc_type = from_utf8(exc['exc_type'])
+                    cls = getattr(sys.modules[exc_module], exc_type)
+                exc_msg = exc['exc_message']
+                exc = cls(*exc_msg if isinstance(exc_msg, tuple) else exc_msg)
             if self.serializer in EXCEPTION_ABLE_CODECS:
                 exc = get_pickled_exception(exc)
         return exc
@@ -410,23 +412,19 @@ class Backend(object):
     def on_chord_part_return(self, request, state, result, **kwargs):
         pass
 
-    def fallback_chord_unlock(self, group_id, body, result=None,
-                              countdown=1, **kwargs):
-        kwargs['result'] = [r.as_tuple() for r in result]
+    def fallback_chord_unlock(self, header_result, body, countdown=1,
+                              **kwargs):
+        kwargs['result'] = [r.as_tuple() for r in header_result]
         self.app.tasks['celery.chord_unlock'].apply_async(
-            (group_id, body,), kwargs, countdown=countdown,
+            (header_result.id, body,), kwargs, countdown=countdown,
         )
 
     def ensure_chords_allowed(self):
         pass
 
-    def apply_chord(self, header, partial_args, group_id, body,
-                    options={}, **kwargs):
+    def apply_chord(self, header_result, body, **kwargs):
         self.ensure_chords_allowed()
-        fixed_options = {k: v for k, v in items(options) if k != 'task_id'}
-        result = header(*partial_args, task_id=group_id, **fixed_options or {})
-        self.fallback_chord_unlock(group_id, body, **kwargs)
-        return result
+        self.fallback_chord_unlock(header_result, body, **kwargs)
 
     def current_task_children(self, request=None):
         request = request or getattr(get_current_task(), 'request', None)
@@ -510,6 +508,8 @@ class SyncBackendMixin(object):
 
 class BaseBackend(Backend, SyncBackendMixin):
     """Base (synchronous) result backend."""
+
+
 BaseDictBackend = BaseBackend  # noqa: E305 XXX compat
 
 
@@ -679,14 +679,9 @@ class BaseKeyValueStoreBackend(Backend):
             meta['result'] = result_from_tuple(result, self.app)
             return meta
 
-    def _apply_chord_incr(self, header, partial_args, group_id, body,
-                          result=None, options={}, **kwargs):
+    def _apply_chord_incr(self, header_result, body, **kwargs):
         self.ensure_chords_allowed()
-        self.save_group(group_id, self.app.GroupResult(group_id, result))
-
-        fixed_options = {k: v for k, v in items(options) if k != 'task_id'}
-
-        return header(*partial_args, task_id=group_id, **fixed_options or {})
+        header_result.save(backend=self)
 
     def on_chord_part_return(self, request, state, result, **kwargs):
         if not self.implements_incr:

+ 12 - 7
celery/backends/cache.py

@@ -1,14 +1,18 @@
 # -*- coding: utf-8 -*-
 """Memcached and in-memory cache result backend."""
 from __future__ import absolute_import, unicode_literals
+
 import sys
+
 from kombu.utils.encoding import bytes_to_str, ensure_bytes
 from kombu.utils.objects import cached_property
+
 from celery.exceptions import ImproperlyConfigured
 from celery.utils.functional import LRUCache
+
 from .base import KeyValueStoreBackend
 
-__all__ = ['CacheBackend']
+__all__ = ('CacheBackend',)
 
 _imp = [None]
 
@@ -128,10 +132,11 @@ class CacheBackend(KeyValueStoreBackend):
     def delete(self, key):
         return self.client.delete(key)
 
-    def _apply_chord_incr(self, header, partial_args, group_id, body, **opts):
-        self.client.set(self.get_key_for_chord(group_id), 0, time=self.expires)
+    def _apply_chord_incr(self, header_result, body, **kwargs):
+        chord_key = self.get_key_for_chord(header_result.id)
+        self.client.set(chord_key, 0, time=self.expires)
         return super(CacheBackend, self)._apply_chord_incr(
-            header, partial_args, group_id, body, **opts)
+            header_result, body, **kwargs)
 
     def incr(self, key):
         return self.client.incr(key)
@@ -147,9 +152,9 @@ class CacheBackend(KeyValueStoreBackend):
         servers = ';'.join(self.servers)
         backend = '{0}://{1}/'.format(self.backend, servers)
         kwargs.update(
-            dict(backend=backend,
-                 expires=self.expires,
-                 options=self.options))
+            {'backend': backend,
+             'expires': self.expires,
+             'options': self.options})
         return super(CacheBackend, self).__reduce__(args, kwargs)
 
     def as_uri(self, *args, **kwargs):

+ 11 - 5
celery/backends/cassandra.py

@@ -1,11 +1,15 @@
 # -* coding: utf-8 -*-
 """Apache Cassandra result store backend using the DataStax driver."""
 from __future__ import absolute_import, unicode_literals
+
 import sys
+
 from celery import states
 from celery.exceptions import ImproperlyConfigured
 from celery.utils.log import get_logger
+
 from .base import BaseBackend
+
 try:  # pragma: no cover
     import cassandra
     import cassandra.auth
@@ -14,7 +18,7 @@ except ImportError:  # pragma: no cover
     cassandra = None   # noqa
 
 
-__all__ = ['CassandraBackend']
+__all__ = ('CassandraBackend',)
 
 logger = get_logger(__name__)
 
@@ -90,6 +94,7 @@ class CassandraBackend(BaseBackend):
         self.port = port or conf.get('cassandra_port', None)
         self.keyspace = keyspace or conf.get('cassandra_keyspace', None)
         self.table = table or conf.get('cassandra_table', None)
+        self.cassandra_options = conf.get('cassandra_options', {})
 
         if not self.servers or not self.keyspace or not self.table:
             raise ImproperlyConfigured('Cassandra backend not configured.')
@@ -141,7 +146,8 @@ class CassandraBackend(BaseBackend):
         try:
             self._connection = cassandra.cluster.Cluster(
                 self.servers, port=self.port,
-                auth_provider=self.auth_provider)
+                auth_provider=self.auth_provider,
+                **self.cassandra_options)
             self._session = self._connection.connect(self.keyspace)
 
             # We're forced to do concatenation below, as formatting would
@@ -224,7 +230,7 @@ class CassandraBackend(BaseBackend):
 
     def __reduce__(self, args=(), kwargs={}):
         kwargs.update(
-            dict(servers=self.servers,
-                 keyspace=self.keyspace,
-                 table=self.table))
+            {'servers': self.servers,
+             'keyspace': self.keyspace,
+             'table': self.table})
         return super(CassandraBackend, self).__reduce__(args, kwargs)

+ 9 - 7
celery/backends/consul.py

@@ -5,10 +5,14 @@
     in the key-value store of Consul.
 """
 from __future__ import absolute_import, unicode_literals
+
+from kombu.utils.encoding import bytes_to_str
 from kombu.utils.url import parse_url
+
+from celery.backends.base import KeyValueStoreBackend
 from celery.exceptions import ImproperlyConfigured
-from celery.backends.base import KeyValueStoreBackend, PY3
 from celery.utils.log import get_logger
+
 try:
     import consul
 except ImportError:
@@ -16,7 +20,7 @@ except ImportError:
 
 logger = get_logger(__name__)
 
-__all__ = ['ConsulBackend']
+__all__ = ('ConsulBackend',)
 
 CONSUL_MISSING = """\
 You need to install the python-consul library in order to use \
@@ -50,8 +54,7 @@ class ConsulBackend(KeyValueStoreBackend):
                                     consistency=self.consistency)
 
     def _key_to_consul_key(self, key):
-        if PY3:
-            key = key.encode('utf-8')
+        key = bytes_to_str(key)
         return key if self.path is None else '{0}/{1}'.format(self.path, key)
 
     def get(self, key):
@@ -78,9 +81,8 @@ class ConsulBackend(KeyValueStoreBackend):
         If the session expires it will remove the key so that results
         can auto expire from the K/V store
         """
-        session_name = key
-        if PY3:
-            session_name = key.decode('utf-8')
+        session_name = bytes_to_str(key)
+
         key = self._key_to_consul_key(key)
 
         logger.debug('Trying to create Consul session %s with TTL %d',

+ 10 - 1
celery/backends/couchbase.py

@@ -1,11 +1,20 @@
 # -*- coding: utf-8 -*-
 """Couchbase result store backend."""
 from __future__ import absolute_import, unicode_literals
+
 import logging
+
 from kombu.utils.encoding import str_t
 from kombu.utils.url import _parse_url
+
 from celery.exceptions import ImproperlyConfigured
+
 from .base import KeyValueStoreBackend
+
+try:
+    import couchbase_ffi # noqa
+except ImportError:
+    pass # noqa
 try:
     from couchbase import Couchbase
     from couchbase.connection import Connection
@@ -13,7 +22,7 @@ try:
 except ImportError:
     Couchbase = Connection = NotFoundError = None   # noqa
 
-__all__ = ['CouchbaseBackend']
+__all__ = ('CouchbaseBackend',)
 
 
 class CouchbaseBackend(KeyValueStoreBackend):

+ 7 - 1
celery/backends/couchdb.py

@@ -1,15 +1,20 @@
 # -*- coding: utf-8 -*-
 """CouchDB result store backend."""
 from __future__ import absolute_import, unicode_literals
+
+from kombu.utils.encoding import bytes_to_str
 from kombu.utils.url import _parse_url
+
 from celery.exceptions import ImproperlyConfigured
+
 from .base import KeyValueStoreBackend
+
 try:
     import pycouchdb
 except ImportError:
     pycouchdb = None  # noqa
 
-__all__ = ['CouchBackend']
+__all__ = ('CouchBackend',)
 
 ERR_LIB_MISSING = """\
 You need to install the pycouchdb library to use the CouchDB result backend\
@@ -82,6 +87,7 @@ class CouchBackend(KeyValueStoreBackend):
             return None
 
     def set(self, key, value):
+        key = bytes_to_str(key)
         data = {'_id': key, 'value': value}
         try:
             self.connection.save(data)

+ 4 - 4
celery/backends/database/__init__.py

@@ -28,7 +28,7 @@ except ImportError:  # pragma: no cover
 
 logger = logging.getLogger(__name__)
 
-__all__ = ['DatabaseBackend']
+__all__ = ('DatabaseBackend',)
 
 
 @contextmanager
@@ -182,7 +182,7 @@ class DatabaseBackend(BaseBackend):
 
     def __reduce__(self, args=(), kwargs={}):
         kwargs.update(
-            dict(dburi=self.url,
-                 expires=self.expires,
-                 engine_options=self.engine_options))
+            {'dburi': self.url,
+             'expires': self.expires,
+             'engine_options': self.engine_options})
         return super(DatabaseBackend, self).__reduce__(args, kwargs)

+ 6 - 2
celery/backends/database/models.py

@@ -1,14 +1,18 @@
 # -*- coding: utf-8 -*-
 """Database models used by the SQLAlchemy result store backend."""
 from __future__ import absolute_import, unicode_literals
-import sqlalchemy as sa
+
 from datetime import datetime
+
+import sqlalchemy as sa
 from sqlalchemy.types import PickleType
+
 from celery import states
 from celery.five import python_2_unicode_compatible
+
 from .session import ResultModelBase
 
-__all__ = ['Task', 'TaskSet']
+__all__ = ('Task', 'TaskSet')
 
 
 @python_2_unicode_compatible

+ 3 - 2
celery/backends/database/session.py

@@ -1,15 +1,16 @@
 # -*- coding: utf-8 -*-
 """SQLAlchemy session."""
 from __future__ import absolute_import, unicode_literals
+
+from kombu.utils.compat import register_after_fork
 from sqlalchemy import create_engine
 from sqlalchemy.ext.declarative import declarative_base
 from sqlalchemy.orm import sessionmaker
 from sqlalchemy.pool import NullPool
-from kombu.utils.compat import register_after_fork
 
 ResultModelBase = declarative_base()
 
-__all__ = ['SessionManager']
+__all__ = ('SessionManager',)
 
 
 def _after_fork_cleanup_session(session):

+ 28 - 24
celery/backends/dynamodb.py

@@ -1,21 +1,25 @@
 # -*- coding: utf-8 -*-
 """AWS DynamoDB result store backend."""
 from __future__ import absolute_import, unicode_literals
+
 from collections import namedtuple
-from time import time, sleep
+from time import sleep, time
 
 from kombu.utils.url import _parse_url as parse_url
+
 from celery.exceptions import ImproperlyConfigured
-from celery.utils.log import get_logger
 from celery.five import string
+from celery.utils.log import get_logger
+
 from .base import KeyValueStoreBackend
+
 try:
     import boto3
     from botocore.exceptions import ClientError
 except ImportError:  # pragma: no cover
     boto3 = ClientError = None  # noqa
 
-__all__ = ['DynamoDBBackend']
+__all__ = ('DynamoDBBackend',)
 
 
 # Helper class that describes a DynamoDB attribute
@@ -126,14 +130,14 @@ class DynamoDBBackend(KeyValueStoreBackend):
     def _get_client(self, access_key_id=None, secret_access_key=None):
         """Get client connection."""
         if self._client is None:
-            client_parameters = dict(
-                region_name=self.aws_region
-            )
+            client_parameters = {
+                'region_name': self.aws_region
+            }
             if access_key_id is not None:
-                client_parameters.update(dict(
-                    aws_access_key_id=access_key_id,
-                    aws_secret_access_key=secret_access_key
-                ))
+                client_parameters.update({
+                    'aws_access_key_id': access_key_id,
+                    'aws_secret_access_key': secret_access_key
+                })
 
             if self.endpoint_url is not None:
                 client_parameters['endpoint_url'] = self.endpoint_url
@@ -147,25 +151,25 @@ class DynamoDBBackend(KeyValueStoreBackend):
 
     def _get_table_schema(self):
         """Get the boto3 structure describing the DynamoDB table schema."""
-        return dict(
-            AttributeDefinitions=[
+        return {
+            'AttributeDefinitions': [
                 {
                     'AttributeName': self._key_field.name,
                     'AttributeType': self._key_field.data_type
                 }
             ],
-            TableName=self.table_name,
-            KeySchema=[
+            'TableName': self.table_name,
+            'KeySchema': [
                 {
                     'AttributeName': self._key_field.name,
                     'KeyType': 'HASH'
                 }
             ],
-            ProvisionedThroughput={
+            'ProvisionedThroughput': {
                 'ReadCapacityUnits': self.read_capacity_units,
                 'WriteCapacityUnits': self.write_capacity_units
             }
-        )
+        }
 
     def _get_or_create_table(self):
         """Create table if not exists, otherwise return the description."""
@@ -215,20 +219,20 @@ class DynamoDBBackend(KeyValueStoreBackend):
 
     def _prepare_get_request(self, key):
         """Construct the item retrieval request parameters."""
-        return dict(
-            TableName=self.table_name,
-            Key={
+        return {
+            'TableName': self.table_name,
+            'Key': {
                 self._key_field.name: {
                     self._key_field.data_type: key
                 }
             }
-        )
+        }
 
     def _prepare_put_request(self, key, value):
         """Construct the item creation request parameters."""
-        return dict(
-            TableName=self.table_name,
-            Item={
+        return {
+            'TableName': self.table_name,
+            'Item': {
                 self._key_field.name: {
                     self._key_field.data_type: key
                 },
@@ -239,7 +243,7 @@ class DynamoDBBackend(KeyValueStoreBackend):
                     self._timestamp_field.data_type: str(time())
                 }
             }
-        )
+        }
 
     def _item_to_dict(self, raw_response):
         """Convert get_item() response to field-value pairs."""

+ 7 - 2
celery/backends/elasticsearch.py

@@ -1,18 +1,23 @@
 # -* coding: utf-8 -*-
 """Elasticsearch result store backend."""
 from __future__ import absolute_import, unicode_literals
+
 from datetime import datetime
-from kombu.utils.url import _parse_url
+
 from kombu.utils.encoding import bytes_to_str
+from kombu.utils.url import _parse_url
+
 from celery.exceptions import ImproperlyConfigured
 from celery.five import items
+
 from .base import KeyValueStoreBackend
+
 try:
     import elasticsearch
 except ImportError:
     elasticsearch = None  # noqa
 
-__all__ = ['ElasticsearchBackend']
+__all__ = ('ElasticsearchBackend',)
 
 E_LIB_MISSING = """\
 You need to install the elasticsearch library to use the Elasticsearch \

+ 5 - 2
celery/backends/filesystem.py

@@ -1,12 +1,15 @@
 # -*- coding: utf-8 -*-
 """File-system result store backend."""
 from __future__ import absolute_import, unicode_literals
-import os
+
 import locale
+import os
+
 from kombu.utils.encoding import ensure_bytes
+
 from celery import uuid
-from celery.exceptions import ImproperlyConfigured
 from celery.backends.base import KeyValueStoreBackend
+from celery.exceptions import ImproperlyConfigured
 
 # Python 2 does not have FileNotFoundError and IsADirectoryError
 try:

+ 12 - 8
celery/backends/mongodb.py

@@ -1,13 +1,17 @@
 # -*- coding: utf-8 -*-
 """MongoDB result store backend."""
 from __future__ import absolute_import, unicode_literals
+
 from datetime import datetime, timedelta
+
+from kombu.exceptions import EncodeError
 from kombu.utils.objects import cached_property
 from kombu.utils.url import maybe_sanitize_url
-from kombu.exceptions import EncodeError
+
 from celery import states
 from celery.exceptions import ImproperlyConfigured
-from celery.five import string_t, items
+from celery.five import items, string_t
+
 from .base import BaseBackend
 
 try:
@@ -27,7 +31,7 @@ else:                                       # pragma: no cover
     class InvalidDocument(Exception):       # noqa
         pass
 
-__all__ = ['MongoBackend']
+__all__ = ('MongoBackend',)
 
 BINARY_CODECS = frozenset(['pickle', 'msgpack'])
 
@@ -117,11 +121,11 @@ class MongoBackend(BaseBackend):
             self.options.update(config)
 
     def _prepare_client_options(self):
-            if pymongo.version_tuple >= (3,):
-                return {'maxPoolSize': self.max_pool_size}
-            else:  # pragma: no cover
-                return {'max_pool_size': self.max_pool_size,
-                        'auto_start_request': False}
+        if pymongo.version_tuple >= (3,):
+            return {'maxPoolSize': self.max_pool_size}
+        else:  # pragma: no cover
+            return {'max_pool_size': self.max_pool_size,
+                    'auto_start_request': False}
 
     def _get_connection(self):
         """Connect to the MongoDB server."""

+ 94 - 22
celery/backends/redis.py

@@ -18,30 +18,38 @@ from celery.utils.functional import dictfilter
 from celery.utils.log import get_logger
 from celery.utils.time import humanize_seconds
 
-from . import async
-from . import base
+from . import async, base
 
 try:
     import redis
     from kombu.transport.redis import get_redis_error_classes
-except ImportError:                 # pragma: no cover
-    redis = None                    # noqa
+except ImportError:  # pragma: no cover
+    redis = None  # noqa
     get_redis_error_classes = None  # noqa
 
-__all__ = ['RedisBackend']
+try:
+    from redis import sentinel
+except ImportError:
+    sentinel = None
+
+__all__ = ('RedisBackend', 'SentinelBackend')
 
 E_REDIS_MISSING = """
 You need to install the redis library in order to use \
 the Redis result store backend.
 """
 
+E_REDIS_SENTINEL_MISSING = """
+You need to install the redis library with support of \
+sentinel in order to use the Redis result store backend.
+"""
+
 E_LOST = 'Connection to Redis lost: Retry (%s/%s) %s.'
 
 logger = get_logger(__name__)
 
 
 class ResultConsumer(async.BaseResultConsumer):
-
     _pubsub = None
 
     def __init__(self, *args, **kwargs):
@@ -243,15 +251,13 @@ class RedisBackend(base.BaseKeyValueStoreBackend, async.AsyncBackendMixin):
             raise ChordError('Dependency {0} raised {1!r}'.format(tid, retval))
         return retval
 
-    def apply_chord(self, header, partial_args, group_id, body,
-                    result=None, options={}, **kwargs):
+    def apply_chord(self, header_result, body, **kwargs):
         # Overrides this to avoid calling GroupResult.save
         # pylint: disable=method-hidden
         # Note that KeyValueStoreBackend.__init__ sets self.apply_chord
         # if the implements_incr attr is set.  Redis backend doesn't set
         # this flag.
-        options['task_id'] = group_id
-        return header(*partial_args, **options or {})
+        pass
 
     def on_chord_part_return(self, request, state, result,
                              propagate=None, **kwargs):
@@ -265,12 +271,12 @@ class RedisBackend(base.BaseKeyValueStoreBackend, async.AsyncBackendMixin):
         tkey = self.get_key_for_group(gid, '.t')
         result = self.encode_result(result, state)
         with client.pipeline() as pipe:
-            _, readycount, totaldiff, _, _ = pipe                           \
-                .rpush(jkey, self.encode([1, tid, state, result]))          \
-                .llen(jkey)                                                 \
-                .get(tkey)                                                  \
-                .expire(jkey, self.expires)                                 \
-                .expire(tkey, self.expires)                                 \
+            _, readycount, totaldiff, _, _ = pipe \
+                .rpush(jkey, self.encode([1, tid, state, result])) \
+                .llen(jkey) \
+                .get(tkey) \
+                .expire(jkey, self.expires) \
+                .expire(tkey, self.expires) \
                 .execute()
 
         totaldiff = int(totaldiff or 0)
@@ -281,10 +287,10 @@ class RedisBackend(base.BaseKeyValueStoreBackend, async.AsyncBackendMixin):
             if readycount == total:
                 decode, unpack = self.decode, self._unpack_chord_result
                 with client.pipeline() as pipe:
-                    resl, _, _ = pipe               \
-                        .lrange(jkey, 0, total)     \
-                        .delete(jkey)               \
-                        .delete(tkey)               \
+                    resl, _, _ = pipe \
+                        .lrange(jkey, 0, total) \
+                        .delete(jkey) \
+                        .delete(tkey) \
                         .execute()
                 try:
                     callback.delay([unpack(tup, decode) for tup in resl])
@@ -306,10 +312,16 @@ class RedisBackend(base.BaseKeyValueStoreBackend, async.AsyncBackendMixin):
             )
 
     def _create_client(self, **params):
-        return self.redis.StrictRedis(
-            connection_pool=self.ConnectionPool(**params),
+        return self._get_client()(
+            connection_pool=self._get_pool(**params),
         )
 
+    def _get_client(self):
+        return self.redis.StrictRedis
+
+    def _get_pool(self, **params):
+        return self.ConnectionPool(**params)
+
     @property
     def ConnectionPool(self):
         if self._ConnectionPool is None:
@@ -340,3 +352,63 @@ class RedisBackend(base.BaseKeyValueStoreBackend, async.AsyncBackendMixin):
     @deprecated.Property(4.0, 5.0)
     def password(self):
         return self.connparams['password']
+
+
+class SentinelBackend(RedisBackend):
+    """Redis sentinel task result store."""
+
+    sentinel = sentinel
+
+    def __init__(self, *args, **kwargs):
+        if self.sentinel is None:
+            raise ImproperlyConfigured(E_REDIS_SENTINEL_MISSING.strip())
+
+        super(SentinelBackend, self).__init__(*args, **kwargs)
+
+    def _params_from_url(self, url, defaults):
+        # URL looks like sentinel://0.0.0.0:26347/3;sentinel://0.0.0.0:26348/3.
+        chunks = url.split(";")
+        connparams = dict(defaults, hosts=[])
+        for chunk in chunks:
+            data = super(SentinelBackend, self)._params_from_url(
+                url=chunk, defaults=defaults)
+            connparams['hosts'].append(data)
+        for p in ("host", "port", "db", "password"):
+            connparams.pop(p)
+
+        # Adding db/password in connparams to connect to the correct instance
+        for p in ("db", "password"):
+            if connparams['hosts'] and p in connparams['hosts'][0]:
+                connparams[p] = connparams['hosts'][0].get(p)
+        return connparams
+
+    def _get_sentinel_instance(self, **params):
+        connparams = params.copy()
+
+        hosts = connparams.pop("hosts")
+        result_backend_transport_opts = self.app.conf.get(
+            "result_backend_transport_options", {})
+        min_other_sentinels = result_backend_transport_opts.get(
+            "min_other_sentinels", 0)
+        sentinel_kwargs = result_backend_transport_opts.get(
+            "sentinel_kwargs", {})
+
+        sentinel_instance = self.sentinel.Sentinel(
+            [(cp['host'], cp['port']) for cp in hosts],
+            min_other_sentinels=min_other_sentinels,
+            sentinel_kwargs=sentinel_kwargs,
+            **connparams)
+
+        return sentinel_instance
+
+    def _get_pool(self, **params):
+        sentinel_instance = self._get_sentinel_instance(**params)
+
+        result_backend_transport_opts = self.app.conf.get(
+            "result_backend_transport_options", {})
+        master_name = result_backend_transport_opts.get("master_name", None)
+
+        return sentinel_instance.master_for(
+            service_name=master_name,
+            redis_class=self._get_client(),
+        ).connection_pool

+ 6 - 1
celery/backends/riak.py

@@ -1,10 +1,15 @@
 # -*- coding: utf-8 -*-
 """Riak result store backend."""
 from __future__ import absolute_import, unicode_literals
+
 import sys
+
 from kombu.utils.url import _parse_url
+
 from celery.exceptions import ImproperlyConfigured
+
 from .base import KeyValueStoreBackend
+
 try:
     import riak
     from riak import RiakClient
@@ -12,7 +17,7 @@ try:
 except ImportError:  # pragma: no cover
     riak = RiakClient = last_written_resolver = None  # noqa
 
-__all__ = ['RiakBackend']
+__all__ = ('RiakBackend',)
 
 E_BUCKET_NAME = """\
 Riak bucket names must be composed of ASCII characters only, not: {0!r}\

+ 2 - 2
celery/backends/rpc.py

@@ -5,9 +5,9 @@ RPC-style result backend, using reply-to and one queue per client.
 """
 from __future__ import absolute_import, unicode_literals
 
-import kombu
 import time
 
+import kombu
 from kombu.common import maybe_declare
 from kombu.utils.compat import register_after_fork
 from kombu.utils.objects import cached_property
@@ -19,7 +19,7 @@ from celery.five import items, range
 from . import base
 from .async import AsyncBackendMixin, BaseResultConsumer
 
-__all__ = ['BacklogLimitExceeded', 'RPCBackend']
+__all__ = ('BacklogLimitExceeded', 'RPCBackend')
 
 E_NO_CHORD_SUPPORT = """
 The "rpc" result backend does not support chords!

+ 39 - 18
celery/beat.py

@@ -6,36 +6,32 @@ import copy
 import errno
 import heapq
 import os
-import time
 import shelve
 import sys
+import time
 import traceback
-
 from collections import namedtuple
 from functools import total_ordering
 from threading import Event, Thread
 
 from billiard import ensure_multiprocessing
-from billiard.context import Process
 from billiard.common import reset_signals
+from billiard.context import Process
 from kombu.utils.functional import maybe_evaluate, reprcall
 from kombu.utils.objects import cached_property
 
-from . import __version__
-from . import platforms
-from . import signals
-from .five import (
-    items, monotonic, python_2_unicode_compatible, reraise, values,
-)
-from .schedules import maybe_schedule, crontab
+from . import __version__, platforms, signals
+from .five import (items, monotonic, python_2_unicode_compatible, reraise,
+                   values)
+from .schedules import crontab, maybe_schedule
 from .utils.imports import load_extension_class_names, symbol_by_name
-from .utils.time import humanize_seconds
 from .utils.log import get_logger, iter_open_logger_fds
+from .utils.time import humanize_seconds
 
-__all__ = [
+__all__ = (
     'SchedulingError', 'ScheduleEntry', 'Scheduler',
     'PersistentScheduler', 'Service', 'EmbeddedService',
-]
+)
 
 event_t = namedtuple('event_t', ('time', 'priority', 'entry'))
 
@@ -97,17 +93,18 @@ class ScheduleEntry(object):
         self.kwargs = kwargs
         self.options = options
         self.schedule = maybe_schedule(schedule, relative, app=self.app)
-        self.last_run_at = last_run_at or self._default_now()
+        self.last_run_at = last_run_at or self.default_now()
         self.total_run_count = total_run_count or 0
 
-    def _default_now(self):
+    def default_now(self):
         return self.schedule.now() if self.schedule else self.app.now()
+    _default_now = default_now  # compat
 
     def _next_instance(self, last_run_at=None):
         """Return new instance, with date and count fields updated."""
         return self.__class__(**dict(
             self,
-            last_run_at=last_run_at or self._default_now(),
+            last_run_at=last_run_at or self.default_now(),
             total_run_count=self.total_run_count + 1,
         ))
     __next__ = next = _next_instance  # for 2to3
@@ -155,6 +152,28 @@ class ScheduleEntry(object):
             return id(self) < id(other)
         return NotImplemented
 
+    def editable_fields_equal(self, other):
+        for attr in ('task', 'args', 'kwargs', 'options', 'schedule'):
+            if getattr(self, attr) != getattr(other, attr):
+                return False
+        return True
+
+    def __eq__(self, other):
+        """Test schedule entries equality.
+
+        Will only compare "editable" fields:
+        ``task``, ``schedule``, ``args``, ``kwargs``, ``options``.
+        """
+        return self.editable_fields_equal(other)
+
+    def __ne__(self, other):
+        """Test schedule entries inequality.
+
+        Will only compare "editable" fields:
+        ``task``, ``schedule``, ``args``, ``kwargs``, ``options``.
+        """
+        return not self == other
+
 
 class Scheduler(object):
     """Scheduler for periodic tasks.
@@ -237,7 +256,7 @@ class Scheduler(object):
     def _when(self, entry, next_time_to_run, mktime=time.mktime):
         adjust = self.adjust
 
-        return (mktime(entry.schedule.now().timetuple()) +
+        return (mktime(entry.default_now().timetuple()) +
                 (adjust(next_time_to_run) or 0))
 
     def populate_heap(self, event_t=event_t, heapify=heapq.heapify):
@@ -290,7 +309,9 @@ class Scheduler(object):
             return False
         for name, old_entry in old_schedules.items():
             new_entry = new_schedules.get(name)
-            if not new_entry or old_entry.schedule != new_entry.schedule:
+            if not new_entry:
+                return False
+            if new_entry != old_entry:
                 return False
         return True
 

+ 1 - 1
celery/bin/__init__.py

@@ -1,4 +1,4 @@
 from __future__ import absolute_import, unicode_literals
 from .base import Option
 
-__all__ = ['Option']
+__all__ = ('Option',)

+ 4 - 6
celery/bin/amqp.py

@@ -6,22 +6,20 @@
 from __future__ import absolute_import, print_function, unicode_literals
 
 import cmd as _cmd
-import sys
-import shlex
 import pprint
-
+import shlex
+import sys
 from functools import partial
 from itertools import count
 
 from kombu.utils.encoding import safe_str
 
-from celery.utils.functional import padlist
-
 from celery.bin.base import Command
 from celery.five import string_t
+from celery.utils.functional import padlist
 from celery.utils.serialization import strtobool
 
-__all__ = ['AMQPAdmin', 'AMQShell', 'Spec', 'amqp']
+__all__ = ('AMQPAdmin', 'AMQShell', 'Spec', 'amqp')
 
 # Map to coerce strings to other types.
 COERCE = {bool: strtobool}

+ 19 - 25
celery/bin/base.py

@@ -3,33 +3,27 @@
 from __future__ import absolute_import, print_function, unicode_literals
 
 import argparse
+import json
 import os
 import random
 import re
 import sys
 import warnings
-import json
-
 from collections import defaultdict
 from heapq import heappush
 from pprint import pformat
 
-from celery import VERSION_BANNER, Celery, maybe_patch_concurrency
-from celery import signals
+from celery import VERSION_BANNER, Celery, maybe_patch_concurrency, signals
 from celery.exceptions import CDeprecationWarning, CPendingDeprecationWarning
-from celery.five import (
-    getfullargspec, items, python_2_unicode_compatible,
-    string, string_t, text_t, long_t,
-)
+from celery.five import (getfullargspec, items, long_t,
+                         python_2_unicode_compatible, string, string_t,
+                         text_t)
 from celery.platforms import EX_FAILURE, EX_OK, EX_USAGE, isatty
-from celery.utils import imports
-from celery.utils import term
-from celery.utils import text
+from celery.utils import imports, term, text
 from celery.utils.functional import dictfilter
-from celery.utils.nodenames import node_format, host_format
+from celery.utils.nodenames import host_format, node_format
 from celery.utils.objects import Bunch
 
-
 # Option is here for backwards compatiblity, as third-party commands
 # may import it from here.
 try:
@@ -42,9 +36,9 @@ try:
 except NameError:  # pragma: no cover
     pass
 
-__all__ = [
+__all__ = (
     'Error', 'UsageError', 'Extensions', 'Command', 'Option', 'daemon_options',
-]
+)
 
 # always enable DeprecationWarnings, so our users can see them.
 for warning in (CDeprecationWarning, CPendingDeprecationWarning):
@@ -85,18 +79,18 @@ def _add_optparse_argument(parser, opt, typemap={
     # store_true sets value to "('NO', 'DEFAULT')" for some
     # crazy reason, so not to set a sane default here.
     if opt.action == 'store_true' and opt.default is None:
-            opt.default = False
+        opt.default = False
     parser.add_argument(
         *opt._long_opts + opt._short_opts,
-        **dictfilter(dict(
-            action=opt.action,
-            type=typemap.get(opt.type, opt.type),
-            dest=opt.dest,
-            nargs=opt.nargs,
-            choices=opt.choices,
-            help=opt.help,
-            metavar=opt.metavar,
-            default=opt.default)))
+        **dictfilter({
+            'action': opt.action,
+            'type': typemap.get(opt.type, opt.type),
+            'dest': opt.dest,
+            'nargs': opt.nargs,
+            'choices': opt.choices,
+            'help': opt.help,
+            'metavar': opt.metavar,
+            'default': opt.default}))
 
 
 def _add_compat_options(parser, options):

+ 6 - 4
celery/bin/beat.py

@@ -20,7 +20,7 @@
 .. cmdoption:: -S, --scheduler
 
     Scheduler class to use.
-    Default is :class:`celery.beat.PersistentScheduler`.
+    Default is :class:`{default}`.
 
 .. cmdoption:: --max-interval
 
@@ -65,11 +65,13 @@
     Executable to use for the detached process.
 """
 from __future__ import absolute_import, unicode_literals
+
 from functools import partial
-from celery.platforms import detached, maybe_drop_privileges
+
 from celery.bin.base import Command, daemon_options
+from celery.platforms import detached, maybe_drop_privileges
 
-__all__ = ['beat']
+__all__ = ('beat',)
 
 HELP = __doc__
 
@@ -113,7 +115,7 @@ class beat(Command):
         bopts.add_argument(
             '-s', '--schedule', default=c.beat_schedule_filename)
         bopts.add_argument('--max-interval', type=float)
-        bopts.add_argument('-S', '--scheduler')
+        bopts.add_argument('-S', '--scheduler', default=c.beat_scheduler)
         bopts.add_argument('-l', '--loglevel', default='WARN')
 
         daemon_options(parser, default_pidfile='celerybeat.pid')

+ 2 - 0
celery/bin/call.py

@@ -1,6 +1,8 @@
 """The ``celery call`` program used to send tasks from the command-line."""
 from __future__ import absolute_import, unicode_literals
+
 from kombu.utils.json import loads
+
 from celery.bin.base import Command
 from celery.five import string_t
 from celery.utils.time import maybe_iso8601

+ 7 - 11
celery/bin/celery.py

@@ -253,22 +253,16 @@ in any command that also has a `--detach` option.
 
     Destination routing key (defaults to the queue routing key).
 """
-from __future__ import absolute_import, unicode_literals, print_function
+from __future__ import absolute_import, print_function, unicode_literals
 
 import numbers
 import sys
-
 from functools import partial
 
-from celery.platforms import EX_OK, EX_FAILURE, EX_USAGE
-from celery.utils import term
-from celery.utils import text
-
-# Cannot use relative imports here due to a Windows issue (#1111).
-from celery.bin.base import Command, Extensions
-
 # Import commands from other modules
 from celery.bin.amqp import amqp
+# Cannot use relative imports here due to a Windows issue (#1111).
+from celery.bin.base import Command, Extensions
 from celery.bin.beat import beat
 from celery.bin.call import call
 from celery.bin.control import _RemoteControl  # noqa
@@ -281,10 +275,12 @@ from celery.bin.migrate import migrate
 from celery.bin.purge import purge
 from celery.bin.result import result
 from celery.bin.shell import shell
-from celery.bin.worker import worker
 from celery.bin.upgrade import upgrade
+from celery.bin.worker import worker
+from celery.platforms import EX_FAILURE, EX_OK, EX_USAGE
+from celery.utils import term, text
 
-__all__ = ['CeleryCommand', 'main']
+__all__ = ('CeleryCommand', 'main')
 
 HELP = """
 ---- -- - - ---- Commands- -------------- --- ------------

+ 6 - 3
celery/bin/celeryd_detach.py

@@ -6,16 +6,19 @@ leads to weird issues (it was a long time ago now, but it
 could have something to do with the threading mutex bug)
 """
 from __future__ import absolute_import, unicode_literals
+
 import argparse
-import celery
 import os
 import sys
+
+import celery
+from celery.bin.base import daemon_options
 from celery.platforms import EX_FAILURE, detached
 from celery.utils.log import get_logger
 from celery.utils.nodenames import default_nodename, node_format
-from celery.bin.base import daemon_options
 
-__all__ = ['detached_celeryd', 'detach']
+__all__ = ('detached_celeryd', 'detach')
+
 logger = get_logger(__name__)
 C_FAKEFORK = os.environ.get('C_FAKEFORK')
 

+ 3 - 1
celery/bin/control.py

@@ -1,9 +1,11 @@
 """The ``celery control``, ``. inspect`` and ``. status`` programs."""
 from __future__ import absolute_import, unicode_literals
+
 from kombu.utils.json import dumps
 from kombu.utils.objects import cached_property
-from celery.five import items, string_t
+
 from celery.bin.base import Command
+from celery.five import items, string_t
 from celery.platforms import EX_UNAVAILABLE, EX_USAGE
 from celery.utils import text
 

+ 4 - 2
celery/bin/events.py

@@ -66,12 +66,14 @@
     Executable to use for the detached process.
 """
 from __future__ import absolute_import, unicode_literals
+
 import sys
 from functools import partial
-from celery.platforms import detached, set_process_title, strargv
+
 from celery.bin.base import Command, daemon_options
+from celery.platforms import detached, set_process_title, strargv
 
-__all__ = ['events']
+__all__ = ('events',)
 
 HELP = __doc__
 

+ 5 - 2
celery/bin/graph.py

@@ -4,12 +4,15 @@
 .. program:: celery graph
 """
 from __future__ import absolute_import, unicode_literals
+
 from operator import itemgetter
+
 from celery.five import items, python_2_unicode_compatible
 from celery.utils.graph import DependencyGraph, GraphFormatter
+
 from .base import Command
 
-__all__ = ['graph']
+__all__ = ('graph',)
 
 
 class graph(Command):
@@ -161,7 +164,7 @@ class graph(Command):
             workers = args['nodes']
             threads = args.get('threads') or []
         except KeyError:
-            replies = self.app.control.inspect().stats()
+            replies = self.app.control.inspect().stats() or {}
             workers, threads = [], []
             for worker, reply in items(replies):
                 workers.append(worker)

+ 1 - 0
celery/bin/list.py

@@ -1,5 +1,6 @@
 """The ``celery list bindings`` command, used to inspect queue bindings."""
 from __future__ import absolute_import, unicode_literals
+
 from celery.bin.base import Command
 
 

+ 3 - 1
celery/bin/logtool.py

@@ -5,12 +5,14 @@
 """
 
 from __future__ import absolute_import, unicode_literals
+
 import re
 from collections import Counter
 from fileinput import FileInput
+
 from .base import Command
 
-__all__ = ['logtool']
+__all__ = ('logtool',)
 
 RE_LOG_START = re.compile(r'^\[\d\d\d\d\-\d\d-\d\d ')
 RE_TASK_RECEIVED = re.compile(r'.+?\] Received')

+ 2 - 0
celery/bin/migrate.py

@@ -1,7 +1,9 @@
 """The ``celery migrate`` command, used to filter and move messages."""
 from __future__ import absolute_import, unicode_literals
+
 from celery.bin.base import Command
 
+
 MIGRATE_PROGRESS_FMT = """\
 Migrating task {state.count}/{state.strtotal}: \
 {body[task]}[{body[id]}]\

+ 4 - 1
celery/bin/multi.py

@@ -94,18 +94,21 @@ Examples
     celery worker -n xuzzy@myhost -c 3
 """
 from __future__ import absolute_import, print_function, unicode_literals
+
 import os
 import signal
 import sys
 from functools import wraps
+
 from kombu.utils.objects import cached_property
+
 from celery import VERSION_BANNER
 from celery.apps.multi import Cluster, MultiParser, NamespacedOptionParser
 from celery.platforms import EX_FAILURE, EX_OK, signals
 from celery.utils import term
 from celery.utils.text import pluralize
 
-__all__ = ['MultiTool']
+__all__ = ('MultiTool',)
 
 USAGE = """\
 usage: {prog_name} start <node1 node2 nodeN|range> [worker options]

+ 2 - 1
celery/bin/purge.py

@@ -1,7 +1,8 @@
 """The ``celery purge`` program, used to delete messages from queues."""
 from __future__ import absolute_import, unicode_literals
-from celery.five import keys
+
 from celery.bin.base import Command
+from celery.five import keys
 from celery.utils import text
 
 

+ 1 - 0
celery/bin/result.py

@@ -1,5 +1,6 @@
 """The ``celery result`` program, used to inspect task results."""
 from __future__ import absolute_import, unicode_literals
+
 from celery.bin.base import Command
 
 

+ 3 - 1
celery/bin/shell.py

@@ -1,10 +1,12 @@
 """The ``celery shell`` program, used to start a REPL."""
 from __future__ import absolute_import, unicode_literals
+
 import os
 import sys
 from importlib import import_module
-from celery.five import values
+
 from celery.bin.base import Command
+from celery.five import values
 
 
 class shell(Command):  # pragma: no cover

+ 2 - 0
celery/bin/upgrade.py

@@ -1,6 +1,8 @@
 """The ``celery upgrade`` command, used to upgrade from previous versions."""
 from __future__ import absolute_import, print_function, unicode_literals
+
 import codecs
+
 from celery.app import defaults
 from celery.bin.base import Command
 from celery.utils.functional import pass1

+ 3 - 1
celery/bin/worker.py

@@ -176,7 +176,9 @@ The :program:`celery worker` command (previously known as ``celeryd``)
     Executable to use for the detached process.
 """
 from __future__ import absolute_import, unicode_literals
+
 import sys
+
 from celery import concurrency
 from celery.bin.base import Command, daemon_options
 from celery.bin.celeryd_detach import detached_celeryd
@@ -185,7 +187,7 @@ from celery.platforms import maybe_drop_privileges
 from celery.utils.log import LOG_LEVELS, mlevel
 from celery.utils.nodenames import default_nodename
 
-__all__ = ['worker', 'main']
+__all__ = ('worker', 'main')
 
 HELP = __doc__
 

+ 1 - 1
celery/bootsteps.py

@@ -21,7 +21,7 @@ except ImportError:  # pragma: no cover
 else:
     IGNORE_ERRORS = (GreenletExit,)
 
-__all__ = ['Blueprint', 'Step', 'StartStopStep', 'ConsumerStep']
+__all__ = ('Blueprint', 'Step', 'StartStopStep', 'ConsumerStep')
 
 #: States
 RUN = 0x1

+ 60 - 52
celery/canvas.py

@@ -10,10 +10,10 @@ from __future__ import absolute_import, unicode_literals
 import itertools
 import operator
 import sys
-
 from collections import MutableSequence, deque
 from copy import deepcopy
-from functools import partial as _partial, reduce
+from functools import partial as _partial
+from functools import reduce
 from operator import itemgetter
 
 from kombu.utils.functional import fxrange, reprcall
@@ -26,17 +26,17 @@ from celery.five import python_2_unicode_compatible
 from celery.local import try_import
 from celery.result import GroupResult
 from celery.utils import abstract
-from celery.utils.functional import (
-    maybe_list, is_list, _regen, regen, chunks as _chunks,
-    seq_concat_seq, seq_concat_item,
-)
+from celery.utils.functional import _regen
+from celery.utils.functional import chunks as _chunks
+from celery.utils.functional import (is_list, maybe_list, regen,
+                                     seq_concat_item, seq_concat_seq)
 from celery.utils.objects import getitem_property
-from celery.utils.text import truncate, remove_repeating_from_task
+from celery.utils.text import remove_repeating_from_task, truncate
 
-__all__ = [
+__all__ = (
     'Signature', 'chain', 'xmap', 'xstarmap', 'chunks',
     'group', 'chord', 'signature', 'maybe_signature',
-]
+)
 
 PY3 = sys.version_info[0] == 3
 
@@ -375,69 +375,53 @@ class Signature(dict):
     def __or__(self, other):
         # These could be implemented in each individual class,
         # I'm sure, but for now we have this.
-        if isinstance(other, chord) and len(other.tasks) == 1:
-            # chord with one header -> header[0] | body
-            other = other.tasks[0] | other.body
-
         if isinstance(self, group):
             if isinstance(other, group):
                 # group() | group() -> single group
                 return group(
                     itertools.chain(self.tasks, other.tasks), app=self.app)
             # group() | task -> chord
-            if len(self.tasks) == 1:
-                # group(ONE.s()) | other -> ONE.s() | other
-                # Issue #3323
-                return self.tasks[0] | other
             return chord(self, body=other, app=self._app)
         elif isinstance(other, group):
             # unroll group with one member
             other = maybe_unroll_group(other)
             if isinstance(self, _chain):
                 # chain | group() -> chain
-                sig = self.clone()
-                sig.tasks.append(other)
-                return sig
+                return _chain(seq_concat_item(
+                    self.unchain_tasks(), other), app=self._app)
             # task | group() -> chain
             return _chain(self, other, app=self.app)
 
         if not isinstance(self, _chain) and isinstance(other, _chain):
             # task | chain -> chain
-            return _chain(
-                seq_concat_seq((self,), other.tasks), app=self._app)
+            return _chain(seq_concat_seq(
+                (self,), other.unchain_tasks()), app=self._app)
         elif isinstance(other, _chain):
             # chain | chain -> chain
-            sig = self.clone()
-            if isinstance(sig.tasks, tuple):
-                sig.tasks = list(sig.tasks)
-            sig.tasks.extend(other.tasks)
-            return sig
+            return _chain(seq_concat_seq(
+                self.unchain_tasks(), other.unchain_tasks()), app=self._app)
         elif isinstance(self, chord):
-            # chord(ONE, body) | other -> ONE | body | other
-            # chord with one header task is unecessary.
-            if len(self.tasks) == 1:
-                return self.tasks[0] | self.body | other
             # chord | task ->  attach to body
             sig = self.clone()
             sig.body = sig.body | other
             return sig
         elif isinstance(other, Signature):
             if isinstance(self, _chain):
-                if isinstance(self.tasks[-1], group):
+                if self.tasks and isinstance(self.tasks[-1], group):
                     # CHAIN [last item is group] | TASK -> chord
                     sig = self.clone()
                     sig.tasks[-1] = chord(
                         sig.tasks[-1], other, app=self._app)
                     return sig
-                elif isinstance(self.tasks[-1], chord):
+                elif self.tasks and isinstance(self.tasks[-1], chord):
                     # CHAIN [last item is chord] -> chain with chord body.
                     sig = self.clone()
                     sig.tasks[-1].body = sig.tasks[-1].body | other
                     return sig
                 else:
                     # chain | task -> chain
-                    return _chain(
-                        seq_concat_item(self.tasks, other), app=self._app)
+                    return _chain(seq_concat_item(
+                        self.unchain_tasks(), other), app=self._app)
             # task | task -> chain
             return _chain(self, other, app=self._app)
         return NotImplemented
@@ -557,6 +541,15 @@ class _chain(Signature):
         ]
         return s
 
+    def unchain_tasks(self):
+        # Clone chain's tasks assigning sugnatures from link_error
+        # to each task
+        tasks = [t.clone() for t in self.tasks]
+        for sig in self.options.get('link_error', []):
+            for task in tasks:
+                task.link_error(sig)
+        return tasks
+
     def apply_async(self, args=(), kwargs={}, **options):
         # python is best at unpacking kwargs, so .run is here to do that.
         app = self.app
@@ -950,6 +943,8 @@ class group(Signature):
             tasks = tasks[0]
             if isinstance(tasks, group):
                 tasks = tasks.tasks
+            if isinstance(tasks, abstract.CallableSignature):
+                tasks = [tasks.clone()]
             if not isinstance(tasks, _regen):
                 tasks = regen(tasks)
         Signature.__init__(
@@ -1174,8 +1169,9 @@ class chord(Signature):
 
     @classmethod
     def from_dict(cls, d, app=None):
-        args, d['kwargs'] = cls._unpack_args(**d['kwargs'])
-        return _upgrade(d, cls(*args, app=app, **d))
+        options = d.copy()
+        args, options['kwargs'] = cls._unpack_args(**options['kwargs'])
+        return _upgrade(d, cls(*args, app=app, **options))
 
     @staticmethod
     def _unpack_args(header=None, body=None, **kwargs):
@@ -1187,8 +1183,8 @@ class chord(Signature):
                  args=(), kwargs={}, app=None, **options):
         Signature.__init__(
             self, task, args,
-            dict(kwargs=kwargs, header=_maybe_group(header, app),
-                 body=maybe_signature(body, app=app)), app=app, **options
+            {'kwargs': kwargs, 'header': _maybe_group(header, app),
+             'body': maybe_signature(body, app=app)}, app=app, **options
         )
         self.subtask_type = 'chord'
 
@@ -1235,11 +1231,6 @@ class chord(Signature):
         if app.conf.task_always_eager:
             return self.apply(args, kwargs,
                               body=body, task_id=task_id, **options)
-        if len(self.tasks) == 1:
-            # chord([A], B) can be optimized as A | B
-            # - Issue #3323
-            return (self.tasks[0] | body).set(task_id=task_id).apply_async(
-                args, kwargs, **options)
         # chord([A, B, ...], C)
         return self.run(tasks, body, args, task_id=task_id, **options)
 
@@ -1277,19 +1268,32 @@ class chord(Signature):
             options.pop('task_id', None)
             body.options.update(options)
 
-        results = header.freeze(
-            group_id=group_id, chord=body, root_id=root_id).results
         bodyres = body.freeze(task_id, root_id=root_id)
 
         # Chains should not be passed to the header tasks. See #3771
         options.pop('chain', None)
+        # Neither should chords, for deeply nested chords to work
+        options.pop('chord', None)
+        options.pop('task_id', None)
+
+        header.freeze(group_id=group_id, chord=body, root_id=root_id)
+        header_result = header(*partial_args, task_id=group_id, **options)
+
+        if len(header_result) > 0:
+            app.backend.apply_chord(
+                header_result,
+                body,
+                interval=interval,
+                countdown=countdown,
+                max_retries=max_retries,
+            )
+        # The execution of a chord body is normally triggered by its header's
+        # tasks completing. If the header is empty this will never happen, so
+        # we execute the body manually here.
+        else:
+            body.delay([])
 
-        parent = app.backend.apply_chord(
-            header, partial_args, group_id, body,
-            interval=interval, countdown=countdown,
-            options=options, max_retries=max_retries,
-            result=results)
-        bodyres.parent = parent
+        bodyres.parent = header_result
         return bodyres
 
     def clone(self, *args, **kwargs):
@@ -1364,6 +1368,8 @@ def signature(varies, *args, **kwargs):
             return varies.clone()
         return Signature.from_dict(varies, app=app)
     return Signature(varies, *args, **kwargs)
+
+
 subtask = signature  # noqa: E305 XXX compat
 
 
@@ -1392,4 +1398,6 @@ def maybe_signature(d, app=None, clone=False):
         if app is not None:
             d._app = app
     return d
+
+
 maybe_subtask = maybe_signature  # noqa: E305 XXX compat

+ 1 - 1
celery/concurrency/__init__.py

@@ -7,7 +7,7 @@ from __future__ import absolute_import, unicode_literals
 # too much (e.g., for eventlet patching)
 from kombu.utils.imports import symbol_by_name
 
-__all__ = ['get_implementation']
+__all__ = ('get_implementation',)
 
 ALIASES = {
     'prefork': 'celery.concurrency.prefork:TaskPool',

+ 4 - 5
celery/concurrency/asynpool.py

@@ -23,7 +23,6 @@ import socket
 import struct
 import sys
 import time
-
 from collections import deque, namedtuple
 from io import BytesIO
 from numbers import Integral
@@ -31,11 +30,11 @@ from pickle import HIGHEST_PROTOCOL
 from time import sleep
 from weakref import WeakValueDictionary, ref
 
-from billiard.pool import RUN, TERMINATE, ACK, NACK, WorkersJoined
 from billiard import pool as _pool
-from billiard.compat import buf_t, setblocking, isblocking
+from billiard.compat import buf_t, isblocking, setblocking
+from billiard.pool import ACK, NACK, RUN, TERMINATE, WorkersJoined
 from billiard.queues import _SimpleQueue
-from kombu.async import WRITE, ERR
+from kombu.async import ERR, WRITE
 from kombu.serialization import pickle as _pickle
 from kombu.utils.eventio import SELECT_BAD_FD
 from kombu.utils.functional import fxrange
@@ -76,7 +75,7 @@ except (ImportError, NameError):  # pragma: no cover
     def unpack_from(fmt, iobuf, unpack=struct.unpack):  # noqa
         return unpack(fmt, iobuf.getvalue())  # <-- BytesIO
 
-__all__ = ['AsynPool']
+__all__ = ('AsynPool',)
 
 logger = get_logger(__name__)
 error, debug = logger.error, logger.debug

+ 2 - 2
celery/concurrency/base.py

@@ -13,10 +13,10 @@ from kombu.utils.encoding import safe_repr
 from celery.exceptions import WorkerShutdown, WorkerTerminate
 from celery.five import monotonic, reraise
 from celery.utils import timer2
-from celery.utils.text import truncate
 from celery.utils.log import get_logger
+from celery.utils.text import truncate
 
-__all__ = ['BasePool', 'apply_target']
+__all__ = ('BasePool', 'apply_target')
 
 logger = get_logger('celery.pool')
 

+ 8 - 9
celery/concurrency/eventlet.py

@@ -1,10 +1,17 @@
 # -*- coding: utf-8 -*-
 """Eventlet execution pool."""
 from __future__ import absolute_import, unicode_literals
+
 import sys
+
+from kombu.async import timer as _timer  # noqa
 from kombu.five import monotonic
 
-__all__ = ['TaskPool']
+from celery import signals  # noqa
+
+from . import base  # noqa
+
+__all__ = ('TaskPool',)
 
 W_RACE = """\
 Celery module with %s imported before eventlet patched\
@@ -20,14 +27,6 @@ for mod in (mod for mod in sys.modules if mod.startswith(RACE_MODS)):
             import warnings
             warnings.warn(RuntimeWarning(W_RACE % side))
 
-# idiotic pep8.py does not allow expressions before imports
-# so have to silence errors here
-from kombu.async import timer as _timer  # noqa
-
-from celery import signals  # noqa
-
-from . import base  # noqa
-
 
 def apply_target(target, args=(), kwargs={}, callback=None,
                  accept_callback=None, getpid=None):

+ 4 - 1
celery/concurrency/gevent.py

@@ -1,15 +1,18 @@
 # -*- coding: utf-8 -*-
 """Gevent execution pool."""
 from __future__ import absolute_import, unicode_literals
+
 from kombu.async import timer as _timer
 from kombu.five import monotonic
+
 from . import base
+
 try:
     from gevent import Timeout
 except ImportError:  # pragma: no cover
     Timeout = None  # noqa
 
-__all__ = ['TaskPool']
+__all__ = ('TaskPool',)
 
 # pylint: disable=redefined-outer-name
 # We cache globals and attribute lookups, so disable this warning.

+ 6 - 6
celery/concurrency/prefork.py

@@ -7,13 +7,13 @@ from __future__ import absolute_import, unicode_literals
 
 import os
 
-from billiard.common import REMAP_SIGTERM, TERM_SIGNAME
 from billiard import forking_enable
-from billiard.pool import RUN, CLOSE, Pool as BlockingPool
+from billiard.common import REMAP_SIGTERM, TERM_SIGNAME
+from billiard.pool import CLOSE, RUN
+from billiard.pool import Pool as BlockingPool
 
-from celery import platforms
-from celery import signals
-from celery._state import set_default_app, _set_task_join_will_block
+from celery import platforms, signals
+from celery._state import _set_task_join_will_block, set_default_app
 from celery.app import trace
 from celery.concurrency.base import BasePool
 from celery.five import items
@@ -22,7 +22,7 @@ from celery.utils.log import get_logger
 
 from .asynpool import AsynPool
 
-__all__ = ['TaskPool', 'process_initializer', 'process_destructor']
+__all__ = ('TaskPool', 'process_initializer', 'process_destructor')
 
 #: List of signals to reset when a child process starts.
 WORKER_SIGRESET = {

+ 3 - 1
celery/concurrency/solo.py

@@ -1,10 +1,12 @@
 # -*- coding: utf-8 -*-
 """Single-threaded execution pool."""
 from __future__ import absolute_import, unicode_literals
+
 import os
+
 from .base import BasePool, apply_target
 
-__all__ = ['TaskPool']
+__all__ = ('TaskPool',)
 
 
 class TaskPool(BasePool):

+ 2 - 1
celery/contrib/abortable.py

@@ -84,10 +84,11 @@ have it block until the task is finished.
    database backends.
 """
 from __future__ import absolute_import, unicode_literals
+
 from celery import Task
 from celery.result import AsyncResult
 
-__all__ = ['AbortableAsyncResult', 'AbortableTask']
+__all__ = ('AbortableAsyncResult', 'AbortableTask')
 
 
 """

+ 3 - 4
celery/contrib/migrate.py

@@ -3,11 +3,10 @@
 from __future__ import absolute_import, print_function, unicode_literals
 
 import socket
-
 from functools import partial
 from itertools import cycle, islice
 
-from kombu import eventloop, Queue
+from kombu import Queue, eventloop
 from kombu.common import maybe_declare
 from kombu.utils.encoding import ensure_bytes
 
@@ -16,12 +15,12 @@ from celery.five import python_2_unicode_compatible, string, string_t
 from celery.utils.nodenames import worker_direct
 from celery.utils.text import str_to_list
 
-__all__ = [
+__all__ = (
     'StopFiltering', 'State', 'republish', 'migrate_task',
     'migrate_tasks', 'move', 'task_id_eq', 'task_id_in',
     'start_filter', 'move_task_by_id', 'move_by_idmap',
     'move_by_taskmap', 'move_direct', 'move_direct_by_id',
-]
+)
 
 MOVING_PROGRESS_FMT = """\
 Moving task {state.filtered}/{state.strtotal}: \

+ 7 - 18
celery/contrib/pytest.py

@@ -1,8 +1,11 @@
 """Fixtures and testing utilities for :pypi:`py.test <pytest>`."""
 from __future__ import absolute_import, unicode_literals
+
 import os
-import pytest
 from contextlib import contextmanager
+
+import pytest
+
 from .testing import worker
 from .testing.app import TestApp, setup_default_app
 
@@ -13,8 +16,7 @@ NO_WORKER = os.environ.get('NO_WORKER')
 
 
 @contextmanager
-def _create_app(request,
-                enable_logging=False,
+def _create_app(enable_logging=False,
                 use_trap=False,
                 parameters={},
                 **config):
@@ -26,18 +28,7 @@ def _create_app(request,
         config=config,
         **parameters
     )
-    # request.module is not defined for session
-    _module = getattr(request, 'module', None)
-    _cls = getattr(request, 'cls', None)
-    _function = getattr(request, 'function', None)
     with setup_default_app(test_app, use_trap=use_trap):
-        is_not_contained = any([
-            not getattr(_module, 'app_contained', True),
-            not getattr(_cls, 'app_contained', True),
-            not getattr(_function, 'app_contained', True)
-        ])
-        if is_not_contained:
-            test_app.set_current()
         yield test_app
 
 
@@ -62,8 +53,7 @@ def celery_session_app(request,
     """Session Fixture: Return app for session fixtures."""
     mark = request.node.get_marker('celery')
     config = dict(celery_config, **mark.kwargs if mark else {})
-    with _create_app(request,
-                     enable_logging=celery_enable_logging,
+    with _create_app(enable_logging=celery_enable_logging,
                      use_trap=use_celery_app_trap,
                      parameters=celery_parameters,
                      **config) as app:
@@ -163,8 +153,7 @@ def celery_app(request,
     """Fixture creating a Celery application instance."""
     mark = request.node.get_marker('celery')
     config = dict(celery_config, **mark.kwargs if mark else {})
-    with _create_app(request,
-                     enable_logging=celery_enable_logging,
+    with _create_app(enable_logging=celery_enable_logging,
                      use_trap=use_celery_app_trap,
                      parameters=celery_parameters,
                      **config) as app:

+ 7 - 3
celery/contrib/rdb.py

@@ -5,7 +5,7 @@ Introduction
 ============
 
 This is a remote debugger for Celery tasks running in multiprocessing
-pool workers.  Inspired by http://snippets.dzone.com/posts/show/7248
+pool workers.  Inspired by a lost post on dzone.com.
 
 Usage
 -----
@@ -42,18 +42,21 @@ Environment Variables
     base port.  The selected port will be logged by the worker.
 """
 from __future__ import absolute_import, print_function, unicode_literals
+
 import errno
 import os
 import socket
 import sys
 from pdb import Pdb
+
 from billiard.process import current_process
+
 from celery.five import range
 
-__all__ = [
+__all__ = (
     'CELERY_RDB_HOST', 'CELERY_RDB_PORT', 'DEFAULT_PORT',
     'Rdb', 'debugger', 'set_trace',
-]
+)
 
 DEFAULT_PORT = 6899
 
@@ -124,6 +127,7 @@ class Rdb(Pdb):
         this_port = None
         for i in range(search_limit):
             _sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+            _sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
             this_port = port + skew + i
             try:
                 _sock.bind((host, this_port))

+ 28 - 7
celery/contrib/sphinx.py

@@ -22,18 +22,23 @@ then you can change the ``celery_task_prefix`` configuration value:
     celery_task_prefix = '(task)'  # < default
 
 With the extension installed `autodoc` will automatically find
-task decorated objects and generate the correct (as well as
-add a ``(task)`` prefix), and you can also refer to the tasks
-using `:task:proj.tasks.add` syntax.
+task decorated objects (e.g. when using the automodule directive)
+and generate the correct (as well as add a ``(task)`` prefix),
+and you can also refer to the tasks using `:task:proj.tasks.add`
+syntax.
 
-Use ``.. autotask::`` to manually document a task.
+Use ``.. autotask::`` to alternatively manually document a task.
 """
 from __future__ import absolute_import, unicode_literals
-from inspect import formatargspec
+
+from celery.app.task import BaseTask
 from sphinx.domains.python import PyModulelevel
 from sphinx.ext.autodoc import FunctionDocumenter
-from celery.app.task import BaseTask
-from celery.five import getfullargspec
+
+try:  # pragma: no cover
+    from inspect import formatargspec, getfullargspec
+except ImportError:  # Py2
+    from inspect import formatargspec, getargspec as getfullargspec  # noqa
 
 
 class TaskDocumenter(FunctionDocumenter):
@@ -50,6 +55,8 @@ class TaskDocumenter(FunctionDocumenter):
         wrapped = getattr(self.object, '__wrapped__', None)
         if wrapped is not None:
             argspec = getfullargspec(wrapped)
+            if argspec[0] and argspec[0][0] in ('cls', 'self'):
+                del argspec[0][0]
             fmt = formatargspec(*argspec)
             fmt = fmt.replace('\\', '\\\\')
             return fmt
@@ -58,6 +65,16 @@ class TaskDocumenter(FunctionDocumenter):
     def document_members(self, all_members=False):
         pass
 
+    def check_module(self):
+        # Normally checks if *self.object* is really defined in the module
+        # given by *self.modname*. But since functions decorated with the @task
+        # decorator are instances living in the celery.local module we're
+        # checking for that and simply agree to document those then.
+        modname = self.get_attr(self.object, '__module__', None)
+        if modname and modname == 'celery.local':
+            return True
+        return super(TaskDocumenter, self).check_module()
+
 
 class TaskDirective(PyModulelevel):
     """Sphinx task directive."""
@@ -71,3 +88,7 @@ def setup(app):
     app.add_autodocumenter(TaskDocumenter)
     app.add_directive_to_domain('py', 'task', TaskDirective)
     app.add_config_value('celery_task_prefix', '(task)', True)
+
+    return {
+        'parallel_read_safe': True
+    }

+ 4 - 2
celery/contrib/testing/app.py

@@ -1,11 +1,13 @@
 """Create Celery app instances used for testing."""
 from __future__ import absolute_import, unicode_literals
+
 import weakref
 from contextlib import contextmanager
 from copy import deepcopy
+
 from kombu.utils.imports import symbol_by_name
-from celery import Celery
-from celery import _state
+
+from celery import Celery, _state
 
 #: Contains the default configuration values for the test app.
 DEFAULT_TEST_CONFIG = {

+ 0 - 1
celery/contrib/testing/manager.py

@@ -3,7 +3,6 @@ from __future__ import absolute_import, print_function, unicode_literals
 
 import socket
 import sys
-
 from collections import defaultdict
 from functools import partial
 from itertools import count

+ 2 - 0
celery/contrib/testing/mocks.py

@@ -1,7 +1,9 @@
 """Useful mocks for unit testing."""
 from __future__ import absolute_import, unicode_literals
+
 import numbers
 from datetime import datetime, timedelta
+
 try:
     from case import Mock
 except ImportError:

+ 1 - 0
celery/contrib/testing/tasks.py

@@ -1,5 +1,6 @@
 """Helper tasks for integration tests."""
 from __future__ import absolute_import, unicode_literals
+
 from celery import shared_task
 
 

+ 3 - 1
celery/contrib/testing/worker.py

@@ -1,10 +1,12 @@
 """Embedded workers for integration tests."""
 from __future__ import absolute_import, unicode_literals
+
 import os
 import threading
 from contextlib import contextmanager
+
 from celery import worker
-from celery.result import allow_join_result, _set_task_join_will_block
+from celery.result import _set_task_join_will_block, allow_join_result
 from celery.utils.dispatch import Signal
 from celery.utils.nodenames import anon_nodename
 

+ 2 - 2
celery/events/__init__.py

@@ -10,7 +10,7 @@ from .dispatcher import EventDispatcher
 from .event import Event, event_exchange, get_exchange, group_from
 from .receiver import EventReceiver
 
-__all__ = [
+__all__ = (
     'Event', 'EventDispatcher', 'EventReceiver',
     'event_exchange', 'get_exchange', 'group_from',
-]
+)

+ 3 - 5
celery/events/cursesmon.py

@@ -5,20 +5,18 @@ from __future__ import absolute_import, print_function, unicode_literals
 import curses
 import sys
 import threading
-
 from datetime import datetime
 from itertools import count
+from math import ceil
 from textwrap import wrap
 from time import time
-from math import ceil
 
-from celery import VERSION_BANNER
-from celery import states
+from celery import VERSION_BANNER, states
 from celery.app import app_or_default
 from celery.five import items, values
 from celery.utils.text import abbr, abbrtask
 
-__all__ = ['CursesMonitor', 'evtop']
+__all__ = ('CursesMonitor', 'evtop')
 
 BORDER_SPACING = 4
 LEFT_BORDER_OFFSET = 3

+ 1 - 2
celery/events/dispatcher.py

@@ -4,7 +4,6 @@ from __future__ import absolute_import, unicode_literals
 import os
 import threading
 import time
-
 from collections import defaultdict, deque
 
 from kombu import Producer
@@ -16,7 +15,7 @@ from celery.utils.time import utcoffset
 
 from .event import Event, get_exchange, group_from
 
-__all__ = ['EventDispatcher']
+__all__ = ('EventDispatcher',)
 
 
 class EventDispatcher(object):

+ 3 - 1
celery/events/dumper.py

@@ -5,13 +5,15 @@ This is a simple program that dumps events to the console
 as they happen.  Think of it like a `tcpdump` for Celery events.
 """
 from __future__ import absolute_import, print_function, unicode_literals
+
 import sys
 from datetime import datetime
+
 from celery.app import app_or_default
 from celery.utils.functional import LRUCache
 from celery.utils.time import humanize_seconds
 
-__all__ = ['Dumper', 'evdump']
+__all__ = ('Dumper', 'evdump')
 
 TASK_NAMES = LRUCache(limit=0xFFF)
 

+ 4 - 2
celery/events/event.py

@@ -1,12 +1,14 @@
 """Creating events, and event exchange definition."""
 from __future__ import absolute_import, unicode_literals
+
 import time
 from copy import copy
+
 from kombu import Exchange
 
-__all__ = [
+__all__ = (
     'Event', 'event_exchange', 'get_exchange', 'group_from',
-]
+)
 
 #: Exchange used to send events on.
 #: Note: Use :func:`get_exchange` instead, as the type of

+ 1 - 2
celery/events/receiver.py

@@ -2,7 +2,6 @@
 from __future__ import absolute_import, unicode_literals
 
 import time
-
 from operator import itemgetter
 
 from kombu import Queue
@@ -15,7 +14,7 @@ from celery.utils.time import adjust_timestamp
 
 from .event import get_exchange
 
-__all__ = ['EventReceiver']
+__all__ = ('EventReceiver',)
 
 CLIENT_CLOCK_SKEW = -1
 

+ 4 - 2
celery/events/snapshot.py

@@ -8,16 +8,18 @@ implementation of this writing the snapshots to a database
 in :mod:`djcelery.snapshots` in the `django-celery` distribution.
 """
 from __future__ import absolute_import, print_function, unicode_literals
+
 from kombu.utils.limits import TokenBucket
+
 from celery import platforms
 from celery.app import app_or_default
-from celery.utils.timer2 import Timer
 from celery.utils.dispatch import Signal
 from celery.utils.imports import instantiate
 from celery.utils.log import get_logger
 from celery.utils.time import rate
+from celery.utils.timer2 import Timer
 
-__all__ = ['Polaroid', 'evcam']
+__all__ = ('Polaroid', 'evcam')
 
 logger = get_logger('celery.evcam')
 

+ 3 - 2
celery/events/state.py

@@ -18,7 +18,6 @@ from __future__ import absolute_import, unicode_literals
 import bisect
 import sys
 import threading
-
 from collections import Callable, defaultdict
 from datetime import datetime
 from decimal import Decimal
@@ -35,7 +34,7 @@ from celery.five import items, python_2_unicode_compatible, values
 from celery.utils.functional import LRUCache, memoize, pass1
 from celery.utils.log import get_logger
 
-__all__ = ['Worker', 'Task', 'State', 'heartbeat_expires']
+__all__ = ('Worker', 'Task', 'State', 'heartbeat_expires')
 
 # pylint: disable=redefined-outer-name
 # We cache globals and attribute lookups, so disable this warning.
@@ -101,6 +100,8 @@ class CallableDefaultdict(defaultdict):
 
     def __call__(self, *args, **kwargs):
         return self.fun(*args, **kwargs)
+
+
 Callable.register(CallableDefaultdict)  # noqa: E305
 
 

+ 12 - 6
celery/exceptions.py

@@ -48,14 +48,16 @@ Error Hierarchy
         - :exc:`~celery.exceptions.WorkerShutdown`
 """
 from __future__ import absolute_import, unicode_literals
+
 import numbers
-from .five import python_2_unicode_compatible, string_t
-from billiard.exceptions import (
-    SoftTimeLimitExceeded, TimeLimitExceeded, WorkerLostError, Terminated,
-)
+
+from billiard.exceptions import (SoftTimeLimitExceeded, Terminated,
+                                 TimeLimitExceeded, WorkerLostError)
 from kombu.exceptions import OperationalError
 
-__all__ = [
+from .five import python_2_unicode_compatible, string_t
+
+__all__ = (
     # Warnings
     'CeleryWarning',
     'AlwaysEagerIgnored', 'DuplicateNodenameWarning',
@@ -86,7 +88,7 @@ __all__ = [
 
     # Worker shutdown semi-predicates (inherits from SystemExit).
     'WorkerShutdown', 'WorkerTerminate',
-]
+)
 
 UNREGISTERED_FMT = """\
 Task of kind {0} never registered, please make sure it's imported.\
@@ -159,6 +161,8 @@ class Retry(TaskPredicate):
 
     def __reduce__(self):
         return self.__class__, (self.message, self.excs, self.when)
+
+
 RetryTaskError = Retry  # noqa: E305 XXX compat
 
 
@@ -242,6 +246,8 @@ class CDeprecationWarning(DeprecationWarning):
 
 class WorkerTerminate(SystemExit):
     """Signals that the worker should terminate immediately."""
+
+
 SystemTerminate = WorkerTerminate  # noqa: E305 XXX compat
 
 

+ 3 - 0
celery/five.py

@@ -1,6 +1,9 @@
 # -*- coding: utf-8 -*-
 """Python 2/3 compatibility utilities."""
 from __future__ import absolute_import, unicode_literals
+
 import sys
+
 import vine.five
+
 sys.modules[__name__] = vine.five

+ 6 - 8
celery/fixups/django.py

@@ -4,18 +4,16 @@ from __future__ import absolute_import, unicode_literals
 import os
 import sys
 import warnings
+from datetime import datetime
+from importlib import import_module
 
 from kombu.utils.imports import symbol_by_name
 from kombu.utils.objects import cached_property
 
-from datetime import datetime
-from importlib import import_module
-
-from celery import _state
-from celery import signals
+from celery import _state, signals
 from celery.exceptions import FixupWarning, ImproperlyConfigured
 
-__all__ = ['DjangoFixup', 'fixup']
+__all__ = ('DjangoFixup', 'fixup')
 
 ERR_NOT_INSTALLED = """\
 Environment variable DJANGO_SETTINGS_MODULE is defined
@@ -183,7 +181,7 @@ class DjangoWorkerFixup(object):
     def _close_database(self):
         for conn in self._db.connections.all():
             try:
-                conn.close()
+                conn.close_if_unusable_or_obsolete()
             except self.interface_errors:
                 pass
             except self.DatabaseError as exc:
@@ -193,7 +191,7 @@ class DjangoWorkerFixup(object):
 
     def close_cache(self):
         try:
-            self._cache.cache.close()
+            self._cache.close_caches()
         except (TypeError, AttributeError):
             pass
 

+ 1 - 1
celery/loaders/__init__.py

@@ -7,7 +7,7 @@ when workers start, when tasks are executed and so on.
 from __future__ import absolute_import, unicode_literals
 from celery.utils.imports import symbol_by_name, import_from_cwd
 
-__all__ = ['get_loader_cls']
+__all__ = ('get_loader_cls',)
 
 LOADER_ALIASES = {
     'app': 'celery.loaders.app:AppLoader',

+ 2 - 1
celery/loaders/app.py

@@ -1,9 +1,10 @@
 # -*- coding: utf-8 -*-
 """The default loader used with custom app instances."""
 from __future__ import absolute_import, unicode_literals
+
 from .base import BaseLoader
 
-__all__ = ['AppLoader']
+__all__ = ('AppLoader',)
 
 
 class AppLoader(BaseLoader):

Alguns arquivos não foram mostrados porque muitos arquivos mudaram nesse diff