Parcourir la source

Distribution cleanup and prepare for release

Ask Solem il y a 11 ans
Parent
commit
f8dace1ba9

+ 7 - 1
MANIFEST.in

@@ -8,7 +8,13 @@ include setup.cfg
 include setup.py
 include setup.py
 recursive-include celery *.py
 recursive-include celery *.py
 recursive-include docs *
 recursive-include docs *
-recursive-include extra *
+recursive-include extra/bash-completion *
+recursive-include extra/centos *
+recursive-include extra/generic-init.d *
+recursive-include extra/osx *
+recursive-include extra/supervisord *
+recursive-include extra/systemd *
+recursive-include extra/zsh-completion *
 recursive-include examples *
 recursive-include examples *
 recursive-include requirements *.txt
 recursive-include requirements *.txt
 prune *.pyc
 prune *.pyc

+ 0 - 8
TODO

@@ -1,10 +1,2 @@
 Please see our Issue Tracker at GitHub:
 Please see our Issue Tracker at GitHub:
     http://github.com/celery/celery/issues
     http://github.com/celery/celery/issues
-
-Celerymon
-=========
-
-Create a UI for celerymon using js, with statistics and so on.
-Needs a graphing library, maybe highcharts
-(http://www.highcharts.com/: awesome but weird license) or InfoVis
-(http://thejit.org/; BSD licensed).

+ 0 - 58
celery/__init__.py

@@ -143,61 +143,3 @@ old_module, new_module = recreate_module(  # pragma: no cover
     maybe_patch_concurrency=maybe_patch_concurrency,
     maybe_patch_concurrency=maybe_patch_concurrency,
     _find_option_with_arg=_find_option_with_arg,
     _find_option_with_arg=_find_option_with_arg,
 )
 )
-
-
-if sys.version_info[0:2] == (3, 2):
-    # There is a problem in Python3's import system where it
-    # returns the raw module object instead of the one
-    # kept in ``sys.modules``.
-
-    # This breaks our dynamically generated modules because we insert
-    # them into sys.modules, and expect the import statement to return
-    # that.
-
-    # I'm not entirely sure of why, or when it happens, but this import hook
-    # fixes the problem.  The bug can be reproduced by disabling the hook
-    # and doing the following:
-    #
-    #   >>> import celery
-    #   >>> from celery.task import sets
-    #   >>> from celery import task
-    #   >>> type(celery.task)
-    #   <class 'celery.task'>
-    #   >>> import sys
-    #   >>> import celery
-    #   >>> sys.modules.pop('celery.task')
-    #   <module 'celery.task' from 'celery/task/__init__.py'>
-    #   >>> from celery.task import sets
-    #   Traceback (most recent call last):
-    #     File "<stdin>", line 1, in <module>
-    #   ImportError: cannot import name sets
-    #   >>> type(celery.task)
-    #   <class 'module'>      # <-- where did this come from?!?
-
-    # Note that popping the module from sys.modules is just a way to force
-    # this to happen and I'm sure it happens in other cases too.
-
-    # [ask]
-
-    import imp
-
-    class FixBrokenImportHook(object):
-        generated_modules = ('celery', 'celery.task')
-
-        def load_module(self, name, *args):
-            try:
-                return sys.modules[name]
-            except KeyError:
-                modname, path = name, None
-                if '.' in name:
-                    modname, path = name.split('.')[-1], __path__
-                module_info = imp.find_module(modname, path)
-                imp.load_module(name, *module_info)
-                return sys.modules[name]
-
-        def find_module(self, name, path):
-            if name in self.generated_modules:
-                return self
-            return None
-
-    sys.meta_path.insert(0, FixBrokenImportHook())

+ 1 - 1
celery/datastructures.py

@@ -52,7 +52,7 @@ class GraphFormatter(object):
         'shape': 'box',
         'shape': 'box',
         'arrowhead': 'vee',
         'arrowhead': 'vee',
         'style': 'filled',
         'style': 'filled',
-        'fontname': 'Helvetica Neue',
+        'fontname': 'HelveticaNeue',
     }
     }
     edge_scheme = {
     edge_scheme = {
         'color': 'darkseagreen4',
         'color': 'darkseagreen4',

+ 1 - 5
celery/tests/case.py

@@ -47,7 +47,7 @@ from celery.utils.imports import qualname
 __all__ = [
 __all__ = [
     'Case', 'AppCase', 'Mock', 'patch', 'call', 'skip_unless_module',
     'Case', 'AppCase', 'Mock', 'patch', 'call', 'skip_unless_module',
     'wrap_logger', 'with_environ', 'sleepdeprived',
     'wrap_logger', 'with_environ', 'sleepdeprived',
-    'skip_if_environ', 'skip_if_quick', 'todo', 'skip', 'skip_if',
+    'skip_if_environ', 'todo', 'skip', 'skip_if',
     'skip_unless', 'mask_modules', 'override_stdouts', 'mock_module',
     'skip_unless', 'mask_modules', 'override_stdouts', 'mock_module',
     'replace_module_value', 'sys_platform', 'reset_modules',
     'replace_module_value', 'sys_platform', 'reset_modules',
     'patch_modules', 'mock_context', 'mock_open', 'patch_many',
     'patch_modules', 'mock_context', 'mock_open', 'patch_many',
@@ -452,10 +452,6 @@ def skip_if_environ(env_var_name):
     return _wrap_test
     return _wrap_test
 
 
 
 
-def skip_if_quick(fun):
-    return skip_if_environ('QUICKTEST')(fun)
-
-
 def _skip_test(reason, sign):
 def _skip_test(reason, sign):
 
 
     def _wrap_test(fun):
     def _wrap_test(fun):

+ 1 - 2
celery/tests/tasks/test_result.py

@@ -15,7 +15,7 @@ from celery.result import (
 from celery.utils import uuid
 from celery.utils import uuid
 from celery.utils.serialization import pickle
 from celery.utils.serialization import pickle
 
 
-from celery.tests.case import AppCase, depends_on_current_app, skip_if_quick
+from celery.tests.case import AppCase, depends_on_current_app
 
 
 
 
 def mock_task(name, state, result):
 def mock_task(name, state, result):
@@ -650,7 +650,6 @@ class test_pending_Group(AppCase):
         with self.assertRaises(TimeoutError):
         with self.assertRaises(TimeoutError):
             self.ts.join(timeout=0.001)
             self.ts.join(timeout=0.001)
 
 
-    @skip_if_quick
     def x_join_longer(self):
     def x_join_longer(self):
         with self.assertRaises(TimeoutError):
         with self.assertRaises(TimeoutError):
             self.ts.join(timeout=1)
             self.ts.join(timeout=1)

+ 1 - 2
celery/tests/utils/test_timer2.py

@@ -5,7 +5,7 @@ import time
 
 
 import celery.utils.timer2 as timer2
 import celery.utils.timer2 as timer2
 
 
-from celery.tests.case import Case, Mock, patch, skip_if_quick
+from celery.tests.case import Case, Mock, patch
 from kombu.tests.case import redirect_stdouts
 from kombu.tests.case import redirect_stdouts
 
 
 
 
@@ -68,7 +68,6 @@ class test_Schedule(Case):
 
 
 class test_Timer(Case):
 class test_Timer(Case):
 
 
-    @skip_if_quick
     def test_enter_after(self):
     def test_enter_after(self):
         t = timer2.Timer()
         t = timer2.Timer()
         try:
         try:

+ 1 - 1
docs/.templates/page.html

@@ -2,7 +2,7 @@
 {% block body %}
 {% block body %}
 <div class="deck">
 <div class="deck">
 
 
-    {% if version == "3.1" or version == "4.0" %}
+    {% if version == "3.2" or version == "4.0" %}
         <p class="developmentversion">
         <p class="developmentversion">
         This document is for Celery's development version, which can be
         This document is for Celery's development version, which can be
         significantly different from previous releases. Get old docs here:
         significantly different from previous releases. Get old docs here:

BIN
docs/images/Celery-Overview-v4.jpg


BIN
docs/images/icon-paypal.png


BIN
docs/images/worker_graph_full.png


+ 0 - 1
docs/internals/reference/index.rst

@@ -22,7 +22,6 @@
     celery.concurrency.gevent
     celery.concurrency.gevent
     celery.concurrency.base
     celery.concurrency.base
     celery.concurrency.threads
     celery.concurrency.threads
-    celery.beat
     celery.backends
     celery.backends
     celery.backends.base
     celery.backends.base
     celery.backends.rpc
     celery.backends.rpc

+ 0 - 0
docs/internals/reference/celery.beat.rst → docs/reference/celery.beat.rst


+ 0 - 10
docs/reference/celery.task.base.rst

@@ -1,10 +0,0 @@
-===================================
- celery.task.base (Deprecated)
-===================================
-
-.. contents::
-    :local:
-.. currentmodule:: celery.task.base
-
-.. automodule:: celery.task.base
-    :members: BaseTask, PeriodicTask, TaskType

+ 0 - 18
docs/reference/celery.task.rst

@@ -1,18 +0,0 @@
-=====================================================
- celery.task
-=====================================================
-
-.. contents::
-    :local:
-.. currentmodule:: celery.task
-
-.. automodule:: celery.task
-
-    .. autofunction:: task
-
-    .. autofunction:: periodic_task
-
-    .. autoclass:: Task
-
-        .. seealso::
-            :class:`celery.task.base.BaseTask`.

+ 1 - 2
docs/reference/index.rst

@@ -21,8 +21,6 @@
     celery.app.log
     celery.app.log
     celery.app.utils
     celery.app.utils
     celery.bootsteps
     celery.bootsteps
-    celery.task
-    celery.task.base
     celery.result
     celery.result
     celery.task.http
     celery.task.http
     celery.schedules
     celery.schedules
@@ -43,6 +41,7 @@
     celery.contrib.methods
     celery.contrib.methods
     celery.events
     celery.events
     celery.events.state
     celery.events.state
+    celery.beat
     celery.apps.worker
     celery.apps.worker
     celery.apps.beat
     celery.apps.beat
     celery.worker
     celery.worker

+ 3 - 3
docs/tutorials/daemonizing.rst

@@ -382,10 +382,10 @@ you should :ref:`report it <reporting-bugs>`).
 launchd (OS X)
 launchd (OS X)
 ==============
 ==============
 
 
-* `extra/mac/`_
+* `extra/osx`_
 
 
-.. _`extra/mac/`:
-    http://github.com/celery/celery/tree/3.1/extra/mac/
+.. _`extra/osx`:
+    http://github.com/celery/celery/tree/3.1/extra/osx/
 
 
 
 
 .. _daemon-windows:
 .. _daemon-windows:

+ 6 - 0
docs/whatsnew-3.1.rst

@@ -578,6 +578,12 @@ You install extras by specifying them inside brackets:
 The above will install the dependencies for Redis and MongoDB.  You can list
 The above will install the dependencies for Redis and MongoDB.  You can list
 as many extras as you want.
 as many extras as you want.
 
 
+
+.. warning::
+
+    You can't use the ``celery-with-*`` packages anymore, as these will not be
+    updated to use Celery 3.1.
+
 +-------------+-------------------------+---------------------------+
 +-------------+-------------------------+---------------------------+
 | Extension   | Requirement entry       | Type                      |
 | Extension   | Requirement entry       | Type                      |
 +=============+=========================+===========================+
 +=============+=========================+===========================+

+ 0 - 53
docs/xreftest.rst

@@ -1,53 +0,0 @@
-xreftest
-========
-
-Must not be in public docs
---------------------------
-
-hello, how do you do3
-
-
-``meth @Task.retry``: :meth:`@Task.retry`
-
-``meth @-Task.retry``: :meth:`@-Task.retry`
-
-``meth ~@Task.retry``: :meth:`~@Task.retry`
-
-
-``class @Celery``: :class:`@Celery`
-
-``class @-Celery``: :class:`@-Celery`
-
-``class ~@Celery``: :class:`~@Celery`
-
-
-``meth @Celery.config_from_object``: :meth:`@Celery.config_from_object`
-
-``meth @-Celery.config_from_object``: :meth:`@-Celery.config_from_object`
-
-``meth ~@Celery.config_from_object``: :meth:`~@Celery.config_from_object`
-
-``meth celery.Celery.config_from_object``: :meth:`@Celery.send_task`
-
-:class:`celery.Celery`
-
-:class:`celery.subtask.link`
-
-
-``attr @amqp``:   :attr:`@amqp`
-
-``attr @-amqp``:   :attr:`@-amqp`
-
-``attr ~@amqp``:   :attr:`~@amqp`
-
-
-``meth @amqp.TaskConsumer``:  :meth:`@amqp.TaskConsumer`
-
-``meth @-amqp.TaskConsumer``: :meth:`@-amqp.TaskConsumer`
-
-``meth ~@amqp.TaskConsumer``: :meth:`~@amqp.TaskConsumer`
-
-
-``exc @NotRegistered``: :exc:`@NotRegistered`
-
-``exc @-NotRegistered``: :exc:`@-NotRegistered`

+ 0 - 2
extra/debian/README.rst

@@ -1,2 +0,0 @@
-These init scripts have been deprecated,
-please use ../generic-init.d instead.

+ 0 - 72
extra/logtools/find-unprocessed-tasks-debug.sh

@@ -1,72 +0,0 @@
-#!/bin/bash
-#--------------------------------------------------------------------#
-# Find all currently unprocessed tasks by searching the celeryd
-# log file.
-#
-# Please note that this will also include tasks that raised an exception,
-# or is just under active processing (will finish soon).
-#
-# Usage:
-#
-#     # Using default log file /var/log/celeryd.log
-#     $ bash find-unprocessed-tasks.sh
-#
-#     # Using a custom logfile
-#     # bash find-unprocessed-tasks.sh ./celeryd.log
-#
-#--------------------------------------------------------------------#
-
-DEFAULT_LOGFILE=/var/log/celeryd.log
-export CELERYD_LOGFILE=${1:-$DEFAULT_LOGFILE}
-
-
-get_start_date_by_task_id() {
-    task_id="$1"
-    grep Apply $CELERYD_LOGFILE | \
-        grep "$task_id" | \
-        perl -nle'
-            /^\[(.+?): DEBUG/; print $1' | \
-        sed 's/\s*$//'
-}
-
-
-get_end_date_by_task_id() {
-    task_id="$1"
-    grep processed $CELERYD_LOGFILE | \
-        grep "$task_id" | \
-        perl -nle'
-            /^\[(.+?): INFO/; print $1 ' | \
-        sed 's/\s*$//'
-}
-
-
-get_all_task_ids() {
-    grep Apply $CELERYD_LOGFILE | perl -nle"/'task_id': '(.+?)'/; print \$1"
-}
-
-
-search_logs_for_task_id() {
-    grep "$task_id" $CELERYD_LOGFILE
-}
-
-
-report_unprocessed_task() {
-    task_id="$1"
-    date_start="$2"
-
-    cat <<EOFTEXT
-"---------------------------------------------------------------------------------"
-| UNFINISHED TASK: $task_id [$date_start]
-"---------------------------------------------------------------------------------"
-Related logs:
-EOFTEXT
-	search_logs_for_task_id "$task_id"
-}
-
-for task_id in $(get_all_task_ids); do
-    date_start=$(get_start_date_by_task_id "$task_id")
-    date_end=$(get_end_date_by_task_id "$task_id")
-    if [ -z "$date_end" ]; then
-        report_unprocessed_task "$task_id" "$date_start"
-    fi
-done

+ 0 - 73
extra/logtools/find-unprocessed-tasks.sh

@@ -1,73 +0,0 @@
-#!/bin/bash
-#--------------------------------------------------------------------#
-# Find all currently unprocessed tasks by searching the celeryd
-# log file.
-#
-# Please note that this will also include tasks that raised an exception,
-# or is just under active processing (will finish soon).
-#
-# Usage:
-#
-#     # Using default log file /var/log/celeryd.log
-#     $ bash find-unprocessed-tasks.sh
-#
-#     # Using a custom logfile
-#     # bash find-unprocessed-tasks.sh ./celeryd.log
-#
-#--------------------------------------------------------------------#
-
-DEFAULT_LOGFILE=/var/log/celeryd.log
-export CELERYD_LOGFILE=${1:-$DEFAULT_LOGFILE}
-
-
-get_start_date_by_task_id() {
-    task_id="$1"
-    grep 'Received task:' $CELERYD_LOGFILE | \
-        grep "$task_id" | \
-        perl -nle'
-            /^\[(.+?): INFO/; print $1' | \
-        sed 's/\s*$//'
-}
-
-
-get_end_date_by_task_id() {
-    task_id="$1"
-    grep processed $CELERYD_LOGFILE | \
-        grep "$task_id" | \
-        perl -nle'
-            /^\[(.+?): INFO/; print $1 ' | \
-        sed 's/\s*$//'
-}
-
-
-get_all_task_ids() {
- grep 'Received task:' $CELERYD_LOGFILE | \
-	perl -nle'/Received task:.+?\[(.+?)\]/; print($1)'
-}
-
-
-search_logs_for_task_id() {
-    grep "$task_id" $CELERYD_LOGFILE
-}
-
-
-report_unprocessed_task() {
-    task_id="$1"
-    date_start="$2"
-
-    cat <<EOFTEXT
-"---------------------------------------------------------------------------------"
-| UNFINISHED TASK: $task_id [$date_start]
-"---------------------------------------------------------------------------------"
-Related logs:
-EOFTEXT
-	search_logs_for_task_id "$task_id"
-}
-
-for task_id in $(get_all_task_ids); do
-    date_start=$(get_start_date_by_task_id "$task_id")
-    date_end=$(get_end_date_by_task_id "$task_id")
-    if [ -z "$date_end" ]; then
-        report_unprocessed_task "$task_id" "$date_start"
-    fi
-done

+ 0 - 66
extra/logtools/periodic-task-runtimes.sh

@@ -1,66 +0,0 @@
-#!/bin/bash
-#---------------------------------------------------------------------------#
-#
-# Tool to find race conditions in the Periodic Task system.
-# Outputs times of all runs of a certain task (by searching for task name
-# using a search query).
-#
-# Usage:
-#
-#   $ bash periodic-task-runtimes.sh query host1 [host2 ... hostN]
-#
-# Example usage:
-#
-#   $ bash periodic-task-runtimes.sh refresh_all_feeds host1 host2 host3
-#
-# The output is sorted.
-#
-#---------------------------------------------------------------------------#
-
-USER="root"
-CELERYD_LOGFILE="/var/log/celeryd.log"
-
-query="$1"
-shift
-hosts="$*"
-
-usage () {
-    echo "$(basename $0) task_name_query host1 [host2 ... hostN]"
-    exit 1
-}
-
-[ -z "$query" -o -z "$hosts" ] && usage
-
-
-get_received_date_for_task () {
-    host="$1"
-    ssh "$USER@$host" "
-        grep '$query' $CELERYD_LOGFILE | \
-            grep 'Received task:' | \
-            perl -nle'
-                /^\[(.+?): INFO.+?Received task:(.+?)\s*/;
-                print \"[\$1] $host \$2\"' | \
-            sed 's/\s*$//'
-    "
-}
-
-get_processed_date_for_task () {
-    host="$1"
-    ssh "$USER@$host" "
-        grep '$query' $CELERYD_LOGFILE | \
-            grep 'processed:' | \
-            perl -nle'
-                /^\[(.+?): INFO.+?Task\s+(.+?)\s*/;
-                print \"[\$1] $host \$2\"' | \
-            sed 's/\s*$//'
-    "
-}
-
-get_processed_for_all_hosts () {
-    for_hosts="$*"
-    for host in $for_hosts; do
-        get_processed_date_for_task $host
-    done
-}
-
-get_processed_for_all_hosts $hosts | sort

+ 0 - 43
extra/mac/watch-workers.applescript

@@ -1,43 +0,0 @@
-set broker to "h8.opera.com"
-set workers to {"h6.opera.com", "h8.opera.com", "h9.opera.com", "h10.opera.com"}
-set clock to "h6.opera.com"
-tell application "iTerm"
-    activate
-    set myterm to (make new terminal)
-    tell myterm
-        set number of columns to 80
-        set number of rows to 50
-        repeat with workerhost in workers
-            set worker to (make new session at the end of sessions)
-            tell worker
-                set name to workerhost
-                set foreground color to "white"
-                set background color to "black"
-                set transparency to 0.1
-                exec command "/bin/sh -i"
-                write text "ssh root@" & workerhost & " 'tail -f /var/log/celeryd.log'"
-            end tell
-        end repeat
-        set celerybeat to (make new session at the end of sessions)
-        tell celerybeat
-            set name to "celerybeat.log"
-            set foreground color to "white"
-            set background color to "black"
-            set transparency to 0.1
-            exec command "/bin/sh -i"
-            write text "ssh root@" & clock & " 'tail -f /var/log/celerybeat.log'"
-        end tell
-        set rabbit to (make new session at the end of sessions)
-        tell rabbit
-            set name to "rabbit.log"
-            set foreground color to "white"
-            set background color to "black"
-            set transparency to 0.1
-            exec command "/bin/sh -i"
-            write text "ssh root@" & broker & " 'tail -f /var/log/rabbitmq/rabbit.log'"
-        end tell
-        tell the first session
-            activate
-        end tell
-    end tell
-end tell

+ 0 - 0
extra/mac/org.celeryq.beat.plist → extra/osx/org.celeryq.beat.plist


+ 0 - 0
extra/mac/org.celeryq.worker.plist → extra/osx/org.celeryq.worker.plist


+ 0 - 57
extra/release/core-modules.txt

@@ -1,57 +0,0 @@
-celery//__init__.py
-celery//app/__init__.py
-celery//app/abstract.py
-celery//app/amqp.py
-celery//app/annotations.py
-celery//app/base.py
-celery//app/builtins.py
-celery//app/control.py
-celery//app/defaults.py
-celery//app/log.py
-celery//app/registry.py
-celery//app/routes.py
-celery//app/task.py
-celery//app/utils.py
-celery//apps/__init__.py
-celery//apps/worker.py
-celery//backends/__init__.py
-celery//backends/base.py
-celery//bin/__init__.py
-celery//bin/base.py
-celery//bin/worker.py
-celery//canvas.py
-celery//concurrency/__init__.py
-celery//concurrency/base.py
-celery//concurrency/processes/__init__.py
-celery//concurrency/processes/_win.py
-celery//concurrency/solo.py
-celery//datastructures.py
-celery//events/__init__.py
-celery//events/state.py
-celery//exceptions.py
-celery//loaders/__init__.py
-celery//loaders/app.py
-celery//loaders/base.py
-celery//loaders/default.py
-celery//result.py
-celery//security/__init__.py
-celery//security/certificate.py
-celery//security/key.py
-celery//security/serialization.py
-celery//security/utils.py
-celery//signals.py
-celery//state.py
-celery//states.py
-celery//task/trace.py
-celery//worker/__init__.py
-celery//worker/abstract.py
-celery//worker/autoreload.py
-celery//worker/autoscale.py
-celery//worker/buckets.py
-celery//worker/consumer.py
-celery//worker/control.py
-celery//worker/heartbeat.py
-celery//worker/job.py
-celery//worker/mediator.py
-celery//worker/state.py
-celery//worker/strategy.py

+ 0 - 0
extra/security/gen-cert.sh → extra/release/gen-cert.sh


+ 2 - 4
funtests/stress/stress/__main__.py

@@ -33,11 +33,9 @@ class Stress(Command):
             Option('-g', '--group', default='all',
             Option('-g', '--group', default='all',
                    help='Specify test group (all|green)'),
                    help='Specify test group (all|green)'),
             Option('--diag', default=False, action='store_true',
             Option('--diag', default=False, action='store_true',
-                   help='Enable diagnostics (slow)',
-            ),
+                   help='Enable diagnostics (slow)'),
             Option('-J', '--no-join', default=False, action='store_true',
             Option('-J', '--no-join', default=False, action='store_true',
-                   help='Do not wait for task results',
-            ),
+                   help='Do not wait for task results'),
         )
         )
 
 
 
 

+ 1 - 1
funtests/stress/stress/templates.py

@@ -92,5 +92,5 @@ class pickle(default):
 
 
 @template()
 @template()
 class confirms(default):
 class confirms(default):
-    BROKER_URL='pyamqp://'
+    BROKER_URL = 'pyamqp://'
     BROKER_TRANSPORT_OPTIONS = {'confirm_publish': True}
     BROKER_TRANSPORT_OPTIONS = {'confirm_publish': True}

+ 2 - 5
funtests/suite/test_leak.py

@@ -18,7 +18,6 @@ from celery.tests.utils import unittest
 import suite  # noqa
 import suite  # noqa
 
 
 GET_RSIZE = '/bin/ps -p {pid} -o rss='
 GET_RSIZE = '/bin/ps -p {pid} -o rss='
-QUICKTEST = int(os.environ.get('QUICKTEST', 0))
 
 
 
 
 class Sizes(list):
 class Sizes(list):
@@ -92,8 +91,7 @@ class LeakFunCase(unittest.TestCase):
 
 
 class test_leaks(LeakFunCase):
 class test_leaks(LeakFunCase):
 
 
-    def test_task_apply_leak(self):
-        its = QUICKTEST and 10 or 1000
+    def test_task_apply_leak(self, its=1000):
         self.assertNotEqual(self.app.conf.BROKER_TRANSPORT, 'memory')
         self.assertNotEqual(self.app.conf.BROKER_TRANSPORT, 'memory')
 
 
         @self.app.task
         @self.app.task
@@ -112,8 +110,7 @@ class test_leaks(LeakFunCase):
         finally:
         finally:
             self.app.conf.BROKER_POOL_LIMIT = pool_limit
             self.app.conf.BROKER_POOL_LIMIT = pool_limit
 
 
-    def test_task_apply_leak_with_pool(self):
-        its = QUICKTEST and 10 or 1000
+    def test_task_apply_leak_with_pool(self, its=1000):
         self.assertNotEqual(self.app.conf.BROKER_TRANSPORT, 'memory')
         self.assertNotEqual(self.app.conf.BROKER_TRANSPORT, 'memory')
 
 
         @self.app.task
         @self.app.task

+ 4 - 11
pavement.py

@@ -107,15 +107,12 @@ def bump(options):
 @task
 @task
 @cmdopts([
 @cmdopts([
     ('coverage', 'c', 'Enable coverage'),
     ('coverage', 'c', 'Enable coverage'),
-    ('quick', 'q', 'Quick test'),
     ('verbose', 'V', 'Make more noise'),
     ('verbose', 'V', 'Make more noise'),
 ])
 ])
 def test(options):
 def test(options):
     cmd = 'CELERY_LOADER=default nosetests'
     cmd = 'CELERY_LOADER=default nosetests'
     if getattr(options, 'coverage', False):
     if getattr(options, 'coverage', False):
         cmd += ' --with-coverage3'
         cmd += ' --with-coverage3'
-    if getattr(options, 'quick', False):
-        cmd = 'QUICKTEST=1 SKIP_RLIMITS=1 {0}'.format(cmd)
     if getattr(options, 'verbose', False):
     if getattr(options, 'verbose', False):
         cmd += ' --verbosity=2'
         cmd += ' --verbosity=2'
     sh(cmd)
     sh(cmd)
@@ -139,9 +136,10 @@ def removepyc(options):
 
 
 
 
 @task
 @task
-def update_graphs(options):
-    sh('celery worker_graph | dot -Tpng -o docs/images/worker_graph.png')
-    sh('celery consumer_graph | dot -Tpng -o docs/images/consumer_graph.png')
+def update_graphs(options, dest='docs/images/worker_graph_full.png'):
+    sh('celery graph bootsteps | dot -Tpng -o {dest}'.format(
+        dest=dest,
+    ))
 
 
 
 
 @task
 @task
@@ -168,11 +166,6 @@ def verify_authors(options):
     sh('git shortlog -se | cut -f2 | extra/release/attribution.py')
     sh('git shortlog -se | cut -f2 | extra/release/attribution.py')
 
 
 
 
-@task
-def coreloc(options):
-    sh('xargs sloccount < extra/release/core-modules.txt')
-
-
 @task
 @task
 def testloc(options):
 def testloc(options):
     sh('sloccount celery/tests')
     sh('sloccount celery/tests')

+ 1 - 14
setup.py

@@ -24,7 +24,6 @@ if sys.version_info < (2, 6):
 
 
 downgrade_packages = [
 downgrade_packages = [
     'celery.app.task',
     'celery.app.task',
-    'celery.concurrency.processes',
 ]
 ]
 orig_path = sys.path[:]
 orig_path = sys.path[:]
 for path in (os.path.curdir, os.getcwd()):
 for path in (os.path.curdir, os.getcwd()):
@@ -71,8 +70,8 @@ classes = """
     Programming Language :: Python :: 2.6
     Programming Language :: Python :: 2.6
     Programming Language :: Python :: 2.7
     Programming Language :: Python :: 2.7
     Programming Language :: Python :: 3
     Programming Language :: Python :: 3
-    Programming Language :: Python :: 3.2
     Programming Language :: Python :: 3.3
     Programming Language :: Python :: 3.3
+    Programming Language :: Python :: 3.4
     Programming Language :: Python :: Implementation :: CPython
     Programming Language :: Python :: Implementation :: CPython
     Programming Language :: Python :: Implementation :: PyPy
     Programming Language :: Python :: Implementation :: PyPy
     Programming Language :: Python :: Implementation :: Jython
     Programming Language :: Python :: Implementation :: Jython
@@ -126,17 +125,6 @@ try:
 finally:
 finally:
     meta_fh.close()
     meta_fh.close()
 
 
-# -*- Custom Commands -*-
-
-
-class quicktest(test):
-    extra_env = dict(SKIP_RLIMITS=1, QUICKTEST=1)
-
-    def run(self, *args, **kwargs):
-        for env_name, env_value in self.extra_env.items():
-            os.environ[env_name] = str(env_value)
-        test.run(self, *args, **kwargs)
-
 # -*- Installation Requires -*-
 # -*- Installation Requires -*-
 
 
 py_version = sys.version_info
 py_version = sys.version_info
@@ -224,7 +212,6 @@ setup(
     install_requires=install_requires,
     install_requires=install_requires,
     tests_require=tests_require,
     tests_require=tests_require,
     test_suite='nose.collector',
     test_suite='nose.collector',
-    cmdclass={'quicktest': quicktest},
     classifiers=classifiers,
     classifiers=classifiers,
     entry_points=entrypoints,
     entry_points=entrypoints,
     long_description=long_description,
     long_description=long_description,