Forráskód Böngészése

Reorganized distribution contrib/ directory

Ask Solem 15 éve
szülő
commit
811ddad346

+ 0 - 61
contrib/bump

@@ -1,61 +0,0 @@
-#!/bin/bash
-# Bump version of python package in current directory.
-# Updates version in package/__init__.py, and version embedded as
-# reStructuredtext in README.
-#
-# Usage: BUMP package_name [new_version] [-c]
-# If new_version is not specified the release part of the version will
-# be incremented.
-# if -c is set it will be commited and pushed.
-
-bump_version () {
-    commit=0
-    while getopts "c" flag; do
-        case $flag in
-            c)
-                commit=1
-            ;;
-        esac
-    done
-    shift $(($OPTIND - 1))
-    package="$1"
-    new_version="$2"
-    [ $commit ] && git pull origin master
-    current=$(python -c "
-import $package
-print($package.__version__)
-    ")
-    cur_major=$(echo "$current" | cut -d. -f 1)
-    cur_minor=$(echo "$current" | cut -d. -f 2)
-    cur_release=$(echo "$current" | cut -d. -f 3)
-    if [ -z "$new_version" ]; then
-        new_version="$cur_major.$cur_minor.$(($cur_release + 1))";
-        new_as_tuple="($cur_major, $cur_minor, $(($cur_release + 1)))";
-    fi
-    new_major=$(echo "$new_version" | cut -d. -f 1)
-    new_minor=$(echo "$new_version" | cut -d. -f 2)
-    new_release=$(echo "$new_version" | cut -d. -f 3)
-
-    new_as_tuple="($new_major, $new_minor, $new_release)"
-
-    echo "$package: $current -> $new_version"
-
-    perl -pi -e"s/(VERSION\s*=\s*)\((.+?)\);?/\$1$new_as_tuple/" \
-        "$package/__init__.py"
-    perl -pi -e"s/(:Version:)\s*(.+?)(\s*$)/\$1 $new_version\$3/i" README
-
-    [ $commit ] && (
-        git commit "$package/__init__.py" README \
-            -m "Bumped version to $new_version";
-        git push;
-    )
-    
-}
-
-if [ -z "$1" ]; then
-    echo "Usage: $(basename $0) package_name [new_version]"
-    exit 1
-fi
-
-bump_version $*
-

+ 0 - 12
contrib/doc2ghpages

@@ -1,12 +0,0 @@
-#!/bin/bash
-
-(cd docs;
-    rm -rf .build;
-    make html;
-    (cd .build/html;
-        sphinx-to-github;))
-git checkout gh-pages
-cp -r docs/.build/html/* .
-git commit . -m "Autogenerated documentation for github."
-git push origin gh-pages
-git checkout master

+ 0 - 0
contrib/find-unprocessed-tasks-debug.sh → contrib/logtools/find-unprocessed-tasks-debug.sh


+ 0 - 0
contrib/find-unprocessed-tasks.sh → contrib/logtools/find-unprocessed-tasks.sh


+ 0 - 0
contrib/periodic-task-runtimes.sh → contrib/logtools/periodic-task-runtimes.sh


+ 0 - 0
contrib/doc4allmods → contrib/release/doc4allmods


+ 0 - 0
contrib/sphinx-to-rst.py → contrib/release/sphinx-to-rst.py


+ 0 - 0
contrib/verify-reference-index.sh → contrib/release/verify-reference-index.sh


+ 0 - 130
contrib/testconn.py

@@ -1,130 +0,0 @@
-import settings
-from django.core.management import setup_environ
-setup_environ(settings)
-from carrot.connection import DjangoBrokerConnection
-from carrot.messaging import Messaging
-from amqplib import client_0_8 as amqp
-from celery.task import dmap
-import operator
-import simplejson
-import time
-import multiprocessing
-import logging
-
-
-def get_logger():
-    logger = multiprocessing.get_logger()
-    logger.setLevel(logging.INFO)
-    multiprocessing.log_to_stderr()
-    return logger
-
-
-class MyMessager(Messaging):
-    queue = "conntest"
-    exchange = "conntest"
-    routing_key = "conntest"
-
-
-def _create_conn():
-    from django.conf import settings
-    conn = amqp.Connection(host=settings.BROKER_SERVER,
-                           userid=settings.BROKER_USER,
-                           password=settings.BROKER_PASSWORD,
-                           virtual_host=settings.BROKER_VHOST,
-                           insist=False)
-    return conn
-
-
-def _send2(msg):
-    conn = _create_conn()
-    channel = conn.channel()
-    msg = amqp.Message(msg)
-    msg.properties["delivery_mode"] = 2
-    channel.basic_publish(msg, exchange="conntest", routing_key="conntest")
-    conn.close()
-
-
-def _recv2():
-    conn = _create_conn()
-    channel = conn.channel()
-    channel.queue_declare(queue="conntest", durable=True, exclusive=False,
-                          auto_delete=False)
-    channel.exchange_declare(exchange="conntest", type="direct",
-                             durable=True, auto_delete=False)
-    channel.queue_bind(queue="conntest", exchange="conntest",
-                       routing_key="conntest")
-    m = channel.basic_get("conntest")
-    if m:
-        channel.basic_ack(m.delivery_tag)
-        print("RECEIVED MSG: %s" % m.body)
-    conn.close()
-
-
-def send_a_message(msg):
-    conn = DjangoBrokerConnection()
-    MyMessager(connection=conn).send({"message": msg})
-    conn.close()
-
-
-def discard_all():
-    conn = DjangoBrokerConnection()
-    MyMessager(connection=conn).consumer.discard_all()
-    conn.close()
-
-
-def receive_a_message():
-    logger = get_logger()
-    conn = DjangoBrokerConnection()
-    m = MyMessager(connection=conn).fetch()
-    if m:
-        msg = simplejson.loads(m.body)
-        logger.info("Message receieved: %s" % msg.get("message"))
-        m.ack()
-    conn.close()
-
-
-def connection_stress_test():
-    message_count = 0
-    discard_all()
-    while True:
-        send_a_message("FOOBARBAZ!!!")
-        time.sleep(0.1)
-        receive_a_message()
-        message_count += 1
-        print("Sent %d message(s)" % message_count)
-
-
-def connection_stress_test_mp():
-    message_count = 0
-    pool = multiprocessing.Pool(10)
-    discard_all()
-    while True:
-        pool.apply(send_a_message, ["FOOBARBAZ!!!"])
-        time.sleep(0.1)
-        r = pool.apply(receive_a_message)
-
-        message_count += 1
-        print("Sent %d message(s)" % message_count)
-
-
-def connection_stress_test2():
-    message_count = 0
-    while True:
-        _send2("FOOBARBAZ!!!")
-        time.sleep(0.1)
-        _recv2()
-        message_count += 1
-        print("Sent %d message(s)" % message_count)
-
-
-def task_stress_test():
-    task_count = 0
-    while True:
-        r = dmap(operator.add, [[2, 2], [4, 4], [8, 8]])
-        print("[2+2, 4+4, 8+8] = %s" % r)
-        task_count += 3
-        print("Executed %d task(s)" % task_count)
-
-if __name__ == "__main__":
-    #connection_stress_test_mp()
-    task_stress_test()

+ 0 - 48
contrib/testdynpool.py

@@ -1,48 +0,0 @@
-from billiard.pool import DynamicPool
-from multiprocessing import get_logger, log_to_stderr
-import logging
-
-
-def setup_logger():
-    log_to_stderr()
-    logger = get_logger()
-    logger.setLevel(logging.DEBUG)
-    return logger
-
-
-def target(n):
-    r = n * n
-    setup_logger().info("%d * %d = %d" % (n, n, r))
-    return r
-
-
-def exit_process():
-    setup_logger().error("EXITING NOW!")
-    import os
-    os._exit(0)
-
-
-def send_exit(pool):
-    pool.apply_async(exit_process)
-
-
-def do_work(pool):
-    results = [pool.apply_async(target, args=[i]) for i in range(10)]
-    [result.get() for result in results]
-
-
-def workpool():
-    pool = DynamicPool(2)
-    do_work(pool)
-    print("GROWING")
-    pool.grow(1)
-    do_work(pool)
-    send_exit(pool)
-    import time
-    time.sleep(2)
-    pool.replace_dead_workers()
-    do_work(pool)
-
-
-if __name__ == "__main__":
-    workpool()

+ 4 - 4
pavement.py

@@ -51,12 +51,12 @@ def upload_docs(options):
 
 @task
 def autodoc(options):
-    sh("contrib/doc4allmods/celery")
+    sh("contrib/release/doc4allmods/celery")
 
 
 @task
 def verifyindex(options):
-    sh("contrib/verify-reference-index.sh")
+    sh("contrib/release/verify-reference-index.sh")
 
 
 @task
@@ -73,14 +73,14 @@ def clean_readme(options):
 @task
 @needs("clean_readme")
 def readme(options):
-    sh("python contrib/sphinx-to-rst.py docs/templates/readme.txt \
+    sh("python contrib/release/sphinx-to-rst.py docs/templates/readme.txt \
             > README.txt")
     sh("ln -s README.rst README")
 
 
 @task
 def bump(options):
-    sh("contrib/bump -c celery")
+    sh("bump -c celery")
 
 
 @task