Browse Source

PEP8ify + pyflakes

Ask Solem 14 years ago
parent
commit
785ab7925a

+ 2 - 1
celery/backends/amqp.py

@@ -154,7 +154,8 @@ class AMQPBackend(BaseDictBackend):
                 payload = self._cache[task_id] = result.payload
                 return payload
             elif task_id in self._cache:
-                return self._cache[task_id]     # use previously received state.
+                # use previously received state.
+                return self._cache[task_id]
             return {"status": states.PENDING, "result": None}
         finally:
             channel.close()

+ 1 - 1
celery/bin/celeryd_detach.py

@@ -1,7 +1,7 @@
 import os
 import sys
 
-from optparse import OptionParser, BadOptionError, make_option as Option
+from optparse import OptionParser, BadOptionError
 
 from celery import __version__
 from celery.bin.base import daemon_options

+ 0 - 3
celery/concurrency/base.py

@@ -81,7 +81,6 @@ class BasePool(object):
                              error_callback=on_worker_error,
                              waitforslot=self.putlocks)
 
-
     def on_ready(self, callbacks, errbacks, ret_value):
         """What to do when a worker task is ready and its return value has
         been collected."""
@@ -121,5 +120,3 @@ class BasePool(object):
     @property
     def active(self):
         return self._state == self.RUN
-
-

+ 5 - 10
celery/concurrency/evlet.py

@@ -2,6 +2,11 @@ import sys
 
 from time import time
 
+import eventlet
+import eventlet.debug
+eventlet.monkey_patch()
+eventlet.debug.hub_prevent_multiple_readers(False)
+
 from eventlet import GreenPool
 from eventlet.greenthread import spawn, spawn_after_local
 from greenlet import GreenletExit
@@ -94,13 +99,3 @@ class TaskPool(BasePool):
 
     def blocking(self, fun, *args, **kwargs):
         return spawn(fun, *args, **kwargs).wait()
-
-    @classmethod
-    def on_import(cls):
-        import eventlet
-        import eventlet.debug
-        eventlet.monkey_patch()
-        eventlet.debug.hub_prevent_multiple_readers(False)
-TaskPool.on_import()
-
-

+ 0 - 1
celery/concurrency/threads.py

@@ -1,4 +1,3 @@
-import threading
 from threadpool import ThreadPool, WorkRequest
 
 from celery.concurrency.base import apply_target, BasePool

+ 1 - 3
celery/events/__init__.py

@@ -13,6 +13,7 @@ from celery.utils import gen_unique_id
 
 event_exchange = Exchange("celeryev", type="topic")
 
+
 def create_event(type, fields):
     std = {"type": type,
            "timestamp": fields.get("timestamp") or time.time()}
@@ -156,7 +157,6 @@ class EventReceiver(object):
             by calling `consumer.channel.close()`.
 
         """
-        conf = self.app.conf
         consumer = Consumer(self.connection.channel(),
                             queues=[self.queue],
                             no_ack=True)
@@ -188,7 +188,6 @@ class EventReceiver(object):
                               timeout=timeout,
                               wakeup=wakeup))
 
-
     def wakeup_workers(self, channel=None):
         self.app.control.broadcast("heartbeat",
                                    connection=self.connection,
@@ -211,7 +210,6 @@ class EventReceiver(object):
         self.process(type, create_event(type, message_data))
 
 
-
 class Events(object):
 
     def __init__(self, app):

+ 0 - 2
celery/result.py

@@ -192,7 +192,6 @@ class AsyncResult(BaseAsyncResult):
                                           task_name=task_name, app=app)
 
 
-
 class TaskSetResult(object):
     """Working with :class:`~celery.task.sets.TaskSet` results.
 
@@ -383,7 +382,6 @@ class TaskSetResult(object):
 
         return list(results)
 
-
     def save(self, backend=None):
         """Save taskset result for later retrieval using :meth:`restore`.
 

+ 1 - 1
celery/schedules.py

@@ -251,7 +251,7 @@ class crontab(schedule):
     def remaining_estimate(self, last_run_at):
         """Returns when the periodic task should run next as a timedelta."""
         weekday = last_run_at.isoweekday()
-        if weekday == 7: # Sunday is day 0, not day 7.
+        if weekday == 7:    # Sunday is day 0, not day 7.
             weekday = 0
 
         execute_this_hour = (weekday in self.day_of_week and

+ 0 - 1
celery/tests/test_concurrency_processes.py

@@ -4,7 +4,6 @@ import unittest2 as unittest
 from itertools import cycle
 
 from celery.concurrency import processes as mp
-from celery.concurrency.base import BasePool
 from celery.datastructures import ExceptionInfo
 from celery.utils import noop
 

+ 0 - 3
celery/tests/test_utils.py

@@ -1,12 +1,9 @@
 import pickle
-import sys
 import unittest2 as unittest
 
 from celery import utils
 from celery.utils import promise, mpromise, maybe_promise
 
-from celery.tests.utils import execute_context, mask_modules
-
 
 def double(x):
     return x * 2

+ 0 - 1
celery/tests/test_worker.py

@@ -517,7 +517,6 @@ class test_WorkController(unittest.TestCase):
         self.assertRaises(KeyboardInterrupt, worker.process_task, task)
         self.assertEqual(worker._state, worker.TERMINATE)
 
-
     def test_process_task_raise_regular(self):
         worker = self.worker
         worker.pool = MockPool(raise_regular=True)

+ 0 - 1
celery/worker/__init__.py

@@ -250,7 +250,6 @@ class WorkController(object):
             self.stop()
             raise exc
 
-
     def process_task(self, wrapper):
         """Process task by sending it to the pool of workers."""
         try:

+ 4 - 1
examples/eventlet/webcrawler.py

@@ -18,11 +18,14 @@ from eventlet import Timeout
 from eventlet.green import urllib2
 
 # http://daringfireball.net/2009/11/liberal_regex_for_matching_urls
-url_regex = re.compile(r'\b(([\w-]+://?|www[.])[^\s()<>]+(?:\([\w\d]+\)|([^[:punct:]\s]|/)))')
+url_regex = re.compile(
+    r'\b(([\w-]+://?|www[.])[^\s()<>]+ (?:\([\w\d]+\)|([^[:punct:]\s]|/)))')
+
 
 def domain(url):
     return urlparse.urlsplit(url)[1].split(":")[0]
 
+
 @task
 def crawl(url):
     print("crawling: %r" % (url, ))