Browse Source

PEP8ify + pyflakes

Ask Solem 14 years ago
parent
commit
d16c9061db

+ 0 - 1
celery/app/amqp.py

@@ -140,7 +140,6 @@ class Queues(dict):
         return cls(queues)
 
 
-
 class TaskPublisher(messaging.Publisher):
     auto_declare = True
     retry = False

+ 2 - 1
celery/app/base.py

@@ -108,7 +108,8 @@ class Settings(datastructures.ConfigurationView):
 
     @property
     def BROKER_TRANSPORT(self):
-        """Resolves compat aliases ``BROKER_BACKEND`` and ``CARROT_BACKEND``."""
+        """Resolves compat aliases :setting:`BROKER_BACKEND`
+        and :setting:`CARROT_BACKEND`."""
         return (self.get("BROKER_TRANSPORT") or
                 self.get("BROKER_BACKEND") or
                 self.get("CARROT_BACKEND"))

+ 0 - 3
celery/app/task/__init__.py

@@ -7,7 +7,6 @@ from celery.exceptions import MaxRetriesExceededError, RetryTaskError
 from celery.execute.trace import TaskTrace
 from celery.registry import tasks, _unpickle_task
 from celery.result import EagerResult
-from celery.schedules import maybe_schedule
 from celery.utils import mattrgetter, gen_unique_id, fun_takes_kwargs
 
 extract_exec_options = mattrgetter("queue", "routing_key",
@@ -705,5 +704,3 @@ class BaseTask(object):
     @property
     def __name__(self):
         return self.__class__.__name__
-
-

+ 1 - 1
celery/apps/worker.py

@@ -302,7 +302,7 @@ def install_worker_int_again_handler(worker):
             worker.terminate(in_sighandler=True)
         raise SystemTerminate()
 
-    platforms.signals["SIGINT"] =_stop
+    platforms.signals["SIGINT"] = _stop
 
 
 def install_worker_term_handler(worker):

+ 1 - 1
celery/backends/base.py

@@ -300,7 +300,7 @@ class KeyValueStoreBackend(BaseDictBackend):
             else:
                 if cached["status"] in states.READY_STATES:
                     yield task_id, cached
-                    cached_ids.add(taskid)
+                    cached_ids.add(task_id)
 
         ids ^= cached_ids
         while ids:

+ 1 - 1
celery/beat.py

@@ -98,6 +98,7 @@ class ScheduleEntry(object):
         self.__dict__.update({"task": other.task, "schedule": other.schedule,
                               "args": other.args, "kwargs": other.kwargs,
                               "options": other.options})
+
     def is_due(self):
         """See :meth:`celery.task.base.PeriodicTask.is_due`."""
         return self.schedule.is_due(self.last_run_at)
@@ -233,7 +234,6 @@ class Scheduler(object):
         finally:
             self._last_sync = time.time()
 
-
     def sync(self):
         pass
 

+ 1 - 2
celery/bin/celeryd_detach.py

@@ -20,8 +20,7 @@ def detach(path, argv, logfile=None, pidfile=None, uid=None,
         except Exception:
             import logging
             from celery.log import setup_logger
-            logger = setup_logger(logfile=self.logfile,
-                                  loglevel=logging.ERROR)
+            logger = setup_logger(logfile=logfile, loglevel=logging.ERROR)
             logger.critical("Can't exec %r" % (
                     " ".join([path] + argv), ),
                     exc_info=sys.exc_info())

+ 2 - 1
celery/concurrency/processes/__init__.py

@@ -15,7 +15,8 @@ if platform.system() == "Windows":
     # On Windows os.kill calls TerminateProcess which cannot be
     # handled by # any process, so this is needed to terminate the task
     # *and its children* (if any).
-    from celery.concurrency.processes._win import kill_processtree as _kill
+    from celery.concurrency.processes import _win
+    _kill = _win.kill_processtree  # noqa
 
 
 class TaskPool(BasePool):

+ 2 - 1
celery/execute/trace.py

@@ -95,7 +95,8 @@ class TaskTrace(object):
                                  trace.exc_type, trace.tb, trace.strtb)
         return r
 
-    def handle_after_return(self, status, retval, type_, tb, strtb, einfo=None):
+    def handle_after_return(self, status, retval, type_, tb, strtb,
+            einfo=None):
         if status in states.EXCEPTION_STATES:
             einfo = ExceptionInfo((retval, type_, tb))
         self.task.after_return(status, retval, self.task_id,

+ 0 - 4
celery/loaders/app.py

@@ -1,9 +1,5 @@
 from __future__ import absolute_import
 
-import os
-
-from celery.datastructures import DictAttribute
-from celery.exceptions import ImproperlyConfigured
 from celery.loaders.base import BaseLoader
 
 

+ 0 - 2
celery/loaders/default.py

@@ -3,8 +3,6 @@ from __future__ import absolute_import
 import os
 import warnings
 
-from importlib import import_module
-
 from celery.datastructures import AttributeDict
 from celery.exceptions import NotConfigured
 from celery.loaders.base import BaseLoader

+ 2 - 1
celery/platforms.py

@@ -273,7 +273,8 @@ class Signals(object):
         """Get signal number from signal name."""
         if isinstance(signal_name, int):
             return signal_name
-        if not isinstance(signal_name, basestring) or not signal_name.isupper():
+        if not isinstance(signal_name, basestring) \
+                or not signal_name.isupper():
             raise TypeError("signal name must be uppercase string.")
         if not signal_name.startswith("SIG"):
             signal_name = "SIG" + signal_name

+ 1 - 2
celery/result.py

@@ -7,7 +7,6 @@ from itertools import imap
 
 from celery import current_app
 from celery import states
-from celery import current_app
 from celery.app import app_or_default
 from celery.exceptions import TimeoutError
 from celery.registry import _unpickle_task
@@ -310,7 +309,7 @@ class ResultSet(object):
         """Revoke all tasks in the set."""
         with self.app.default_connection(connection, connect_timeout) as conn:
             for result in self.results:
-                result.revoke(connection=connection)
+                result.revoke(connection=conn)
 
     def __iter__(self):
         return self.iterate()

+ 1 - 1
celery/task/base.py

@@ -1,5 +1,5 @@
 from celery import current_app
-from celery.app.task import Context, TaskType, BaseTask
+from celery.app.task import Context, TaskType, BaseTask  # noqa
 from celery.schedules import maybe_schedule
 from celery.utils import deprecated
 from celery.utils import timeutils

+ 1 - 1
celery/task/http.py

@@ -5,7 +5,7 @@ from urlparse import urlparse
 try:
     from urlparse import parse_qsl
 except ImportError:
-    from cgi import parse_qsl
+    from cgi import parse_qsl  # noqa
 
 from anyjson import deserialize
 

+ 1 - 1
celery/tests/test_app/test_loaders.py

@@ -204,7 +204,7 @@ class TestDefaultLoader(unittest.TestCase):
             def find_module(self, name):
                 raise ImportError(name)
 
-        with catch_warnings(record=True) as log:
+        with catch_warnings(record=True):
             l = _Loader()
             self.assertDictEqual(l.conf, {})
             context_executed[0] = True

+ 0 - 2
celery/tests/test_bin/test_celeryd.py

@@ -11,8 +11,6 @@ try:
 except ImportError:
     current_process = None  # noqa
 
-from functools import wraps
-
 from nose import SkipTest
 from kombu.tests.utils import redirect_stdouts
 

+ 0 - 1
celery/tests/test_compat/test_log.py

@@ -4,7 +4,6 @@ import sys
 import logging
 from celery.tests.utils import unittest
 from tempfile import mktemp
-from celery.tests.utils import StringIO
 
 from celery import log
 from celery.log import (setup_logger, setup_task_logger,

+ 0 - 1
celery/tests/test_task/test_chord.py

@@ -43,7 +43,6 @@ class test_unlock_chord_task(AppCase):
         self.assertIn("celery.chord_unlock", tasks)
 
 
-
 class test_chord(AppCase):
 
     def test_apply(self):

+ 3 - 4
celery/tests/test_worker/test_worker.py

@@ -260,16 +260,15 @@ class test_Consumer(unittest.TestCase):
             self.assertTrue(log)
             self.assertIn("unknown message", log[0].message.args[0])
 
-
     @patch("celery.utils.timer2.to_timestamp")
     def test_receive_message_eta_OverflowError(self, to_timestamp):
         to_timestamp.side_effect = OverflowError()
         l = MyKombuConsumer(self.ready_queue, self.eta_schedule, self.logger,
                              send_events=False)
         m = create_message(Mock(), task=foo_task.name,
-                                    args=("2, 2"),
-                                    kwargs={},
-                                    eta=datetime.now().isoformat())
+                                   args=("2, 2"),
+                                   kwargs={},
+                                   eta=datetime.now().isoformat())
         l.event_dispatcher = Mock()
         l.pidbox_node = MockNode()
 

+ 2 - 3
celery/utils/__init__.py

@@ -387,13 +387,12 @@ def find_module(module, path=None, imp=None):
             last = None
             parts = module.split(".")
             for i, part in enumerate(parts[:-1]):
-                path = imp(".".join(parts[:i+1])).__path__
-                last = _imp.find_module(parts[i+1], path)
+                path = imp(".".join(parts[:i + 1])).__path__
+                last = _imp.find_module(parts[i + 1], path)
             return last
         return _imp.find_module(module)
 
 
-
 def import_from_cwd(module, imp=None):
     """Import module, but make sure it finds modules
     located in the current directory.

+ 0 - 1
celery/utils/compat.py

@@ -236,7 +236,6 @@ except ImportError:
     OrderedDict = CompatOrderedDict  # noqa
 
 ############## logging.LoggerAdapter ########################################
-import inspect
 import logging
 try:
     import multiprocessing

+ 2 - 1
celery/worker/job.py

@@ -536,7 +536,8 @@ class TaskRequest(object):
                     tuple(map(get_symbol_by_name, whitelist))):
                 subject = self.email_subject.strip() % context
                 body = self.email_body.strip() % context
-                self.app.mail_admins(subject, body, fail_silently=fail_silently)
+                self.app.mail_admins(subject, body,
+                                     fail_silently=fail_silently)
 
     def repr_result(self, result, maxlen=46):
         # 46 is the length needed to fit