Ask Solem 8 years ago
parent
commit
0b4c8ad4a8

+ 2 - 0
celery/backends/cache.py

@@ -46,6 +46,8 @@ def import_best_memcache():
 
 
 def get_best_memcache(*args, **kwargs):
+    # pylint: disable=unpacking-non-sequence
+    #   This is most definitely a sequence, but pylint thinks it's not.
     is_pylibmc, memcache, key_t = import_best_memcache()
     Client = _Client = memcache.Client
 

+ 5 - 6
celery/bin/amqp.py

@@ -5,7 +5,7 @@
 """
 from __future__ import absolute_import, print_function, unicode_literals
 
-import cmd
+import cmd as _cmd
 import sys
 import shlex
 import pprint
@@ -111,7 +111,7 @@ def format_declare_queue(ret):
     return 'ok. queue:{0} messages:{1} consumers:{2}.'.format(*ret)
 
 
-class AMQShell(cmd.Cmd):
+class AMQShell(_cmd.Cmd):
     """AMQP API Shell.
 
     Arguments:
@@ -124,7 +124,7 @@ class AMQShell(cmd.Cmd):
     conn = None
     chan = None
     prompt_fmt = '{self.counter}> '
-    identchars = cmd.IDENTCHARS = '.'
+    identchars = _cmd.IDENTCHARS = '.'
     needs_reconnect = False
     counter = 1
     inc_counter = count(2)
@@ -186,7 +186,7 @@ class AMQShell(cmd.Cmd):
         self.connect = kwargs.pop('connect')
         self.silent = kwargs.pop('silent', False)
         self.out = kwargs.pop('out', sys.stderr)
-        cmd.Cmd.__init__(self, *args, **kwargs)
+        _cmd.Cmd.__init__(self, *args, **kwargs)
         self._reconnect()
 
     def note(self, m):
@@ -284,7 +284,7 @@ class AMQShell(cmd.Cmd):
             self.respond(self.dispatch(cmd, arg))
         except (AttributeError, KeyError) as exc:
             self.default(line)
-        except Exception as exc:
+        except Exception as exc:  # pylint: disable=broad-except
             self.say(exc)
             self.needs_reconnect = True
 
@@ -336,7 +336,6 @@ class AMQPAdmin(object):
             return shell.cmdloop()
         except KeyboardInterrupt:
             self.note('(bibi)')
-            pass
 
     def note(self, m):
         if not self.silent:

+ 4 - 0
celery/bin/base.py

@@ -191,9 +191,13 @@ class Command(object):
         raise NotImplementedError('subclass responsibility')
 
     def on_error(self, exc):
+        # pylint: disable=method-hidden
+        #   on_error argument to __init__ may override this method.
         self.error(self.colored.red('Error: {0}'.format(exc)))
 
     def on_usage_error(self, exc):
+        # pylint: disable=method-hidden
+        #   on_usage_error argument to __init__ may override this method.
         self.handle_error(exc)
 
     def on_concurrency_setup(self):

+ 20 - 4
celery/canvas.py

@@ -197,10 +197,9 @@ class Signature(dict):
                  type=None, subtask_type=None, immutable=False,
                  app=None, **ex):
         self._app = app
-        init = dict.__init__
 
         if isinstance(task, dict):
-            init(self, task)  # works like dict(d)
+            super(Signature, self).__init__(task)  # works like dict(d)
         else:
             # Also supports using task class/instance instead of string name.
             try:
@@ -210,8 +209,7 @@ class Signature(dict):
             else:
                 self._type = task
 
-            init(
-                self,
+            super(Signature, self).__init__(
                 task=task_name, args=tuple(args or ()),
                 kwargs=kwargs or {},
                 options=dict(options or {}, **ex),
@@ -264,6 +262,8 @@ class Signature(dict):
             args, kwargs, options = self._merge(args, kwargs, options)
         else:
             args, kwargs, options = self.args, self.kwargs, self.options
+        # pylint: disable=too-many-function-args
+        #   Borks on this, as it's a property
         return _apply(args, kwargs, **options)
 
     def _merge(self, args=(), kwargs={}, options={}, force=False):
@@ -308,6 +308,8 @@ class Signature(dict):
         Returns:
             ~@AsyncResult: promise of future evaluation.
         """
+        # pylint: disable=redefined-outer-name
+        #   XXX chord is also a class in outer scope.
         opts = self.options
         try:
             tid = opts['task_id']
@@ -323,6 +325,8 @@ class Signature(dict):
             opts['group_id'] = group_id
         if chord:
             opts['chord'] = chord
+        # pylint: disable=too-many-function-args
+        #   Borks on this, as it's a property.
         return self.AsyncResult(tid)
     _freeze = freeze
 
@@ -595,6 +599,8 @@ class chain(Signature):
     def run(self, args=(), kwargs={}, group_id=None, chord=None,
             task_id=None, link=None, link_error=None, publisher=None,
             producer=None, root_id=None, parent_id=None, app=None, **options):
+        # pylint: disable=redefined-outer-name
+        #   XXX chord is also a class in outer scope.
         app = app or self.app
         use_link = self._use_link
         if use_link is None and app.conf.task_protocol == 1:
@@ -620,6 +626,8 @@ class chain(Signature):
 
     def freeze(self, _id=None, group_id=None, chord=None,
                root_id=None, parent_id=None):
+        # pylint: disable=redefined-outer-name
+        #   XXX chord is also a class in outer scope.
         _, results = self._frozen = self.prepare_steps(
             self.args, self.tasks, root_id, parent_id, None,
             self.app, _id, group_id, chord, clone=False,
@@ -947,6 +955,8 @@ class group(Signature):
 
     def _apply_tasks(self, tasks, producer=None, app=None, p=None,
                      add_to_parent=None, chord=None, **options):
+        # pylint: disable=redefined-outer-name
+        #   XXX chord is also a class in outer scope.
         app = app or self.app
         with app.producer_or_acquire(producer) as producer:
             for sig, res in tasks:
@@ -1033,6 +1043,8 @@ class group(Signature):
         return self.apply_async(partial_args, **options)
 
     def _freeze_unroll(self, new_tasks, group_id, chord, root_id, parent_id):
+        # pylint: disable=redefined-outer-name
+        #   XXX chord is also a class in outer scope.
         stack = deque(self.tasks)
         while stack:
             task = maybe_signature(stack.popleft(), app=self._app).clone()
@@ -1046,6 +1058,8 @@ class group(Signature):
 
     def freeze(self, _id=None, group_id=None, chord=None,
                root_id=None, parent_id=None):
+        # pylint: disable=redefined-outer-name
+        #   XXX chord is also a class in outer scope.
         opts = self.options
         try:
             gid = opts['task_id']
@@ -1133,6 +1147,8 @@ class chord(Signature):
 
     def freeze(self, _id=None, group_id=None, chord=None,
                root_id=None, parent_id=None):
+        # pylint: disable=redefined-outer-name
+        #   XXX chord is also a class in outer scope.
         if not isinstance(self.tasks, group):
             self.tasks = group(self.tasks, app=self.app)
         bodyres = self.body.freeze(_id, parent_id=self.id, root_id=root_id)

+ 10 - 7
celery/concurrency/asynpool.py

@@ -301,7 +301,9 @@ class ResultHandler(_pool.ResultHandler):
     def register_with_event_loop(self, hub):
         self.handle_event = self._make_process_result(hub)
 
-    def handle_event(self, fileno):
+    def handle_event(self, *args):
+        # pylint: disable=method-hidden
+        #   register_with_event_loop overrides this
         raise RuntimeError('Not registered with event loop')
 
     def on_stop_not_started(self):
@@ -500,7 +502,7 @@ class AsynPool(_pool.Pool):
             try:
                 tref = trefs.pop(job)
                 tref.cancel()
-                del(tref)
+                del tref
             except (KeyError, AttributeError):
                 pass  # out of scope
         self._discard_tref = _discard_tref
@@ -842,7 +844,7 @@ class AsynPool(_pool.Pool):
                 while Hw < 4:
                     try:
                         Hw += send(header, Hw)
-                    except Exception as exc:
+                    except Exception as exc:  # pylint: disable=broad-except
                         if getattr(exc, 'errno', None) not in UNAVAIL:
                             raise
                         # suspend until more data
@@ -858,7 +860,7 @@ class AsynPool(_pool.Pool):
                 while Bw < body_size:
                     try:
                         Bw += send(body, Bw)
-                    except Exception as exc:
+                    except Exception as exc:  # pylint: disable=broad-except
                         if getattr(exc, 'errno', None) not in UNAVAIL:
                             raise
                         # suspend until more data
@@ -907,7 +909,7 @@ class AsynPool(_pool.Pool):
                 while Hw < 4:
                     try:
                         Hw += send(header, Hw)
-                    except Exception as exc:
+                    except Exception as exc:  # pylint: disable=broad-except
                         if getattr(exc, 'errno', None) not in UNAVAIL:
                             raise
                         yield
@@ -916,7 +918,7 @@ class AsynPool(_pool.Pool):
                 while Bw < body_size:
                     try:
                         Bw += send(body, Bw)
-                    except Exception as exc:
+                    except Exception as exc:  # pylint: disable=broad-except
                         if getattr(exc, 'errno', None) not in UNAVAIL:
                             raise
                         # suspend until more data
@@ -1202,7 +1204,7 @@ class AsynPool(_pool.Pool):
         writer = _get_job_writer(job)
         if writer:
             self._active_writers.discard(writer)
-            del(writer)
+            del writer
 
         if not proc.dead:
             proc.dead = True
@@ -1262,6 +1264,7 @@ class AsynPool(_pool.Pool):
 
     @classmethod
     def _help_stuff_finish(cls, pool):
+        # pylint: disable=arguments-differ
         debug(
             'removing tasks from inqueue until task handler finished',
         )

+ 9 - 7
celery/schedules.py

@@ -392,7 +392,7 @@ class crontab(BaseSchedule):
     """
 
     def __init__(self, minute='*', hour='*', day_of_week='*',
-                 day_of_month='*', month_of_year='*', nowfun=None, app=None):
+                 day_of_month='*', month_of_year='*', **kwargs):
         self._orig_minute = cronfield(minute)
         self._orig_hour = cronfield(hour)
         self._orig_day_of_week = cronfield(day_of_week)
@@ -403,8 +403,7 @@ class crontab(BaseSchedule):
         self.day_of_week = self._expand_cronspec(day_of_week, 7)
         self.day_of_month = self._expand_cronspec(day_of_month, 31, 1)
         self.month_of_year = self._expand_cronspec(month_of_year, 12, 1)
-        self.nowfun = nowfun
-        self._app = app
+        super(crontab, self).__init__(**kwargs)
 
     @staticmethod
     def _expand_cronspec(cronspec, max_, min_=0):
@@ -533,6 +532,8 @@ class crontab(BaseSchedule):
                                  self._orig_month_of_year), None)
 
     def remaining_delta(self, last_run_at, tz=None, ffwd=ffwd):
+        # pylint: disable=redefined-outer-name
+        # caching global ffwd
         tz = tz or self.tz
         last_run_at = self.maybe_make_aware(last_run_at)
         now = self.maybe_make_aware(self.now())
@@ -595,6 +596,8 @@ class crontab(BaseSchedule):
         Returns when the periodic task should run next as a
         :class:`~datetime.timedelta`.
         """
+        # pylint: disable=redefined-outer-name
+        # caching global ffwd
         return remaining(*self.remaining_delta(last_run_at, ffwd=ffwd))
 
     def is_due(self, last_run_at):
@@ -645,7 +648,7 @@ def maybe_schedule(s, relative=False, app=None):
 
 
 @python_2_unicode_compatible
-class solar(schedule):
+class solar(BaseSchedule):
     """Solar event.
 
     A solar event can be used as the ``run_every`` value of a
@@ -720,13 +723,12 @@ class solar(schedule):
         'dusk_astronomical': True,
     }
 
-    def __init__(self, event, lat, lon, nowfun=None, app=None):
+    def __init__(self, event, lat, lon, **kwargs):
         self.ephem = __import__('ephem')
         self.event = event
         self.lat = lat
         self.lon = lon
-        self.nowfun = nowfun
-        self._app = app
+        super(solar, self).__init__(**kwargs)
 
         if event not in self._all_events:
             raise ValueError(SOLAR_INVALID_EVENT.format(

+ 1 - 1
celery/utils/functional.py

@@ -254,7 +254,7 @@ def head_from_fun(fun, bound=False, debug=False):
     if debug:  # pragma: no cover
         print(definition, file=sys.stderr)
     namespace = {'__name__': fun.__module__}
-    # pylint: disable=use-of-exec
+    # pylint: disable=exec-used
     # Tasks are rarely, if ever, created at runtime - exec here is fine.
     exec(definition, namespace)
     result = namespace[name]

+ 4 - 4
celery/utils/log.py

@@ -93,9 +93,9 @@ def logger_isa(l, p, max=1000):
     return False
 
 
-def _using_logger_parent(base_logger, logger_):
-    if not logger_isa(logger_, base_logger):
-        logger_.parent = base_logger
+def _using_logger_parent(parent_logger, logger_):
+    if not logger_isa(logger_, parent_logger):
+        logger_.parent = parent_logger
     return logger_
 
 
@@ -166,7 +166,7 @@ class ColorFormatter(logging.Formatter):
                     return safe_str(color(msg))
                 except UnicodeDecodeError:  # pragma: no cover
                     return safe_str(msg)  # skip colors
-            except Exception as exc:  # pylint: disable=broad-exc
+            except Exception as exc:  # pylint: disable=broad-except
                 prev_msg, record.exc_info, record.msg = (
                     record.msg, 1, '<Unrepresentable {0!r}: {1!r}>'.format(
                         type(msg), exc

+ 6 - 0
celery/worker/request.py

@@ -217,6 +217,8 @@ class Request(object):
             self.acknowledge()
 
         request = self.request_dict
+        # pylint: disable=unpacking-non-sequence
+        #    payload is a property, so pylint doesn't think it's a tuple.
         args, kwargs, embed = self._payload
         request.update({'loglevel': loglevel, 'logfile': logfile,
                         'hostname': self.hostname, 'is_eager': False,
@@ -474,6 +476,8 @@ class Request(object):
     def chord(self):
         # used by backend.mark_as_failure when failure is reported
         # by parent process
+        # pylint: disable=unpacking-non-sequence
+        #    payload is a property, so pylint doesn't think it's a tuple.
         _, _, embed = self._payload
         return embed.get('chord')
 
@@ -481,6 +485,8 @@ class Request(object):
     def errbacks(self):
         # used by backend.mark_as_failure when failure is reported
         # by parent process
+        # pylint: disable=unpacking-non-sequence
+        #    payload is a property, so pylint doesn't think it's a tuple.
         _, _, embed = self._payload
         return embed.get('errbacks')