Bläddra i källkod

Use new magic super()

Ask Solem 8 år sedan
förälder
incheckning
5c7edfb422

+ 1 - 1
celery/apps/worker.py

@@ -99,7 +99,7 @@ class Worker(WorkController):
             'worker_redirect_stdouts', redirect_stdouts)
         self.redirect_stdouts_level = self.app.either(
             'worker_redirect_stdouts_level', redirect_stdouts_level)
-        super(Worker, self).setup_defaults(**kwargs)
+        super().setup_defaults(**kwargs)
         self.purge = purge
         self.no_color = no_color
         self._isatty = isatty(sys.stdout)

+ 1 - 1
celery/backends/amqp.py

@@ -23,7 +23,7 @@ class AMQPBackend(BaseRPCBackend):
         deprecated.warn(
             'The AMQP backend', deprecation='4.0', removal='5.0',
             alternative='Please use RPC backend or a persistent backend.')
-        super(AMQPBackend, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
 
     def _create_exchange(self, name, type='direct', delivery_mode=2):
         return self.Exchange(name=name,

+ 1 - 1
celery/backends/base.py

@@ -482,7 +482,7 @@ class BaseKeyValueStoreBackend(Backend):
         if hasattr(self.key_t, '__func__'):  # pragma: no cover
             self.key_t = self.key_t.__func__  # remove binding
         self._encode_prefixes()
-        super(BaseKeyValueStoreBackend, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
         if self.implements_incr:
             self.apply_chord = self._apply_chord_incr
 

+ 3 - 3
celery/backends/cache.py

@@ -88,7 +88,7 @@ class CacheBackend(KeyValueStoreBackend):
 
     def __init__(self, app, expires=None, backend=None,
                  options={}, url=None, **kwargs):
-        super(CacheBackend, self).__init__(app, **kwargs)
+        super().__init__(app, **kwargs)
         self.url = url
 
         self.options = dict(self.app.conf.cache_backend_options,
@@ -120,7 +120,7 @@ class CacheBackend(KeyValueStoreBackend):
 
     def _apply_chord_incr(self, header, partial_args, group_id, body, **opts):
         self.client.set(self.get_key_for_chord(group_id), 0, time=86400)
-        return super(CacheBackend, self)._apply_chord_incr(
+        return super()._apply_chord_incr(
             header, partial_args, group_id, body, **opts)
 
     def incr(self, key):
@@ -137,7 +137,7 @@ class CacheBackend(KeyValueStoreBackend):
             dict(backend=backend,
                  expires=self.expires,
                  options=self.options))
-        return super(CacheBackend, self).__reduce__(args, kwargs)
+        return super().__reduce__(args, kwargs)
 
     def as_uri(self, *args, **kwargs):
         """Return the backend as an URI.

+ 2 - 2
celery/backends/cassandra.py

@@ -77,7 +77,7 @@ class CassandraBackend(BaseBackend):
 
     def __init__(self, servers=None, keyspace=None, table=None, entry_ttl=None,
                  port=9042, **kwargs):
-        super(CassandraBackend, self).__init__(**kwargs)
+        super().__init__(**kwargs)
 
         if not cassandra:
             raise ImproperlyConfigured(E_NO_CASSANDRA)
@@ -224,4 +224,4 @@ class CassandraBackend(BaseBackend):
             dict(servers=self.servers,
                  keyspace=self.keyspace,
                  table=self.table))
-        return super(CassandraBackend, self).__reduce__(args, kwargs)
+        return super().__reduce__(args, kwargs)

+ 1 - 1
celery/backends/consul.py

@@ -35,7 +35,7 @@ class ConsulBackend(KeyValueStoreBackend):
     path = None
 
     def __init__(self, *args, **kwargs):
-        super(ConsulBackend, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
 
         if self.consul is None:
             raise ImproperlyConfigured(CONSUL_MISSING)

+ 1 - 1
celery/backends/couchbase.py

@@ -39,7 +39,7 @@ class CouchbaseBackend(KeyValueStoreBackend):
     key_t = str_t
 
     def __init__(self, url=None, *args, **kwargs):
-        super(CouchbaseBackend, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
         self.url = url
 
         if Couchbase is None:

+ 1 - 1
celery/backends/couchdb.py

@@ -34,7 +34,7 @@ class CouchBackend(KeyValueStoreBackend):
     password = None
 
     def __init__(self, url=None, *args, **kwargs):
-        super(CouchBackend, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
         self.url = url
 
         if pycouchdb is None:

+ 2 - 2
celery/backends/database/__init__.py

@@ -68,7 +68,7 @@ class DatabaseBackend(BaseBackend):
     def __init__(self, dburi=None, engine_options=None, url=None, **kwargs):
         # The `url` argument was added later and is used by
         # the app to set backend by url (celery.backends.get_backend_by_url)
-        super(DatabaseBackend, self).__init__(
+        super().__init__(
             expires_type=maybe_timedelta, url=url, **kwargs)
         conf = self.app.conf
         self.url = url or dburi or conf.sqlalchemy_dburi
@@ -181,4 +181,4 @@ class DatabaseBackend(BaseBackend):
             dict(dburi=self.url,
                  expires=self.expires,
                  engine_options=self.engine_options))
-        return super(DatabaseBackend, self).__reduce__(args, kwargs)
+        return super().__reduce__(args, kwargs)

+ 1 - 1
celery/backends/elasticsearch.py

@@ -36,7 +36,7 @@ class ElasticsearchBackend(KeyValueStoreBackend):
     port = 9200
 
     def __init__(self, url=None, *args, **kwargs):
-        super(ElasticsearchBackend, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
         self.url = url
 
         if elasticsearch is None:

+ 1 - 1
celery/backends/filesystem.py

@@ -37,7 +37,7 @@ class FilesystemBackend(KeyValueStoreBackend):
 
     def __init__(self, url=None, open=open, unlink=os.unlink, sep=os.sep,
                  encoding=default_encoding, *args, **kwargs):
-        super(FilesystemBackend, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
         self.url = url
         path = self._find_path(url)
 

+ 4 - 4
celery/backends/mongodb.py

@@ -56,7 +56,7 @@ class MongoBackend(BaseBackend):
     def __init__(self, app=None, **kwargs):
         self.options = {}
 
-        super(MongoBackend, self).__init__(app, **kwargs)
+        super().__init__(app, **kwargs)
 
         if not pymongo:
             raise ImproperlyConfigured(
@@ -150,12 +150,12 @@ class MongoBackend(BaseBackend):
         if self.serializer == 'bson':
             # mongodb handles serialization
             return data
-        return super(MongoBackend, self).encode(data)
+        return super().encode(data)
 
     def decode(self, data):
         if self.serializer == 'bson':
             return data
-        return super(MongoBackend, self).decode(data)
+        return super().decode(data)
 
     def _store_result(self, task_id, result, state,
                       traceback=None, request=None, **kwargs):
@@ -240,7 +240,7 @@ class MongoBackend(BaseBackend):
         )
 
     def __reduce__(self, args=(), kwargs={}):
-        return super(MongoBackend, self).__reduce__(
+        return super().__reduce__(
             args, dict(kwargs, expires=self.expires, url=self.url))
 
     def _get_database(self):

+ 3 - 3
celery/backends/redis.py

@@ -45,7 +45,7 @@ class ResultConsumer(async.BaseResultConsumer):
     _pubsub = None
 
     def __init__(self, *args, **kwargs):
-        super(ResultConsumer, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
         self._get_key_for_task = self.backend.get_key_for_task
         self._decode_result = self.backend.decode_result
         self.subscribed_to = set()
@@ -105,7 +105,7 @@ class RedisBackend(base.BaseKeyValueStoreBackend, async.AsyncBackendMixin):
     def __init__(self, host=None, port=None, db=None, password=None,
                  max_connections=None, url=None,
                  connection_pool=None, **kwargs):
-        super(RedisBackend, self).__init__(expires_type=int, **kwargs)
+        super().__init__(expires_type=int, **kwargs)
         _get = self.app.conf.get
         if self.redis is None:
             raise ImproperlyConfigured(REDIS_MISSING)
@@ -305,7 +305,7 @@ class RedisBackend(base.BaseKeyValueStoreBackend, async.AsyncBackendMixin):
         return self._create_client(**self.connparams)
 
     def __reduce__(self, args=(), kwargs={}):
-        return super(RedisBackend, self).__reduce__(
+        return super().__reduce__(
             (self.url,), {'expires': self.expires},
         )
 

+ 1 - 1
celery/backends/riak.py

@@ -59,7 +59,7 @@ class RiakBackend(KeyValueStoreBackend):
 
     def __init__(self, host=None, port=None, bucket_name=None, protocol=None,
                  url=None, *args, **kwargs):
-        super(RiakBackend, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
         self.url = url
 
         if not riak:

+ 3 - 3
celery/backends/rpc.py

@@ -38,7 +38,7 @@ class ResultConsumer(BaseResultConsumer):
     _consumer = None
 
     def __init__(self, *args, **kwargs):
-        super(ResultConsumer, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
         self._create_binding = self.backend._create_binding
 
     def start(self, initial_task_id, no_ack=True):
@@ -101,7 +101,7 @@ class BaseRPCBackend(base.Backend, AsyncBackendMixin):
 
     def __init__(self, app, connection=None, exchange=None, exchange_type=None,
                  persistent=None, serializer=None, auto_delete=True, **kwargs):
-        super(BaseRPCBackend, self).__init__(app, **kwargs)
+        super().__init__(app, **kwargs)
         conf = self.app.conf
         self._connection = connection
         self._out_of_band = {}
@@ -241,7 +241,7 @@ class BaseRPCBackend(base.Backend, AsyncBackendMixin):
             'delete_group is not supported by this backend.')
 
     def __reduce__(self, args=(), kwargs={}):
-        return super(BaseRPCBackend, self).__reduce__(args, dict(
+        return super().__reduce__(args, dict(
             kwargs,
             connection=self._connection,
             exchange=self.exchange.name,

+ 2 - 2
celery/beat.py

@@ -558,7 +558,7 @@ class _Threaded(Thread):
     """Embedded task scheduler using threading."""
 
     def __init__(self, app, **kwargs):
-        super(_Threaded, self).__init__()
+        super().__init__()
         self.app = app
         self.service = Service(app, **kwargs)
         self.daemon = True
@@ -580,7 +580,7 @@ else:
     class _Process(Process):    # noqa
 
         def __init__(self, app, **kwargs):
-            super(_Process, self).__init__()
+            super().__init__()
             self.app = app
             self.service = Service(app, **kwargs)
             self.name = 'Beat'

+ 1 - 1
celery/bin/base.py

@@ -55,7 +55,7 @@ class Error(Exception):
     def __init__(self, reason, status=None):
         self.reason = reason
         self.status = status if status is not None else self.status
-        super(Error, self).__init__(reason, status)
+        super().__init__(reason, status)
 
     def __str__(self):
         return self.reason

+ 2 - 2
celery/bin/celery.py

@@ -557,7 +557,7 @@ class _RemoteControl(Command):
     def __init__(self, *args, **kwargs):
         self.show_body = kwargs.pop('show_body', True)
         self.show_reply = kwargs.pop('show_reply', True)
-        super(_RemoteControl, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
 
     @classmethod
     def get_command_info(self, command,
@@ -1157,7 +1157,7 @@ class CeleryCommand(Command):
             self.respects_app_option = False
         try:
             sys.exit(determine_exit_status(
-                super(CeleryCommand, self).execute_from_commandline(argv)))
+                super().execute_from_commandline(argv)))
         except KeyboardInterrupt:
             sys.exit(EX_FAILURE)
 

+ 1 - 1
celery/bootsteps.py

@@ -279,7 +279,7 @@ class StepType(type):
             __qualname__=qname,
             name=attrs.get('name') or qname,
         )
-        return super(StepType, cls).__new__(cls, name, bases, attrs)
+        return super().__new__(cls, name, bases, attrs)
 
     def __str__(self):
         return self.name

+ 5 - 5
celery/concurrency/asynpool.py

@@ -197,7 +197,7 @@ class ResultHandler(_pool.ResultHandler):
     def __init__(self, *args, **kwargs):
         self.fileno_to_outq = kwargs.pop('fileno_to_outq')
         self.on_process_alive = kwargs.pop('on_process_alive')
-        super(ResultHandler, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
         # add our custom message handler
         self.state_handlers[WORKER_UP] = self.on_process_alive
 
@@ -351,7 +351,7 @@ class AsynPool(_pool.Pool):
     Worker = Worker
 
     def WorkerProcess(self, worker):
-        worker = super(AsynPool, self).WorkerProcess(worker)
+        worker = super().WorkerProcess(worker)
         worker.dead = False
         return worker
 
@@ -398,7 +398,7 @@ class AsynPool(_pool.Pool):
 
         self.write_stats = Counter()
 
-        super(AsynPool, self).__init__(processes, *args, **kwargs)
+        super().__init__(processes, *args, **kwargs)
 
         for proc in self._pool:
             # create initial mappings, these will be updated
@@ -415,7 +415,7 @@ class AsynPool(_pool.Pool):
 
     def _create_worker_process(self, i):
         gc.collect()  # Issue #2927
-        return super(AsynPool, self)._create_worker_process(i)
+        return super()._create_worker_process(i)
 
     def _event_process_exit(self, hub, proc):
         # This method is called whenever the process sentinel is readable.
@@ -1101,7 +1101,7 @@ class AsynPool(_pool.Pool):
                         raise
 
     def create_result_handler(self):
-        return super(AsynPool, self).create_result_handler(
+        return super().create_result_handler(
             fileno_to_outq=self._fileno_to_outq,
             on_process_alive=self.on_process_alive,
         )

+ 3 - 3
celery/concurrency/eventlet.py

@@ -40,7 +40,7 @@ class Timer(_timer.Timer):
     def __init__(self, *args, **kwargs):
         from eventlet.greenthread import spawn_after
         from greenlet import GreenletExit
-        super(Timer, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
 
         self.GreenletExit = GreenletExit
         self._spawn_after = spawn_after
@@ -101,7 +101,7 @@ class TaskPool(base.BasePool):
         self.getpid = lambda: id(greenthread.getcurrent())
         self.spawn_n = greenthread.spawn_n
 
-        super(TaskPool, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
 
     def on_start(self):
         self._pool = self.Pool(self.limit)
@@ -135,7 +135,7 @@ class TaskPool(base.BasePool):
         self.limit = limit
 
     def _get_info(self):
-        info = super(TaskPool, self)._get_info()
+        info = super()._get_info()
         info.update({
             'max-concurrency': self.limit,
             'free-threads': self._pool.free(),

+ 2 - 2
celery/concurrency/gevent.py

@@ -37,7 +37,7 @@ class Timer(_timer.Timer):
 
         self._Greenlet = _Greenlet
         self._GreenletExit = GreenletExit
-        super(Timer, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
         self._queue = set()
 
     def _enter(self, eta, priority, entry):
@@ -83,7 +83,7 @@ class TaskPool(BasePool):
         self.Pool = Pool
         self.spawn_n = spawn_raw
         self.timeout = kwargs.get('timeout')
-        super(TaskPool, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
 
     def on_start(self):
         self._pool = self.Pool(self.limit)

+ 1 - 1
celery/concurrency/solo.py

@@ -12,7 +12,7 @@ class TaskPool(BasePool):
     body_can_be_buffer = True
 
     def __init__(self, *args, **kwargs):
-        super(TaskPool, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
         self.on_apply = apply_target
         self.limit = 1
 

+ 1 - 1
celery/events/state.py

@@ -88,7 +88,7 @@ class CallableDefaultdict(defaultdict):
 
     def __init__(self, fun, *args, **kwargs):
         self.fun = fun
-        super(CallableDefaultdict, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
 
     def __call__(self, *args, **kwargs):
         return self.fun(*args, **kwargs)

+ 1 - 1
celery/exceptions.py

@@ -94,7 +94,7 @@ class Reject(TaskPredicate):
     def __init__(self, reason=None, requeue=False):
         self.reason = reason
         self.requeue = requeue
-        super(Reject, self).__init__(reason, requeue)
+        super().__init__(reason, requeue)
 
     def __repr__(self):
         return 'reject requeue=%s: %s' % (self.requeue, self.reason)

+ 2 - 2
celery/tests/app/test_builtins.py

@@ -93,7 +93,7 @@ class test_group(BuiltinsCase):
         self.app.producer_or_acquire.attach_mock(ContextMock(), 'return_value')
         self.app.conf.task_always_eager = True
         self.task = builtins.add_group_task(self.app)
-        super(test_group, self).setup()
+        super().setup()
 
     def test_apply_async_eager(self):
         self.task.apply = Mock(name='apply')
@@ -140,7 +140,7 @@ class test_chord(BuiltinsCase):
 
     def setup(self):
         self.task = builtins.add_chord_task(self.app)
-        super(test_chord, self).setup()
+        super().setup()
 
     def test_apply_async(self):
         x = chord([self.add.s(i, i) for i in range(10)], body=self.xsum.s())

+ 1 - 1
celery/tests/backends/test_base.py

@@ -121,7 +121,7 @@ class KVBackend(KeyValueStoreBackend):
 
     def __init__(self, app, *args, **kwargs):
         self.db = {}
-        super(KVBackend, self).__init__(app)
+        super().__init__(app)
 
     def get(self, key):
         return self.db.get(key)

+ 1 - 1
celery/tests/backends/test_cache.py

@@ -151,7 +151,7 @@ class MemcachedClient(DummyClient):
                 'Keys must be {0}, not {1}.  Convert your '
                 'strings using mystring.{2}(charset)!'.format(
                     must_be, not_be, cod))
-        return super(MemcachedClient, self).set(key, value, *args, **kwargs)
+        return super().set(key, value, *args, **kwargs)
 
 
 class MockCacheMixin:

+ 2 - 2
celery/tests/case.py

@@ -92,7 +92,7 @@ class Trap:
 class UnitLogging(symbol_by_name(Celery.log_cls)):
 
     def __init__(self, *args, **kwargs):
-        super(UnitLogging, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
         self.already_setup = True
 
 
@@ -126,7 +126,7 @@ class AppCase(Case):
     _threads_at_startup = [None]
 
     def __init__(self, *args, **kwargs):
-        super(AppCase, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
         setUp = self.__class__.__dict__.get('setUp')
         tearDown = self.__class__.__dict__.get('tearDown')
         if setUp and not hasattr(setUp, '__wrapped__'):

+ 1 - 1
celery/tests/events/test_state.py

@@ -114,7 +114,7 @@ class ev_logical_clock_ordering(replay):
     def __init__(self, state, offset=0, uids=None):
         self.offset = offset or 0
         self.uids = self.setuids(uids)
-        super(ev_logical_clock_ordering, self).__init__(state)
+        super().__init__(state)
 
     def setuids(self, uids):
         uids = self.tA, self.tB, self.tC = uids or [uuid(), uuid(), uuid()]

+ 3 - 4
celery/utils/collections.py

@@ -269,8 +269,7 @@ class ConfigurationView(ChainMap, AttributeDictMixin):
 
     def __init__(self, changes, defaults=None, key_t=None, prefix=None):
         defaults = [] if defaults is None else defaults
-        super(ConfigurationView, self).__init__(
-            changes, *defaults, **{'key_t': key_t})
+        super().__init__(changes, *defaults, **{'key_t': key_t})
         self.__dict__.update(
             prefix=prefix.rstrip('_') + '_' if prefix else prefix,
         )
@@ -284,7 +283,7 @@ class ConfigurationView(ChainMap, AttributeDictMixin):
 
     def __getitem__(self, key):
         keys = self._to_keys(key)
-        getitem = super(ConfigurationView, self).__getitem__
+        getitem = super().__getitem__
         for k in keys:
             try:
                 return getitem(k)
@@ -640,7 +639,7 @@ class BufferMap(OrderedDict, Evictable):
     bufmaxsize = None
 
     def __init__(self, maxsize, iterable=None, bufmaxsize=1000):
-        super(BufferMap, self).__init__()
+        super().__init__()
         self.maxsize = maxsize
         self.bufmaxsize = 1000
         if iterable:

+ 2 - 3
celery/utils/dispatch/saferef.py

@@ -95,7 +95,7 @@ class BoundMethodWeakref:  # pragma: no cover
             current.deletion_methods.append(on_delete)
             return current
         else:
-            base = super(BoundMethodWeakref, cls).__new__(cls)
+            base = super().__new__(cls)
             cls._all_instances[key] = base
             base.__init__(target, on_delete, *arguments, **named)
             return base
@@ -230,8 +230,7 @@ class BoundNonDescriptorMethodWeakref(BoundMethodWeakref):  # pragma: no cover
                 which will be passed a pointer to this object.
         """
         assert getattr(target.__self__, target.__name__) == target
-        super(BoundNonDescriptorMethodWeakref, self).__init__(target,
-                                                              on_delete)
+        super().__init__(target, on_delete)
 
     def __call__(self):
         """Return a strong reference to the bound method

+ 1 - 1
celery/utils/threads.py

@@ -44,7 +44,7 @@ def default_socket_timeout(timeout):
 class bgThread(threading.Thread):
 
     def __init__(self, name=None, **kwargs):
-        super(bgThread, self).__init__()
+        super().__init__()
         self._is_shutdown = threading.Event()
         self._is_stopped = threading.Event()
         self.daemon = True

+ 1 - 1
celery/utils/timer2.py

@@ -34,7 +34,7 @@ class Timer(threading.Thread):
             import traceback
             print('- Timer starting')
             traceback.print_stack()
-            super(Timer, self).start(*args, **kwargs)
+            super().start(*args, **kwargs)
 
     def __init__(self, schedule=None, on_error=None, on_tick=None,
                  on_start=None, max_interval=None, **kwargs):

+ 1 - 1
celery/worker/consumer/gossip.py

@@ -96,7 +96,7 @@ class Gossip(bootsteps.ConsumerStep):
         self.dispatcher.send('worker-elect-ack', id=id_)
 
     def start(self, c):
-        super(Gossip, self).start(c)
+        super().start(c)
         self.dispatcher = c.event_dispatcher
 
     def on_elect_ack(self, event):

+ 1 - 1
docs/internals/protocol.rst

@@ -167,7 +167,7 @@ Changes from version 1
 
             def apply_async(self, args, kwargs, **options):
                 fun, real_args = self.unpack_args(*args)
-                return super(PickleTask, self).apply_async(
+                return super().apply_async(
                     (fun, real_args, kwargs), shadow=qualname(fun), **options
                 )
 

+ 1 - 1
docs/userguide/application.rst

@@ -507,7 +507,7 @@ class: :class:`celery.Task`.
 
         def __call__(self, *args, **kwargs):
             print('TASK STARTING: {0.name}[{0.request.id}]'.format(self))
-            return super(DebugTask, self).__call__(*args, **kwargs)
+            return super().__call__(*args, **kwargs)
 
 
 .. tip::

+ 1 - 1
docs/userguide/canvas.rst

@@ -943,7 +943,7 @@ implemented in other backends (suggestions welcome!).
 
         def after_return(self, *args, **kwargs):
             do_something()
-            super(MyTask, self).after_return(*args, **kwargs)
+            super().after_return(*args, **kwargs)
 
 .. _canvas-map:
 

+ 2 - 2
docs/userguide/tasks.rst

@@ -292,7 +292,7 @@ may contain:
         def gen_task_name(self, name, module):
             if module.endswith('.tasks'):
                 module = module[:-6]
-            return super(MyCelery, self).gen_task_name(name, module)
+            return super().gen_task_name(name, module)
 
     app = MyCelery('main')
 
@@ -1033,7 +1033,7 @@ you have to pass them as regular args:
             self.headers = headers
             self.body = body
 
-            super(HttpError, self).__init__(status_code, headers, body)
+            super().__init__(status_code, headers, body)
 
 .. _task-semipredicates:
 

+ 2 - 2
docs/whatsnew-3.1.rst

@@ -978,11 +978,11 @@ In Other News
         class Celery(celery.Celery):
 
             def __init__(self, *args, **kwargs):
-                super(Celery, self).__init__(*args, **kwargs)
+                super().__init__(*args, **kwargs)
                 self.foo = kwargs.get('foo')
 
             def __reduce_keys__(self):
-                return super(Celery, self).__reduce_keys__().update(
+                return super().__reduce_keys__().update(
                     foo=self.foo,
                 )