Browse Source

3.2: Use dict and set comprehensions (+ literals)

Ask Solem 11 years ago
parent
commit
f9e49a8f7a

+ 4 - 3
celery/app/amqp.py

@@ -65,7 +65,7 @@ class Queues(dict):
         self.ha_policy = ha_policy
         self.autoexchange = Exchange if autoexchange is None else autoexchange
         if isinstance(queues, (tuple, list)):
-            queues = dict((q.name, q) for q in queues)
+            queues = {q.name: q for q in queues}
         for name, q in items(queues or {}):
             self.add(q) if isinstance(q, Queue) else self.add_compat(name, **q)
 
@@ -156,8 +156,9 @@ class Queues(dict):
                         Can be iterable or string.
         """
         if include:
-            self._consume_from = dict((name, self[name])
-                                      for name in maybe_list(include))
+            self._consume_from = {
+                name: self[name] for name in maybe_list(include)
+            }
     select_subset = select  # XXX compat
 
     def deselect(self, exclude):

+ 1 - 1
celery/app/defaults.py

@@ -233,7 +233,7 @@ def flatten(d, ns=''):
                 stack.append((name + key + '_', value))
             else:
                 yield name + key, value
-DEFAULTS = dict((key, value.default) for key, value in flatten(NAMESPACES))
+DEFAULTS = {key: value.default for key, value in flatten(NAMESPACES)}
 
 
 def find_deprecated_settings(source):

+ 2 - 2
celery/app/registry.py

@@ -57,8 +57,8 @@ class TaskRegistry(dict):
         return self.filter_types('periodic')
 
     def filter_types(self, type):
-        return dict((name, task) for name, task in items(self)
-                    if getattr(task, 'type', 'regular') == type)
+        return {name: task for name, task in items(self)
+                if getattr(task, 'type', 'regular') == type}
 
 
 def _unpickle_task(name):

+ 4 - 3
celery/app/task.py

@@ -724,9 +724,10 @@ class Task(object):
                               'loglevel': options.get('loglevel', 0),
                               'delivery_info': {'is_eager': True}}
             supported_keys = fun_takes_kwargs(task.run, default_kwargs)
-            extend_with = dict((key, val)
-                               for key, val in items(default_kwargs)
-                               if key in supported_keys)
+            extend_with = {
+                key: val for key, val in items(default_kwargs)
+                if key in supported_keys
+            }
             kwargs.update(extend_with)
 
         tb = None

+ 4 - 4
celery/app/utils.py

@@ -117,11 +117,11 @@ class Settings(ConfigurationView):
 
     def table(self, with_defaults=False, censored=True):
         filt = filter_hidden_settings if censored else lambda v: v
-        return filt(dict(
-            (k, v) for k, v in items(
+        return filt({
+            k: v for k, v in items(
                 self if with_defaults else self.without_defaults())
             if k.isupper() and not k.startswith('_')
-        ))
+        })
 
     def humanize(self, with_defaults=False, censored=True):
         """Return a human readable string showing changes to the
@@ -182,7 +182,7 @@ def filter_hidden_settings(conf):
             return Connection(value).as_uri(mask=mask)
         return value
 
-    return dict((k, maybe_censor(k, v)) for k, v in items(conf))
+    return {k: maybe_censor(k, v) for k, v in items(conf)}
 
 
 def bugreport(app):

+ 9 - 7
celery/backends/base.py

@@ -435,14 +435,16 @@ class KeyValueStoreBackend(BaseBackend):
     def _mget_to_results(self, values, keys):
         if hasattr(values, 'items'):
             # client returns dict so mapping preserved.
-            return dict((self._strip_prefix(k), self.decode(v))
-                        for k, v in items(values)
-                        if v is not None)
+            return {
+                self._strip_prefix(k): self.decode(v)
+                for k, v in items(values) if v is not None
+            }
         else:
             # client returns list so need to recreate mapping.
-            return dict((bytes_to_str(keys[i]), self.decode(value))
-                        for i, value in enumerate(values)
-                        if value is not None)
+            return {
+                bytes_to_str(keys[i]): self.decode(value)
+                for i, value in enumerate(values) if value is not None
+            }
 
     def get_many(self, task_ids, timeout=None, interval=0.5, no_ack=True,
                  READY_STATES=states.READY_STATES):
@@ -467,7 +469,7 @@ class KeyValueStoreBackend(BaseBackend):
             r = self._mget_to_results(self.mget([self.get_key_for_task(k)
                                                  for k in keys]), keys)
             cache.update(r)
-            ids.difference_update(set(bytes_to_str(v) for v in r))
+            ids.difference_update({bytes_to_str(v) for v in r})
             for key, value in items(r):
                 yield bytes_to_str(key), value
             if timeout and iterations * interval >= timeout:

+ 1 - 1
celery/backends/cache.py

@@ -73,7 +73,7 @@ class DummyClient(object):
 
     def get_multi(self, keys):
         cache = self.cache
-        return dict((k, cache[k]) for k in keys if k in cache)
+        return {k: cache[k] for k in keys if k in cache}
 
     def set(self, key, value, *args, **kwargs):
         self.cache[key] = value

+ 4 - 3
celery/beat.py

@@ -295,9 +295,10 @@ class Scheduler(object):
         return self.Entry(**dict(entry, name=name, app=self.app))
 
     def update_from_dict(self, dict_):
-        self.schedule.update(dict(
-            (name, self._maybe_entry(name, entry))
-            for name, entry in items(dict_)))
+        self.schedule.update({
+            name: self._maybe_entry(name, entry)
+            for name, entry in items(dict_)
+        })
 
     def merge_inplace(self, b):
         schedule = self.schedule

+ 4 - 3
celery/bin/base.py

@@ -373,9 +373,10 @@ class Command(object):
 
     def prepare_args(self, options, args):
         if options:
-            options = dict((k, self.expanduser(v))
-                           for k, v in items(vars(options))
-                           if not k.startswith('_'))
+            options = {
+                k: self.expanduser(v)
+                for k, v in items(vars(options)) if not k.startswith('_')
+            }
         args = [self.expanduser(arg) for arg in args]
         self.check_args(args)
         return options, args

+ 4 - 4
celery/bin/celery.py

@@ -572,10 +572,10 @@ class shell(Command):  # pragma: no cover
                        'signature': celery.signature}
 
         if not without_tasks:
-            self.locals.update(dict(
-                (task.__name__, task) for task in values(self.app.tasks)
-                if not task.name.startswith('celery.')),
-            )
+            self.locals.update({
+                task.__name__: task for task in values(self.app.tasks)
+                if not task.name.startswith('celery.')
+            })
 
         if force_python:
             return self.invoke_fallback_shell()

+ 1 - 1
celery/bin/graph.py

@@ -34,7 +34,7 @@ class graph(Command):
 
     def bootsteps(self, *args, **kwargs):
         worker = self.app.WorkController()
-        include = set(arg.lower() for arg in args or ['worker', 'consumer'])
+        include = {arg.lower() for arg in args or ['worker', 'consumer']}
         if 'worker' in include:
             graph = worker.blueprint.graph
             if 'consumer' in include:

+ 3 - 3
celery/bin/multi.py

@@ -116,9 +116,9 @@ from celery.utils.text import pluralize
 
 __all__ = ['MultiTool']
 
-SIGNAMES = set(sig for sig in dir(signal)
-               if sig.startswith('SIG') and '_' not in sig)
-SIGMAP = dict((getattr(signal, name), name) for name in SIGNAMES)
+SIGNAMES = {sig for sig in dir(signal)
+            if sig.startswith('SIG') and '_' not in sig}
+SIGMAP = {getattr(signal, name): name for name in SIGNAMES}
 
 USAGE = """\
 usage: {prog_name} start <node1 node2 nodeN|range> [worker options]

+ 8 - 7
celery/concurrency/asynpool.py

@@ -347,8 +347,9 @@ class AsynPool(_pool.Pool):
         processes = self.cpu_count() if processes is None else processes
         self.synack = synack
         # create queue-pairs for all our processes in advance.
-        self._queues = dict((self.create_process_queues(), None)
-                            for _ in range(processes))
+        self._queues = {
+            self.create_process_queues(): None for _ in range(processes)
+        }
 
         # inqueue fileno -> process mapping
         self._fileno_to_inq = {}
@@ -912,7 +913,7 @@ class AsynPool(_pool.Pool):
             self._busy_workers.clear()
 
     def _flush_writer(self, proc, writer):
-        fds = set([proc.inq._writer])
+        fds = {proc.inq._writer}
         try:
             while fds:
                 if not proc._is_alive():
@@ -941,9 +942,9 @@ class AsynPool(_pool.Pool):
         """Grow the pool by ``n`` proceses."""
         diff = max(self._processes - len(self._queues), 0)
         if diff:
-            self._queues.update(
-                dict((self.create_process_queues(), None) for _ in range(diff))
-            )
+            self._queues.update({
+                self.create_process_queues(): None for _ in range(diff)
+            })
 
     def on_shrink(self, n):
         """Shrink the pool by ``n`` processes."""
@@ -1085,7 +1086,7 @@ class AsynPool(_pool.Pool):
         """
         resq = proc.outq._reader
         on_state_change = self._result_handler.on_state_change
-        fds = set([resq])
+        fds = {resq}
         while fds and not resq.closed and self._state != TERMINATE:
             readable, _, again = _select(fds, None, fds, timeout=0.01)
             if readable:

+ 1 - 1
celery/contrib/migrate.py

@@ -250,7 +250,7 @@ def start_filter(app, conn, filter, limit=None, timeout=1.0,
     if isinstance(tasks, string_t):
         tasks = set(tasks.split(','))
     if tasks is None:
-        tasks = set([])
+        tasks = set()
 
     def update_state(body, message):
         state.count += 1

+ 3 - 3
celery/datastructures.py

@@ -186,9 +186,9 @@ class DependencyGraph(object):
         graph = DependencyGraph()
         components = self._tarjan72()
 
-        NC = dict((node, component)
-                  for component in components
-                  for node in component)
+        NC = {
+            node: component for component in components for node in component
+        }
         for component in components:
             graph.add_arc(component)
         for node in self:

+ 2 - 2
celery/events/__init__.py

@@ -112,7 +112,7 @@ class EventDispatcher(object):
     You need to :meth:`close` this after use.
 
     """
-    DISABLED_TRANSPORTS = set(['sql'])
+    DISABLED_TRANSPORTS = {'sql'}
 
     app = None
 
@@ -300,7 +300,7 @@ class EventReceiver(ConsumerMixin):
         self.adjust_clock = self.clock.adjust
         self.forward_clock = self.clock.forward
         if accept is None:
-            accept = set([self.app.conf.CELERY_EVENT_SERIALIZER, 'json'])
+            accept = {self.app.conf.CELERY_EVENT_SERIALIZER, 'json'}
         self.accept = accept
 
     def _get_queue_arguments(self):

+ 13 - 12
celery/events/state.py

@@ -222,7 +222,7 @@ class Worker(object):
     def _defaults(cls):
         """Deprecated, to be removed in 3.3"""
         source = cls()
-        return dict((k, getattr(source, k)) for k in cls._fields)
+        return {k: getattr(source, k) for k in cls._fields}
 
 
 @with_unique_field('uuid')
@@ -295,9 +295,9 @@ class Task(object):
             # this state logically happens-before the current state, so merge.
             keep = self.merge_rules.get(state)
             if keep is not None:
-                fields = dict(
-                    (k, v) for k, v in items(fields) if k in keep
-                )
+                fields = {
+                    k: v for k, v in items(fields) if k in keep
+                }
             for key, value in items(fields):
                 setattr(self, key, value)
         else:
@@ -323,9 +323,9 @@ class Task(object):
 
     def as_dict(self):
         get = object.__getattribute__
-        return dict(
-            (k, get(self, k)) for k in self._fields
-        )
+        return {
+            k: get(self, k) for k in self._fields
+        }
 
     def __reduce__(self):
         return _depickle_task, (self.__class__, self.as_dict())
@@ -379,7 +379,7 @@ class Task(object):
     def merge(self, state, timestamp, fields):
         keep = self.merge_rules.get(state)
         if keep is not None:
-            fields = dict((k, v) for k, v in items(fields) if k in keep)
+            fields = {k: v for k, v in items(fields) if k in keep}
         for key, value in items(fields):
             setattr(self, key, value)
 
@@ -387,7 +387,7 @@ class Task(object):
     def _defaults(cls):
         """Deprecated, to be removed in 3.3."""
         source = cls()
-        return dict((k, getattr(source, k)) for k in source._fields)
+        return {k: getattr(source, k) for k in source._fields}
 
 
 class State(object):
@@ -436,9 +436,10 @@ class State(object):
 
     def _clear_tasks(self, ready=True):
         if ready:
-            in_progress = dict(
-                (uuid, task) for uuid, task in self.itertasks()
-                if task.state not in states.READY_STATES)
+            in_progress = {
+                uuid: task for uuid, task in self.itertasks()
+                if task.state not in states.READY_STATES
+            }
             self.tasks.clear()
             self.tasks.update(in_progress)
         else:

+ 12 - 9
celery/five.py

@@ -127,7 +127,7 @@ else:
     exec_("""def reraise(tp, value, tb=None): raise tp, value, tb""")
 
 
-def with_metaclass(Type, skip_attrs=set(['__dict__', '__weakref__'])):
+def with_metaclass(Type, skip_attrs={'__dict__', '__weakref__'}):
     """Class decorator to set metaclass.
 
     Works with both Python 2 and Python 3 and it does not add
@@ -137,8 +137,8 @@ def with_metaclass(Type, skip_attrs=set(['__dict__', '__weakref__'])):
     """
 
     def _clone_with_metaclass(Class):
-        attrs = dict((key, value) for key, value in items(vars(Class))
-                     if key not in skip_attrs)
+        attrs = {key: value for key, value in items(vars(Class))
+                 if key not in skip_attrs}
         return Type(Class.__name__, Class.__bases__, attrs)
 
     return _clone_with_metaclass
@@ -191,7 +191,7 @@ MODULE_DEPRECATED = """
 The module %s is deprecated and will be removed in a future version.
 """
 
-DEFAULT_ATTRS = set(['__file__', '__path__', '__doc__', '__all__'])
+DEFAULT_ATTRS = {'__file__', '__path__', '__doc__', '__all__'}
 
 # im_func is no longer available in Py3.
 # instead the unbound method itself can be used.
@@ -327,8 +327,10 @@ def create_module(name, attrs, cls_attrs=None, pkg=None,
     pkg, _, modname = name.rpartition('.')
     cls_attrs['__module__'] = pkg
 
-    attrs = dict((attr_name, prepare_attr(attr) if prepare_attr else attr)
-                 for attr_name, attr in items(attrs))
+    attrs = {
+        attr_name: (prepare_attr(attr) if prepare_attr else attr)
+        for attr_name, attr in items(attrs)
+    }
     module = sys.modules[fqdn] = type(modname, (base, ), cls_attrs)(fqdn)
     module.__dict__.update(attrs)
     return module
@@ -350,8 +352,9 @@ def recreate_module(name, compat_modules=(), by_module={}, direct={},
         ))),
     )
     new_module = create_module(name, attrs, cls_attrs=cattrs, base=base)
-    new_module.__dict__.update(dict((mod, get_compat_module(new_module, mod))
-                               for mod in compat_modules))
+    new_module.__dict__.update({
+        mod: get_compat_module(new_module, mod) for mod in compat_modules
+    })
     return old_module, new_module
 
 
@@ -375,7 +378,7 @@ def get_compat_module(pkg, name):
 def get_origins(defs):
     origins = {}
     for module, attrs in items(defs):
-        origins.update(dict((attr, module) for attr in attrs))
+        origins.update({attr: module for attr in attrs})
     return origins
 
 

+ 3 - 3
celery/result.py

@@ -676,9 +676,9 @@ class ResultSet(ResultBase):
 
         """
         assert_will_not_block()
-        order_index = None if callback else dict(
-            (result.id, i) for i, result in enumerate(self.results)
-        )
+        order_index = None if callback else {
+            result.id: i for i, result in enumerate(self.results)
+        }
         acc = None if callback else [None for _ in range(len(self))]
         for task_id, meta in self.iter_native(timeout, interval, no_ack):
             value = meta['result']

+ 2 - 2
celery/schedules.py

@@ -383,7 +383,7 @@ class crontab(schedule):
 
             int         (like 7)
             str         (like '3-5,*/15', '*', or 'monday')
-            set         (like set([0,15,30,45]))
+            set         (like {0,15,30,45}
             list        (like [8-17])
 
         And convert it to an (expanded) set representing all time unit
@@ -403,7 +403,7 @@ class crontab(schedule):
 
         """
         if isinstance(cronspec, numbers.Integral):
-            result = set([cronspec])
+            result = {cronspec}
         elif isinstance(cronspec, string_t):
             result = crontab_parser(max_, min_).parse(cronspec)
         elif isinstance(cronspec, set):

+ 5 - 5
celery/task/http.py

@@ -41,13 +41,13 @@ else:
 
     from urllib2 import Request, urlopen  # noqa
 
-    def utf8dict(tup):  # noqa
+    def utf8dict(tup, enc='utf-8'):  # noqa
         """With a dict's items() tuple return a new dict with any utf-8
         keys/values encoded."""
-        return dict(
-            (k.encode('utf-8'),
-             v.encode('utf-8') if isinstance(v, unicode) else v)  # noqa
-            for k, v in tup)
+        return {
+            k.encode(enc): (v.encode(enc) if isinstance(v, unicode) else v)
+            for k, v in tup
+        }
 
 
 class InvalidResponseError(Exception):

+ 2 - 2
celery/tests/app/test_amqp.py

@@ -14,11 +14,11 @@ class test_TaskConsumer(AppCase):
             self.app.conf.CELERY_ACCEPT_CONTENT = ['application/json']
             self.assertEqual(
                 self.app.amqp.TaskConsumer(conn).accept,
-                set(['application/json'])
+                {'application/json'},
             )
             self.assertEqual(
                 self.app.amqp.TaskConsumer(conn, accept=['json']).accept,
-                set(['application/json']),
+                {'application/json'},
             )
 
 

+ 49 - 51
celery/tests/app/test_schedules.py

@@ -54,65 +54,63 @@ class test_crontab_parser(AppCase):
 
     def test_parse_range_wraps(self):
         self.assertEqual(crontab_parser(12).parse('11-1'),
-                         set([11, 0, 1]))
+                         {11, 0, 1})
         self.assertEqual(crontab_parser(60, 1).parse('2-1'),
                          set(range(1, 60 + 1)))
 
     def test_parse_groups(self):
         self.assertEqual(crontab_parser().parse('1,2,3,4'),
-                         set([1, 2, 3, 4]))
+                         {1, 2, 3, 4})
         self.assertEqual(crontab_parser().parse('0,15,30,45'),
-                         set([0, 15, 30, 45]))
+                         {0, 15, 30, 45})
         self.assertEqual(crontab_parser(min_=1).parse('1,2,3,4'),
-                         set([1, 2, 3, 4]))
+                         {1, 2, 3, 4})
 
     def test_parse_steps(self):
         self.assertEqual(crontab_parser(8).parse('*/2'),
-                         set([0, 2, 4, 6]))
+                         {0, 2, 4, 6})
         self.assertEqual(crontab_parser().parse('*/2'),
-                         set(i * 2 for i in range(30)))
+                         {i * 2 for i in range(30)})
         self.assertEqual(crontab_parser().parse('*/3'),
-                         set(i * 3 for i in range(20)))
+                         {i * 3 for i in range(20)})
         self.assertEqual(crontab_parser(8, 1).parse('*/2'),
-                         set([1, 3, 5, 7]))
+                         {1, 3, 5, 7})
         self.assertEqual(crontab_parser(min_=1).parse('*/2'),
-                         set(i * 2 + 1 for i in range(30)))
+                         {i * 2 + 1 for i in range(30)})
         self.assertEqual(crontab_parser(min_=1).parse('*/3'),
-                         set(i * 3 + 1 for i in range(20)))
+                         {i * 3 + 1 for i in range(20)})
 
     def test_parse_composite(self):
-        self.assertEqual(crontab_parser(8).parse('*/2'), set([0, 2, 4, 6]))
-        self.assertEqual(crontab_parser().parse('2-9/5'), set([2, 7]))
-        self.assertEqual(crontab_parser().parse('2-10/5'), set([2, 7]))
+        self.assertEqual(crontab_parser(8).parse('*/2'), {0, 2, 4, 6})
+        self.assertEqual(crontab_parser().parse('2-9/5'), {2, 7})
+        self.assertEqual(crontab_parser().parse('2-10/5'), {2, 7})
         self.assertEqual(
             crontab_parser(min_=1).parse('55-5/3'),
-            set([55, 58, 1, 4]),
+            {55, 58, 1, 4},
         )
-        self.assertEqual(crontab_parser().parse('2-11/5,3'), set([2, 3, 7]))
+        self.assertEqual(crontab_parser().parse('2-11/5,3'), {2, 3, 7})
         self.assertEqual(
             crontab_parser().parse('2-4/3,*/5,0-21/4'),
-            set([0, 2, 4, 5, 8, 10, 12, 15, 16,
-                 20, 25, 30, 35, 40, 45, 50, 55]),
+            {0, 2, 4, 5, 8, 10, 12, 15, 16, 20, 25, 30, 35, 40, 45, 50, 55},
         )
         self.assertEqual(
             crontab_parser().parse('1-9/2'),
-            set([1, 3, 5, 7, 9]),
+            {1, 3, 5, 7, 9},
         )
-        self.assertEqual(crontab_parser(8, 1).parse('*/2'), set([1, 3, 5, 7]))
-        self.assertEqual(crontab_parser(min_=1).parse('2-9/5'), set([2, 7]))
-        self.assertEqual(crontab_parser(min_=1).parse('2-10/5'), set([2, 7]))
+        self.assertEqual(crontab_parser(8, 1).parse('*/2'), {1, 3, 5, 7})
+        self.assertEqual(crontab_parser(min_=1).parse('2-9/5'), {2, 7})
+        self.assertEqual(crontab_parser(min_=1).parse('2-10/5'), {2, 7})
         self.assertEqual(
             crontab_parser(min_=1).parse('2-11/5,3'),
-            set([2, 3, 7]),
+            {2, 3, 7},
         )
         self.assertEqual(
             crontab_parser(min_=1).parse('2-4/3,*/5,1-21/4'),
-            set([1, 2, 5, 6, 9, 11, 13, 16, 17,
-                 21, 26, 31, 36, 41, 46, 51, 56]),
+            {1, 2, 5, 6, 9, 11, 13, 16, 17, 21, 26, 31, 36, 41, 46, 51, 56},
         )
         self.assertEqual(
             crontab_parser(min_=1).parse('1-9/2'),
-            set([1, 3, 5, 7, 9]),
+            {1, 3, 5, 7, 9},
         )
 
     def test_parse_errors_on_empty_string(self):
@@ -148,11 +146,11 @@ class test_crontab_parser(AppCase):
     def test_expand_cronspec_eats_iterables(self):
         self.assertEqual(
             crontab._expand_cronspec(iter([1, 2, 3]), 100),
-            set([1, 2, 3]),
+            {1, 2, 3},
         )
         self.assertEqual(
             crontab._expand_cronspec(iter([1, 2, 3]), 100, 1),
-            set([1, 2, 3]),
+            {1, 2, 3},
         )
 
     def test_expand_cronspec_invalid_type(self):
@@ -408,7 +406,7 @@ class test_crontab_is_due(AppCase):
 
     def test_simple_crontab_spec(self):
         c = self.crontab(minute=30)
-        self.assertEqual(c.minute, set([30]))
+        self.assertEqual(c.minute, {30})
         self.assertEqual(c.hour, set(range(24)))
         self.assertEqual(c.day_of_week, set(range(7)))
         self.assertEqual(c.day_of_month, set(range(1, 32)))
@@ -416,13 +414,13 @@ class test_crontab_is_due(AppCase):
 
     def test_crontab_spec_minute_formats(self):
         c = self.crontab(minute=30)
-        self.assertEqual(c.minute, set([30]))
+        self.assertEqual(c.minute, {30})
         c = self.crontab(minute='30')
-        self.assertEqual(c.minute, set([30]))
+        self.assertEqual(c.minute, {30})
         c = self.crontab(minute=(30, 40, 50))
-        self.assertEqual(c.minute, set([30, 40, 50]))
-        c = self.crontab(minute=set([30, 40, 50]))
-        self.assertEqual(c.minute, set([30, 40, 50]))
+        self.assertEqual(c.minute, {30, 40, 50})
+        c = self.crontab(minute={30, 40, 50})
+        self.assertEqual(c.minute, {30, 40, 50})
 
     def test_crontab_spec_invalid_minute(self):
         with self.assertRaises(ValueError):
@@ -432,11 +430,11 @@ class test_crontab_is_due(AppCase):
 
     def test_crontab_spec_hour_formats(self):
         c = self.crontab(hour=6)
-        self.assertEqual(c.hour, set([6]))
+        self.assertEqual(c.hour, {6})
         c = self.crontab(hour='5')
-        self.assertEqual(c.hour, set([5]))
+        self.assertEqual(c.hour, {5})
         c = self.crontab(hour=(4, 8, 12))
-        self.assertEqual(c.hour, set([4, 8, 12]))
+        self.assertEqual(c.hour, {4, 8, 12})
 
     def test_crontab_spec_invalid_hour(self):
         with self.assertRaises(ValueError):
@@ -446,17 +444,17 @@ class test_crontab_is_due(AppCase):
 
     def test_crontab_spec_dow_formats(self):
         c = self.crontab(day_of_week=5)
-        self.assertEqual(c.day_of_week, set([5]))
+        self.assertEqual(c.day_of_week, {5})
         c = self.crontab(day_of_week='5')
-        self.assertEqual(c.day_of_week, set([5]))
+        self.assertEqual(c.day_of_week, {5})
         c = self.crontab(day_of_week='fri')
-        self.assertEqual(c.day_of_week, set([5]))
+        self.assertEqual(c.day_of_week, {5})
         c = self.crontab(day_of_week='tuesday,sunday,fri')
-        self.assertEqual(c.day_of_week, set([0, 2, 5]))
+        self.assertEqual(c.day_of_week, {0, 2, 5})
         c = self.crontab(day_of_week='mon-fri')
-        self.assertEqual(c.day_of_week, set([1, 2, 3, 4, 5]))
+        self.assertEqual(c.day_of_week, {1, 2, 3, 4, 5})
         c = self.crontab(day_of_week='*/2')
-        self.assertEqual(c.day_of_week, set([0, 2, 4, 6]))
+        self.assertEqual(c.day_of_week, {0, 2, 4, 6})
 
     def test_crontab_spec_invalid_dow(self):
         with self.assertRaises(ValueError):
@@ -470,13 +468,13 @@ class test_crontab_is_due(AppCase):
 
     def test_crontab_spec_dom_formats(self):
         c = self.crontab(day_of_month=5)
-        self.assertEqual(c.day_of_month, set([5]))
+        self.assertEqual(c.day_of_month, {5})
         c = self.crontab(day_of_month='5')
-        self.assertEqual(c.day_of_month, set([5]))
+        self.assertEqual(c.day_of_month, {5})
         c = self.crontab(day_of_month='2,4,6')
-        self.assertEqual(c.day_of_month, set([2, 4, 6]))
+        self.assertEqual(c.day_of_month, {2, 4, 6})
         c = self.crontab(day_of_month='*/5')
-        self.assertEqual(c.day_of_month, set([1, 6, 11, 16, 21, 26, 31]))
+        self.assertEqual(c.day_of_month, {1, 6, 11, 16, 21, 26, 31})
 
     def test_crontab_spec_invalid_dom(self):
         with self.assertRaises(ValueError):
@@ -490,15 +488,15 @@ class test_crontab_is_due(AppCase):
 
     def test_crontab_spec_moy_formats(self):
         c = self.crontab(month_of_year=1)
-        self.assertEqual(c.month_of_year, set([1]))
+        self.assertEqual(c.month_of_year, {1})
         c = self.crontab(month_of_year='1')
-        self.assertEqual(c.month_of_year, set([1]))
+        self.assertEqual(c.month_of_year, {1})
         c = self.crontab(month_of_year='2,4,6')
-        self.assertEqual(c.month_of_year, set([2, 4, 6]))
+        self.assertEqual(c.month_of_year, {2, 4, 6})
         c = self.crontab(month_of_year='*/2')
-        self.assertEqual(c.month_of_year, set([1, 3, 5, 7, 9, 11]))
+        self.assertEqual(c.month_of_year, {1, 3, 5, 7, 9, 11})
         c = self.crontab(month_of_year='2-12/2')
-        self.assertEqual(c.month_of_year, set([2, 4, 6, 8, 10, 12]))
+        self.assertEqual(c.month_of_year, {2, 4, 6, 8, 10, 12})
 
     def test_crontab_spec_invalid_moy(self):
         with self.assertRaises(ValueError):

+ 9 - 9
celery/tests/concurrency/test_prefork.py

@@ -153,13 +153,13 @@ class test_AsynPool(PoolCase):
         with patch('select.select') as select:
             select.return_value = ([3], [], [])
             self.assertEqual(
-                asynpool._select(set([3])),
+                asynpool._select({3}),
                 ([3], [], 0),
             )
 
             select.return_value = ([], [], [3])
             self.assertEqual(
-                asynpool._select(set([3]), None, set([3])),
+                asynpool._select({3}, None, {3}),
                 ([3], [], 0),
             )
 
@@ -167,13 +167,13 @@ class test_AsynPool(PoolCase):
             eintr.errno = errno.EINTR
             select.side_effect = eintr
 
-            readers = set([3])
+            readers = {3}
             self.assertEqual(asynpool._select(readers), ([], [], 1))
             self.assertIn(3, readers)
 
         with patch('select.select') as select:
             select.side_effect = ebadf
-            readers = set([3])
+            readers = {3}
             self.assertEqual(asynpool._select(readers), ([], [], 1))
             select.assert_has_calls([call([3], [], [], 0)])
             self.assertNotIn(3, readers)
@@ -181,7 +181,7 @@ class test_AsynPool(PoolCase):
         with patch('select.select') as select:
             select.side_effect = MemoryError()
             with self.assertRaises(MemoryError):
-                asynpool._select(set([1]))
+                asynpool._select({1})
 
         with patch('select.select') as select:
 
@@ -190,7 +190,7 @@ class test_AsynPool(PoolCase):
                 raise ebadf
             select.side_effect = se
             with self.assertRaises(MemoryError):
-                asynpool._select(set([3]))
+                asynpool._select({3})
 
         with patch('select.select') as select:
 
@@ -200,14 +200,14 @@ class test_AsynPool(PoolCase):
                 raise ebadf
             select.side_effect = se2
             with self.assertRaises(socket.error):
-                asynpool._select(set([3]))
+                asynpool._select({3})
 
         with patch('select.select') as select:
 
             select.side_effect = socket.error()
             select.side_effect.errno = 34134
             with self.assertRaises(socket.error):
-                asynpool._select(set([3]))
+                asynpool._select({3})
 
     def test_promise(self):
         fun = Mock()
@@ -309,7 +309,7 @@ class test_TaskPool(PoolCase):
         raise SkipTest('functional test')
 
         def get_pids(pool):
-            return set([p.pid for p in pool._pool._pool])
+            return {p.pid for p in pool._pool._pool}
 
         tp = self.TaskPool(5)
         time.sleep(0.5)

+ 1 - 1
celery/tests/utils/test_datastructures.py

@@ -220,7 +220,7 @@ class test_LimitedSet(Case):
             s.purge()
             hp.assert_called_with(s._heap)
         with patch('celery.datastructures.heappop') as hp:
-            s._data = dict((i * 2, i * 2) for i in range(10))
+            s._data = {i * 2: i * 2 for i in range(10)}
             s.purge()
             self.assertEqual(hp.call_count, 10)
 

+ 2 - 2
celery/tests/worker/test_control.py

@@ -141,7 +141,7 @@ class test_ControlPanel(AppCase):
         evd.groups = set()
         panel.handle('enable_events')
         self.assertIn('task', evd.groups)
-        evd.groups = set(['task'])
+        evd.groups = {'task'}
         self.assertIn('already enabled', panel.handle('enable_events')['ok'])
 
     def test_disable_events(self):
@@ -149,7 +149,7 @@ class test_ControlPanel(AppCase):
         panel = self.create_panel(consumer=consumer)
         evd = consumer.event_dispatcher
         evd.enabled = True
-        evd.groups = set(['task'])
+        evd.groups = {'task'}
         panel.handle('disable_events')
         self.assertNotIn('task', evd.groups)
         self.assertIn('already disabled', panel.handle('disable_events')['ok'])

+ 7 - 6
celery/utils/__init__.py

@@ -176,8 +176,8 @@ def lpmerge(L, R):
     """In place left precedent dictionary merge.
 
     Keeps values from `L`, if the value in `R` is :const:`None`."""
-    set = L.__setitem__
-    [set(k, v) for k, v in items(R) if v is not None]
+    setitem = L.__setitem__
+    [setitem(k, v) for k, v in items(R) if v is not None]
     return L
 
 
@@ -214,7 +214,7 @@ def cry(out=None, sepchr='=', seplen=49):  # pragma: no cover
 
     # get a map of threads by their ID so we can print their names
     # during the traceback dump
-    tmap = dict((t.ident, t) for t in threading.enumerate())
+    tmap = {t.ident: t for t in threading.enumerate()}
 
     sep = sepchr * seplen
     for tid, frame in items(sys._current_frames()):
@@ -276,9 +276,10 @@ def jsonify(obj,
     elif isinstance(obj, (tuple, list)):
         return [_jsonify(v) for v in obj]
     elif isinstance(obj, dict):
-        return dict((k, _jsonify(v, key=k))
-                    for k, v in items(obj)
-                    if (keyfilter(k) if keyfilter else 1))
+        return {
+            k: _jsonify(v, key=k) for k, v in items(obj)
+            if (keyfilter(k) if keyfilter else 1)
+        }
     elif isinstance(obj, datetime.datetime):
         # See "Date Time String Format" in the ECMA-262 specification.
         r = obj.isoformat()

+ 2 - 3
celery/utils/functional.py

@@ -265,8 +265,7 @@ def padlist(container, size, default=None):
 def mattrgetter(*attrs):
     """Like :func:`operator.itemgetter` but return :const:`None` on missing
     attributes instead of raising :exc:`AttributeError`."""
-    return lambda obj: dict((attr, getattr(obj, attr, None))
-                            for attr in attrs)
+    return lambda obj: {attr: getattr(obj, attr, None) for attr in attrs}
 
 
 def uniq(it):
@@ -303,4 +302,4 @@ class _regen(UserList, list):
 def dictfilter(d=None, **kw):
     """Remove all keys from dict ``d`` whose value is :const:`None`"""
     d = kw if d is None else (dict(d, **kw) if kw else d)
-    return dict((k, v) for k, v in items(d) if v is not None)
+    return {k: v for k, v in items(d) if v is not None}

+ 4 - 5
celery/worker/__init__.py

@@ -76,7 +76,7 @@ class WorkController(object):
     class Blueprint(bootsteps.Blueprint):
         """Worker bootstep blueprint."""
         name = 'Worker'
-        default_steps = set([
+        default_steps = {
             'celery.worker.components:Hub',
             'celery.worker.components:Queues',
             'celery.worker.components:Pool',
@@ -86,8 +86,7 @@ class WorkController(object):
             'celery.worker.components:Consumer',
             'celery.worker.autoscale:WorkerComponent',
             'celery.worker.autoreload:WorkerComponent',
-
-        ])
+        }
 
     def __init__(self, app=None, hostname=None, **kwargs):
         self.app = app or self.app
@@ -190,8 +189,8 @@ class WorkController(object):
             prev += tuple(includes)
             [self.app.loader.import_task_module(m) for m in includes]
         self.include = includes
-        task_modules = set(task.__class__.__module__
-                           for task in values(self.app.tasks))
+        task_modules = {task.__class__.__module__
+                        for task in values(self.app.tasks)}
         self.app.conf.CELERY_INCLUDE = tuple(set(prev) | task_modules)
 
     def prepare_args(self, **kwargs):

+ 7 - 6
celery/worker/autoreload.py

@@ -107,8 +107,8 @@ class StatMonitor(BaseMonitor):
 
     def find_changes(self):
         maybe_modified = self._maybe_modified
-        modified = dict((f, mt) for f, mt in self._mtimes()
-                        if maybe_modified(f, mt))
+        modified = {f: mt for f, mt in self._mtimes()
+                    if maybe_modified(f, mt)}
         if modified:
             self.on_change(modified)
             self.modify_times.update(modified)
@@ -131,7 +131,7 @@ class KQueueMonitor(BaseMonitor):
 
     def __init__(self, *args, **kwargs):
         super(KQueueMonitor, self).__init__(*args, **kwargs)
-        self.filemap = dict((f, None) for f in self.files)
+        self.filemap = {f: None for f in self.files}
         self.fdmap = {}
 
     def register_with_event_loop(self, hub):
@@ -257,13 +257,14 @@ class Autoreloader(bgThread):
 
     def on_init(self):
         files = self.file_to_module
-        files.update(dict(
-            (module_file(sys.modules[m]), m) for m in self.modules))
+        files.update({
+            module_file(sys.modules[m]): m for m in self.modules
+        })
 
         self._monitor = self.Monitor(
             files, self.on_change,
             shutdown_event=self._is_shutdown, **self.options)
-        self._hashes = dict([(f, file_hash(f)) for f in files])
+        self._hashes = {f: file_hash(f) for f in files}
 
     def register_with_event_loop(self, hub):
         if self._monitor is None:

+ 2 - 2
celery/worker/consumer.py

@@ -550,7 +550,7 @@ class Heart(bootsteps.StartStopStep):
 class Mingle(bootsteps.StartStopStep):
     label = 'Mingle'
     requires = (Events, )
-    compatible_transports = set(['amqp', 'redis'])
+    compatible_transports = {'amqp', 'redis'}
 
     def __init__(self, c, without_mingle=False, **kwargs):
         self.enabled = not without_mingle and self.compatible_transport(c.app)
@@ -643,7 +643,7 @@ class Gossip(bootsteps.ConsumerStep):
     _cons_stamp_fields = itemgetter(
         'id', 'clock', 'hostname', 'pid', 'topic', 'action', 'cver',
     )
-    compatible_transports = set(['amqp', 'redis'])
+    compatible_transports = {'amqp', 'redis'}
 
     def __init__(self, c, without_gossip=False, interval=5.0, **kwargs):
         self.enabled = not without_gossip and self.compatible_transport(c.app)

+ 12 - 12
celery/worker/control.py

@@ -56,15 +56,14 @@ def query_task(state, ids, **kwargs):
     def reqinfo(state, req):
         return state, req.info()
 
-    reqs = dict((req.id, ('reserved', req.info()))
-                for req in _find_requests_by_id(
-                    ids, worker_state.reserved_requests))
-    reqs.update(dict(
-        (req.id, ('active', req.info()))
-        for req in _find_requests_by_id(
-            ids, worker_state.active_requests,
-        )
-    ))
+    reqs = {
+        req.id: ('reserved', req.info())
+        for req in _find_requests_by_id(ids, worker_state.reserved_requests)
+    }
+    reqs.update({
+        req.id: ('active', req.info())
+        for req in _find_requests_by_id(ids, worker_state.active_requests)
+    })
 
     return reqs
 
@@ -280,9 +279,10 @@ def dump_tasks(state, taskinfoitems=None, **kwargs):
     taskinfoitems = taskinfoitems or DEFAULT_TASK_INFO_ITEMS
 
     def _extract_info(task):
-        fields = dict((field, str(getattr(task, field, None)))
-                      for field in taskinfoitems
-                      if getattr(task, field, None) is not None)
+        fields = {
+            field: str(getattr(task, field, None)) for field in taskinfoitems
+            if getattr(task, field, None) is not None
+        }
         if fields:
             info = ['='.join(f) for f in items(fields)]
             return '{0} [{1}]'.format(task.name, ' '.join(info))

+ 2 - 2
celery/worker/job.py

@@ -221,8 +221,8 @@ class Request(object):
                           'delivery_info': self.delivery_info}
         fun = self.task.run
         supported_keys = fun_takes_kwargs(fun, default_kwargs)
-        extend_with = dict((key, val) for key, val in items(default_kwargs)
-                           if key in supported_keys)
+        extend_with = {key: val for key, val in items(default_kwargs)
+                       if key in supported_keys}
         kwargs.update(extend_with)
         return kwargs