Ask Solem 11 years ago
parent
commit
cac1d1893f

+ 1 - 1
celery/__init__.py

@@ -62,7 +62,7 @@ def _find_option_with_arg(argv, short_opts=None, long_opts=None):
     """Search argv for option specifying its short and longopt
     alternatives.
 
-    Returns the value of the option if found.
+    Return the value of the option if found.
 
     """
     for i, arg in enumerate(argv):

+ 3 - 3
celery/app/__init__.py

@@ -89,8 +89,8 @@ App = Celery  # XXX Compat
 
 
 def shared_task(*args, **kwargs):
-    """Task decorator that creates shared tasks,
-    and returns a proxy that always returns the task from the current apps
+    """Create shared tasks (decorator).
+    Will return a proxy that always takes the task from the current apps
     task registry.
 
     This can be used by library authors to create tasks that will work
@@ -126,7 +126,7 @@ def shared_task(*args, **kwargs):
                     with app._finalize_mutex:
                         app._task_from_fun(fun, **options)
 
-            # Returns a proxy that always gets the task from the current
+            # Return a proxy that always gets the task from the current
             # apps task registry.
             def task_by_cons():
                 app = current_app()

+ 2 - 2
celery/app/amqp.py

@@ -371,7 +371,7 @@ class AMQP(object):
         )
 
     def Router(self, queues=None, create_missing=None):
-        """Returns the current task router."""
+        """Return the current task router."""
         return _routes.Router(self.routes, queues or self.queues,
                               self.app.either('CELERY_CREATE_MISSING_QUEUES',
                                               create_missing), app=self.app)
@@ -386,7 +386,7 @@ class AMQP(object):
 
     @cached_property
     def TaskProducer(self):
-        """Returns publisher used to send tasks.
+        """Return publisher used to send tasks.
 
         You should use `app.send_task` instead.
 

+ 1 - 1
celery/app/base.py

@@ -478,7 +478,7 @@ class Celery(object):
         )
 
     def __reduce_keys__(self):
-        """Returns keyword arguments used to reconstruct the object
+        """Return keyword arguments used to reconstruct the object
         when unpickling."""
         return {
             'main': self.main,

+ 1 - 1
celery/app/control.py

@@ -171,7 +171,7 @@ class Control(object):
     def ping(self, destination=None, timeout=1, **kwargs):
         """Ping all (or specific) workers.
 
-        Returns answer from alive workers.
+        Will return the list of answers.
 
         See :meth:`broadcast` for supported keyword arguments.
 

+ 1 - 1
celery/app/log.py

@@ -150,7 +150,7 @@ class Logging(object):
 
         If `logfile` is not specified, then `sys.stderr` is used.
 
-        Returns logger object.
+        Will return the base task logger object.
 
         """
         loglevel = mlevel(loglevel or self.loglevel)

+ 1 - 1
celery/app/task.py

@@ -669,7 +669,7 @@ class Task(object):
                                            task_name=self.name, **kwargs)
 
     def subtask(self, args=None, *starargs, **starkwargs):
-        """Returns :class:`~celery.subtask` object for
+        """Return :class:`~celery.subtask` object for
         this task, wrapping arguments and execution options
         for a single task invocation."""
         starkwargs.setdefault('app', self.app)

+ 3 - 3
celery/app/trace.py

@@ -59,7 +59,7 @@ _patched = {}
 
 
 def task_has_custom(task, attr):
-    """Returns true if the task or one of its bases
+    """Return true if the task or one of its bases
     defines ``attr`` (excluding the one in BaseTask)."""
     return mro_lookup(task.__class__, attr, stop=(BaseTask, object),
                       monkey_patched=['celery.app.task'])
@@ -125,7 +125,7 @@ class TraceInfo(object):
 def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
                  Info=TraceInfo, eager=False, propagate=False, app=None,
                  IGNORE_STATES=IGNORE_STATES):
-    """Returns a function that traces task execution; catches all
+    """Return a function that traces task execution; catches all
     exceptions and updates result backend with the state and result
 
     If the call was successful, it saves the result to the task result
@@ -138,7 +138,7 @@ def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
     If the call results in an exception, it saves the exception as the task
     result, and sets the task state to `"FAILURE"`.
 
-    Returns a function that takes the following arguments:
+    Return a function that takes the following arguments:
 
         :param uuid: The id of the task.
         :param args: List of positional args to pass on to the function.

+ 4 - 4
celery/app/utils.py

@@ -72,7 +72,7 @@ class Settings(ConfigurationView):
         return self.first('CELERY_TIMEZONE', 'TIME_ZONE')
 
     def without_defaults(self):
-        """Returns the current configuration, but without defaults."""
+        """Return the current configuration, but without defaults."""
         # the last stash is the default settings, so just skip that
         return Settings({}, self._order[:-1])
 
@@ -96,7 +96,7 @@ class Settings(ConfigurationView):
         return self.get_by_parts(*self.find_option(name, namespace)[:-1])
 
     def get_by_parts(self, *parts):
-        """Returns the current value for setting specified as a path.
+        """Return the current value for setting specified as a path.
 
         Example::
 
@@ -107,7 +107,7 @@ class Settings(ConfigurationView):
         return self['_'.join(part for part in parts if part)]
 
     def humanize(self):
-        """Returns a human readable string showing changes to the
+        """Return a human readable string showing changes to the
         configuration."""
         return '\n'.join(
             '{0}: {1}'.format(key, pretty(value, width=50))
@@ -164,7 +164,7 @@ def filter_hidden_settings(conf):
 
 
 def bugreport(app):
-    """Returns a string containing information useful in bug reports."""
+    """Return a string containing information useful in bug reports."""
     import billiard
     import celery
     import kombu

+ 1 - 1
celery/backends/base.py

@@ -46,7 +46,7 @@ PY3 = sys.version_info >= (3, 0)
 
 
 def unpickle_backend(cls, args, kwargs):
-    """Returns an unpickled backend."""
+    """Return an unpickled backend."""
     from celery import current_app
     return cls(*args, app=current_app._get_current_object(), **kwargs)
 

+ 1 - 1
celery/beat.py

@@ -98,7 +98,7 @@ class ScheduleEntry(object):
         return self.schedule.now() if self.schedule else self.app.now()
 
     def _next_instance(self, last_run_at=None):
-        """Returns a new instance of the same class, but with
+        """Return a new instance of the same class, but with
         its date and count fields updated."""
         return self.__class__(**dict(
             self,

+ 1 - 1
celery/bin/base.py

@@ -509,7 +509,7 @@ class Command(object):
         return options
 
     def with_pool_option(self, argv):
-        """Returns tuple of ``(short_opts, long_opts)`` if the command
+        """Return tuple of ``(short_opts, long_opts)`` if the command
         supports a pool argument, and used to monkey patch eventlet/gevent
         environments as early as possible.
 

+ 1 - 1
celery/bin/worker.py

@@ -159,7 +159,7 @@ class worker(Command):
     def maybe_detach(self, argv, dopts=['-D', '--detach']):
         if any(arg in argv for arg in dopts):
             argv = [v for v in argv if v not in dopts]
-            # never returns
+            # will never return
             detached_celeryd(self.app).execute_from_commandline(argv)
             raise SystemExit(0)
 

+ 3 - 3
celery/contrib/abortable.py

@@ -114,7 +114,7 @@ class AbortableAsyncResult(AsyncResult):
     """
 
     def is_aborted(self):
-        """Returns :const:`True` if the task is (being) aborted."""
+        """Return :const:`True` if the task is (being) aborted."""
         return self.state == ABORTED
 
     def abort(self):
@@ -146,14 +146,14 @@ class AbortableTask(Task):
     abstract = True
 
     def AsyncResult(self, task_id):
-        """Returns the accompanying AbortableAsyncResult instance."""
+        """Return the accompanying AbortableAsyncResult instance."""
         return AbortableAsyncResult(task_id, backend=self.backend)
 
     def is_aborted(self, **kwargs):
         """Checks against the backend whether this
         :class:`AbortableAsyncResult` is :const:`ABORTED`.
 
-        Always returns :const:`False` in case the `task_id` parameter
+        Always return :const:`False` in case the `task_id` parameter
         refers to a regular (non-abortable) :class:`Task`.
 
         Be aware that invoking this method will cause a hit in the

+ 1 - 1
celery/contrib/rdb.py

@@ -165,7 +165,7 @@ class Rdb(Pdb):
 
 
 def debugger():
-    """Returns the current debugger instance (if any),
+    """Return the current debugger instance (if any),
     or creates a new one."""
     rdb = _current[0]
     if rdb is None or not rdb.active:

+ 2 - 2
celery/datastructures.py

@@ -187,7 +187,7 @@ class DependencyGraph(object):
         return [t[0] for t in graph._khan62()]
 
     def valency_of(self, obj):
-        """Returns the valency (degree) of a vertex in the graph."""
+        """Return the valency (degree) of a vertex in the graph."""
         try:
             l = [len(self[obj])]
         except KeyError:
@@ -207,7 +207,7 @@ class DependencyGraph(object):
                 self.add_edge(obj, dep)
 
     def edges(self):
-        """Returns generator that yields for all edges in the graph."""
+        """Return generator that yields for all edges in the graph."""
         return (obj for obj, adj in items(self) if adj)
 
     def _khan62(self):

+ 4 - 4
celery/events/state.py

@@ -323,7 +323,7 @@ class State(object):
     def get_or_create_worker(self, hostname, **kwargs):
         """Get or create worker by hostname.
 
-        Returns tuple of ``(worker, was_created)``.
+        Return tuple of ``(worker, was_created)``.
         """
         try:
             worker = self.workers[hostname]
@@ -415,7 +415,7 @@ class State(object):
     def tasks_by_type(self, name, limit=None):
         """Get all tasks by type.
 
-        Returns a list of ``(uuid, Task)`` tuples.
+        Return a list of ``(uuid, Task)`` tuples.
 
         """
         return islice(
@@ -435,11 +435,11 @@ class State(object):
         )
 
     def task_types(self):
-        """Returns a list of all seen task types."""
+        """Return a list of all seen task types."""
         return list(sorted(set(task.name for task in values(self.tasks))))
 
     def alive_workers(self):
-        """Returns a list of (seemingly) alive workers."""
+        """Return a list of (seemingly) alive workers."""
         return [w for w in values(self.workers) if w.alive]
 
     def __repr__(self):

+ 5 - 5
celery/platforms.py

@@ -61,7 +61,7 @@ Seems we're already running? (pid: {1})"""
 
 
 def pyimplementation():
-    """Returns string identifying the current Python implementation."""
+    """Return string identifying the current Python implementation."""
     if hasattr(_platform, 'python_implementation'):
         return _platform.python_implementation()
     elif sys.platform.startswith('java'):
@@ -80,7 +80,7 @@ class LockFailed(Exception):
 
 
 def get_fdmax(default=None):
-    """Returns the maximum number of open file descriptors
+    """Return the maximum number of open file descriptors
     on this system.
 
     :keyword default: Value returned if there's no file
@@ -120,7 +120,7 @@ class Pidfile(object):
     __enter__ = acquire
 
     def is_locked(self):
-        """Returns true if the pid lock exists."""
+        """Return true if the pid lock exists."""
         return os.path.exists(self.path)
 
     def release(self, *args):
@@ -129,7 +129,7 @@ class Pidfile(object):
     __exit__ = release
 
     def read_pid(self):
-        """Reads and returns the current pid."""
+        """Read and return the current pid."""
         with ignore_errno('ENOENT'):
             with open(self.path, 'r') as fh:
                 line = fh.readline()
@@ -540,7 +540,7 @@ class Signals(object):
         return _signal.alarm(0)
 
     def supported(self, signal_name):
-        """Returns true value if ``signal_name`` exists on this platform."""
+        """Return true value if ``signal_name`` exists on this platform."""
         try:
             return self.signum(signal_name)
         except AttributeError:

+ 2 - 2
celery/security/certificate.py

@@ -34,11 +34,11 @@ class Certificate(object):
         return self._cert.has_expired()
 
     def get_serial_number(self):
-        """Returns the certificates serial number."""
+        """Return the serial number in the certificate."""
         return self._cert.get_serial_number()
 
     def get_issuer(self):
-        """Returns issuer (CA) as a string"""
+        """Return issuer (CA) as a string"""
         return ' '.join(bytes_to_str(x[1]) for x in
                         self._cert.get_issuer().get_components())
 

+ 1 - 1
celery/task/http.py

@@ -138,7 +138,7 @@ class HttpDispatch(object):
         self.logger = kwargs.get('logger') or logger
 
     def make_request(self, url, method, params):
-        """Makes an HTTP request and returns the response."""
+        """Perform HTTP request and return the response."""
         request = Request(url, params)
         for key, val in items(self.http_headers):
             request.add_header(key, val)

+ 1 - 1
celery/utils/__init__.py

@@ -57,7 +57,7 @@ NODENAME_SEP = '@'
 
 
 def worker_direct(hostname):
-    """Returns :class:`kombu.Queue` that is a direct route to
+    """Return :class:`kombu.Queue` that is a direct route to
     a worker by hostname.
 
     :param hostname: The fully qualified node name of a worker

+ 2 - 2
celery/utils/debug.py

@@ -143,14 +143,14 @@ def humanbytes(s):
 
 
 def mem_rss():
-    """Returns RSS memory usage as a humanized string."""
+    """Return RSS memory usage as a humanized string."""
     p = ps()
     if p is not None:
         return humanbytes(p.get_memory_info().rss)
 
 
 def ps():
-    """Returns the global :class:`psutil.Process` instance,
+    """Return the global :class:`psutil.Process` instance,
     or :const:`None` if :mod:`psutil` is not installed."""
     global _process
     if _process is None and Process is not None:

+ 2 - 2
celery/utils/dispatch/saferef.py

@@ -190,7 +190,7 @@ class BoundMethodWeakref(object):  # pragma: no cover
         """Return a strong reference to the bound method
 
         If the target cannot be retrieved, then will
-        return None, otherwise returns a bound instance
+        return None, otherwise return a bound instance
         method for our object and function.
 
         Note:
@@ -253,7 +253,7 @@ class BoundNonDescriptorMethodWeakref(BoundMethodWeakref):  # pragma: no cover
         """Return a strong reference to the bound method
 
         If the target cannot be retrieved, then will
-        return None, otherwise returns a bound instance
+        return None, otherwise return a bound instance
         method for our object and function.
 
         Note:

+ 5 - 5
celery/utils/functional.py

@@ -186,7 +186,7 @@ def noop(*args, **kwargs):
 
 
 def first(predicate, it):
-    """Returns the first element in `iterable` that `predicate` returns a
+    """Return the first element in `iterable` that `predicate` Gives a
     :const:`True` value for.
 
     If `predicate` is None it will return the first item that is not None.
@@ -199,8 +199,8 @@ def first(predicate, it):
 
 
 def firstmethod(method):
-    """Returns a function that with a list of instances,
-    finds the first instance that returns a value for the given method.
+    """Return a function that with a list of instances,
+    finds the first instance that gives a value for the given method.
 
     The list can also contain lazy instances
     (:class:`~kombu.utils.functional.lazy`.)
@@ -259,14 +259,14 @@ def padlist(container, size, default=None):
 
 
 def mattrgetter(*attrs):
-    """Like :func:`operator.itemgetter` but returns :const:`None` on missing
+    """Like :func:`operator.itemgetter` but return :const:`None` on missing
     attributes instead of raising :exc:`AttributeError`."""
     return lambda obj: dict((attr, getattr(obj, attr, None))
                             for attr in attrs)
 
 
 def uniq(it):
-    """Returns all unique elements in ``it``, preserving order."""
+    """Return all unique elements in ``it``, preserving order."""
     seen = set()
     return (seen.add(obj) or obj for obj in it if obj not in seen)
 

+ 1 - 1
celery/utils/imports.py

@@ -109,6 +109,6 @@ def reload_from_cwd(module, reloader=None):
 
 
 def module_file(module):
-    """Returns the correct original file name of a module."""
+    """Return the correct original file name of a module."""
     name = module.__file__
     return name[:-1] if name.endswith('.pyc') else name

+ 1 - 1
celery/utils/iso8601.py

@@ -4,7 +4,7 @@ Originally taken from pyiso8601 (http://code.google.com/p/pyiso8601/)
 Modified to match the behavior of dateutil.parser:
 
     - raise ValueError instead of ParseError
-    - returns naive datetimes by default
+    - return naive datetimes by default
     - uses pytz.FixedOffset
 
 This is the original License:

+ 1 - 1
celery/utils/log.py

@@ -202,7 +202,7 @@ class LoggingProxy(object):
         self.closed = True
 
     def isatty(self):
-        """Always returns :const:`False`. Just here for file support."""
+        """Always return :const:`False`. Just here for file support."""
         return False
 
 

+ 1 - 1
celery/utils/mail.py

@@ -175,7 +175,7 @@ py-celery at {{hostname}}.
         self.body = kwargs.get('body', self.body)
 
     def should_send(self, context, exc):
-        """Returns true or false depending on if a task error mail
+        """Return true or false depending on if a task error mail
         should be sent for this type of error."""
         return True
 

+ 1 - 1
celery/utils/objects.py

@@ -12,7 +12,7 @@ __all__ = ['mro_lookup']
 
 
 def mro_lookup(cls, attr, stop=(), monkey_patched=[]):
-    """Returns the first node by MRO order that defines an attribute.
+    """Return the first node by MRO order that defines an attribute.
 
     :keyword stop: A list of types that if reached will stop the search.
     :keyword monkey_patched: Use one of the stop classes if the attr's

+ 3 - 2
celery/utils/serialization.py

@@ -44,8 +44,9 @@ def find_pickleable_exception(exc, loads=pickle.loads,
 
     :param exc: An exception instance.
 
-    :returns: the nearest exception if it's not :exc:`Exception` or below,
-              if it is it returns :const:`None`.
+    Will return the nearest pickleable parent exception class
+    (except :exc:`Exception` and parents), or if the exception is
+    pickleable it will return :cons:`None`.
 
     :rtype :exc:`Exception`:
 

+ 1 - 1
celery/utils/threads.py

@@ -183,7 +183,7 @@ class _LocalStack(object):
     item from the stack after using.  When the stack is empty it will
     no longer be bound to the current context (and as such released).
 
-    By calling the stack without arguments it returns a proxy that
+    By calling the stack without arguments it will return a proxy that
     resolves to the topmost item on the stack.
 
     """

+ 1 - 1
celery/utils/timeutils.py

@@ -265,7 +265,7 @@ def maybe_iso8601(dt):
 
 
 def is_naive(dt):
-    """Returns :const:`True` if the datetime is naive
+    """Return :const:`True` if the datetime is naive
     (does not have timezone information)."""
     return dt.tzinfo is None or dt.tzinfo.utcoffset(dt) is None
 

+ 1 - 1
celery/worker/control.py

@@ -335,7 +335,7 @@ def cancel_consumer(state, queue=None, **_):
 
 @Panel.register
 def active_queues(state):
-    """Returns the queues associated with each worker."""
+    """Return information about the queues a worker consumes from."""
     return [dict(queue.as_dict(recurse=True))
             for queue in state.consumer.task_consumer.queues]
 

+ 9 - 7
docs/reference/celery.rst

@@ -121,7 +121,7 @@ and creating Celery applications.
 
     .. method:: Celery.bugreport
 
-        Returns a string with information useful for the Celery core
+        Return a string with information useful for the Celery core
         developers when reporting a bug.
 
     .. method:: Celery.config_from_object(obj, silent=False)
@@ -240,8 +240,8 @@ and creating Celery applications.
 
         .. admonition:: App Binding
 
-            For custom apps the task decorator returns proxy
-            objects, so that the act of creating the task is not performed
+            For custom apps the task decorator will return a proxy
+            object, so that the act of creating the task is not performed
             until the task is used or the task registry is accessed.
 
             If you are depending on binding to be deferred, then you must
@@ -333,7 +333,7 @@ and creating Celery applications.
 
     .. method:: Celery.now()
 
-        Returns the current time and date as a :class:`~datetime.datetime`
+        Return the current time and date as a :class:`~datetime.datetime`
         object.
 
     .. method:: Celery.set_current()
@@ -367,7 +367,9 @@ See :ref:`guide-canvas` for more about creating task workflows.
     A group is lazy so you must call it to take action and evaluate
     the group.
 
-    Calling the group returns :class:`~@GroupResult`.
+    Will return a `group` task that when called will then call of the
+    tasks in the group (and return a :class:`GroupResult` instance
+    that can be used to inspect the state of the group).
 
 .. class:: chain(task1[, task2[, task3[,... taskN]]])
 
@@ -460,7 +462,7 @@ See :ref:`guide-canvas` for more about creating task workflows.
 
     .. method:: subtask.clone(args=(), kwargs={}, ...)
 
-        Returns a copy of this subtask.
+        Return a copy of this subtask.
 
         :keyword args: Partial args to be prepended to the existing args.
         :keyword kwargs: Partial kwargs to be merged with the existing kwargs.
@@ -490,7 +492,7 @@ See :ref:`guide-canvas` for more about creating task workflows.
 
         Set arbitrary options (same as ``.options.update(...)``).
 
-        This is a chaining method call (i.e. it returns itself).
+        This is a chaining method call (i.e. it will return ``self``).
 
     .. method:: subtask.flatten_links()
 

+ 6 - 6
docs/userguide/canvas.rst

@@ -639,16 +639,16 @@ It supports the following operations:
 
 * :meth:`~celery.result.GroupResult.successful`
 
-    Returns :const:`True` if all of the subtasks finished
+    Return :const:`True` if all of the subtasks finished
     successfully (e.g. did not raise an exception).
 
 * :meth:`~celery.result.GroupResult.failed`
 
-    Returns :const:`True` if any of the subtasks failed.
+    Return :const:`True` if any of the subtasks failed.
 
 * :meth:`~celery.result.GroupResult.waiting`
 
-    Returns :const:`True` if any of the subtasks
+    Return :const:`True` if any of the subtasks
     is not ready yet.
 
 * :meth:`~celery.result.GroupResult.ready`
@@ -658,15 +658,15 @@ It supports the following operations:
 
 * :meth:`~celery.result.GroupResult.completed_count`
 
-    Returns the number of completed subtasks.
+    Return the number of completed subtasks.
 
 * :meth:`~celery.result.GroupResult.revoke`
 
-    Revokes all of the subtasks.
+    Revoke all of the subtasks.
 
 * :meth:`~celery.result.GroupResult.iterate`
 
-    Iterates over the return values of the subtasks
+    Iterate over the return values of the subtasks
     as they finish, one by one.
 
 * :meth:`~celery.result.GroupResult.join`

+ 1 - 1
examples/eventlet/webcrawler.py

@@ -42,7 +42,7 @@ url_regex = re.compile(
 
 
 def domain(url):
-    """Returns the domain part of an URL."""
+    """Return the domain part of an URL."""
     return urlsplit(url)[1].split(':')[0]