summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorJenkins <jenkins@review.openstack.org>2013-06-11 18:43:09 +0000
committerGerrit Code Review <review@openstack.org>2013-06-11 18:43:09 +0000
commit109e1b525dc96fd2d523dbc41c018ee975030e82 (patch)
treec33556b56380515caca0219f49e4f2f69bb3dcb8
parent165b98415d3949f8a9dddbdbc1f304fb7d257718 (diff)
parent7119e29cb535426c587eaf2cfc2cfcd11a422df0 (diff)
downloadoslo-109e1b525dc96fd2d523dbc41c018ee975030e82.tar.gz
oslo-109e1b525dc96fd2d523dbc41c018ee975030e82.tar.xz
oslo-109e1b525dc96fd2d523dbc41c018ee975030e82.zip
Merge "Enable hacking H404 test."
-rw-r--r--openstack/common/cfgfilter.py13
-rw-r--r--openstack/common/context.py3
-rw-r--r--openstack/common/db/sqlalchemy/session.py28
-rw-r--r--openstack/common/deprecated/wsgi.py31
-rw-r--r--openstack/common/exception.py3
-rw-r--r--openstack/common/gettextutils.py10
-rw-r--r--openstack/common/importutils.py5
-rw-r--r--openstack/common/log.py7
-rw-r--r--openstack/common/middleware/base.py17
-rw-r--r--openstack/common/middleware/context.py11
-rw-r--r--openstack/common/middleware/debug.py12
-rw-r--r--openstack/common/middleware/sizelimit.py3
-rw-r--r--openstack/common/network_utils.py4
-rw-r--r--openstack/common/policy.py207
-rw-r--r--openstack/common/processutils.py9
-rw-r--r--openstack/common/rootwrap/wrapper.py12
-rw-r--r--openstack/common/rpc/amqp.py31
-rw-r--r--openstack/common/rpc/common.py16
-rw-r--r--openstack/common/rpc/impl_zmq.py50
-rw-r--r--openstack/common/rpc/matchmaker.py122
-rw-r--r--openstack/common/rpc/matchmaker_redis.py12
-rw-r--r--openstack/common/rpc/matchmaker_ring.py8
-rw-r--r--openstack/common/strutils.py19
-rw-r--r--openstack/common/timeutils.py12
-rw-r--r--tests/unit/rpc/matchmaker_common.py13
-rw-r--r--tests/unit/rpc/test_common.py3
-rw-r--r--tests/unit/rpc/test_matchmaker_redis.py21
-rw-r--r--tests/unit/rpc/test_qpid.py12
-rw-r--r--tests/unit/rpc/test_zmq.py12
-rw-r--r--tests/unit/scheduler/fake_hosts.py3
-rw-r--r--tests/unit/scheduler/test_base_filter.py39
-rw-r--r--tests/unit/test_service.py4
-rw-r--r--tests/utils.py3
-rw-r--r--tox.ini2
34 files changed, 331 insertions, 426 deletions
diff --git a/openstack/common/cfgfilter.py b/openstack/common/cfgfilter.py
index 29aeac2..27ff471 100644
--- a/openstack/common/cfgfilter.py
+++ b/openstack/common/cfgfilter.py
@@ -60,9 +60,10 @@ from oslo.config import cfg
class ConfigFilter(collections.Mapping):
- """
- A helper class which wraps a ConfigOpts object and enforces the
- explicit declaration of dependencies on external options.
+ """A helper class which wraps a ConfigOpts object.
+
+ ConfigFilter enforces the explicit declaration of dependencies on external
+ options.
"""
def __init__(self, conf):
@@ -198,9 +199,9 @@ class ConfigFilter(collections.Mapping):
class GroupAttr(collections.Mapping):
- """
- A helper class representing the option values of a group as a mapping
- and attributes.
+ """Helper class to wrap a group object.
+
+ Represents the option values of a group as a mapping and attributes.
"""
def __init__(self, conf, group):
diff --git a/openstack/common/context.py b/openstack/common/context.py
index c872331..3899c2c 100644
--- a/openstack/common/context.py
+++ b/openstack/common/context.py
@@ -33,7 +33,8 @@ def generate_request_id():
class RequestContext(object):
- """
+ """Helper class to represent useful information about a request context.
+
Stores information about the security context under which the user
accesses the system, as well as additional request information.
"""
diff --git a/openstack/common/db/sqlalchemy/session.py b/openstack/common/db/sqlalchemy/session.py
index cb8fcf9..b5e10f1 100644
--- a/openstack/common/db/sqlalchemy/session.py
+++ b/openstack/common/db/sqlalchemy/session.py
@@ -384,8 +384,7 @@ def cleanup():
class SqliteForeignKeysListener(PoolListener):
- """
- Ensures that the foreign key constraints are enforced in SQLite.
+ """Ensures that the foreign key constraints are enforced in SQLite.
The foreign key constraints are disabled by default in SQLite,
so the foreign key constraints will be enabled here for every
@@ -444,7 +443,8 @@ _DUP_KEY_RE_DB = {
def _raise_if_duplicate_entry_error(integrity_error, engine_name):
- """
+ """Raise exception if two entries are duplicated.
+
In this function will be raised DBDuplicateEntry exception if integrity
error wrap unique constraint violation.
"""
@@ -487,7 +487,8 @@ _DEADLOCK_RE_DB = {
def _raise_if_deadlock_error(operational_error, engine_name):
- """
+ """Raise exception on deadlock condition.
+
Raise DBDeadlock exception if OperationalError contains a Deadlock
condition.
"""
@@ -566,19 +567,17 @@ def _add_regexp_listener(dbapi_con, con_record):
def _greenthread_yield(dbapi_con, con_record):
- """
- Ensure other greenthreads get a chance to execute by forcing a context
- switch. With common database backends (eg MySQLdb and sqlite), there is
- no implicit yield caused by network I/O since they are implemented by
- C libraries that eventlet cannot monkey patch.
+ """Ensure other greenthreads get a chance to be executed.
+
+ Force a context switch. With common database backends (eg MySQLdb and
+ sqlite), there is no implicit yield caused by network I/O since they are
+ implemented by C libraries that eventlet cannot monkey patch.
"""
greenthread.sleep(0)
def _ping_listener(dbapi_conn, connection_rec, connection_proxy):
- """
- Ensures that MySQL connections checked out of the
- pool are alive.
+ """Ensures that MySQL connections checked out of the pool are alive.
Borrowed from:
http://groups.google.com/group/sqlalchemy/msg/a4ce563d802c929f
@@ -716,8 +715,9 @@ def get_maker(engine, autocommit=True, expire_on_commit=False):
def _patch_mysqldb_with_stacktrace_comments():
- """Adds current stack trace as a comment in queries by patching
- MySQLdb.cursors.BaseCursor._do_query.
+ """Adds current stack trace as a comment in queries.
+
+ Patches MySQLdb.cursors.BaseCursor._do_query.
"""
import MySQLdb.cursors
import traceback
diff --git a/openstack/common/deprecated/wsgi.py b/openstack/common/deprecated/wsgi.py
index f9be97e..cf2e9ae 100644
--- a/openstack/common/deprecated/wsgi.py
+++ b/openstack/common/deprecated/wsgi.py
@@ -66,8 +66,7 @@ def run_server(application, port, **kwargs):
class Service(service.Service):
- """
- Provides a Service API for wsgi servers.
+ """Provides a Service API for wsgi servers.
This gives us the ability to launch wsgi servers with the
Launcher classes in service.py.
@@ -163,13 +162,10 @@ class Service(service.Service):
class Router(object):
- """
- WSGI middleware that maps incoming requests to WSGI apps.
- """
+ """WSGI middleware that maps incoming requests to WSGI apps."""
def __init__(self, mapper):
- """
- Create a router for the given routes.Mapper.
+ """Create a router for the given routes.Mapper.
Each route in `mapper` must specify a 'controller', which is a
WSGI app to call. You'll probably want to specify an 'action' as
@@ -197,8 +193,8 @@ class Router(object):
@webob.dec.wsgify
def __call__(self, req):
- """
- Route the incoming request to a controller based on self.map.
+ """Route the incoming request to a controller based on self.map.
+
If no match, return a 404.
"""
return self._router
@@ -206,9 +202,10 @@ class Router(object):
@staticmethod
@webob.dec.wsgify
def _dispatch(req):
- """
+ """Gets application from the environment.
+
Called by self._router after matching the incoming request to a route
- and putting the information into req.environ. Either returns 404
+ and putting the information into req.environ. Either returns 404
or the routed WSGI app's response.
"""
match = req.environ['wsgiorg.routing_args'][1]
@@ -263,8 +260,7 @@ class Request(webob.Request):
class Resource(object):
- """
- WSGI app that handles (de)serialization and controller dispatch.
+ """WSGI app that handles (de)serialization and controller dispatch.
Reads routing information supplied by RoutesMiddleware and calls
the requested action method upon its deserializer, controller,
@@ -280,7 +276,8 @@ class Resource(object):
serialized by requested content type.
"""
def __init__(self, controller, deserializer=None, serializer=None):
- """
+ """Initiates Resource object.
+
:param controller: object that implement methods created by routes lib
:param deserializer: object that supports webob request deserialization
through controller-like actions
@@ -387,7 +384,8 @@ class JSONDictSerializer(DictSerializer):
class XMLDictSerializer(DictSerializer):
def __init__(self, metadata=None, xmlns=None):
- """
+ """Initiates XMLDictSerializer object.
+
:param metadata: information needed to deserialize xml into
a dictionary.
:param xmlns: XML namespace to include with serialized xml
@@ -662,7 +660,8 @@ class JSONDeserializer(TextDeserializer):
class XMLDeserializer(TextDeserializer):
def __init__(self, metadata=None):
- """
+ """Initiates XMLDeserializer object.
+
:param metadata: information needed to deserialize xml into
a dictionary.
"""
diff --git a/openstack/common/exception.py b/openstack/common/exception.py
index a645588..cdf40f3 100644
--- a/openstack/common/exception.py
+++ b/openstack/common/exception.py
@@ -110,8 +110,7 @@ def wrap_exception(f):
class OpenstackException(Exception):
- """
- Base Exception
+ """Base Exception class.
To correctly use this class, inherit from it and define
a 'message' property. That message will get printf'd
diff --git a/openstack/common/gettextutils.py b/openstack/common/gettextutils.py
index d6b5f10..0e27425 100644
--- a/openstack/common/gettextutils.py
+++ b/openstack/common/gettextutils.py
@@ -75,9 +75,10 @@ def get_lazy_gettext(domain):
"""
def _lazy_gettext(msg):
- """
- Create and return a Message object encapsulating a string
- so that we can translate it later when needed.
+ """Create and return a Message object.
+
+ Message encapsulates a string so that we can translate it later when
+ needed.
"""
return Message(msg, domain)
@@ -207,8 +208,7 @@ class LocaleHandler(logging.Handler):
"""
def __init__(self, locale, target):
- """
- Initialize a LocaleHandler
+ """Initialize a LocaleHandler
:param locale: locale to use for translating messages
:param target: logging.Handler object to forward
diff --git a/openstack/common/importutils.py b/openstack/common/importutils.py
index dbee325..7a303f9 100644
--- a/openstack/common/importutils.py
+++ b/openstack/common/importutils.py
@@ -41,8 +41,9 @@ def import_object(import_str, *args, **kwargs):
def import_object_ns(name_space, import_str, *args, **kwargs):
- """
- Import a class and return an instance of it, first by trying
+ """Tries to import object from default namespace.
+
+ Imports a class and return an instance of it, first by trying
to find the class in a default namespace, then failing back to
a full path if not found in the default namespace.
"""
diff --git a/openstack/common/log.py b/openstack/common/log.py
index d125d90..8097b23 100644
--- a/openstack/common/log.py
+++ b/openstack/common/log.py
@@ -459,10 +459,11 @@ def getLogger(name='unknown', version='unknown'):
def getLazyLogger(name='unknown', version='unknown'):
- """
- create a pass-through logger that does not create the real logger
+ """Returns lazy logger.
+
+ Creates a pass-through logger that does not create the real logger
until it is really needed and delegates all calls to the real logger
- once it is created
+ once it is created.
"""
return LazyAdapter(name, version)
diff --git a/openstack/common/middleware/base.py b/openstack/common/middleware/base.py
index 624a391..7236731 100644
--- a/openstack/common/middleware/base.py
+++ b/openstack/common/middleware/base.py
@@ -18,18 +18,16 @@ import webob.dec
class Middleware(object):
- """
- Base WSGI middleware wrapper. These classes require an application to be
- initialized that will be called next. By default the middleware will
- simply call its wrapped app, or you can override __call__ to customize its
- behavior.
+ """Base WSGI middleware wrapper.
+
+ These classes require an application to be initialized that will be called
+ next. By default the middleware will simply call its wrapped app, or you
+ can override __call__ to customize its behavior.
"""
@classmethod
def factory(cls, global_conf, **local_conf):
- """
- Factory method for paste.deploy
- """
+ """Factory method for paste.deploy."""
def filter(app):
return cls(app)
@@ -40,8 +38,7 @@ class Middleware(object):
self.application = application
def process_request(self, req):
- """
- Called on each request.
+ """Called on each request.
If this returns None, the next application down the stack will be
executed. If it returns a response then that response will be returned
diff --git a/openstack/common/middleware/context.py b/openstack/common/middleware/context.py
index 2636e8e..542e71e 100644
--- a/openstack/common/middleware/context.py
+++ b/openstack/common/middleware/context.py
@@ -30,9 +30,7 @@ class ContextMiddleware(base.Middleware):
super(ContextMiddleware, self).__init__(app)
def make_context(self, *args, **kwargs):
- """
- Create a context with the given arguments.
- """
+ """Create a context with the given arguments."""
# Determine the context class to use
ctxcls = context.RequestContext
@@ -42,7 +40,8 @@ class ContextMiddleware(base.Middleware):
return ctxcls(*args, **kwargs)
def process_request(self, req):
- """
+ """Process the request.
+
Extract any authentication information in the request and
construct an appropriate context from it.
"""
@@ -52,9 +51,7 @@ class ContextMiddleware(base.Middleware):
def filter_factory(global_conf, **local_conf):
- """
- Factory method for paste.deploy
- """
+ """Factory method for paste.deploy."""
conf = global_conf.copy()
conf.update(local_conf)
diff --git a/openstack/common/middleware/debug.py b/openstack/common/middleware/debug.py
index b92af11..f0f6b90 100644
--- a/openstack/common/middleware/debug.py
+++ b/openstack/common/middleware/debug.py
@@ -24,9 +24,10 @@ from openstack.common.middleware import base
class Debug(base.Middleware):
- """
- Helper class that can be inserted into any WSGI application chain
- to get information about the request and response.
+ """Helper class that returns debug information.
+
+ Can be inserted into any WSGI application chain to get information about
+ the request and response.
"""
@webob.dec.wsgify
@@ -48,10 +49,7 @@ class Debug(base.Middleware):
@staticmethod
def print_generator(app_iter):
- """
- Iterator that prints the contents of a wrapper string iterator
- when iterated.
- """
+ """Prints the contents of a wrapper string iterator when iterated."""
print(("*" * 40) + " BODY")
for part in app_iter:
sys.stdout.write(part)
diff --git a/openstack/common/middleware/sizelimit.py b/openstack/common/middleware/sizelimit.py
index 1128b8a..96a1fbf 100644
--- a/openstack/common/middleware/sizelimit.py
+++ b/openstack/common/middleware/sizelimit.py
@@ -41,7 +41,8 @@ CONF.register_opt(max_req_body_size)
class LimitingReader(object):
"""Reader to limit the size of an incoming request."""
def __init__(self, data, limit):
- """
+ """Initiates LimitingReader object.
+
:param data: Underlying data object
:param limit: maximum number of bytes the reader should allow
"""
diff --git a/openstack/common/network_utils.py b/openstack/common/network_utils.py
index fc7b9a8..0527ab9 100644
--- a/openstack/common/network_utils.py
+++ b/openstack/common/network_utils.py
@@ -26,8 +26,8 @@ LOG = logging.getLogger(__name__)
def parse_host_port(address, default_port=None):
- """
- Interpret a string as a host:port pair.
+ """Interpret a string as a host:port pair.
+
An IPv6 address MUST be escaped if accompanied by a port,
because otherwise ambiguity ensues: 2001:db8:85a3::8a2e:370:7334
means both [2001:db8:85a3::8a2e:370:7334] and
diff --git a/openstack/common/policy.py b/openstack/common/policy.py
index f3e62ba..40e5a6e 100644
--- a/openstack/common/policy.py
+++ b/openstack/common/policy.py
@@ -94,15 +94,11 @@ class PolicyNotAuthorized(Exception):
class Rules(dict):
- """
- A store for rules. Handles the default_rule setting directly.
- """
+ """A store for rules. Handles the default_rule setting directly."""
@classmethod
def load_json(cls, data, default_rule=None):
- """
- Allow loading of JSON rule data.
- """
+ """Allow loading of JSON rule data."""
# Suck in the JSON data and parse the rules
rules = dict((k, parse_rule(v)) for k, v in
@@ -143,8 +139,7 @@ class Rules(dict):
class Enforcer(object):
- """
- Responsible for loading and enforcing rules
+ """Responsible for loading and enforcing rules.
:param policy_file: Custom policy file to use, if none is
specified, `CONF.policy_file` will be
@@ -165,8 +160,7 @@ class Enforcer(object):
self.policy_file = policy_file or CONF.policy_file
def set_rules(self, rules, overwrite=True):
- """
- Create a new Rules object based on the provided dict of rules
+ """Create a new Rules object based on the provided dict of rules.
:param rules: New rules to use. It should be an instance of dict.
:param overwrite: Whether to overwrite current rules or update them
@@ -183,17 +177,14 @@ class Enforcer(object):
self.update(rules)
def clear(self):
- """
- Clears Enforcer rules, policy's cache
- and policy's path.
- """
+ """Clears Enforcer rules, policy's cache and policy's path."""
self.set_rules({})
self.policy_path = None
def load_rules(self, force_reload=False):
- """
- Loads policy_path's rules. Policy file is cached
- and will be reloaded if modified.
+ """Loads policy_path's rules.
+
+ Policy file is cached and will be reloaded if modified.
:param force_reload: Whether to overwrite current rules.
"""
@@ -210,8 +201,7 @@ class Enforcer(object):
LOG.debug(_("Rules successfully reloaded"))
def _get_policy_path(self):
- """
- Locate the policy json data file
+ """Locate the policy json data file.
:param policy_file: Custom policy file to locate.
@@ -229,8 +219,7 @@ class Enforcer(object):
def enforce(self, rule, target, creds, do_raise=False,
exc=None, *args, **kwargs):
- """
- Checks authorization of a rule against the target and credentials.
+ """Checks authorization of a rule against the target and credentials.
:param rule: A string or BaseCheck instance specifying the rule
to evaluate.
@@ -285,25 +274,21 @@ class Enforcer(object):
class BaseCheck(object):
- """
- Abstract base class for Check classes.
- """
+ """Abstract base class for Check classes."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __str__(self):
- """
- Retrieve a string representation of the Check tree rooted at
- this node.
- """
+ """String representation of the Check tree rooted at this node."""
pass
@abc.abstractmethod
def __call__(self, target, cred):
- """
- Perform the check. Returns False to reject the access or a
+ """Triggers if instance of the class is called.
+
+ Performs the check. Returns False to reject the access or a
true value (not necessary True) to accept the access.
"""
@@ -311,9 +296,7 @@ class BaseCheck(object):
class FalseCheck(BaseCheck):
- """
- A policy check that always returns False (disallow).
- """
+ """A policy check that always returns False (disallow)."""
def __str__(self):
"""Return a string representation of this check."""
@@ -327,9 +310,7 @@ class FalseCheck(BaseCheck):
class TrueCheck(BaseCheck):
- """
- A policy check that always returns True (allow).
- """
+ """A policy check that always returns True (allow)."""
def __str__(self):
"""Return a string representation of this check."""
@@ -343,12 +324,11 @@ class TrueCheck(BaseCheck):
class Check(BaseCheck):
- """
- A base class to allow for user-defined policy checks.
- """
+ """A base class to allow for user-defined policy checks."""
def __init__(self, kind, match):
- """
+ """Initiates Check instance.
+
:param kind: The kind of the check, i.e., the field before the
':'.
:param match: The match of the check, i.e., the field after
@@ -365,14 +345,13 @@ class Check(BaseCheck):
class NotCheck(BaseCheck):
- """
+ """Implements the "not" logical operator.
+
A policy check that inverts the result of another policy check.
- Implements the "not" operator.
"""
def __init__(self, rule):
- """
- Initialize the 'not' check.
+ """Initialize the 'not' check.
:param rule: The rule to negate. Must be a Check.
"""
@@ -385,23 +364,22 @@ class NotCheck(BaseCheck):
return "not %s" % self.rule
def __call__(self, target, cred):
- """
- Check the policy. Returns the logical inverse of the wrapped
- check.
+ """Check the policy.
+
+ Returns the logical inverse of the wrapped check.
"""
return not self.rule(target, cred)
class AndCheck(BaseCheck):
- """
- A policy check that requires that a list of other checks all
- return True. Implements the "and" operator.
+ """Implements the "and" logical operator.
+
+ A policy check that requires that a list of other checks all return True.
"""
def __init__(self, rules):
- """
- Initialize the 'and' check.
+ """Initialize the 'and' check.
:param rules: A list of rules that will be tested.
"""
@@ -414,9 +392,9 @@ class AndCheck(BaseCheck):
return "(%s)" % ' and '.join(str(r) for r in self.rules)
def __call__(self, target, cred):
- """
- Check the policy. Requires that all rules accept in order to
- return True.
+ """Check the policy.
+
+ Requires that all rules accept in order to return True.
"""
for rule in self.rules:
@@ -426,7 +404,8 @@ class AndCheck(BaseCheck):
return True
def add_check(self, rule):
- """
+ """Adds rule to be tested.
+
Allows addition of another rule to the list of rules that will
be tested. Returns the AndCheck object for convenience.
"""
@@ -436,14 +415,14 @@ class AndCheck(BaseCheck):
class OrCheck(BaseCheck):
- """
+ """Implements the "or" operator.
+
A policy check that requires that at least one of a list of other
- checks returns True. Implements the "or" operator.
+ checks returns True.
"""
def __init__(self, rules):
- """
- Initialize the 'or' check.
+ """Initialize the 'or' check.
:param rules: A list of rules that will be tested.
"""
@@ -456,9 +435,9 @@ class OrCheck(BaseCheck):
return "(%s)" % ' or '.join(str(r) for r in self.rules)
def __call__(self, target, cred):
- """
- Check the policy. Requires that at least one rule accept in
- order to return True.
+ """Check the policy.
+
+ Requires that at least one rule accept in order to return True.
"""
for rule in self.rules:
@@ -468,7 +447,8 @@ class OrCheck(BaseCheck):
return False
def add_check(self, rule):
- """
+ """Adds rule to be tested.
+
Allows addition of another rule to the list of rules that will
be tested. Returns the OrCheck object for convenience.
"""
@@ -478,9 +458,7 @@ class OrCheck(BaseCheck):
def _parse_check(rule):
- """
- Parse a single base check rule into an appropriate Check object.
- """
+ """Parse a single base check rule into an appropriate Check object."""
# Handle the special checks
if rule == '!':
@@ -506,9 +484,9 @@ def _parse_check(rule):
def _parse_list_rule(rule):
- """
- Provided for backwards compatibility. Translates the old
- list-of-lists syntax into a tree of Check objects.
+ """Translates the old list-of-lists syntax into a tree of Check objects.
+
+ Provided for backwards compatibility.
"""
# Empty rule defaults to True
@@ -549,8 +527,7 @@ _tokenize_re = re.compile(r'\s+')
def _parse_tokenize(rule):
- """
- Tokenizer for the policy language.
+ """Tokenizer for the policy language.
Most of the single-character tokens are specified in the
_tokenize_re; however, parentheses need to be handled specially,
@@ -599,16 +576,16 @@ def _parse_tokenize(rule):
class ParseStateMeta(type):
- """
- Metaclass for the ParseState class. Facilitates identifying
- reduction methods.
+ """Metaclass for the ParseState class.
+
+ Facilitates identifying reduction methods.
"""
def __new__(mcs, name, bases, cls_dict):
- """
- Create the class. Injects the 'reducers' list, a list of
- tuples matching token sequences to the names of the
- corresponding reduction methods.
+ """Create the class.
+
+ Injects the 'reducers' list, a list of tuples matching token sequences
+ to the names of the corresponding reduction methods.
"""
reducers = []
@@ -625,10 +602,10 @@ class ParseStateMeta(type):
def reducer(*tokens):
- """
- Decorator for reduction methods. Arguments are a sequence of
- tokens, in order, which should trigger running this reduction
- method.
+ """Decorator for reduction methods.
+
+ Arguments are a sequence of tokens, in order, which should trigger running
+ this reduction method.
"""
def decorator(func):
@@ -645,10 +622,10 @@ def reducer(*tokens):
class ParseState(object):
- """
- Implement the core of parsing the policy language. Uses a greedy
- reduction algorithm to reduce a sequence of tokens into a single
- terminal, the value of which will be the root of the Check tree.
+ """Implement the core of parsing the policy language.
+
+ Uses a greedy reduction algorithm to reduce a sequence of tokens into
+ a single terminal, the value of which will be the root of the Check tree.
Note: error reporting is rather lacking. The best we can get with
this parser formulation is an overall "parse failed" error.
@@ -665,11 +642,11 @@ class ParseState(object):
self.values = []
def reduce(self):
- """
- Perform a greedy reduction of the token stream. If a reducer
- method matches, it will be executed, then the reduce() method
- will be called recursively to search for any more possible
- reductions.
+ """Perform a greedy reduction of the token stream.
+
+ If a reducer method matches, it will be executed, then the
+ reduce() method will be called recursively to search for any more
+ possible reductions.
"""
for reduction, methname in self.reducers:
@@ -699,9 +676,9 @@ class ParseState(object):
@property
def result(self):
- """
- Obtain the final result of the parse. Raises ValueError if
- the parse failed to reduce to a single result.
+ """Obtain the final result of the parse.
+
+ Raises ValueError if the parse failed to reduce to a single result.
"""
if len(self.values) != 1:
@@ -718,35 +695,31 @@ class ParseState(object):
@reducer('check', 'and', 'check')
def _make_and_expr(self, check1, _and, check2):
- """
- Create an 'and_expr' from two checks joined by the 'and'
- operator.
+ """Create an 'and_expr'.
+
+ Join two checks by the 'and' operator.
"""
return [('and_expr', AndCheck([check1, check2]))]
@reducer('and_expr', 'and', 'check')
def _extend_and_expr(self, and_expr, _and, check):
- """
- Extend an 'and_expr' by adding one more check.
- """
+ """Extend an 'and_expr' by adding one more check."""
return [('and_expr', and_expr.add_check(check))]
@reducer('check', 'or', 'check')
def _make_or_expr(self, check1, _or, check2):
- """
- Create an 'or_expr' from two checks joined by the 'or'
- operator.
+ """Create an 'or_expr'.
+
+ Join two checks by the 'or' operator.
"""
return [('or_expr', OrCheck([check1, check2]))]
@reducer('or_expr', 'or', 'check')
def _extend_or_expr(self, or_expr, _or, check):
- """
- Extend an 'or_expr' by adding one more check.
- """
+ """Extend an 'or_expr' by adding one more check."""
return [('or_expr', or_expr.add_check(check))]
@@ -758,7 +731,8 @@ class ParseState(object):
def _parse_text_rule(rule):
- """
+ """Parses policy to the tree.
+
Translates a policy written in the policy language into a tree of
Check objects.
"""
@@ -783,9 +757,7 @@ def _parse_text_rule(rule):
def parse_rule(rule):
- """
- Parses a policy rule into a tree of Check objects.
- """
+ """Parses a policy rule into a tree of Check objects."""
# If the rule is a string, it's in the policy language
if isinstance(rule, basestring):
@@ -794,8 +766,7 @@ def parse_rule(rule):
def register(name, func=None):
- """
- Register a function or Check class as a policy check.
+ """Register a function or Check class as a policy check.
:param name: Gives the name of the check type, e.g., 'rule',
'role', etc. If name is None, a default check type
@@ -823,9 +794,7 @@ def register(name, func=None):
@register("rule")
class RuleCheck(Check):
def __call__(self, target, creds, enforcer):
- """
- Recursively checks credentials based on the defined rules.
- """
+ """Recursively checks credentials based on the defined rules."""
try:
return enforcer.rules[self.match](target, creds, enforcer)
@@ -845,8 +814,7 @@ class RoleCheck(Check):
@register('http')
class HttpCheck(Check):
def __call__(self, target, creds, enforcer):
- """
- Check http: rules by calling to a remote server.
+ """Check http: rules by calling to a remote server.
This example implementation simply verifies that the response
is exactly 'True'.
@@ -863,8 +831,7 @@ class HttpCheck(Check):
@register(None)
class GenericCheck(Check):
def __call__(self, target, creds, enforcer):
- """
- Check an individual match.
+ """Check an individual match.
Matches look like:
diff --git a/openstack/common/processutils.py b/openstack/common/processutils.py
index 02cfada..5417055 100644
--- a/openstack/common/processutils.py
+++ b/openstack/common/processutils.py
@@ -74,9 +74,9 @@ def _subprocess_setup():
def execute(*cmd, **kwargs):
- """
- Helper method to shell out and execute a command through subprocess with
- optional retry.
+ """Helper method to shell out and execute a command through subprocess.
+
+ Allows optional retry.
:param cmd: Passed to subprocess.Popen.
:type cmd: string
@@ -187,8 +187,7 @@ def execute(*cmd, **kwargs):
def trycmd(*args, **kwargs):
- """
- A wrapper around execute() to more easily handle warnings and errors.
+ """A wrapper around execute() to more easily handle warnings and errors.
Returns an (out, err) tuple of strings containing the output of
the command's stdout and stderr. If 'err' is not empty then the
diff --git a/openstack/common/rootwrap/wrapper.py b/openstack/common/rootwrap/wrapper.py
index d488ddd..5390c1b 100644
--- a/openstack/common/rootwrap/wrapper.py
+++ b/openstack/common/rootwrap/wrapper.py
@@ -31,10 +31,7 @@ class NoFilterMatched(Exception):
class FilterMatchNotExecutable(Exception):
- """
- This exception is raised when a filter matched but no executable was
- found.
- """
+ """Raised when a filter matched but no executable was found."""
def __init__(self, match=None, **kwargs):
self.match = match
@@ -122,9 +119,10 @@ def load_filters(filters_path):
def match_filter(filter_list, userargs, exec_dirs=[]):
- """
- Checks user command and arguments through command filters and
- returns the first matching filter.
+ """Checks user command and arguments through command filters.
+
+ Returns the first matching filter.
+
Raises NoFilterMatched if no filter matched.
Raises FilterMatchNotExecutable if no executable was found for the
best filter match.
diff --git a/openstack/common/rpc/amqp.py b/openstack/common/rpc/amqp.py
index f5b7cab..64ef582 100644
--- a/openstack/common/rpc/amqp.py
+++ b/openstack/common/rpc/amqp.py
@@ -102,15 +102,15 @@ def get_connection_pool(conf, connection_cls):
class ConnectionContext(rpc_common.Connection):
- """The class that is actually returned to the caller of
- create_connection(). This is essentially a wrapper around
- Connection that supports 'with'. It can also return a new
- Connection, or one from a pool. The function will also catch
- when an instance of this class is to be deleted. With that
- we can return Connections to the pool on exceptions and so
- forth without making the caller be responsible for catching
- them. If possible the function makes sure to return a
- connection to the pool.
+ """The class that is actually returned to the create_connection() caller.
+
+ This is essentially a wrapper around Connection that supports 'with'.
+ It can also return a new Connection, or one from a pool.
+
+ The function will also catch when an instance of this class is to be
+ deleted. With that we can return Connections to the pool on exceptions
+ and so forth without making the caller be responsible for catching them.
+ If possible the function makes sure to return a connection to the pool.
"""
def __init__(self, conf, connection_pool, pooled=True, server_params=None):
@@ -339,8 +339,9 @@ def _add_unique_id(msg):
class _ThreadPoolWithWait(object):
- """Base class for a delayed invocation manager used by
- the Connection class to start up green threads
+ """Base class for a delayed invocation manager.
+
+ Used by the Connection class to start up green threads
to handle incoming messages.
"""
@@ -355,12 +356,14 @@ class _ThreadPoolWithWait(object):
class CallbackWrapper(_ThreadPoolWithWait):
- """Wraps a straight callback to allow it to be invoked in a green
- thread.
+ """Wraps a straight callback.
+
+ Allows it to be invoked in a green thread.
"""
def __init__(self, conf, callback, connection_pool):
- """
+ """Initiates CallbackWrapper object.
+
:param conf: cfg.CONF instance
:param callback: a callable (probably a function)
:param connection_pool: connection pool as returned by
diff --git a/openstack/common/rpc/common.py b/openstack/common/rpc/common.py
index 28dcacd..f880608 100644
--- a/openstack/common/rpc/common.py
+++ b/openstack/common/rpc/common.py
@@ -124,7 +124,8 @@ class Timeout(RPCException):
'info: "%(info)s"')
def __init__(self, info=None, topic=None, method=None):
- """
+ """Initiates Timeout object.
+
:param info: Extra info to convey to the user
:param topic: The topic that the rpc call was sent to
:param rpc_method_name: The name of the rpc method being
@@ -221,9 +222,9 @@ class Connection(object):
raise NotImplementedError()
def join_consumer_pool(self, callback, pool_name, topic, exchange_name):
- """Register as a member of a group of consumers for a given topic from
- the specified exchange.
+ """Register as a member of a group of consumers.
+ Uses given topic from the specified exchange.
Exactly one member of a given pool will receive each message.
A message will be delivered to multiple pools, if more than
@@ -414,10 +415,10 @@ class CommonRpcContext(object):
class ClientException(Exception):
- """This encapsulates some actual exception that is expected to be
- hit by an RPC proxy object. Merely instantiating it records the
- current exception information, which will be passed back to the
- RPC client without exceptional logging.
+ """Encapsulates actual exception expected to be hit by a RPC proxy object.
+
+ Merely instantiating it records the current exception information, which
+ will be passed back to the RPC client without exceptional logging.
"""
def __init__(self):
self._exc_info = sys.exc_info()
@@ -435,6 +436,7 @@ def catch_client_exception(exceptions, func, *args, **kwargs):
def client_exceptions(*exceptions):
"""Decorator for manager methods that raise expected exceptions.
+
Marking a Manager method with this decorator allows the declaration
of expected exceptions that the RPC layer should not consider fatal,
and not log as if they were generated in a real error scenario. Note
diff --git a/openstack/common/rpc/impl_zmq.py b/openstack/common/rpc/impl_zmq.py
index 07b7b41..0bb5741 100644
--- a/openstack/common/rpc/impl_zmq.py
+++ b/openstack/common/rpc/impl_zmq.py
@@ -84,8 +84,8 @@ matchmaker = None # memoized matchmaker object
def _serialize(data):
- """
- Serialization wrapper
+ """Serialization wrapper.
+
We prefer using JSON, but it cannot encode all types.
Error if a developer passes us bad data.
"""
@@ -97,18 +97,15 @@ def _serialize(data):
def _deserialize(data):
- """
- Deserialization wrapper
- """
+ """Deserialization wrapper."""
LOG.debug(_("Deserializing: %s"), data)
return jsonutils.loads(data)
class ZmqSocket(object):
- """
- A tiny wrapper around ZeroMQ to simplify the send/recv protocol
- and connection management.
+ """A tiny wrapper around ZeroMQ.
+ Simplifies the send/recv protocol and connection management.
Can be used as a Context (supports the 'with' statement).
"""
@@ -355,10 +352,9 @@ class ConsumerBase(object):
class ZmqBaseReactor(ConsumerBase):
- """
- A consumer class implementing a
- centralized casting broker (PULL-PUSH)
- for RoundRobin requests.
+ """A consumer class implementing a centralized casting broker (PULL-PUSH).
+
+ Used for RoundRobin requests.
"""
def __init__(self, conf):
@@ -429,10 +425,9 @@ class ZmqBaseReactor(ConsumerBase):
class ZmqProxy(ZmqBaseReactor):
- """
- A consumer class implementing a
- topic-based proxy, forwarding to
- IPC sockets.
+ """A consumer class implementing a topic-based proxy.
+
+ Forwards to IPC sockets.
"""
def __init__(self, conf):
@@ -539,8 +534,9 @@ class ZmqProxy(ZmqBaseReactor):
def unflatten_envelope(packenv):
"""Unflattens the RPC envelope.
- Takes a list and returns a dictionary.
- i.e. [1,2,3,4] => {1: 2, 3: 4}
+
+ Takes a list and returns a dictionary.
+ i.e. [1,2,3,4] => {1: 2, 3: 4}
"""
i = iter(packenv)
h = {}
@@ -553,10 +549,9 @@ def unflatten_envelope(packenv):
class ZmqReactor(ZmqBaseReactor):
- """
- A consumer class implementing a
- consumer for messages. Can also be
- used as a 1:1 proxy
+ """A consumer class implementing a consumer for messages.
+
+ Can also be used as a 1:1 proxy
"""
def __init__(self, conf):
@@ -743,10 +738,9 @@ def _call(addr, context, topic, msg, timeout=None,
def _multi_send(method, context, topic, msg, timeout=None,
envelope=False, _msg_id=None):
- """
- Wraps the sending of messages,
- dispatches to the matchmaker and sends
- message to all relevant hosts.
+ """Wraps the sending of messages.
+
+ Dispatches to the matchmaker and sends message to all relevant hosts.
"""
conf = CONF
LOG.debug(_("%(msg)s") % {'msg': ' '.join(map(pformat, (topic, msg)))})
@@ -803,8 +797,8 @@ def fanout_cast(conf, context, topic, msg, **kwargs):
def notify(conf, context, topic, msg, envelope):
- """
- Send notification event.
+ """Send notification event.
+
Notifications are sent to topic-priority.
This differs from the AMQP drivers which send to topic.priority.
"""
diff --git a/openstack/common/rpc/matchmaker.py b/openstack/common/rpc/matchmaker.py
index 98a8f43..e51636d 100644
--- a/openstack/common/rpc/matchmaker.py
+++ b/openstack/common/rpc/matchmaker.py
@@ -48,8 +48,8 @@ class MatchMakerException(Exception):
class Exchange(object):
- """
- Implements lookups.
+ """Implements lookups.
+
Subclass this to support hashtables, dns, etc.
"""
def __init__(self):
@@ -60,9 +60,7 @@ class Exchange(object):
class Binding(object):
- """
- A binding on which to perform a lookup.
- """
+ """A binding on which to perform a lookup."""
def __init__(self):
pass
@@ -71,10 +69,10 @@ class Binding(object):
class MatchMakerBase(object):
- """
- Match Maker Base Class.
- Build off HeartbeatMatchMakerBase if building a
- heartbeat-capable MatchMaker.
+ """Match Maker Base Class.
+
+ Build off HeartbeatMatchMakerBase if building a heartbeat-capable
+ MatchMaker.
"""
def __init__(self):
# Array of tuples. Index [2] toggles negation, [3] is last-if-true
@@ -84,58 +82,47 @@ class MatchMakerBase(object):
'registration or heartbeat.')
def register(self, key, host):
- """
- Register a host on a backend.
+ """Register a host on a backend.
+
Heartbeats, if applicable, may keepalive registration.
"""
pass
def ack_alive(self, key, host):
- """
- Acknowledge that a key.host is alive.
- Used internally for updating heartbeats,
- but may also be used publically to acknowledge
- a system is alive (i.e. rpc message successfully
- sent to host)
+ """Acknowledge that a key.host is alive.
+
+ Used internally for updating heartbeats, but may also be used
+ publically to acknowledge a system is alive (i.e. rpc message
+ successfully sent to host)
"""
pass
def is_alive(self, topic, host):
- """
- Checks if a host is alive.
- """
+ """Checks if a host is alive."""
pass
def expire(self, topic, host):
- """
- Explicitly expire a host's registration.
- """
+ """Explicitly expire a host's registration."""
pass
def send_heartbeats(self):
- """
- Send all heartbeats.
+ """Send all heartbeats.
+
Use start_heartbeat to spawn a heartbeat greenthread,
which loops this method.
"""
pass
def unregister(self, key, host):
- """
- Unregister a topic.
- """
+ """Unregister a topic."""
pass
def start_heartbeat(self):
- """
- Spawn heartbeat greenthread.
- """
+ """Spawn heartbeat greenthread."""
pass
def stop_heartbeat(self):
- """
- Destroys the heartbeat greenthread.
- """
+ """Destroys the heartbeat greenthread."""
pass
def add_binding(self, binding, rule, last=True):
@@ -162,10 +149,10 @@ class MatchMakerBase(object):
class HeartbeatMatchMakerBase(MatchMakerBase):
- """
- Base for a heart-beat capable MatchMaker.
- Provides common methods for registering,
- unregistering, and maintaining heartbeats.
+ """Base for a heart-beat capable MatchMaker.
+
+ Provides common methods for registering, unregistering, and maintaining
+ heartbeats.
"""
def __init__(self):
self.hosts = set()
@@ -175,8 +162,8 @@ class HeartbeatMatchMakerBase(MatchMakerBase):
super(HeartbeatMatchMakerBase, self).__init__()
def send_heartbeats(self):
- """
- Send all heartbeats.
+ """Send all heartbeats.
+
Use start_heartbeat to spawn a heartbeat greenthread,
which loops this method.
"""
@@ -184,32 +171,31 @@ class HeartbeatMatchMakerBase(MatchMakerBase):
self.ack_alive(key, host)
def ack_alive(self, key, host):
- """
- Acknowledge that a host.topic is alive.
- Used internally for updating heartbeats,
- but may also be used publically to acknowledge
- a system is alive (i.e. rpc message successfully
- sent to host)
+ """Acknowledge that a host.topic is alive.
+
+ Used internally for updating heartbeats, but may also be used
+ publically to acknowledge a system is alive (i.e. rpc message
+ successfully sent to host)
"""
raise NotImplementedError("Must implement ack_alive")
def backend_register(self, key, host):
- """
- Implements registration logic.
+ """Implements registration logic.
+
Called by register(self,key,host)
"""
raise NotImplementedError("Must implement backend_register")
def backend_unregister(self, key, key_host):
- """
- Implements de-registration logic.
+ """Implements de-registration logic.
+
Called by unregister(self,key,host)
"""
raise NotImplementedError("Must implement backend_unregister")
def register(self, key, host):
- """
- Register a host on a backend.
+ """Register a host on a backend.
+
Heartbeats, if applicable, may keepalive registration.
"""
self.hosts.add(host)
@@ -221,9 +207,7 @@ class HeartbeatMatchMakerBase(MatchMakerBase):
self.ack_alive(key, host)
def unregister(self, key, host):
- """
- Unregister a topic.
- """
+ """Unregister a topic."""
if (key, host) in self.host_topic:
del self.host_topic[(key, host)]
@@ -234,8 +218,8 @@ class HeartbeatMatchMakerBase(MatchMakerBase):
{'key': key, 'host': host})
def start_heartbeat(self):
- """
- Implementation of MatchMakerBase.start_heartbeat
+ """Implementation of MatchMakerBase.start_heartbeat.
+
Launches greenthread looping send_heartbeats(),
yielding for CONF.matchmaker_heartbeat_freq seconds
between iterations.
@@ -252,16 +236,14 @@ class HeartbeatMatchMakerBase(MatchMakerBase):
self._heart = eventlet.spawn(do_heartbeat)
def stop_heartbeat(self):
- """
- Destroys the heartbeat greenthread.
- """
+ """Destroys the heartbeat greenthread."""
if self._heart:
self._heart.kill()
class DirectBinding(Binding):
- """
- Specifies a host in the key via a '.' character
+ """Specifies a host in the key via a '.' character.
+
Although dots are used in the key, the behavior here is
that it maps directly to a host, thus direct.
"""
@@ -272,8 +254,8 @@ class DirectBinding(Binding):
class TopicBinding(Binding):
- """
- Where a 'bare' key without dots.
+ """Where a 'bare' key without dots.
+
AMQP generally considers topic exchanges to be those *with* dots,
but we deviate here in terminology as the behavior here matches
that of a topic exchange (whereas where there are dots, behavior
@@ -310,8 +292,8 @@ class LocalhostExchange(Exchange):
class DirectExchange(Exchange):
- """
- Exchange where all topic keys are split, sending to second half.
+ """Exchange where all topic keys are split, sending to second half.
+
i.e. "compute.host" sends a message to "compute.host" running on "host"
"""
def __init__(self):
@@ -323,8 +305,8 @@ class DirectExchange(Exchange):
class MatchMakerLocalhost(MatchMakerBase):
- """
- Match Maker where all bare topics resolve to localhost.
+ """Match Maker where all bare topics resolve to localhost.
+
Useful for testing.
"""
def __init__(self, host='localhost'):
@@ -335,8 +317,8 @@ class MatchMakerLocalhost(MatchMakerBase):
class MatchMakerStub(MatchMakerBase):
- """
- Match Maker where topics are untouched.
+ """Match Maker where topics are untouched.
+
Useful for testing, or for AMQP/brokered queues.
Will not work where knowledge of hosts is known (i.e. zeromq)
"""
diff --git a/openstack/common/rpc/matchmaker_redis.py b/openstack/common/rpc/matchmaker_redis.py
index 367c2c9..8bab9f9 100644
--- a/openstack/common/rpc/matchmaker_redis.py
+++ b/openstack/common/rpc/matchmaker_redis.py
@@ -55,8 +55,8 @@ class RedisExchange(mm_common.Exchange):
class RedisTopicExchange(RedisExchange):
- """
- Exchange where all topic keys are split, sending to second half.
+ """Exchange where all topic keys are split, sending to second half.
+
i.e. "compute.host" sends a message to "compute" running on "host"
"""
def run(self, topic):
@@ -77,9 +77,7 @@ class RedisTopicExchange(RedisExchange):
class RedisFanoutExchange(RedisExchange):
- """
- Return a list of all hosts.
- """
+ """Return a list of all hosts."""
def run(self, topic):
topic = topic.split('~', 1)[1]
hosts = self.redis.smembers(topic)
@@ -90,9 +88,7 @@ class RedisFanoutExchange(RedisExchange):
class MatchMakerRedis(mm_common.HeartbeatMatchMakerBase):
- """
- MatchMaker registering and looking-up hosts with a Redis server.
- """
+ """MatchMaker registering and looking-up hosts with a Redis server."""
def __init__(self):
super(MatchMakerRedis, self).__init__()
diff --git a/openstack/common/rpc/matchmaker_ring.py b/openstack/common/rpc/matchmaker_ring.py
index dbfb36f..2f1f79b 100644
--- a/openstack/common/rpc/matchmaker_ring.py
+++ b/openstack/common/rpc/matchmaker_ring.py
@@ -43,9 +43,7 @@ LOG = logging.getLogger(__name__)
class RingExchange(mm.Exchange):
- """
- Match Maker where hosts are loaded from a static file containing
- a hashmap (JSON formatted).
+ """Match Maker where hosts are loaded from a static JSON formatted file.
__init__ takes optional ring dictionary argument, otherwise
loads the ringfile from CONF.mathcmaker_ringfile.
@@ -104,9 +102,7 @@ class FanoutRingExchange(RingExchange):
class MatchMakerRing(mm.MatchMakerBase):
- """
- Match Maker where hosts are loaded from a static hashmap.
- """
+ """Match Maker where hosts are loaded from a static hashmap."""
def __init__(self, ring=None):
super(MatchMakerRing, self).__init__()
self.add_binding(mm.FanoutBinding(), FanoutRingExchange(ring))
diff --git a/openstack/common/strutils.py b/openstack/common/strutils.py
index 8a5367b..bbe2c92 100644
--- a/openstack/common/strutils.py
+++ b/openstack/common/strutils.py
@@ -40,8 +40,7 @@ FALSE_STRINGS = ('0', 'f', 'false', 'off', 'n', 'no')
def int_from_bool_as_string(subject):
- """
- Interpret a string as a boolean and return either 1 or 0.
+ """Interpret a string as a boolean and return either 1 or 0.
Any string value in:
@@ -55,8 +54,7 @@ def int_from_bool_as_string(subject):
def bool_from_string(subject, strict=False):
- """
- Interpret a string as a boolean.
+ """Interpret a string as a boolean.
A case-insensitive match is performed such that strings matching 't',
'true', 'on', 'y', 'yes', or '1' are considered True and, when
@@ -89,9 +87,7 @@ def bool_from_string(subject, strict=False):
def safe_decode(text, incoming=None, errors='strict'):
- """
- Decodes incoming str using `incoming` if they're
- not already unicode.
+ """Decodes incoming str using `incoming` if they're not already unicode.
:param incoming: Text's current encoding
:param errors: Errors handling policy. See here for valid
@@ -130,11 +126,10 @@ def safe_decode(text, incoming=None, errors='strict'):
def safe_encode(text, incoming=None,
encoding='utf-8', errors='strict'):
- """
- Encodes incoming str/unicode using `encoding`. If
- incoming is not specified, text is expected to
- be encoded with current python's default encoding.
- (`sys.getdefaultencoding`)
+ """Encodes incoming str/unicode using `encoding`.
+
+ If incoming is not specified, text is expected to be encoded with
+ current python's default encoding. (`sys.getdefaultencoding`)
:param incoming: Text's current encoding
:param encoding: Expected encoding for text (Default UTF-8)
diff --git a/openstack/common/timeutils.py b/openstack/common/timeutils.py
index 008e9c8..ac2441b 100644
--- a/openstack/common/timeutils.py
+++ b/openstack/common/timeutils.py
@@ -111,9 +111,9 @@ utcnow.override_time = None
def set_time_override(override_time=datetime.datetime.utcnow()):
- """
- Override utils.utcnow to return a constant time or a list thereof,
- one at a time.
+ """Overrides utils.utcnow.
+
+ Make it return a constant time or a list thereof, one at a time.
"""
utcnow.override_time = override_time
@@ -162,7 +162,8 @@ def unmarshall_time(tyme):
def delta_seconds(before, after):
- """
+ """Return the difference between two timing objects.
+
Compute the difference in seconds between two date, time, or
datetime objects (as a float, to microsecond resolution).
"""
@@ -175,8 +176,7 @@ def delta_seconds(before, after):
def is_soon(dt, window):
- """
- Determines if time is going to happen in the next window seconds.
+ """Determines if time is going to happen in the next window seconds.
:params dt: the time
:params window: minimum seconds to remain to consider the time not soon
diff --git a/tests/unit/rpc/matchmaker_common.py b/tests/unit/rpc/matchmaker_common.py
index 154162d..490c329 100644
--- a/tests/unit/rpc/matchmaker_common.py
+++ b/tests/unit/rpc/matchmaker_common.py
@@ -50,20 +50,17 @@ class _MatchMakerTestCase(_MatchMakerDirectedTopicTestCase):
class _MatchMakerDynRegTestCase(object):
def test_registers_host(self):
- """
- Registers a host, ensures it is registered.
- """
+ """Registers a host, ensures it is registered."""
self.driver.register(self.topic, self.hosts[0])
match = self.driver.queues(self.topic)
self.assertEqual(match[0][1], self.hosts[0])
def test_unregister(self):
- """
- Tests that hosts unregister cleanly.
- Registers a host, ensures it is registered,
- then unregisters and ensures is no
- longer registered.
+ """Tests that hosts unregister cleanly.
+
+ Registers a host, ensures it is registered, then unregisters and
+ ensures is no longer registered.
"""
# Can only unregister if registrations work.
self.test_registers_host()
diff --git a/tests/unit/rpc/test_common.py b/tests/unit/rpc/test_common.py
index 73ca63b..471048c 100644
--- a/tests/unit/rpc/test_common.py
+++ b/tests/unit/rpc/test_common.py
@@ -144,7 +144,8 @@ class RpcCommonTestCase(test_utils.BaseTestCase):
six.text_type(after_exc))
def test_deserialize_remote_exception_args_and_kwargs(self):
- """
+ """Test user exception deserialization.
+
Ensure a user defined exception will be supplied the correct args and
kwargs while being deserialized.
"""
diff --git a/tests/unit/rpc/test_matchmaker_redis.py b/tests/unit/rpc/test_matchmaker_redis.py
index a1eb0a0..446908a 100644
--- a/tests/unit/rpc/test_matchmaker_redis.py
+++ b/tests/unit/rpc/test_matchmaker_redis.py
@@ -95,20 +95,17 @@ class MatchMakerRedisHeartbeatTestCase(utils.BaseTestCase,
self.driver.unregister(self.topic, host)
def test_expires_set(self):
- """
- Test that expirations are set.
- """
+ """Test that expirations are set."""
self.driver.register(self.topic, self.hosts[0])
ttl = self.driver.redis.ttl('.'.join((self.topic, self.hosts[0])))
self.assertTrue(ttl > -1)
def test_expires_hosts(self):
- """
- Tests that hosts expire.
- Registers a host, ensures it is registered,
- then waits for it to expire. Ensures is no
- longer registered.
+ """Tests that hosts expire.
+
+ Registers a host, ensures it is registered, then waits for it to
+ expire. Ensures is no longer registered.
"""
self.driver.register(self.topic, self.hosts[0])
@@ -122,9 +119,7 @@ class MatchMakerRedisHeartbeatTestCase(utils.BaseTestCase,
self.assertEqual(ttl2, -1)
def test_expired_hosts_removed(self):
- """
- Test that expired hosts are removed from results.
- """
+ """Test that expired hosts are removed from results."""
self.test_expires_hosts()
self.assertEqual(self.driver.queues(self.topic), [])
@@ -132,9 +127,7 @@ class MatchMakerRedisHeartbeatTestCase(utils.BaseTestCase,
class MatchMakerRedisTestCase(utils.BaseTestCase):
"""Generic tests that do not require a Redis server."""
def test_redis_import_exception(self):
- """
- Try initializing an object without redis.
- """
+ """Try initializing an object without redis."""
matchmaker.redis = None
self.assertRaises(ImportError, matchmaker.MatchMakerRedis)
reload(matchmaker)
diff --git a/tests/unit/rpc/test_qpid.py b/tests/unit/rpc/test_qpid.py
index 02e8e20..42a6e6b 100644
--- a/tests/unit/rpc/test_qpid.py
+++ b/tests/unit/rpc/test_qpid.py
@@ -45,8 +45,7 @@ FLAGS = cfg.CONF
class RpcQpidTestCase(utils.BaseTestCase):
- """
- Exercise the public API of impl_qpid utilizing mox.
+ """Exercise the public API of impl_qpid utilizing mox.
This set of tests utilizes mox to replace the Qpid objects and ensures
that the right operations happen on them when the various public rpc API
@@ -479,10 +478,11 @@ class RpcQpidTestCase(utils.BaseTestCase):
impl_qpid.Connection.pool.get()
def test_call_with_timeout(self):
- """A little more indepth for a timeout test. Specifically we are
- looking to simulate the event sent to qpid dying on the vine due
- to a TTL. A string test that actually involved qpid would be
- excellent, but this at least verifies that the exceptions flow
+ """A little more indepth for a timeout test.
+
+ Specifically we are looking to simulate the event sent to qpid dying
+ on the vine due to a TTL. A string test that actually involved qpid
+ would be excellent, but this at least verifies that the exceptions flow
like they should. TODO(beagles): is this really necessary or is
the the case for qpid at least the basic timeout test is
sufficient.
diff --git a/tests/unit/rpc/test_zmq.py b/tests/unit/rpc/test_zmq.py
index 564f15e..b0f0262 100644
--- a/tests/unit/rpc/test_zmq.py
+++ b/tests/unit/rpc/test_zmq.py
@@ -42,9 +42,7 @@ FLAGS = cfg.CONF
def get_unused_port():
- """
- Returns an unused port on localhost.
- """
+ """Returns an unused port on localhost."""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('localhost', 0))
addr, port = s.getsockname()
@@ -117,7 +115,8 @@ class _RpcZmqBaseTestCase(common.BaseRpcTestCase):
class RpcZmqBaseTopicTestCase(_RpcZmqBaseTestCase):
- """
+ """Base topic RPC ZMQ test case.
+
This tests with topics such as 'test' and 'nested',
without any .host appended. Stresses the matchmaker.
"""
@@ -125,10 +124,7 @@ class RpcZmqBaseTopicTestCase(_RpcZmqBaseTestCase):
class RpcZmqDirectTopicTestCase(_RpcZmqBaseTestCase):
- """
- Test communication directly to a host,
- tests use 'localhost'.
- """
+ """Test communication directly to a host, tests use 'localhost'."""
def setUp(self):
super(RpcZmqDirectTopicTestCase, self).setUp(
topic='test.127.0.0.1',
diff --git a/tests/unit/scheduler/fake_hosts.py b/tests/unit/scheduler/fake_hosts.py
index b02aca4..baaa148 100644
--- a/tests/unit/scheduler/fake_hosts.py
+++ b/tests/unit/scheduler/fake_hosts.py
@@ -18,7 +18,8 @@ Fakes For filters tests.
class FakeHostManager(object):
- """
+ """Defines fake hosts.
+
host1: free_ram_mb=1024-512-512=0, free_disk_gb=1024-512-512=0
host2: free_ram_mb=2048-512=1536 free_disk_gb=2048-512=1536
host3: free_ram_mb=4096-1024=3072 free_disk_gb=4096-1024=3072
diff --git a/tests/unit/scheduler/test_base_filter.py b/tests/unit/scheduler/test_base_filter.py
index d66d84c..3d7e345 100644
--- a/tests/unit/scheduler/test_base_filter.py
+++ b/tests/unit/scheduler/test_base_filter.py
@@ -51,49 +51,42 @@ class BaseFakeFilter(base_filter.BaseFilter):
class FakeFilter1(BaseFakeFilter):
- """
- * Should be included in the output of all_classes
- * It derives from BaseFakeFilter
- * AND
- * It has a fake entry point defined (is returned by fake ExtensionManager)
+ """Derives from BaseFakeFilter and has a fake entry point defined.
+
+ Entry point is returned by fake ExtensionManager.
+ Should be included in the output of all_classes.
"""
pass
class FakeFilter2(BaseFakeFilter):
- """
- * Should be NOT included in all_classes
- * Derives from BaseFakeFilter
- * BUT
- * It has no entry point
+ """Derives from BaseFakeFilter but has no entry point.
+
+ Should be not included in all_classes.
"""
pass
class FakeFilter3(base_filter.BaseFilter):
- """
- * Should NOT be included
- * Does NOT derive from BaseFakeFilter
+ """Does not derive from BaseFakeFilter.
+
+ Should not be included.
"""
pass
class FakeFilter4(BaseFakeFilter):
- """
- Should be included
- * Derives from BaseFakeFilter
- * AND
- * It has an entrypoint
+ """Derives from BaseFakeFilter and has an entry point.
+
+ Should be included.
"""
pass
class FakeFilter5(BaseFakeFilter):
- """
- Should NOT be included
- * Derives from BaseFakeFilter
- * BUT
- * It has NO entrypoint
+ """Derives from BaseFakeFilter but has no entry point.
+
+ Should not be included.
"""
pass
diff --git a/tests/unit/test_service.py b/tests/unit/test_service.py
index 4a2827e..7e07f28 100644
--- a/tests/unit/test_service.py
+++ b/tests/unit/test_service.py
@@ -62,9 +62,7 @@ class ServiceWithTimer(service.Service):
class ServiceLauncherTest(utils.BaseTestCase):
- """
- Originally from nova/tests/integrated/test_multiprocess_api.py
- """
+ """Originally from nova/tests/integrated/test_multiprocess_api.py."""
def _spawn(self):
self.workers = 2
diff --git a/tests/utils.py b/tests/utils.py
index 7d0cc85..794a3d2 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -66,8 +66,7 @@ class BaseTestCase(testtools.TestCase):
return tempfiles
def config(self, **kw):
- """
- Override some configuration values.
+ """Override some configuration values.
The keyword arguments are the names of configuration options to
override and their values.
diff --git a/tox.ini b/tox.ini
index 1570db5..88ee6d1 100644
--- a/tox.ini
+++ b/tox.ini
@@ -17,7 +17,7 @@ commands =
[flake8]
show-source = True
-ignore = H202,H302,H304,H404
+ignore = H202,H302,H304
exclude = .venv,.tox,dist,doc,*.egg,.update-venv
[testenv:pep8]