summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorSergey Lukjanov <slukjanov@mirantis.com>2013-06-02 20:41:20 +0400
committerSergey Lukjanov <slukjanov@mirantis.com>2013-06-03 07:53:21 +0400
commite3545f828dabe165dc08b2f1670e5f1f19919d0d (patch)
treeead970c643632ed624c6bda25e902f47cf13b49e
parent15d8d698b7c67c43dc7a2b0c2c6952734bd2ba66 (diff)
downloadoslo-e3545f828dabe165dc08b2f1670e5f1f19919d0d.tar.gz
oslo-e3545f828dabe165dc08b2f1670e5f1f19919d0d.tar.xz
oslo-e3545f828dabe165dc08b2f1670e5f1f19919d0d.zip
Enable hacking H402 test
H402 one line docstring needs punctuation Change-Id: Ie848453cace318d8310cdf0234c512f4c1121119
-rw-r--r--openstack/common/importutils.py2
-rw-r--r--openstack/common/notifier/no_op_notifier.py2
-rw-r--r--openstack/common/notifier/rpc_notifier.py2
-rw-r--r--openstack/common/notifier/rpc_notifier2.py2
-rw-r--r--openstack/common/rpc/amqp.py12
-rw-r--r--openstack/common/rpc/impl_fake.py4
-rw-r--r--openstack/common/rpc/impl_kombu.py56
-rw-r--r--openstack/common/rpc/impl_qpid.py56
-rw-r--r--openstack/common/rpc/impl_zmq.py2
-rw-r--r--openstack/common/rpc/serializer.py4
-rw-r--r--openstack/common/scheduler/filters/json_filter.py2
-rw-r--r--openstack/common/service.py2
-rw-r--r--openstack/common/timeutils.py8
-rw-r--r--openstack/common/wsgi.py16
-rw-r--r--tests/unit/plugin/test_callback_plugin.py2
-rw-r--r--tests/unit/rpc/amqp.py2
-rw-r--r--tests/unit/rpc/test_kombu.py16
-rw-r--r--tests/unit/rpc/test_service.py4
-rw-r--r--tests/unit/scheduler/test_host_filters.py2
-rw-r--r--tests/unit/test_lockutils.py6
-rw-r--r--tests/unit/test_log.py2
-rw-r--r--tests/unit/test_loopingcall.py2
-rw-r--r--tests/unit/test_notifier.py4
-rw-r--r--tests/unit/test_periodic.py2
-rw-r--r--tests/unit/test_plugin.py4
-rw-r--r--tests/unit/test_rootwrap.py6
-rw-r--r--tests/unit/test_service.py2
-rw-r--r--tests/unit/test_threadgroup.py2
-rw-r--r--tox.ini2
29 files changed, 114 insertions, 114 deletions
diff --git a/openstack/common/importutils.py b/openstack/common/importutils.py
index 3bd277f..dbee325 100644
--- a/openstack/common/importutils.py
+++ b/openstack/common/importutils.py
@@ -24,7 +24,7 @@ import traceback
def import_class(import_str):
- """Returns a class from a string including module and class"""
+ """Returns a class from a string including module and class."""
mod_str, _sep, class_str = import_str.rpartition('.')
try:
__import__(mod_str)
diff --git a/openstack/common/notifier/no_op_notifier.py b/openstack/common/notifier/no_op_notifier.py
index bc7a56c..13d946e 100644
--- a/openstack/common/notifier/no_op_notifier.py
+++ b/openstack/common/notifier/no_op_notifier.py
@@ -15,5 +15,5 @@
def notify(_context, message):
- """Notifies the recipient of the desired event given the model"""
+ """Notifies the recipient of the desired event given the model."""
pass
diff --git a/openstack/common/notifier/rpc_notifier.py b/openstack/common/notifier/rpc_notifier.py
index 52677fe..17bbc9a 100644
--- a/openstack/common/notifier/rpc_notifier.py
+++ b/openstack/common/notifier/rpc_notifier.py
@@ -31,7 +31,7 @@ CONF.register_opt(notification_topic_opt)
def notify(context, message):
- """Sends a notification via RPC"""
+ """Sends a notification via RPC."""
if not context:
context = req_context.get_admin_context()
priority = message.get('priority',
diff --git a/openstack/common/notifier/rpc_notifier2.py b/openstack/common/notifier/rpc_notifier2.py
index 6ccc9c5..38fe33b 100644
--- a/openstack/common/notifier/rpc_notifier2.py
+++ b/openstack/common/notifier/rpc_notifier2.py
@@ -37,7 +37,7 @@ CONF.register_opt(notification_topic_opt, opt_group)
def notify(context, message):
- """Sends a notification via RPC"""
+ """Sends a notification via RPC."""
if not context:
context = req_context.get_admin_context()
priority = message.get('priority',
diff --git a/openstack/common/rpc/amqp.py b/openstack/common/rpc/amqp.py
index 6ac254e..f5b7cab 100644
--- a/openstack/common/rpc/amqp.py
+++ b/openstack/common/rpc/amqp.py
@@ -114,7 +114,7 @@ class ConnectionContext(rpc_common.Connection):
"""
def __init__(self, conf, connection_pool, pooled=True, server_params=None):
- """Create a new connection, or get one from the pool"""
+ """Create a new connection, or get one from the pool."""
self.connection = None
self.conf = conf
self.connection_pool = connection_pool
@@ -127,7 +127,7 @@ class ConnectionContext(rpc_common.Connection):
self.pooled = pooled
def __enter__(self):
- """When with ConnectionContext() is used, return self"""
+ """When with ConnectionContext() is used, return self."""
return self
def _done(self):
@@ -175,7 +175,7 @@ class ConnectionContext(rpc_common.Connection):
self.connection.consume_in_thread()
def __getattr__(self, key):
- """Proxy all other calls to the Connection instance"""
+ """Proxy all other calls to the Connection instance."""
if self.connection:
return getattr(self.connection, key)
else:
@@ -252,7 +252,7 @@ def msg_reply(conf, msg_id, reply_q, connection_pool, reply=None,
class RpcContext(rpc_common.CommonRpcContext):
- """Context that supports replying to a rpc.call"""
+ """Context that supports replying to a rpc.call."""
def __init__(self, **kwargs):
self.msg_id = kwargs.pop('msg_id', None)
self.reply_q = kwargs.pop('reply_q', None)
@@ -491,7 +491,7 @@ class MulticallProxyWaiter(object):
return result
def __iter__(self):
- """Return a result until we get a reply with an 'ending" flag"""
+ """Return a result until we get a reply with an 'ending' flag."""
if self._done:
raise StopIteration
while True:
@@ -567,7 +567,7 @@ class MulticallWaiter(object):
def create_connection(conf, new, connection_pool):
- """Create a connection"""
+ """Create a connection."""
return ConnectionContext(conf, connection_pool, pooled=not new)
diff --git a/openstack/common/rpc/impl_fake.py b/openstack/common/rpc/impl_fake.py
index 815570d..7719697 100644
--- a/openstack/common/rpc/impl_fake.py
+++ b/openstack/common/rpc/impl_fake.py
@@ -122,7 +122,7 @@ class Connection(object):
def create_connection(conf, new=True):
- """Create a connection"""
+ """Create a connection."""
return Connection()
@@ -179,7 +179,7 @@ def cleanup():
def fanout_cast(conf, context, topic, msg):
- """Cast to all consumers of a topic"""
+ """Cast to all consumers of a topic."""
check_serialize(msg)
method = msg.get('method')
if not method:
diff --git a/openstack/common/rpc/impl_kombu.py b/openstack/common/rpc/impl_kombu.py
index 0960b9a..c062d9a 100644
--- a/openstack/common/rpc/impl_kombu.py
+++ b/openstack/common/rpc/impl_kombu.py
@@ -132,7 +132,7 @@ class ConsumerBase(object):
self.reconnect(channel)
def reconnect(self, channel):
- """Re-declare the queue after a rabbit reconnect"""
+ """Re-declare the queue after a rabbit reconnect."""
self.channel = channel
self.kwargs['channel'] = channel
self.queue = kombu.entity.Queue(**self.kwargs)
@@ -173,7 +173,7 @@ class ConsumerBase(object):
self.queue.consume(*args, callback=_callback, **options)
def cancel(self):
- """Cancel the consuming from the queue, if it has started"""
+ """Cancel the consuming from the queue, if it has started."""
try:
self.queue.cancel(self.tag)
except KeyError as e:
@@ -184,7 +184,7 @@ class ConsumerBase(object):
class DirectConsumer(ConsumerBase):
- """Queue/consumer class for 'direct'"""
+ """Queue/consumer class for 'direct'."""
def __init__(self, conf, channel, msg_id, callback, tag, **kwargs):
"""Init a 'direct' queue.
@@ -216,7 +216,7 @@ class DirectConsumer(ConsumerBase):
class TopicConsumer(ConsumerBase):
- """Consumer class for 'topic'"""
+ """Consumer class for 'topic'."""
def __init__(self, conf, channel, topic, callback, tag, name=None,
exchange_name=None, **kwargs):
@@ -253,7 +253,7 @@ class TopicConsumer(ConsumerBase):
class FanoutConsumer(ConsumerBase):
- """Consumer class for 'fanout'"""
+ """Consumer class for 'fanout'."""
def __init__(self, conf, channel, topic, callback, tag, **kwargs):
"""Init a 'fanout' queue.
@@ -286,7 +286,7 @@ class FanoutConsumer(ConsumerBase):
class Publisher(object):
- """Base Publisher class"""
+ """Base Publisher class."""
def __init__(self, channel, exchange_name, routing_key, **kwargs):
"""Init the Publisher class with the exchange_name, routing_key,
@@ -298,7 +298,7 @@ class Publisher(object):
self.reconnect(channel)
def reconnect(self, channel):
- """Re-establish the Producer after a rabbit reconnection"""
+ """Re-establish the Producer after a rabbit reconnection."""
self.exchange = kombu.entity.Exchange(name=self.exchange_name,
**self.kwargs)
self.producer = kombu.messaging.Producer(exchange=self.exchange,
@@ -306,7 +306,7 @@ class Publisher(object):
routing_key=self.routing_key)
def send(self, msg, timeout=None):
- """Send a message"""
+ """Send a message."""
if timeout:
#
# AMQP TTL is in milliseconds when set in the header.
@@ -317,7 +317,7 @@ class Publisher(object):
class DirectPublisher(Publisher):
- """Publisher class for 'direct'"""
+ """Publisher class for 'direct'."""
def __init__(self, conf, channel, msg_id, **kwargs):
"""init a 'direct' publisher.
@@ -333,7 +333,7 @@ class DirectPublisher(Publisher):
class TopicPublisher(Publisher):
- """Publisher class for 'topic'"""
+ """Publisher class for 'topic'."""
def __init__(self, conf, channel, topic, **kwargs):
"""init a 'topic' publisher.
@@ -352,7 +352,7 @@ class TopicPublisher(Publisher):
class FanoutPublisher(Publisher):
- """Publisher class for 'fanout'"""
+ """Publisher class for 'fanout'."""
def __init__(self, conf, channel, topic, **kwargs):
"""init a 'fanout' publisher.
@@ -367,7 +367,7 @@ class FanoutPublisher(Publisher):
class NotifyPublisher(TopicPublisher):
- """Publisher class for 'notify'"""
+ """Publisher class for 'notify'."""
def __init__(self, conf, channel, topic, **kwargs):
self.durable = kwargs.pop('durable', conf.rabbit_durable_queues)
@@ -579,18 +579,18 @@ class Connection(object):
self.reconnect()
def get_channel(self):
- """Convenience call for bin/clear_rabbit_queues"""
+ """Convenience call for bin/clear_rabbit_queues."""
return self.channel
def close(self):
- """Close/release this connection"""
+ """Close/release this connection."""
self.cancel_consumer_thread()
self.wait_on_proxy_callbacks()
self.connection.release()
self.connection = None
def reset(self):
- """Reset a connection so it can be used again"""
+ """Reset a connection so it can be used again."""
self.cancel_consumer_thread()
self.wait_on_proxy_callbacks()
self.channel.close()
@@ -619,7 +619,7 @@ class Connection(object):
return self.ensure(_connect_error, _declare_consumer)
def iterconsume(self, limit=None, timeout=None):
- """Return an iterator that will consume from all queues/consumers"""
+ """Return an iterator that will consume from all queues/consumers."""
info = {'do_consume': True}
@@ -649,7 +649,7 @@ class Connection(object):
yield self.ensure(_error_callback, _consume)
def cancel_consumer_thread(self):
- """Cancel a consumer thread"""
+ """Cancel a consumer thread."""
if self.consumer_thread is not None:
self.consumer_thread.kill()
try:
@@ -664,7 +664,7 @@ class Connection(object):
proxy_cb.wait()
def publisher_send(self, cls, topic, msg, timeout=None, **kwargs):
- """Send to a publisher based on the publisher class"""
+ """Send to a publisher based on the publisher class."""
def _error_callback(exc):
log_info = {'topic': topic, 'err_str': str(exc)}
@@ -694,27 +694,27 @@ class Connection(object):
topic, callback)
def declare_fanout_consumer(self, topic, callback):
- """Create a 'fanout' consumer"""
+ """Create a 'fanout' consumer."""
self.declare_consumer(FanoutConsumer, topic, callback)
def direct_send(self, msg_id, msg):
- """Send a 'direct' message"""
+ """Send a 'direct' message."""
self.publisher_send(DirectPublisher, msg_id, msg)
def topic_send(self, topic, msg, timeout=None):
- """Send a 'topic' message"""
+ """Send a 'topic' message."""
self.publisher_send(TopicPublisher, topic, msg, timeout)
def fanout_send(self, topic, msg):
- """Send a 'fanout' message"""
+ """Send a 'fanout' message."""
self.publisher_send(FanoutPublisher, topic, msg)
def notify_send(self, topic, msg, **kwargs):
- """Send a notify message on a topic"""
+ """Send a notify message on a topic."""
self.publisher_send(NotifyPublisher, topic, msg, None, **kwargs)
def consume(self, limit=None):
- """Consume from all queues/consumers"""
+ """Consume from all queues/consumers."""
it = self.iterconsume(limit=limit)
while True:
try:
@@ -723,7 +723,7 @@ class Connection(object):
return
def consume_in_thread(self):
- """Consumer from all queues/consumers in a greenthread"""
+ """Consumer from all queues/consumers in a greenthread."""
def _consumer_thread():
try:
self.consume()
@@ -734,7 +734,7 @@ class Connection(object):
return self.consumer_thread
def create_consumer(self, topic, proxy, fanout=False):
- """Create a consumer that calls a method in a proxy object"""
+ """Create a consumer that calls a method in a proxy object."""
proxy_cb = rpc_amqp.ProxyCallback(
self.conf, proxy,
rpc_amqp.get_connection_pool(self.conf, Connection))
@@ -746,7 +746,7 @@ class Connection(object):
self.declare_topic_consumer(topic, proxy_cb)
def create_worker(self, topic, proxy, pool_name):
- """Create a worker that calls a method in a proxy object"""
+ """Create a worker that calls a method in a proxy object."""
proxy_cb = rpc_amqp.ProxyCallback(
self.conf, proxy,
rpc_amqp.get_connection_pool(self.conf, Connection))
@@ -779,7 +779,7 @@ class Connection(object):
def create_connection(conf, new=True):
- """Create a connection"""
+ """Create a connection."""
return rpc_amqp.create_connection(
conf, new,
rpc_amqp.get_connection_pool(conf, Connection))
diff --git a/openstack/common/rpc/impl_qpid.py b/openstack/common/rpc/impl_qpid.py
index e7ac016..32680e1 100644
--- a/openstack/common/rpc/impl_qpid.py
+++ b/openstack/common/rpc/impl_qpid.py
@@ -118,13 +118,13 @@ class ConsumerBase(object):
self.reconnect(session)
def reconnect(self, session):
- """Re-declare the receiver after a qpid reconnect"""
+ """Re-declare the receiver after a qpid reconnect."""
self.session = session
self.receiver = session.receiver(self.address)
self.receiver.capacity = 1
def consume(self):
- """Fetch the message and pass it to the callback object"""
+ """Fetch the message and pass it to the callback object."""
message = self.receiver.fetch()
try:
msg = rpc_common.deserialize_msg(message.content)
@@ -139,7 +139,7 @@ class ConsumerBase(object):
class DirectConsumer(ConsumerBase):
- """Queue/consumer class for 'direct'"""
+ """Queue/consumer class for 'direct'."""
def __init__(self, conf, session, msg_id, callback):
"""Init a 'direct' queue.
@@ -157,7 +157,7 @@ class DirectConsumer(ConsumerBase):
class TopicConsumer(ConsumerBase):
- """Consumer class for 'topic'"""
+ """Consumer class for 'topic'."""
def __init__(self, conf, session, topic, callback, name=None,
exchange_name=None):
@@ -177,7 +177,7 @@ class TopicConsumer(ConsumerBase):
class FanoutConsumer(ConsumerBase):
- """Consumer class for 'fanout'"""
+ """Consumer class for 'fanout'."""
def __init__(self, conf, session, topic, callback):
"""Init a 'fanout' queue.
@@ -196,7 +196,7 @@ class FanoutConsumer(ConsumerBase):
class Publisher(object):
- """Base Publisher class"""
+ """Base Publisher class."""
def __init__(self, session, node_name, node_opts=None):
"""Init the Publisher class with the exchange_name, routing_key,
@@ -225,16 +225,16 @@ class Publisher(object):
self.reconnect(session)
def reconnect(self, session):
- """Re-establish the Sender after a reconnection"""
+ """Re-establish the Sender after a reconnection."""
self.sender = session.sender(self.address)
def send(self, msg):
- """Send a message"""
+ """Send a message."""
self.sender.send(msg)
class DirectPublisher(Publisher):
- """Publisher class for 'direct'"""
+ """Publisher class for 'direct'."""
def __init__(self, conf, session, msg_id):
"""Init a 'direct' publisher."""
super(DirectPublisher, self).__init__(session, msg_id,
@@ -242,7 +242,7 @@ class DirectPublisher(Publisher):
class TopicPublisher(Publisher):
- """Publisher class for 'topic'"""
+ """Publisher class for 'topic'."""
def __init__(self, conf, session, topic):
"""init a 'topic' publisher.
"""
@@ -252,7 +252,7 @@ class TopicPublisher(Publisher):
class FanoutPublisher(Publisher):
- """Publisher class for 'fanout'"""
+ """Publisher class for 'fanout'."""
def __init__(self, conf, session, topic):
"""init a 'fanout' publisher.
"""
@@ -262,7 +262,7 @@ class FanoutPublisher(Publisher):
class NotifyPublisher(Publisher):
- """Publisher class for notifications"""
+ """Publisher class for notifications."""
def __init__(self, conf, session, topic):
"""init a 'topic' publisher.
"""
@@ -330,7 +330,7 @@ class Connection(object):
return self.consumers[str(receiver)]
def reconnect(self):
- """Handles reconnecting and re-establishing sessions and queues"""
+ """Handles reconnecting and re-establishing sessions and queues."""
attempt = 0
delay = 1
while True:
@@ -381,7 +381,7 @@ class Connection(object):
self.reconnect()
def close(self):
- """Close/release this connection"""
+ """Close/release this connection."""
self.cancel_consumer_thread()
self.wait_on_proxy_callbacks()
try:
@@ -394,7 +394,7 @@ class Connection(object):
self.connection = None
def reset(self):
- """Reset a connection so it can be used again"""
+ """Reset a connection so it can be used again."""
self.cancel_consumer_thread()
self.wait_on_proxy_callbacks()
self.session.close()
@@ -418,7 +418,7 @@ class Connection(object):
return self.ensure(_connect_error, _declare_consumer)
def iterconsume(self, limit=None, timeout=None):
- """Return an iterator that will consume from all queues/consumers"""
+ """Return an iterator that will consume from all queues/consumers."""
def _error_callback(exc):
if isinstance(exc, qpid_exceptions.Empty):
@@ -442,7 +442,7 @@ class Connection(object):
yield self.ensure(_error_callback, _consume)
def cancel_consumer_thread(self):
- """Cancel a consumer thread"""
+ """Cancel a consumer thread."""
if self.consumer_thread is not None:
self.consumer_thread.kill()
try:
@@ -457,7 +457,7 @@ class Connection(object):
proxy_cb.wait()
def publisher_send(self, cls, topic, msg):
- """Send to a publisher based on the publisher class"""
+ """Send to a publisher based on the publisher class."""
def _connect_error(exc):
log_info = {'topic': topic, 'err_str': str(exc)}
@@ -487,15 +487,15 @@ class Connection(object):
topic, callback)
def declare_fanout_consumer(self, topic, callback):
- """Create a 'fanout' consumer"""
+ """Create a 'fanout' consumer."""
self.declare_consumer(FanoutConsumer, topic, callback)
def direct_send(self, msg_id, msg):
- """Send a 'direct' message"""
+ """Send a 'direct' message."""
self.publisher_send(DirectPublisher, msg_id, msg)
def topic_send(self, topic, msg, timeout=None):
- """Send a 'topic' message"""
+ """Send a 'topic' message."""
#
# We want to create a message with attributes, e.g. a TTL. We
# don't really need to keep 'msg' in its JSON format any longer
@@ -510,15 +510,15 @@ class Connection(object):
self.publisher_send(TopicPublisher, topic, qpid_message)
def fanout_send(self, topic, msg):
- """Send a 'fanout' message"""
+ """Send a 'fanout' message."""
self.publisher_send(FanoutPublisher, topic, msg)
def notify_send(self, topic, msg, **kwargs):
- """Send a notify message on a topic"""
+ """Send a notify message on a topic."""
self.publisher_send(NotifyPublisher, topic, msg)
def consume(self, limit=None):
- """Consume from all queues/consumers"""
+ """Consume from all queues/consumers."""
it = self.iterconsume(limit=limit)
while True:
try:
@@ -527,7 +527,7 @@ class Connection(object):
return
def consume_in_thread(self):
- """Consumer from all queues/consumers in a greenthread"""
+ """Consumer from all queues/consumers in a greenthread."""
def _consumer_thread():
try:
self.consume()
@@ -538,7 +538,7 @@ class Connection(object):
return self.consumer_thread
def create_consumer(self, topic, proxy, fanout=False):
- """Create a consumer that calls a method in a proxy object"""
+ """Create a consumer that calls a method in a proxy object."""
proxy_cb = rpc_amqp.ProxyCallback(
self.conf, proxy,
rpc_amqp.get_connection_pool(self.conf, Connection))
@@ -554,7 +554,7 @@ class Connection(object):
return consumer
def create_worker(self, topic, proxy, pool_name):
- """Create a worker that calls a method in a proxy object"""
+ """Create a worker that calls a method in a proxy object."""
proxy_cb = rpc_amqp.ProxyCallback(
self.conf, proxy,
rpc_amqp.get_connection_pool(self.conf, Connection))
@@ -597,7 +597,7 @@ class Connection(object):
def create_connection(conf, new=True):
- """Create a connection"""
+ """Create a connection."""
return rpc_amqp.create_connection(
conf, new,
rpc_amqp.get_connection_pool(conf, Connection))
diff --git a/openstack/common/rpc/impl_zmq.py b/openstack/common/rpc/impl_zmq.py
index 726bb52..07b7b41 100644
--- a/openstack/common/rpc/impl_zmq.py
+++ b/openstack/common/rpc/impl_zmq.py
@@ -506,7 +506,7 @@ class ZmqProxy(ZmqBaseReactor):
"%(topic)s. Dropping message.") % {'topic': topic})
def consume_in_thread(self):
- """Runs the ZmqProxy service"""
+ """Runs the ZmqProxy service."""
ipc_dir = CONF.rpc_zmq_ipc_dir
consume_in = "tcp://%s:%s" % \
(CONF.rpc_zmq_bind_address,
diff --git a/openstack/common/rpc/serializer.py b/openstack/common/rpc/serializer.py
index 0a2c9c4..76c6831 100644
--- a/openstack/common/rpc/serializer.py
+++ b/openstack/common/rpc/serializer.py
@@ -18,7 +18,7 @@ import abc
class Serializer(object):
- """Generic (de-)serialization definition base class"""
+ """Generic (de-)serialization definition base class."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
@@ -43,7 +43,7 @@ class Serializer(object):
class NoOpSerializer(Serializer):
- """A serializer that does nothing"""
+ """A serializer that does nothing."""
def serialize_entity(self, context, entity):
return entity
diff --git a/openstack/common/scheduler/filters/json_filter.py b/openstack/common/scheduler/filters/json_filter.py
index 7035947..bc4b4fd 100644
--- a/openstack/common/scheduler/filters/json_filter.py
+++ b/openstack/common/scheduler/filters/json_filter.py
@@ -51,7 +51,7 @@ class JsonFilter(filters.BaseHostFilter):
return self._op_compare(args, operator.gt)
def _in(self, args):
- """First term is in set of remaining terms"""
+ """First term is in set of remaining terms."""
return self._op_compare(args, operator.contains)
def _less_than_equal(self, args):
diff --git a/openstack/common/service.py b/openstack/common/service.py
index eb46164..55e23ed 100644
--- a/openstack/common/service.py
+++ b/openstack/common/service.py
@@ -271,7 +271,7 @@ class ProcessLauncher(object):
return wrap
def wait(self):
- """Loop waiting on children to die and respawning as necessary"""
+ """Loop waiting on children to die and respawning as necessary."""
LOG.debug(_('Full set of CONF:'))
CONF.log_opt_values(LOG, std_logging.DEBUG)
diff --git a/openstack/common/timeutils.py b/openstack/common/timeutils.py
index cb17487..008e9c8 100644
--- a/openstack/common/timeutils.py
+++ b/openstack/common/timeutils.py
@@ -32,7 +32,7 @@ PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND
def isotime(at=None, subsecond=False):
- """Stringify time in ISO 8601 format"""
+ """Stringify time in ISO 8601 format."""
if not at:
at = utcnow()
st = at.strftime(_ISO8601_TIME_FORMAT
@@ -44,7 +44,7 @@ def isotime(at=None, subsecond=False):
def parse_isotime(timestr):
- """Parse time from ISO 8601 format"""
+ """Parse time from ISO 8601 format."""
try:
return iso8601.parse_date(timestr)
except iso8601.ParseError as e:
@@ -66,7 +66,7 @@ def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT):
def normalize_time(timestamp):
- """Normalize time in arbitrary timezone to UTC naive object"""
+ """Normalize time in arbitrary timezone to UTC naive object."""
offset = timestamp.utcoffset()
if offset is None:
return timestamp
@@ -103,7 +103,7 @@ def utcnow():
def iso8601_from_timestamp(timestamp):
- """Returns a iso8601 formated date from timestamp"""
+ """Returns a iso8601 formated date from timestamp."""
return isotime(datetime.datetime.utcfromtimestamp(timestamp))
diff --git a/openstack/common/wsgi.py b/openstack/common/wsgi.py
index 80d4b9b..b5a7157 100644
--- a/openstack/common/wsgi.py
+++ b/openstack/common/wsgi.py
@@ -448,7 +448,7 @@ class ActionDispatcher(object):
class DictSerializer(ActionDispatcher):
- """Default request body serialization"""
+ """Default request body serialization."""
def serialize(self, data, action='default'):
return self.dispatch(data, action=action)
@@ -458,7 +458,7 @@ class DictSerializer(ActionDispatcher):
class JSONDictSerializer(DictSerializer):
- """Default JSON request body serialization"""
+ """Default JSON request body serialization."""
def default(self, data):
def sanitizer(obj):
@@ -570,7 +570,7 @@ class XMLDictSerializer(DictSerializer):
class ResponseHeadersSerializer(ActionDispatcher):
- """Default response headers serialization"""
+ """Default response headers serialization."""
def serialize(self, response, data, action):
self.dispatch(response, data, action=action)
@@ -580,7 +580,7 @@ class ResponseHeadersSerializer(ActionDispatcher):
class ResponseSerializer(object):
- """Encode the necessary pieces into a response object"""
+ """Encode the necessary pieces into a response object."""
def __init__(self, body_serializers=None, headers_serializer=None):
self.body_serializers = {
@@ -722,7 +722,7 @@ class RequestDeserializer(object):
class TextDeserializer(ActionDispatcher):
- """Default request body deserialization"""
+ """Default request body deserialization."""
def deserialize(self, datastring, action='default'):
return self.dispatch(datastring, action=action)
@@ -787,20 +787,20 @@ class XMLDeserializer(TextDeserializer):
return result
def find_first_child_named(self, parent, name):
- """Search a nodes children for the first child with a given name"""
+ """Search a nodes children for the first child with a given name."""
for node in parent.childNodes:
if node.nodeName == name:
return node
return None
def find_children_named(self, parent, name):
- """Return all of a nodes children who have the given name"""
+ """Return all of a nodes children who have the given name."""
for node in parent.childNodes:
if node.nodeName == name:
yield node
def extract_text(self, node):
- """Get the text field contained by the given node"""
+ """Get the text field contained by the given node."""
if len(node.childNodes) == 1:
child = node.childNodes[0]
if child.nodeType == child.TEXT_NODE:
diff --git a/tests/unit/plugin/test_callback_plugin.py b/tests/unit/plugin/test_callback_plugin.py
index cbe2601..3f3fd63 100644
--- a/tests/unit/plugin/test_callback_plugin.py
+++ b/tests/unit/plugin/test_callback_plugin.py
@@ -47,7 +47,7 @@ class TestCBP(callbackplugin.CallbackPlugin):
class CallbackTestCase(test_utils.BaseTestCase):
- """Tests for the callback plugin convenience class"""
+ """Tests for the callback plugin convenience class."""
def test_callback_plugin_subclass(self):
diff --git a/tests/unit/rpc/amqp.py b/tests/unit/rpc/amqp.py
index 69d647a..432dd35 100644
--- a/tests/unit/rpc/amqp.py
+++ b/tests/unit/rpc/amqp.py
@@ -223,7 +223,7 @@ class BaseRpcAMQPTestCase(common.BaseRpcTestCase):
self.config(amqp_rpc_single_reply_queue=False)
def test_duplicate_message_check(self):
- """Test sending *not-dict* to a topic exchange/queue"""
+ """Test sending *not-dict* to a topic exchange/queue."""
conn = self.rpc.create_connection(FLAGS)
message = {'args': 'topic test message', '_unique_id': 'aaaabbbbcccc'}
diff --git a/tests/unit/rpc/test_kombu.py b/tests/unit/rpc/test_kombu.py
index a524c73..159fefb 100644
--- a/tests/unit/rpc/test_kombu.py
+++ b/tests/unit/rpc/test_kombu.py
@@ -95,7 +95,7 @@ class RpcKombuTestCase(amqp.BaseRpcAMQPTestCase):
self.assertEqual(conn1, conn2)
def test_topic_send_receive(self):
- """Test sending to a topic exchange/queue"""
+ """Test sending to a topic exchange/queue."""
conn = self.rpc.create_connection(FLAGS)
message = 'topic test message'
@@ -132,7 +132,7 @@ class RpcKombuTestCase(amqp.BaseRpcAMQPTestCase):
conn.close()
def test_topic_send_receive_exchange_name(self):
- """Test sending to a topic exchange/queue with an exchange name"""
+ """Test sending to a topic exchange/queue with an exchange name."""
conn = self.rpc.create_connection(FLAGS)
message = 'topic test message'
@@ -151,7 +151,7 @@ class RpcKombuTestCase(amqp.BaseRpcAMQPTestCase):
self.assertEqual(self.received_message, message)
def test_topic_multiple_queues(self):
- """Test sending to a topic exchange with multiple queues"""
+ """Test sending to a topic exchange with multiple queues."""
conn = self.rpc.create_connection(FLAGS)
message = 'topic test message'
@@ -233,7 +233,7 @@ class RpcKombuTestCase(amqp.BaseRpcAMQPTestCase):
self.assertEqual(self.received_message_2, message)
def test_direct_send_receive(self):
- """Test sending to a direct exchange/queue"""
+ """Test sending to a direct exchange/queue."""
conn = self.rpc.create_connection(FLAGS)
message = 'direct test message'
@@ -250,7 +250,7 @@ class RpcKombuTestCase(amqp.BaseRpcAMQPTestCase):
self.assertEqual(self.received_message, message)
def test_cast_interface_uses_default_options(self):
- """Test kombu rpc.cast"""
+ """Test kombu rpc.cast."""
ctxt = rpc_common.CommonRpcContext(user='fake_user',
project='fake_project')
@@ -276,7 +276,7 @@ class RpcKombuTestCase(amqp.BaseRpcAMQPTestCase):
impl_kombu.cast(FLAGS, ctxt, 'fake_topic', {'msg': 'fake'})
def test_cast_to_server_uses_server_params(self):
- """Test kombu rpc.cast"""
+ """Test kombu rpc.cast."""
ctxt = rpc_common.CommonRpcContext(user='fake_user',
project='fake_project')
@@ -309,7 +309,7 @@ class RpcKombuTestCase(amqp.BaseRpcAMQPTestCase):
'fake_topic', {'msg': 'fake'})
def test_fanout_send_receive(self):
- """Test sending to a fanout exchange and consuming from 2 queues"""
+ """Test sending to a fanout exchange and consuming from 2 queues."""
self.skipTest("kombu memory transport seems buggy with "
"fanout queues as this test passes when "
@@ -366,7 +366,7 @@ class RpcKombuTestCase(amqp.BaseRpcAMQPTestCase):
self.assertTrue(isinstance(result, self.rpc.DirectConsumer))
def test_declare_consumer_ioerrors_will_reconnect(self):
- """Test that an IOError exception causes a reconnection"""
+ """Test that an IOError exception causes a reconnection."""
info = _raise_exc_stub(self.stubs, 2, self.rpc.DirectConsumer,
'__init__', 'Socket closed', exc_class=IOError)
diff --git a/tests/unit/rpc/test_service.py b/tests/unit/rpc/test_service.py
index 9293d3e..e9f8313 100644
--- a/tests/unit/rpc/test_service.py
+++ b/tests/unit/rpc/test_service.py
@@ -21,7 +21,7 @@ from tests import utils
class FakeService(service.Service):
- """Fake manager for tests"""
+ """Fake manager for tests."""
def __init__(self, host, topic):
super(FakeService, self).__init__(host, topic, None)
self.method_result = 'manager'
@@ -43,7 +43,7 @@ class FakeHookService(FakeService):
class RpcServiceManagerTestCase(utils.BaseTestCase):
- """Test cases for Services"""
+ """Test cases for Services."""
def setUp(self):
super(RpcServiceManagerTestCase, self).setUp()
self.config(fake_rabbit=True)
diff --git a/tests/unit/scheduler/test_host_filters.py b/tests/unit/scheduler/test_host_filters.py
index c36021c..b452955 100644
--- a/tests/unit/scheduler/test_host_filters.py
+++ b/tests/unit/scheduler/test_host_filters.py
@@ -425,7 +425,7 @@ class HostFiltersTestCase(utils.BaseTestCase):
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_json_filter_happy_day(self):
- """Test json filter more thoroughly"""
+ """Test json filter more thoroughly."""
filt_cls = self.class_map['JsonFilter']()
raw = ['and',
'$capabilities.enabled',
diff --git a/tests/unit/test_lockutils.py b/tests/unit/test_lockutils.py
index c37b030..84afa2d 100644
--- a/tests/unit/test_lockutils.py
+++ b/tests/unit/test_lockutils.py
@@ -75,7 +75,7 @@ class LockTestCase(utils.BaseTestCase):
"got mangled")
def test_synchronized_internally(self):
- """We can lock across multiple green threads"""
+ """We can lock across multiple green threads."""
saved_sem_num = len(lockutils._semaphores)
seen_threads = list()
@@ -105,7 +105,7 @@ class LockTestCase(utils.BaseTestCase):
"Semaphore leak detected")
def test_nested_external_works(self):
- """We can nest external syncs"""
+ """We can nest external syncs."""
tempdir = tempfile.mkdtemp()
try:
self.config(lock_path=tempdir)
@@ -126,7 +126,7 @@ class LockTestCase(utils.BaseTestCase):
shutil.rmtree(tempdir)
def _do_test_synchronized_externally(self):
- """We can lock across multiple processes"""
+ """We can lock across multiple processes."""
@lockutils.synchronized('external', 'test-', external=True)
def lock_files(handles_dir):
diff --git a/tests/unit/test_log.py b/tests/unit/test_log.py
index f87a1da..1641497 100644
--- a/tests/unit/test_log.py
+++ b/tests/unit/test_log.py
@@ -238,7 +238,7 @@ class ContextFormatterTestCase(test_utils.BaseTestCase):
class ExceptionLoggingTestCase(test_utils.BaseTestCase):
- """Test that Exceptions are logged"""
+ """Test that Exceptions are logged."""
def test_excepthook_logs_exception(self):
product_name = 'somename'
diff --git a/tests/unit/test_loopingcall.py b/tests/unit/test_loopingcall.py
index f7d21b3..89cf336 100644
--- a/tests/unit/test_loopingcall.py
+++ b/tests/unit/test_loopingcall.py
@@ -58,7 +58,7 @@ class LoopingCallTestCase(utils.BaseTestCase):
self.assertFalse(timer.start(interval=0.5).wait())
def test_interval_adjustment(self):
- """Ensure the interval is adjusted to account for task duration"""
+ """Ensure the interval is adjusted to account for task duration."""
self.num_runs = 3
now = datetime.datetime.utcnow()
diff --git a/tests/unit/test_notifier.py b/tests/unit/test_notifier.py
index b762d5e..6c3b886 100644
--- a/tests/unit/test_notifier.py
+++ b/tests/unit/test_notifier.py
@@ -29,7 +29,7 @@ ctxt2 = context.get_admin_context()
class NotifierTestCase(test_utils.BaseTestCase):
- """Test case for notifications"""
+ """Test case for notifications."""
def setUp(self):
super(NotifierTestCase, self).setUp()
notification_driver = [
@@ -209,7 +209,7 @@ class SimpleNotifier(object):
class MultiNotifierTestCase(test_utils.BaseTestCase):
- """Test case for notifications"""
+ """Test case for notifications."""
def setUp(self):
super(MultiNotifierTestCase, self).setUp()
diff --git a/tests/unit/test_periodic.py b/tests/unit/test_periodic.py
index 1fb1574..d663f8b 100644
--- a/tests/unit/test_periodic.py
+++ b/tests/unit/test_periodic.py
@@ -48,7 +48,7 @@ class AService(periodic_task.PeriodicTasks):
class PeriodicTasksTestCase(utils.BaseTestCase):
- """Test cases for PeriodicTasks"""
+ """Test cases for PeriodicTasks."""
def test_is_called(self):
serv = AService()
diff --git a/tests/unit/test_plugin.py b/tests/unit/test_plugin.py
index 8ee405a..fd653d7 100644
--- a/tests/unit/test_plugin.py
+++ b/tests/unit/test_plugin.py
@@ -37,7 +37,7 @@ class ManagerTestCase(utils.BaseTestCase):
class NotifyTestCase(utils.BaseTestCase):
- """Test case for the plugin notification interface"""
+ """Test case for the plugin notification interface."""
def test_add_notifier(self):
notifier1 = SimpleNotifier()
@@ -99,7 +99,7 @@ class MockExtManager():
class APITestCase(utils.BaseTestCase):
- """Test case for the plugin api extension interface"""
+ """Test case for the plugin api extension interface."""
def test_add_extension(self):
def mock_load(_s):
return TestPluginClass()
diff --git a/tests/unit/test_rootwrap.py b/tests/unit/test_rootwrap.py
index 5a5d9ca..25b2051 100644
--- a/tests/unit/test_rootwrap.py
+++ b/tests/unit/test_rootwrap.py
@@ -113,7 +113,7 @@ class RootwrapTestCase(utils.BaseTestCase):
p.wait()
def test_KillFilter_no_raise(self):
- """Makes sure ValueError from bug 926412 is gone"""
+ """Makes sure ValueError from bug 926412 is gone."""
f = filters.KillFilter("root", "")
# Providing anything other than kill should be False
usercmd = ['notkill', 999999]
@@ -123,7 +123,7 @@ class RootwrapTestCase(utils.BaseTestCase):
self.assertFalse(f.match(usercmd))
def test_KillFilter_deleted_exe(self):
- """Makes sure deleted exe's are killed correctly"""
+ """Makes sure deleted exe's are killed correctly."""
# See bug #967931.
def fake_readlink(blah):
return '/bin/commandddddd (deleted)'
@@ -135,7 +135,7 @@ class RootwrapTestCase(utils.BaseTestCase):
self.assertTrue(f.match(usercmd))
def test_KillFilter_upgraded_exe(self):
- """Makes sure upgraded exe's are killed correctly"""
+ """Makes sure upgraded exe's are killed correctly."""
# See bug #1179793.
def fake_readlink(blah):
return '/bin/commandddddd\0\05190bfb2 (deleted)'
diff --git a/tests/unit/test_service.py b/tests/unit/test_service.py
index b7ba4f7..4a2827e 100644
--- a/tests/unit/test_service.py
+++ b/tests/unit/test_service.py
@@ -44,7 +44,7 @@ class ExtendedService(service.Service):
class ServiceManagerTestCase(utils.BaseTestCase):
- """Test cases for Services"""
+ """Test cases for Services."""
def test_override_manager_method(self):
serv = ExtendedService()
serv.start()
diff --git a/tests/unit/test_threadgroup.py b/tests/unit/test_threadgroup.py
index f627215..5af6653 100644
--- a/tests/unit/test_threadgroup.py
+++ b/tests/unit/test_threadgroup.py
@@ -27,7 +27,7 @@ LOG = logging.getLogger(__name__)
class ThreadGroupTestCase(utils.BaseTestCase):
- """Test cases for thread group"""
+ """Test cases for thread group."""
def setUp(self):
super(ThreadGroupTestCase, self).setUp()
self.tg = threadgroup.ThreadGroup()
diff --git a/tox.ini b/tox.ini
index 93ccb1e..73af61c 100644
--- a/tox.ini
+++ b/tox.ini
@@ -17,7 +17,7 @@ commands =
[flake8]
show-source = True
-ignore = H202,H302,H304,H306,H402,H404
+ignore = H202,H302,H304,H306,H404
exclude = .venv,.tox,dist,doc,*.egg,.update-venv
[testenv:pep8]