diff options
| author | Sergey Lukjanov <slukjanov@mirantis.com> | 2013-06-02 20:41:20 +0400 |
|---|---|---|
| committer | Sergey Lukjanov <slukjanov@mirantis.com> | 2013-06-03 07:53:21 +0400 |
| commit | e3545f828dabe165dc08b2f1670e5f1f19919d0d (patch) | |
| tree | ead970c643632ed624c6bda25e902f47cf13b49e /openstack/common | |
| parent | 15d8d698b7c67c43dc7a2b0c2c6952734bd2ba66 (diff) | |
| download | oslo-e3545f828dabe165dc08b2f1670e5f1f19919d0d.tar.gz oslo-e3545f828dabe165dc08b2f1670e5f1f19919d0d.tar.xz oslo-e3545f828dabe165dc08b2f1670e5f1f19919d0d.zip | |
Enable hacking H402 test
H402 one line docstring needs punctuation
Change-Id: Ie848453cace318d8310cdf0234c512f4c1121119
Diffstat (limited to 'openstack/common')
| -rw-r--r-- | openstack/common/importutils.py | 2 | ||||
| -rw-r--r-- | openstack/common/notifier/no_op_notifier.py | 2 | ||||
| -rw-r--r-- | openstack/common/notifier/rpc_notifier.py | 2 | ||||
| -rw-r--r-- | openstack/common/notifier/rpc_notifier2.py | 2 | ||||
| -rw-r--r-- | openstack/common/rpc/amqp.py | 12 | ||||
| -rw-r--r-- | openstack/common/rpc/impl_fake.py | 4 | ||||
| -rw-r--r-- | openstack/common/rpc/impl_kombu.py | 56 | ||||
| -rw-r--r-- | openstack/common/rpc/impl_qpid.py | 56 | ||||
| -rw-r--r-- | openstack/common/rpc/impl_zmq.py | 2 | ||||
| -rw-r--r-- | openstack/common/rpc/serializer.py | 4 | ||||
| -rw-r--r-- | openstack/common/scheduler/filters/json_filter.py | 2 | ||||
| -rw-r--r-- | openstack/common/service.py | 2 | ||||
| -rw-r--r-- | openstack/common/timeutils.py | 8 | ||||
| -rw-r--r-- | openstack/common/wsgi.py | 16 |
14 files changed, 85 insertions, 85 deletions
diff --git a/openstack/common/importutils.py b/openstack/common/importutils.py index 3bd277f..dbee325 100644 --- a/openstack/common/importutils.py +++ b/openstack/common/importutils.py @@ -24,7 +24,7 @@ import traceback def import_class(import_str): - """Returns a class from a string including module and class""" + """Returns a class from a string including module and class.""" mod_str, _sep, class_str = import_str.rpartition('.') try: __import__(mod_str) diff --git a/openstack/common/notifier/no_op_notifier.py b/openstack/common/notifier/no_op_notifier.py index bc7a56c..13d946e 100644 --- a/openstack/common/notifier/no_op_notifier.py +++ b/openstack/common/notifier/no_op_notifier.py @@ -15,5 +15,5 @@ def notify(_context, message): - """Notifies the recipient of the desired event given the model""" + """Notifies the recipient of the desired event given the model.""" pass diff --git a/openstack/common/notifier/rpc_notifier.py b/openstack/common/notifier/rpc_notifier.py index 52677fe..17bbc9a 100644 --- a/openstack/common/notifier/rpc_notifier.py +++ b/openstack/common/notifier/rpc_notifier.py @@ -31,7 +31,7 @@ CONF.register_opt(notification_topic_opt) def notify(context, message): - """Sends a notification via RPC""" + """Sends a notification via RPC.""" if not context: context = req_context.get_admin_context() priority = message.get('priority', diff --git a/openstack/common/notifier/rpc_notifier2.py b/openstack/common/notifier/rpc_notifier2.py index 6ccc9c5..38fe33b 100644 --- a/openstack/common/notifier/rpc_notifier2.py +++ b/openstack/common/notifier/rpc_notifier2.py @@ -37,7 +37,7 @@ CONF.register_opt(notification_topic_opt, opt_group) def notify(context, message): - """Sends a notification via RPC""" + """Sends a notification via RPC.""" if not context: context = req_context.get_admin_context() priority = message.get('priority', diff --git a/openstack/common/rpc/amqp.py b/openstack/common/rpc/amqp.py index 6ac254e..f5b7cab 100644 --- a/openstack/common/rpc/amqp.py +++ b/openstack/common/rpc/amqp.py @@ -114,7 +114,7 @@ class ConnectionContext(rpc_common.Connection): """ def __init__(self, conf, connection_pool, pooled=True, server_params=None): - """Create a new connection, or get one from the pool""" + """Create a new connection, or get one from the pool.""" self.connection = None self.conf = conf self.connection_pool = connection_pool @@ -127,7 +127,7 @@ class ConnectionContext(rpc_common.Connection): self.pooled = pooled def __enter__(self): - """When with ConnectionContext() is used, return self""" + """When with ConnectionContext() is used, return self.""" return self def _done(self): @@ -175,7 +175,7 @@ class ConnectionContext(rpc_common.Connection): self.connection.consume_in_thread() def __getattr__(self, key): - """Proxy all other calls to the Connection instance""" + """Proxy all other calls to the Connection instance.""" if self.connection: return getattr(self.connection, key) else: @@ -252,7 +252,7 @@ def msg_reply(conf, msg_id, reply_q, connection_pool, reply=None, class RpcContext(rpc_common.CommonRpcContext): - """Context that supports replying to a rpc.call""" + """Context that supports replying to a rpc.call.""" def __init__(self, **kwargs): self.msg_id = kwargs.pop('msg_id', None) self.reply_q = kwargs.pop('reply_q', None) @@ -491,7 +491,7 @@ class MulticallProxyWaiter(object): return result def __iter__(self): - """Return a result until we get a reply with an 'ending" flag""" + """Return a result until we get a reply with an 'ending' flag.""" if self._done: raise StopIteration while True: @@ -567,7 +567,7 @@ class MulticallWaiter(object): def create_connection(conf, new, connection_pool): - """Create a connection""" + """Create a connection.""" return ConnectionContext(conf, connection_pool, pooled=not new) diff --git a/openstack/common/rpc/impl_fake.py b/openstack/common/rpc/impl_fake.py index 815570d..7719697 100644 --- a/openstack/common/rpc/impl_fake.py +++ b/openstack/common/rpc/impl_fake.py @@ -122,7 +122,7 @@ class Connection(object): def create_connection(conf, new=True): - """Create a connection""" + """Create a connection.""" return Connection() @@ -179,7 +179,7 @@ def cleanup(): def fanout_cast(conf, context, topic, msg): - """Cast to all consumers of a topic""" + """Cast to all consumers of a topic.""" check_serialize(msg) method = msg.get('method') if not method: diff --git a/openstack/common/rpc/impl_kombu.py b/openstack/common/rpc/impl_kombu.py index 0960b9a..c062d9a 100644 --- a/openstack/common/rpc/impl_kombu.py +++ b/openstack/common/rpc/impl_kombu.py @@ -132,7 +132,7 @@ class ConsumerBase(object): self.reconnect(channel) def reconnect(self, channel): - """Re-declare the queue after a rabbit reconnect""" + """Re-declare the queue after a rabbit reconnect.""" self.channel = channel self.kwargs['channel'] = channel self.queue = kombu.entity.Queue(**self.kwargs) @@ -173,7 +173,7 @@ class ConsumerBase(object): self.queue.consume(*args, callback=_callback, **options) def cancel(self): - """Cancel the consuming from the queue, if it has started""" + """Cancel the consuming from the queue, if it has started.""" try: self.queue.cancel(self.tag) except KeyError as e: @@ -184,7 +184,7 @@ class ConsumerBase(object): class DirectConsumer(ConsumerBase): - """Queue/consumer class for 'direct'""" + """Queue/consumer class for 'direct'.""" def __init__(self, conf, channel, msg_id, callback, tag, **kwargs): """Init a 'direct' queue. @@ -216,7 +216,7 @@ class DirectConsumer(ConsumerBase): class TopicConsumer(ConsumerBase): - """Consumer class for 'topic'""" + """Consumer class for 'topic'.""" def __init__(self, conf, channel, topic, callback, tag, name=None, exchange_name=None, **kwargs): @@ -253,7 +253,7 @@ class TopicConsumer(ConsumerBase): class FanoutConsumer(ConsumerBase): - """Consumer class for 'fanout'""" + """Consumer class for 'fanout'.""" def __init__(self, conf, channel, topic, callback, tag, **kwargs): """Init a 'fanout' queue. @@ -286,7 +286,7 @@ class FanoutConsumer(ConsumerBase): class Publisher(object): - """Base Publisher class""" + """Base Publisher class.""" def __init__(self, channel, exchange_name, routing_key, **kwargs): """Init the Publisher class with the exchange_name, routing_key, @@ -298,7 +298,7 @@ class Publisher(object): self.reconnect(channel) def reconnect(self, channel): - """Re-establish the Producer after a rabbit reconnection""" + """Re-establish the Producer after a rabbit reconnection.""" self.exchange = kombu.entity.Exchange(name=self.exchange_name, **self.kwargs) self.producer = kombu.messaging.Producer(exchange=self.exchange, @@ -306,7 +306,7 @@ class Publisher(object): routing_key=self.routing_key) def send(self, msg, timeout=None): - """Send a message""" + """Send a message.""" if timeout: # # AMQP TTL is in milliseconds when set in the header. @@ -317,7 +317,7 @@ class Publisher(object): class DirectPublisher(Publisher): - """Publisher class for 'direct'""" + """Publisher class for 'direct'.""" def __init__(self, conf, channel, msg_id, **kwargs): """init a 'direct' publisher. @@ -333,7 +333,7 @@ class DirectPublisher(Publisher): class TopicPublisher(Publisher): - """Publisher class for 'topic'""" + """Publisher class for 'topic'.""" def __init__(self, conf, channel, topic, **kwargs): """init a 'topic' publisher. @@ -352,7 +352,7 @@ class TopicPublisher(Publisher): class FanoutPublisher(Publisher): - """Publisher class for 'fanout'""" + """Publisher class for 'fanout'.""" def __init__(self, conf, channel, topic, **kwargs): """init a 'fanout' publisher. @@ -367,7 +367,7 @@ class FanoutPublisher(Publisher): class NotifyPublisher(TopicPublisher): - """Publisher class for 'notify'""" + """Publisher class for 'notify'.""" def __init__(self, conf, channel, topic, **kwargs): self.durable = kwargs.pop('durable', conf.rabbit_durable_queues) @@ -579,18 +579,18 @@ class Connection(object): self.reconnect() def get_channel(self): - """Convenience call for bin/clear_rabbit_queues""" + """Convenience call for bin/clear_rabbit_queues.""" return self.channel def close(self): - """Close/release this connection""" + """Close/release this connection.""" self.cancel_consumer_thread() self.wait_on_proxy_callbacks() self.connection.release() self.connection = None def reset(self): - """Reset a connection so it can be used again""" + """Reset a connection so it can be used again.""" self.cancel_consumer_thread() self.wait_on_proxy_callbacks() self.channel.close() @@ -619,7 +619,7 @@ class Connection(object): return self.ensure(_connect_error, _declare_consumer) def iterconsume(self, limit=None, timeout=None): - """Return an iterator that will consume from all queues/consumers""" + """Return an iterator that will consume from all queues/consumers.""" info = {'do_consume': True} @@ -649,7 +649,7 @@ class Connection(object): yield self.ensure(_error_callback, _consume) def cancel_consumer_thread(self): - """Cancel a consumer thread""" + """Cancel a consumer thread.""" if self.consumer_thread is not None: self.consumer_thread.kill() try: @@ -664,7 +664,7 @@ class Connection(object): proxy_cb.wait() def publisher_send(self, cls, topic, msg, timeout=None, **kwargs): - """Send to a publisher based on the publisher class""" + """Send to a publisher based on the publisher class.""" def _error_callback(exc): log_info = {'topic': topic, 'err_str': str(exc)} @@ -694,27 +694,27 @@ class Connection(object): topic, callback) def declare_fanout_consumer(self, topic, callback): - """Create a 'fanout' consumer""" + """Create a 'fanout' consumer.""" self.declare_consumer(FanoutConsumer, topic, callback) def direct_send(self, msg_id, msg): - """Send a 'direct' message""" + """Send a 'direct' message.""" self.publisher_send(DirectPublisher, msg_id, msg) def topic_send(self, topic, msg, timeout=None): - """Send a 'topic' message""" + """Send a 'topic' message.""" self.publisher_send(TopicPublisher, topic, msg, timeout) def fanout_send(self, topic, msg): - """Send a 'fanout' message""" + """Send a 'fanout' message.""" self.publisher_send(FanoutPublisher, topic, msg) def notify_send(self, topic, msg, **kwargs): - """Send a notify message on a topic""" + """Send a notify message on a topic.""" self.publisher_send(NotifyPublisher, topic, msg, None, **kwargs) def consume(self, limit=None): - """Consume from all queues/consumers""" + """Consume from all queues/consumers.""" it = self.iterconsume(limit=limit) while True: try: @@ -723,7 +723,7 @@ class Connection(object): return def consume_in_thread(self): - """Consumer from all queues/consumers in a greenthread""" + """Consumer from all queues/consumers in a greenthread.""" def _consumer_thread(): try: self.consume() @@ -734,7 +734,7 @@ class Connection(object): return self.consumer_thread def create_consumer(self, topic, proxy, fanout=False): - """Create a consumer that calls a method in a proxy object""" + """Create a consumer that calls a method in a proxy object.""" proxy_cb = rpc_amqp.ProxyCallback( self.conf, proxy, rpc_amqp.get_connection_pool(self.conf, Connection)) @@ -746,7 +746,7 @@ class Connection(object): self.declare_topic_consumer(topic, proxy_cb) def create_worker(self, topic, proxy, pool_name): - """Create a worker that calls a method in a proxy object""" + """Create a worker that calls a method in a proxy object.""" proxy_cb = rpc_amqp.ProxyCallback( self.conf, proxy, rpc_amqp.get_connection_pool(self.conf, Connection)) @@ -779,7 +779,7 @@ class Connection(object): def create_connection(conf, new=True): - """Create a connection""" + """Create a connection.""" return rpc_amqp.create_connection( conf, new, rpc_amqp.get_connection_pool(conf, Connection)) diff --git a/openstack/common/rpc/impl_qpid.py b/openstack/common/rpc/impl_qpid.py index e7ac016..32680e1 100644 --- a/openstack/common/rpc/impl_qpid.py +++ b/openstack/common/rpc/impl_qpid.py @@ -118,13 +118,13 @@ class ConsumerBase(object): self.reconnect(session) def reconnect(self, session): - """Re-declare the receiver after a qpid reconnect""" + """Re-declare the receiver after a qpid reconnect.""" self.session = session self.receiver = session.receiver(self.address) self.receiver.capacity = 1 def consume(self): - """Fetch the message and pass it to the callback object""" + """Fetch the message and pass it to the callback object.""" message = self.receiver.fetch() try: msg = rpc_common.deserialize_msg(message.content) @@ -139,7 +139,7 @@ class ConsumerBase(object): class DirectConsumer(ConsumerBase): - """Queue/consumer class for 'direct'""" + """Queue/consumer class for 'direct'.""" def __init__(self, conf, session, msg_id, callback): """Init a 'direct' queue. @@ -157,7 +157,7 @@ class DirectConsumer(ConsumerBase): class TopicConsumer(ConsumerBase): - """Consumer class for 'topic'""" + """Consumer class for 'topic'.""" def __init__(self, conf, session, topic, callback, name=None, exchange_name=None): @@ -177,7 +177,7 @@ class TopicConsumer(ConsumerBase): class FanoutConsumer(ConsumerBase): - """Consumer class for 'fanout'""" + """Consumer class for 'fanout'.""" def __init__(self, conf, session, topic, callback): """Init a 'fanout' queue. @@ -196,7 +196,7 @@ class FanoutConsumer(ConsumerBase): class Publisher(object): - """Base Publisher class""" + """Base Publisher class.""" def __init__(self, session, node_name, node_opts=None): """Init the Publisher class with the exchange_name, routing_key, @@ -225,16 +225,16 @@ class Publisher(object): self.reconnect(session) def reconnect(self, session): - """Re-establish the Sender after a reconnection""" + """Re-establish the Sender after a reconnection.""" self.sender = session.sender(self.address) def send(self, msg): - """Send a message""" + """Send a message.""" self.sender.send(msg) class DirectPublisher(Publisher): - """Publisher class for 'direct'""" + """Publisher class for 'direct'.""" def __init__(self, conf, session, msg_id): """Init a 'direct' publisher.""" super(DirectPublisher, self).__init__(session, msg_id, @@ -242,7 +242,7 @@ class DirectPublisher(Publisher): class TopicPublisher(Publisher): - """Publisher class for 'topic'""" + """Publisher class for 'topic'.""" def __init__(self, conf, session, topic): """init a 'topic' publisher. """ @@ -252,7 +252,7 @@ class TopicPublisher(Publisher): class FanoutPublisher(Publisher): - """Publisher class for 'fanout'""" + """Publisher class for 'fanout'.""" def __init__(self, conf, session, topic): """init a 'fanout' publisher. """ @@ -262,7 +262,7 @@ class FanoutPublisher(Publisher): class NotifyPublisher(Publisher): - """Publisher class for notifications""" + """Publisher class for notifications.""" def __init__(self, conf, session, topic): """init a 'topic' publisher. """ @@ -330,7 +330,7 @@ class Connection(object): return self.consumers[str(receiver)] def reconnect(self): - """Handles reconnecting and re-establishing sessions and queues""" + """Handles reconnecting and re-establishing sessions and queues.""" attempt = 0 delay = 1 while True: @@ -381,7 +381,7 @@ class Connection(object): self.reconnect() def close(self): - """Close/release this connection""" + """Close/release this connection.""" self.cancel_consumer_thread() self.wait_on_proxy_callbacks() try: @@ -394,7 +394,7 @@ class Connection(object): self.connection = None def reset(self): - """Reset a connection so it can be used again""" + """Reset a connection so it can be used again.""" self.cancel_consumer_thread() self.wait_on_proxy_callbacks() self.session.close() @@ -418,7 +418,7 @@ class Connection(object): return self.ensure(_connect_error, _declare_consumer) def iterconsume(self, limit=None, timeout=None): - """Return an iterator that will consume from all queues/consumers""" + """Return an iterator that will consume from all queues/consumers.""" def _error_callback(exc): if isinstance(exc, qpid_exceptions.Empty): @@ -442,7 +442,7 @@ class Connection(object): yield self.ensure(_error_callback, _consume) def cancel_consumer_thread(self): - """Cancel a consumer thread""" + """Cancel a consumer thread.""" if self.consumer_thread is not None: self.consumer_thread.kill() try: @@ -457,7 +457,7 @@ class Connection(object): proxy_cb.wait() def publisher_send(self, cls, topic, msg): - """Send to a publisher based on the publisher class""" + """Send to a publisher based on the publisher class.""" def _connect_error(exc): log_info = {'topic': topic, 'err_str': str(exc)} @@ -487,15 +487,15 @@ class Connection(object): topic, callback) def declare_fanout_consumer(self, topic, callback): - """Create a 'fanout' consumer""" + """Create a 'fanout' consumer.""" self.declare_consumer(FanoutConsumer, topic, callback) def direct_send(self, msg_id, msg): - """Send a 'direct' message""" + """Send a 'direct' message.""" self.publisher_send(DirectPublisher, msg_id, msg) def topic_send(self, topic, msg, timeout=None): - """Send a 'topic' message""" + """Send a 'topic' message.""" # # We want to create a message with attributes, e.g. a TTL. We # don't really need to keep 'msg' in its JSON format any longer @@ -510,15 +510,15 @@ class Connection(object): self.publisher_send(TopicPublisher, topic, qpid_message) def fanout_send(self, topic, msg): - """Send a 'fanout' message""" + """Send a 'fanout' message.""" self.publisher_send(FanoutPublisher, topic, msg) def notify_send(self, topic, msg, **kwargs): - """Send a notify message on a topic""" + """Send a notify message on a topic.""" self.publisher_send(NotifyPublisher, topic, msg) def consume(self, limit=None): - """Consume from all queues/consumers""" + """Consume from all queues/consumers.""" it = self.iterconsume(limit=limit) while True: try: @@ -527,7 +527,7 @@ class Connection(object): return def consume_in_thread(self): - """Consumer from all queues/consumers in a greenthread""" + """Consumer from all queues/consumers in a greenthread.""" def _consumer_thread(): try: self.consume() @@ -538,7 +538,7 @@ class Connection(object): return self.consumer_thread def create_consumer(self, topic, proxy, fanout=False): - """Create a consumer that calls a method in a proxy object""" + """Create a consumer that calls a method in a proxy object.""" proxy_cb = rpc_amqp.ProxyCallback( self.conf, proxy, rpc_amqp.get_connection_pool(self.conf, Connection)) @@ -554,7 +554,7 @@ class Connection(object): return consumer def create_worker(self, topic, proxy, pool_name): - """Create a worker that calls a method in a proxy object""" + """Create a worker that calls a method in a proxy object.""" proxy_cb = rpc_amqp.ProxyCallback( self.conf, proxy, rpc_amqp.get_connection_pool(self.conf, Connection)) @@ -597,7 +597,7 @@ class Connection(object): def create_connection(conf, new=True): - """Create a connection""" + """Create a connection.""" return rpc_amqp.create_connection( conf, new, rpc_amqp.get_connection_pool(conf, Connection)) diff --git a/openstack/common/rpc/impl_zmq.py b/openstack/common/rpc/impl_zmq.py index 726bb52..07b7b41 100644 --- a/openstack/common/rpc/impl_zmq.py +++ b/openstack/common/rpc/impl_zmq.py @@ -506,7 +506,7 @@ class ZmqProxy(ZmqBaseReactor): "%(topic)s. Dropping message.") % {'topic': topic}) def consume_in_thread(self): - """Runs the ZmqProxy service""" + """Runs the ZmqProxy service.""" ipc_dir = CONF.rpc_zmq_ipc_dir consume_in = "tcp://%s:%s" % \ (CONF.rpc_zmq_bind_address, diff --git a/openstack/common/rpc/serializer.py b/openstack/common/rpc/serializer.py index 0a2c9c4..76c6831 100644 --- a/openstack/common/rpc/serializer.py +++ b/openstack/common/rpc/serializer.py @@ -18,7 +18,7 @@ import abc class Serializer(object): - """Generic (de-)serialization definition base class""" + """Generic (de-)serialization definition base class.""" __metaclass__ = abc.ABCMeta @abc.abstractmethod @@ -43,7 +43,7 @@ class Serializer(object): class NoOpSerializer(Serializer): - """A serializer that does nothing""" + """A serializer that does nothing.""" def serialize_entity(self, context, entity): return entity diff --git a/openstack/common/scheduler/filters/json_filter.py b/openstack/common/scheduler/filters/json_filter.py index 7035947..bc4b4fd 100644 --- a/openstack/common/scheduler/filters/json_filter.py +++ b/openstack/common/scheduler/filters/json_filter.py @@ -51,7 +51,7 @@ class JsonFilter(filters.BaseHostFilter): return self._op_compare(args, operator.gt) def _in(self, args): - """First term is in set of remaining terms""" + """First term is in set of remaining terms.""" return self._op_compare(args, operator.contains) def _less_than_equal(self, args): diff --git a/openstack/common/service.py b/openstack/common/service.py index eb46164..55e23ed 100644 --- a/openstack/common/service.py +++ b/openstack/common/service.py @@ -271,7 +271,7 @@ class ProcessLauncher(object): return wrap def wait(self): - """Loop waiting on children to die and respawning as necessary""" + """Loop waiting on children to die and respawning as necessary.""" LOG.debug(_('Full set of CONF:')) CONF.log_opt_values(LOG, std_logging.DEBUG) diff --git a/openstack/common/timeutils.py b/openstack/common/timeutils.py index cb17487..008e9c8 100644 --- a/openstack/common/timeutils.py +++ b/openstack/common/timeutils.py @@ -32,7 +32,7 @@ PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND def isotime(at=None, subsecond=False): - """Stringify time in ISO 8601 format""" + """Stringify time in ISO 8601 format.""" if not at: at = utcnow() st = at.strftime(_ISO8601_TIME_FORMAT @@ -44,7 +44,7 @@ def isotime(at=None, subsecond=False): def parse_isotime(timestr): - """Parse time from ISO 8601 format""" + """Parse time from ISO 8601 format.""" try: return iso8601.parse_date(timestr) except iso8601.ParseError as e: @@ -66,7 +66,7 @@ def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT): def normalize_time(timestamp): - """Normalize time in arbitrary timezone to UTC naive object""" + """Normalize time in arbitrary timezone to UTC naive object.""" offset = timestamp.utcoffset() if offset is None: return timestamp @@ -103,7 +103,7 @@ def utcnow(): def iso8601_from_timestamp(timestamp): - """Returns a iso8601 formated date from timestamp""" + """Returns a iso8601 formated date from timestamp.""" return isotime(datetime.datetime.utcfromtimestamp(timestamp)) diff --git a/openstack/common/wsgi.py b/openstack/common/wsgi.py index 80d4b9b..b5a7157 100644 --- a/openstack/common/wsgi.py +++ b/openstack/common/wsgi.py @@ -448,7 +448,7 @@ class ActionDispatcher(object): class DictSerializer(ActionDispatcher): - """Default request body serialization""" + """Default request body serialization.""" def serialize(self, data, action='default'): return self.dispatch(data, action=action) @@ -458,7 +458,7 @@ class DictSerializer(ActionDispatcher): class JSONDictSerializer(DictSerializer): - """Default JSON request body serialization""" + """Default JSON request body serialization.""" def default(self, data): def sanitizer(obj): @@ -570,7 +570,7 @@ class XMLDictSerializer(DictSerializer): class ResponseHeadersSerializer(ActionDispatcher): - """Default response headers serialization""" + """Default response headers serialization.""" def serialize(self, response, data, action): self.dispatch(response, data, action=action) @@ -580,7 +580,7 @@ class ResponseHeadersSerializer(ActionDispatcher): class ResponseSerializer(object): - """Encode the necessary pieces into a response object""" + """Encode the necessary pieces into a response object.""" def __init__(self, body_serializers=None, headers_serializer=None): self.body_serializers = { @@ -722,7 +722,7 @@ class RequestDeserializer(object): class TextDeserializer(ActionDispatcher): - """Default request body deserialization""" + """Default request body deserialization.""" def deserialize(self, datastring, action='default'): return self.dispatch(datastring, action=action) @@ -787,20 +787,20 @@ class XMLDeserializer(TextDeserializer): return result def find_first_child_named(self, parent, name): - """Search a nodes children for the first child with a given name""" + """Search a nodes children for the first child with a given name.""" for node in parent.childNodes: if node.nodeName == name: return node return None def find_children_named(self, parent, name): - """Return all of a nodes children who have the given name""" + """Return all of a nodes children who have the given name.""" for node in parent.childNodes: if node.nodeName == name: yield node def extract_text(self, node): - """Get the text field contained by the given node""" + """Get the text field contained by the given node.""" if len(node.childNodes) == 1: child = node.childNodes[0] if child.nodeType == child.TEXT_NODE: |
