diff options
-rw-r--r-- | openstack/common/db/api.py | 19 | ||||
-rw-r--r-- | openstack/common/db/sqlalchemy/session.py | 84 | ||||
-rw-r--r-- | openstack/common/middleware/correlation_id.py | 29 | ||||
-rw-r--r-- | openstack/common/rpc/amqp.py | 5 | ||||
-rw-r--r-- | openstack/common/rpc/dispatcher.py | 29 | ||||
-rw-r--r-- | openstack/common/rpc/matchmaker.py | 3 | ||||
-rw-r--r-- | openstack/common/rpc/proxy.py | 46 | ||||
-rw-r--r-- | openstack/common/rpc/serializer.py | 52 | ||||
-rw-r--r-- | openstack/common/setup.py | 367 | ||||
-rw-r--r-- | openstack/common/version.py | 94 | ||||
-rw-r--r-- | openstack/common/wsgi.py | 11 | ||||
-rw-r--r-- | setup.cfg | 44 | ||||
-rw-r--r-- | setup.py | 63 | ||||
-rw-r--r-- | tests/unit/db/sqlalchemy/test_sqlalchemy.py | 45 | ||||
-rw-r--r-- | tests/unit/db/test_api.py | 35 | ||||
-rw-r--r-- | tests/unit/middleware/test_correlation_id.py | 51 | ||||
-rw-r--r-- | tests/unit/rpc/test_dispatcher.py | 27 | ||||
-rw-r--r-- | tests/unit/rpc/test_proxy.py | 50 | ||||
-rw-r--r-- | tests/unit/test_setup.py | 257 | ||||
-rw-r--r-- | tests/unit/test_version.py | 75 | ||||
-rw-r--r-- | tests/utils.py | 20 | ||||
-rw-r--r-- | tools/flakes.py | 24 | ||||
-rw-r--r-- | tools/pip-requires | 2 | ||||
-rw-r--r-- | tools/test-requires | 5 | ||||
-rw-r--r-- | tox.ini | 1 |
25 files changed, 507 insertions, 931 deletions
diff --git a/openstack/common/db/api.py b/openstack/common/db/api.py index 5603bb5..dcadecd 100644 --- a/openstack/common/db/api.py +++ b/openstack/common/db/api.py @@ -19,8 +19,9 @@ Supported configuration options: -`db_backend`: DB backend name or full module path to DB backend module. -`dbapi_use_tpool`: Enable thread pooling of DB API calls. +The following two parameters are in the 'database' group: +`backend`: DB backend name or full module path to DB backend module. +`use_tpool`: Enable thread pooling of DB API calls. A DB backend module should implement a method named 'get_backend' which takes no arguments. The method can return any object that implements DB @@ -44,17 +45,21 @@ from openstack.common import lockutils db_opts = [ - cfg.StrOpt('db_backend', + cfg.StrOpt('backend', default='sqlalchemy', + deprecated_name='db_backend', + deprecated_group='DEFAULT', help='The backend to use for db'), - cfg.BoolOpt('dbapi_use_tpool', + cfg.BoolOpt('use_tpool', default=False, + deprecated_name='dbapi_use_tpool', + deprecated_group='DEFAULT', help='Enable the experimental use of thread pooling for ' 'all DB API calls') ] CONF = cfg.CONF -CONF.register_opts(db_opts) +CONF.register_opts(db_opts, 'database') class DBAPI(object): @@ -75,8 +80,8 @@ class DBAPI(object): if self.__backend: # Another thread assigned it return self.__backend - backend_name = CONF.db_backend - self.__use_tpool = CONF.dbapi_use_tpool + backend_name = CONF.database.backend + self.__use_tpool = CONF.database.use_tpool if self.__use_tpool: from eventlet import tpool self.__tpool = tpool diff --git a/openstack/common/db/sqlalchemy/session.py b/openstack/common/db/sqlalchemy/session.py index a66ce9f..ccce7d3 100644 --- a/openstack/common/db/sqlalchemy/session.py +++ b/openstack/common/db/sqlalchemy/session.py @@ -260,53 +260,76 @@ from openstack.common import log as logging from openstack.common.gettextutils import _ from openstack.common import timeutils +DEFAULT = 'DEFAULT' -sql_opts = [ - cfg.StrOpt('sql_connection', +sqlite_db_opts = [ + cfg.StrOpt('sqlite_db', + default='oslo.sqlite', + help='the filename to use with sqlite'), + cfg.BoolOpt('sqlite_synchronous', + default=True, + help='If true, use synchronous mode for sqlite'), +] + +database_opts = [ + cfg.StrOpt('connection', default='sqlite:///' + os.path.abspath(os.path.join(os.path.dirname(__file__), '../', '$sqlite_db')), help='The SQLAlchemy connection string used to connect to the ' 'database', + deprecated_name='sql_connection', + deprecated_group=DEFAULT, secret=True), - cfg.StrOpt('sqlite_db', - default='oslo.sqlite', - help='the filename to use with sqlite'), - cfg.IntOpt('sql_idle_timeout', + cfg.IntOpt('idle_timeout', default=3600, + deprecated_name='sql_idle_timeout', + deprecated_group=DEFAULT, help='timeout before idle sql connections are reaped'), - cfg.BoolOpt('sqlite_synchronous', - default=True, - help='If passed, use synchronous mode for sqlite'), - cfg.IntOpt('sql_min_pool_size', + cfg.IntOpt('min_pool_size', default=1, + deprecated_name='sql_min_pool_size', + deprecated_group=DEFAULT, help='Minimum number of SQL connections to keep open in a ' 'pool'), - cfg.IntOpt('sql_max_pool_size', + cfg.IntOpt('max_pool_size', default=5, + deprecated_name='sql_max_pool_size', + deprecated_group=DEFAULT, help='Maximum number of SQL connections to keep open in a ' 'pool'), - cfg.IntOpt('sql_max_retries', + cfg.IntOpt('max_retries', default=10, + deprecated_name='sql_max_retries', + deprecated_group=DEFAULT, help='maximum db connection retries during startup. ' '(setting -1 implies an infinite retry count)'), - cfg.IntOpt('sql_retry_interval', + cfg.IntOpt('retry_interval', default=10, + deprecated_name='sql_retry_interval', + deprecated_group=DEFAULT, help='interval between retries of opening a sql connection'), - cfg.IntOpt('sql_max_overflow', + cfg.IntOpt('max_overflow', default=None, + deprecated_name='sql_max_overflow', + deprecated_group=DEFAULT, help='If set, use this value for max_overflow with sqlalchemy'), - cfg.IntOpt('sql_connection_debug', + cfg.IntOpt('connection_debug', default=0, + deprecated_name='sql_connection_debug', + deprecated_group=DEFAULT, help='Verbosity of SQL debugging information. 0=None, ' '100=Everything'), - cfg.BoolOpt('sql_connection_trace', + cfg.BoolOpt('connection_trace', default=False, + deprecated_name='sql_connection_trace', + deprecated_group=DEFAULT, help='Add python stack traces to SQL as comment strings'), ] CONF = cfg.CONF -CONF.register_opts(sql_opts) +CONF.register_opts(sqlite_db_opts) +CONF.register_opts(database_opts, 'database') LOG = logging.getLogger(__name__) _ENGINE = None @@ -315,8 +338,9 @@ _MAKER = None def set_defaults(sql_connection, sqlite_db): """Set defaults for configuration variables.""" - cfg.set_defaults(sql_opts, - sql_connection=sql_connection, + cfg.set_defaults(database_opts, + connection=sql_connection) + cfg.set_defaults(sqlite_db_opts, sqlite_db=sqlite_db) @@ -470,7 +494,7 @@ def get_engine(sqlite_fk=False): """Return a SQLAlchemy engine.""" global _ENGINE if _ENGINE is None: - _ENGINE = create_engine(CONF.sql_connection, + _ENGINE = create_engine(CONF.database.connection, sqlite_fk=sqlite_fk) return _ENGINE @@ -533,15 +557,15 @@ def create_engine(sql_connection, sqlite_fk=False): connection_dict = sqlalchemy.engine.url.make_url(sql_connection) engine_args = { - "pool_recycle": CONF.sql_idle_timeout, + "pool_recycle": CONF.database.idle_timeout, "echo": False, 'convert_unicode': True, } # Map our SQL debug level to SQLAlchemy's options - if CONF.sql_connection_debug >= 100: + if CONF.database.connection_debug >= 100: engine_args['echo'] = 'debug' - elif CONF.sql_connection_debug >= 50: + elif CONF.database.connection_debug >= 50: engine_args['echo'] = True if "sqlite" in connection_dict.drivername: @@ -549,13 +573,13 @@ def create_engine(sql_connection, sqlite_fk=False): engine_args["listeners"] = [SqliteForeignKeysListener()] engine_args["poolclass"] = NullPool - if CONF.sql_connection == "sqlite://": + if CONF.database.connection == "sqlite://": engine_args["poolclass"] = StaticPool engine_args["connect_args"] = {'check_same_thread': False} else: - engine_args['pool_size'] = CONF.sql_max_pool_size - if CONF.sql_max_overflow is not None: - engine_args['max_overflow'] = CONF.sql_max_overflow + engine_args['pool_size'] = CONF.database.max_pool_size + if CONF.database.max_overflow is not None: + engine_args['max_overflow'] = CONF.database.max_overflow engine = sqlalchemy.create_engine(sql_connection, **engine_args) @@ -569,7 +593,7 @@ def create_engine(sql_connection, sqlite_fk=False): _synchronous_switch_listener) sqlalchemy.event.listen(engine, 'connect', _add_regexp_listener) - if (CONF.sql_connection_trace and + if (CONF.database.connection_trace and engine.dialect.dbapi.__name__ == 'MySQLdb'): _patch_mysqldb_with_stacktrace_comments() @@ -579,7 +603,7 @@ def create_engine(sql_connection, sqlite_fk=False): if not _is_db_connection_error(e.args[0]): raise - remaining = CONF.sql_max_retries + remaining = CONF.database.max_retries if remaining == -1: remaining = 'infinite' while True: @@ -587,7 +611,7 @@ def create_engine(sql_connection, sqlite_fk=False): LOG.warn(msg % remaining) if remaining != 'infinite': remaining -= 1 - time.sleep(CONF.sql_retry_interval) + time.sleep(CONF.database.retry_interval) try: engine.connect() break diff --git a/openstack/common/middleware/correlation_id.py b/openstack/common/middleware/correlation_id.py new file mode 100644 index 0000000..a3efe34 --- /dev/null +++ b/openstack/common/middleware/correlation_id.py @@ -0,0 +1,29 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright (c) 2013 Rackspace Hosting +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Middleware that attaches a correlation id to WSGI request""" + +from openstack.common import uuidutils +from openstack.common import wsgi + + +class CorrelationIdMiddleware(wsgi.Middleware): + + def process_request(self, req): + correlation_id = (req.headers.get("X_CORRELATION_ID") or + uuidutils.generate_uuid()) + req.headers['X_CORRELATION_ID'] = correlation_id diff --git a/openstack/common/rpc/amqp.py b/openstack/common/rpc/amqp.py index dc9aadb..946501b 100644 --- a/openstack/common/rpc/amqp.py +++ b/openstack/common/rpc/amqp.py @@ -197,8 +197,9 @@ class ReplyProxy(ConnectionContext): msg_id = message_data.pop('_msg_id', None) waiter = self._call_waiters.get(msg_id) if not waiter: - LOG.warn(_('no calling threads waiting for msg_id : %s' - ', message : %s') % (msg_id, message_data)) + LOG.warn(_('no calling threads waiting for msg_id : %(msg_id)s' + ', message : %(data)s'), {'msg_id': msg_id, + 'data': message_data}) else: waiter.put(message_data) diff --git a/openstack/common/rpc/dispatcher.py b/openstack/common/rpc/dispatcher.py index 2efa5f2..34eadff 100644 --- a/openstack/common/rpc/dispatcher.py +++ b/openstack/common/rpc/dispatcher.py @@ -84,6 +84,7 @@ minimum version that supports the new parameter should be specified. """ from openstack.common.rpc import common as rpc_common +from openstack.common.rpc import serializer as rpc_serializer class RpcDispatcher(object): @@ -93,16 +94,38 @@ class RpcDispatcher(object): contains a list of underlying managers that have an API_VERSION attribute. """ - def __init__(self, callbacks): + def __init__(self, callbacks, serializer=None): """Initialize the rpc dispatcher. :param callbacks: List of proxy objects that are an instance of a class with rpc methods exposed. Each proxy object should have an RPC_API_VERSION attribute. + :param serializer: The Serializer object that will be used to + deserialize arguments before the method call and + to serialize the result after it returns. """ self.callbacks = callbacks + if serializer is None: + serializer = rpc_serializer.NoOpSerializer() + self.serializer = serializer super(RpcDispatcher, self).__init__() + def _deserialize_args(self, context, kwargs): + """Helper method called to deserialize args before dispatch. + + This calls our serializer on each argument, returning a new set of + args that have been deserialized. + + :param context: The request context + :param kwargs: The arguments to be deserialized + :returns: A new set of deserialized args + """ + new_kwargs = dict() + for argname, arg in kwargs.iteritems(): + new_kwargs[argname] = self.serializer.deserialize_entity(context, + arg) + return new_kwargs + def dispatch(self, ctxt, version, method, namespace, **kwargs): """Dispatch a message based on a requested version. @@ -145,7 +168,9 @@ class RpcDispatcher(object): if not hasattr(proxyobj, method): continue if is_compatible: - return getattr(proxyobj, method)(ctxt, **kwargs) + kwargs = self._deserialize_args(ctxt, kwargs) + result = getattr(proxyobj, method)(ctxt, **kwargs) + return self.serializer.serialize_entity(ctxt, result) if had_compatible: raise AttributeError("No such RPC function '%s'" % method) diff --git a/openstack/common/rpc/matchmaker.py b/openstack/common/rpc/matchmaker.py index 8072023..fd10f27 100644 --- a/openstack/common/rpc/matchmaker.py +++ b/openstack/common/rpc/matchmaker.py @@ -230,7 +230,8 @@ class HeartbeatMatchMakerBase(MatchMakerBase): self.hosts.discard(host) self.backend_unregister(key, '.'.join((key, host))) - LOG.info(_("Matchmaker unregistered: %s, %s" % (key, host))) + LOG.info(_("Matchmaker unregistered: %(key)s, %(host)s"), + {'key': key, 'host': host}) def start_heartbeat(self): """ diff --git a/openstack/common/rpc/proxy.py b/openstack/common/rpc/proxy.py index 0b311de..3b74fe1 100644 --- a/openstack/common/rpc/proxy.py +++ b/openstack/common/rpc/proxy.py @@ -24,6 +24,7 @@ For more information about rpc API version numbers, see: from openstack.common import rpc from openstack.common.rpc import common as rpc_common +from openstack.common.rpc import serializer as rpc_serializer class RpcProxy(object): @@ -35,7 +36,11 @@ class RpcProxy(object): rpc API. """ - def __init__(self, topic, default_version, version_cap=None): + # The default namespace, which can be overriden in a subclass. + RPC_API_NAMESPACE = None + + def __init__(self, topic, default_version, version_cap=None, + serializer=None): """Initialize an RpcProxy. :param topic: The topic to use for all messages. @@ -44,10 +49,15 @@ class RpcProxy(object): basis. :param version_cap: Optionally cap the maximum version used for sent messages. + :param serializer: Optionaly (de-)serialize entities with a + provided helper. """ self.topic = topic self.default_version = default_version self.version_cap = version_cap + if serializer is None: + serializer = rpc_serializer.NoOpSerializer() + self.serializer = serializer super(RpcProxy, self).__init__() def _set_version(self, msg, vers): @@ -70,9 +80,25 @@ class RpcProxy(object): def make_namespaced_msg(method, namespace, **kwargs): return {'method': method, 'namespace': namespace, 'args': kwargs} - @staticmethod - def make_msg(method, **kwargs): - return RpcProxy.make_namespaced_msg(method, None, **kwargs) + def make_msg(self, method, **kwargs): + return self.make_namespaced_msg(method, self.RPC_API_NAMESPACE, + **kwargs) + + def _serialize_msg_args(self, context, kwargs): + """Helper method called to serialize message arguments. + + This calls our serializer on each argument, returning a new + set of args that have been serialized. + + :param context: The request context + :param kwargs: The arguments to serialize + :returns: A new set of serialized arguments + """ + new_kwargs = dict() + for argname, arg in kwargs.iteritems(): + new_kwargs[argname] = self.serializer.serialize_entity(context, + arg) + return new_kwargs def call(self, context, msg, topic=None, version=None, timeout=None): """rpc.call() a remote method. @@ -89,9 +115,11 @@ class RpcProxy(object): :returns: The return value from the remote method. """ self._set_version(msg, version) + msg['args'] = self._serialize_msg_args(context, msg['args']) real_topic = self._get_topic(topic) try: - return rpc.call(context, real_topic, msg, timeout) + result = rpc.call(context, real_topic, msg, timeout) + return self.serializer.deserialize_entity(context, result) except rpc.common.Timeout as exc: raise rpc.common.Timeout( exc.info, real_topic, msg.get('method')) @@ -112,9 +140,11 @@ class RpcProxy(object): from the remote method as they arrive. """ self._set_version(msg, version) + msg['args'] = self._serialize_msg_args(context, msg['args']) real_topic = self._get_topic(topic) try: - return rpc.multicall(context, real_topic, msg, timeout) + result = rpc.multicall(context, real_topic, msg, timeout) + return self.serializer.deserialize_entity(context, result) except rpc.common.Timeout as exc: raise rpc.common.Timeout( exc.info, real_topic, msg.get('method')) @@ -132,6 +162,7 @@ class RpcProxy(object): remote method. """ self._set_version(msg, version) + msg['args'] = self._serialize_msg_args(context, msg['args']) rpc.cast(context, self._get_topic(topic), msg) def fanout_cast(self, context, msg, topic=None, version=None): @@ -147,6 +178,7 @@ class RpcProxy(object): from the remote method. """ self._set_version(msg, version) + msg['args'] = self._serialize_msg_args(context, msg['args']) rpc.fanout_cast(context, self._get_topic(topic), msg) def cast_to_server(self, context, server_params, msg, topic=None, @@ -165,6 +197,7 @@ class RpcProxy(object): return values. """ self._set_version(msg, version) + msg['args'] = self._serialize_msg_args(context, msg['args']) rpc.cast_to_server(context, server_params, self._get_topic(topic), msg) def fanout_cast_to_server(self, context, server_params, msg, topic=None, @@ -183,5 +216,6 @@ class RpcProxy(object): return values. """ self._set_version(msg, version) + msg['args'] = self._serialize_msg_args(context, msg['args']) rpc.fanout_cast_to_server(context, server_params, self._get_topic(topic), msg) diff --git a/openstack/common/rpc/serializer.py b/openstack/common/rpc/serializer.py new file mode 100644 index 0000000..0a2c9c4 --- /dev/null +++ b/openstack/common/rpc/serializer.py @@ -0,0 +1,52 @@ +# Copyright 2013 IBM Corp. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Provides the definition of an RPC serialization handler""" + +import abc + + +class Serializer(object): + """Generic (de-)serialization definition base class""" + __metaclass__ = abc.ABCMeta + + @abc.abstractmethod + def serialize_entity(self, context, entity): + """Serialize something to primitive form. + + :param context: Security context + :param entity: Entity to be serialized + :returns: Serialized form of entity + """ + pass + + @abc.abstractmethod + def deserialize_entity(self, context, entity): + """Deserialize something from primitive form. + + :param context: Security context + :param entity: Primitive to be deserialized + :returns: Deserialized form of entity + """ + pass + + +class NoOpSerializer(Serializer): + """A serializer that does nothing""" + + def serialize_entity(self, context, entity): + return entity + + def deserialize_entity(self, context, entity): + return entity diff --git a/openstack/common/setup.py b/openstack/common/setup.py deleted file mode 100644 index 1b3a127..0000000 --- a/openstack/common/setup.py +++ /dev/null @@ -1,367 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# Copyright 2012-2013 Hewlett-Packard Development Company, L.P. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Utilities with minimum-depends for use in setup.py -""" - -from __future__ import print_function - -import email -import os -import re -import subprocess -import sys - -from setuptools.command import sdist - - -def parse_mailmap(mailmap='.mailmap'): - mapping = {} - if os.path.exists(mailmap): - with open(mailmap, 'r') as fp: - for l in fp: - try: - canonical_email, alias = re.match( - r'[^#]*?(<.+>).*(<.+>).*', l).groups() - except AttributeError: - continue - mapping[alias] = canonical_email - return mapping - - -def _parse_git_mailmap(git_dir, mailmap='.mailmap'): - mailmap = os.path.join(os.path.dirname(git_dir), mailmap) - return parse_mailmap(mailmap) - - -def canonicalize_emails(changelog, mapping): - """Takes in a string and an email alias mapping and replaces all - instances of the aliases in the string with their real email. - """ - for alias, email_address in mapping.iteritems(): - changelog = changelog.replace(alias, email_address) - return changelog - - -# Get requirements from the first file that exists -def get_reqs_from_files(requirements_files): - for requirements_file in requirements_files: - if os.path.exists(requirements_file): - with open(requirements_file, 'r') as fil: - return fil.read().split('\n') - return [] - - -def parse_requirements(requirements_files=['requirements.txt', - 'tools/pip-requires']): - requirements = [] - for line in get_reqs_from_files(requirements_files): - # For the requirements list, we need to inject only the portion - # after egg= so that distutils knows the package it's looking for - # such as: - # -e git://github.com/openstack/nova/master#egg=nova - if re.match(r'\s*-e\s+', line): - requirements.append(re.sub(r'\s*-e\s+.*#egg=(.*)$', r'\1', - line)) - # such as: - # http://github.com/openstack/nova/zipball/master#egg=nova - elif re.match(r'\s*https?:', line): - requirements.append(re.sub(r'\s*https?:.*#egg=(.*)$', r'\1', - line)) - # -f lines are for index locations, and don't get used here - elif re.match(r'\s*-f\s+', line): - pass - # argparse is part of the standard library starting with 2.7 - # adding it to the requirements list screws distro installs - elif line == 'argparse' and sys.version_info >= (2, 7): - pass - else: - requirements.append(line) - - return requirements - - -def parse_dependency_links(requirements_files=['requirements.txt', - 'tools/pip-requires']): - dependency_links = [] - # dependency_links inject alternate locations to find packages listed - # in requirements - for line in get_reqs_from_files(requirements_files): - # skip comments and blank lines - if re.match(r'(\s*#)|(\s*$)', line): - continue - # lines with -e or -f need the whole line, minus the flag - if re.match(r'\s*-[ef]\s+', line): - dependency_links.append(re.sub(r'\s*-[ef]\s+', '', line)) - # lines that are only urls can go in unmolested - elif re.match(r'\s*https?:', line): - dependency_links.append(line) - return dependency_links - - -def _run_shell_command(cmd, throw_on_error=False): - if os.name == 'nt': - output = subprocess.Popen(["cmd.exe", "/C", cmd], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - else: - output = subprocess.Popen(["/bin/sh", "-c", cmd], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - out = output.communicate() - if output.returncode and throw_on_error: - raise Exception("%s returned %d" % cmd, output.returncode) - if not out: - return None - return out[0].strip() or None - - -def _get_git_directory(): - parent_dir = os.path.dirname(__file__) - while True: - git_dir = os.path.join(parent_dir, '.git') - if os.path.exists(git_dir): - return git_dir - parent_dir, child = os.path.split(parent_dir) - if not child: # reached to root dir - return None - - -def write_git_changelog(): - """Write a changelog based on the git changelog.""" - new_changelog = 'ChangeLog' - git_dir = _get_git_directory() - if not os.getenv('SKIP_WRITE_GIT_CHANGELOG'): - if git_dir: - git_log_cmd = 'git --git-dir=%s log' % git_dir - changelog = _run_shell_command(git_log_cmd) - mailmap = _parse_git_mailmap(git_dir) - with open(new_changelog, "w") as changelog_file: - changelog_file.write(canonicalize_emails(changelog, mailmap)) - else: - open(new_changelog, 'w').close() - - -def generate_authors(): - """Create AUTHORS file using git commits.""" - jenkins_email = 'jenkins@review.(openstack|stackforge).org' - old_authors = 'AUTHORS.in' - new_authors = 'AUTHORS' - git_dir = _get_git_directory() - if not os.getenv('SKIP_GENERATE_AUTHORS'): - if git_dir: - # don't include jenkins email address in AUTHORS file - git_log_cmd = ("git --git-dir=" + git_dir + - " log --format='%aN <%aE>' | sort -u | " - "egrep -v '" + jenkins_email + "'") - changelog = _run_shell_command(git_log_cmd) - signed_cmd = ("git --git-dir=" + git_dir + - " log | grep -i Co-authored-by: | sort -u") - signed_entries = _run_shell_command(signed_cmd) - if signed_entries: - new_entries = "\n".join( - [signed.split(":", 1)[1].strip() - for signed in signed_entries.split("\n") if signed]) - changelog = "\n".join((changelog, new_entries)) - mailmap = _parse_git_mailmap(git_dir) - with open(new_authors, 'w') as new_authors_fh: - new_authors_fh.write(canonicalize_emails(changelog, mailmap)) - if os.path.exists(old_authors): - with open(old_authors, "r") as old_authors_fh: - new_authors_fh.write('\n' + old_authors_fh.read()) - else: - open(new_authors, 'w').close() - - -_rst_template = """%(heading)s -%(underline)s - -.. automodule:: %(module)s - :members: - :undoc-members: - :show-inheritance: -""" - - -def get_cmdclass(): - """Return dict of commands to run from setup.py.""" - - cmdclass = dict() - - def _find_modules(arg, dirname, files): - for filename in files: - if filename.endswith('.py') and filename != '__init__.py': - arg["%s.%s" % (dirname.replace('/', '.'), - filename[:-3])] = True - - class LocalSDist(sdist.sdist): - """Builds the ChangeLog and Authors files from VC first.""" - - def run(self): - write_git_changelog() - generate_authors() - # sdist.sdist is an old style class, can't use super() - sdist.sdist.run(self) - - cmdclass['sdist'] = LocalSDist - - # If Sphinx is installed on the box running setup.py, - # enable setup.py to build the documentation, otherwise, - # just ignore it - try: - from sphinx.setup_command import BuildDoc - - class LocalBuildDoc(BuildDoc): - - builders = ['html', 'man'] - - def generate_autoindex(self): - print("**Autodocumenting from %s" % os.path.abspath(os.curdir)) - modules = {} - option_dict = self.distribution.get_option_dict('build_sphinx') - source_dir = os.path.join(option_dict['source_dir'][1], 'api') - if not os.path.exists(source_dir): - os.makedirs(source_dir) - for pkg in self.distribution.packages: - if '.' not in pkg: - os.path.walk(pkg, _find_modules, modules) - module_list = modules.keys() - module_list.sort() - autoindex_filename = os.path.join(source_dir, 'autoindex.rst') - with open(autoindex_filename, 'w') as autoindex: - autoindex.write(""".. toctree:: - :maxdepth: 1 - -""") - for module in module_list: - output_filename = os.path.join(source_dir, - "%s.rst" % module) - heading = "The :mod:`%s` Module" % module - underline = "=" * len(heading) - values = dict(module=module, heading=heading, - underline=underline) - - print("Generating %s" % output_filename) - with open(output_filename, 'w') as output_file: - output_file.write(_rst_template % values) - autoindex.write(" %s.rst\n" % module) - - def run(self): - if not os.getenv('SPHINX_DEBUG'): - self.generate_autoindex() - - for builder in self.builders: - self.builder = builder - self.finalize_options() - self.project = self.distribution.get_name() - self.version = self.distribution.get_version() - self.release = self.distribution.get_version() - BuildDoc.run(self) - - class LocalBuildLatex(LocalBuildDoc): - builders = ['latex'] - - cmdclass['build_sphinx'] = LocalBuildDoc - cmdclass['build_sphinx_latex'] = LocalBuildLatex - except ImportError: - pass - - return cmdclass - - -def _get_revno(git_dir): - """Return the number of commits since the most recent tag. - - We use git-describe to find this out, but if there are no - tags then we fall back to counting commits since the beginning - of time. - """ - describe = _run_shell_command( - "git --git-dir=%s describe --always" % git_dir) - if "-" in describe: - return describe.rsplit("-", 2)[-2] - - # no tags found - revlist = _run_shell_command( - "git --git-dir=%s rev-list --abbrev-commit HEAD" % git_dir) - return len(revlist.splitlines()) - - -def _get_version_from_git(pre_version): - """Return a version which is equal to the tag that's on the current - revision if there is one, or tag plus number of additional revisions - if the current revision has no tag.""" - - git_dir = _get_git_directory() - if git_dir: - if pre_version: - try: - return _run_shell_command( - "git --git-dir=" + git_dir + " describe --exact-match", - throw_on_error=True).replace('-', '.') - except Exception: - sha = _run_shell_command( - "git --git-dir=" + git_dir + " log -n1 --pretty=format:%h") - return "%s.a%s.g%s" % (pre_version, _get_revno(git_dir), sha) - else: - return _run_shell_command( - "git --git-dir=" + git_dir + " describe --always").replace( - '-', '.') - return None - - -def _get_version_from_pkg_info(package_name): - """Get the version from PKG-INFO file if we can.""" - try: - pkg_info_file = open('PKG-INFO', 'r') - except (IOError, OSError): - return None - try: - pkg_info = email.message_from_file(pkg_info_file) - except email.MessageError: - return None - # Check to make sure we're in our own dir - if pkg_info.get('Name', None) != package_name: - return None - return pkg_info.get('Version', None) - - -def get_version(package_name, pre_version=None): - """Get the version of the project. First, try getting it from PKG-INFO, if - it exists. If it does, that means we're in a distribution tarball or that - install has happened. Otherwise, if there is no PKG-INFO file, pull the - version from git. - - We do not support setup.py version sanity in git archive tarballs, nor do - we support packagers directly sucking our git repo into theirs. We expect - that a source tarball be made from our git repo - or that if someone wants - to make a source tarball from a fork of our repo with additional tags in it - that they understand and desire the results of doing that. - """ - version = os.environ.get("OSLO_PACKAGE_VERSION", None) - if version: - return version - version = _get_version_from_pkg_info(package_name) - if version: - return version - version = _get_version_from_git(pre_version) - if version: - return version - raise Exception("Versioning for this project requires either an sdist" - " tarball, or access to an upstream git repository.") diff --git a/openstack/common/version.py b/openstack/common/version.py deleted file mode 100644 index a94548f..0000000 --- a/openstack/common/version.py +++ /dev/null @@ -1,94 +0,0 @@ - -# Copyright 2012 OpenStack Foundation -# Copyright 2012-2013 Hewlett-Packard Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Utilities for consuming the version from pkg_resources. -""" - -import pkg_resources - - -class VersionInfo(object): - - def __init__(self, package): - """Object that understands versioning for a package - :param package: name of the python package, such as glance, or - python-glanceclient - """ - self.package = package - self.release = None - self.version = None - self._cached_version = None - - def __str__(self): - """Make the VersionInfo object behave like a string.""" - return self.version_string() - - def __repr__(self): - """Include the name.""" - return "VersionInfo(%s:%s)" % (self.package, self.version_string()) - - def _get_version_from_pkg_resources(self): - """Get the version of the package from the pkg_resources record - associated with the package.""" - try: - requirement = pkg_resources.Requirement.parse(self.package) - provider = pkg_resources.get_provider(requirement) - return provider.version - except pkg_resources.DistributionNotFound: - # The most likely cause for this is running tests in a tree - # produced from a tarball where the package itself has not been - # installed into anything. Revert to setup-time logic. - from openstack.common import setup - return setup.get_version(self.package) - - def release_string(self): - """Return the full version of the package including suffixes indicating - VCS status. - """ - if self.release is None: - self.release = self._get_version_from_pkg_resources() - - return self.release - - def version_string(self): - """Return the short version minus any alpha/beta tags.""" - if self.version is None: - parts = [] - for part in self.release_string().split('.'): - if part[0].isdigit(): - parts.append(part) - else: - break - self.version = ".".join(parts) - - return self.version - - # Compatibility functions - canonical_version_string = version_string - version_string_with_vcs = release_string - - def cached_version_string(self, prefix=""): - """Generate an object which will expand in a string context to - the results of version_string(). We do this so that don't - call into pkg_resources every time we start up a program when - passing version information into the CONF constructor, but - rather only do the calculation when and if a version is requested - """ - if not self._cached_version: - self._cached_version = "%s%s" % (prefix, - self.version_string()) - return self._cached_version diff --git a/openstack/common/wsgi.py b/openstack/common/wsgi.py index d184963..80d4b9b 100644 --- a/openstack/common/wsgi.py +++ b/openstack/common/wsgi.py @@ -172,6 +172,17 @@ class Middleware(object): behavior. """ + @classmethod + def factory(cls, global_conf, **local_conf): + """ + Factory method for paste.deploy + """ + + def filter(app): + return cls(app) + + return filter + def __init__(self, application): self.application = application @@ -1,3 +1,47 @@ +[metadata] +name = openstack.common +version = 2013.2 +summary = OpenStack Common Libraries +description-file = + README.rst +author = OpenStack +author-email = openstack-dev@lists.openstack.org +home-page = http://www.openstack.org/ +classifier = + Environment :: OpenStack + Intended Audience :: Information Technology + Intended Audience :: System Administrators + License :: OSI Approved :: Apache Software License + Operating System :: POSIX :: Linux + Programming Language :: Python + Programming Language :: Python :: 2 + Programming Language :: Python :: 2.7 + Programming Language :: Python :: 2.6 + +[files] +packages = + openstack +namespace_packages = + openstack + +[global] +setup-hooks = + pbr.hooks.setup_hook + +[entry_points] +console_scripts = + oslo-rootwrap = openstack.common.rootwrap.cmd:main + oslo-rpc-zmq-receiver = openstack.common.rpc.zmq_receiver:main + +openstack.common.scheduler.filters = + AvailabilityZoneFilter = openstack.common.scheduler.filters.availability_zone_filter:AvailabilityZoneFilter + CapabilitiesFilter = openstack.common.scheduler.filters.capabilities_filter:CapabilitiesFilter + JsonFilter = openstack.common.scheduler.filters.json_filter:JsonFilter + +openstack.common.tests.fakes.weights = + FakeWeigher1 = tests.unit.fakes:FakeWeigher1 + FakeWeigher2 = tests.unit.fakes:FakeWeigher2 + [nosetests] # NOTE(jkoelker) To run the test suite under nose install the following # coverage http://pypi.python.org/pypi/coverage @@ -1,7 +1,5 @@ -#!/usr/bin/python -# -*- encoding: utf-8 -*- -# Copyright (c) 2012 OpenStack Foundation. -# Copyright (c) 2012-2013 Hewlett-Packard Development Company, L.P. +#!/usr/bin/env python +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,59 +16,6 @@ import setuptools -from openstack.common import setup - -requires = setup.parse_requirements() -depend_links = setup.parse_dependency_links() -package = 'openstack.common' - -filters = [ - "AvailabilityZoneFilter = " - "openstack.common.scheduler.filters." - "availability_zone_filter:AvailabilityZoneFilter", - "CapabilitiesFilter = " - "openstack.common.scheduler.filters." - "capabilities_filter:CapabilitiesFilter", - "JsonFilter = " - "openstack.common.scheduler.filters.json_filter:JsonFilter", -] - -weights = [ - "FakeWeigher1 = tests.unit.fakes:FakeWeigher1", - "FakeWeigher2 = tests.unit.fakes:FakeWeigher2", -] - setuptools.setup( - name=package, - version=setup.get_version(package, '2013.1'), - description="Common components for Openstack", - long_description="Common components for Openstack " - "including paster templates.", - classifiers=[ - 'Development Status :: 4 - Beta', - 'License :: OSI Approved :: Apache Software License', - 'Operating System :: POSIX :: Linux', - 'Programming Language :: Python :: 2.6', - 'Environment :: No Input/Output (Daemon)', ], - keywords='openstack', - author='OpenStack', - author_email='openstack@lists.launchpad.net', - url='http://www.openstack.org/', - license='Apache Software License', - packages=setuptools.find_packages(exclude=['ez_setup', - 'examples', 'tests']), - include_package_data=True, - cmdclass=setup.get_cmdclass(), - zip_safe=True, - install_requires=requires, - dependency_links=depend_links, - entry_points={ - "openstack.common.scheduler.filters": filters, - "openstack.common.tests.fakes.weights": weights, - "console_scripts": [ - 'oslo-rootwrap = openstack.common.rootwrap.cmd:main', - 'oslo-rpc-zmq-receiver = openstack.common.rpc.zmq_receiver:main', - ] - }, - namespace_packages=['openstack'], -) + setup_requires=['d2to1>=0.2.10,<0.3', 'pbr>=0.5,<0.6'], + d2to1=True) diff --git a/tests/unit/db/sqlalchemy/test_sqlalchemy.py b/tests/unit/db/sqlalchemy/test_sqlalchemy.py index 7cbe31d..b18825a 100644 --- a/tests/unit/db/sqlalchemy/test_sqlalchemy.py +++ b/tests/unit/db/sqlalchemy/test_sqlalchemy.py @@ -37,6 +37,51 @@ class TmpTable(BASE, models.ModelBase): foo = Column(Integer) +class SessionParametersTestCase(test_utils.BaseTestCase): + + def test_deprecated_session_parameters(self): + paths = self.create_tempfiles([('test', """[DEFAULT] +sql_connection=x://y.z +sql_min_pool_size=10 +sql_max_pool_size=20 +sql_max_retries=30 +sql_retry_interval=40 +sql_max_overflow=50 +sql_connection_debug=60 +sql_connection_trace=True +""")]) + test_utils.CONF(['--config-file', paths[0]]) + self.assertEquals(test_utils.CONF.database.connection, 'x://y.z') + self.assertEquals(test_utils.CONF.database.min_pool_size, 10) + self.assertEquals(test_utils.CONF.database.max_pool_size, 20) + self.assertEquals(test_utils.CONF.database.max_retries, 30) + self.assertEquals(test_utils.CONF.database.retry_interval, 40) + self.assertEquals(test_utils.CONF.database.max_overflow, 50) + self.assertEquals(test_utils.CONF.database.connection_debug, 60) + self.assertEquals(test_utils.CONF.database.connection_trace, True) + + def test_session_parameters(self): + paths = self.create_tempfiles([('test', """[database] +connection=x://y.z +min_pool_size=10 +max_pool_size=20 +max_retries=30 +retry_interval=40 +max_overflow=50 +connection_debug=60 +connection_trace=True +""")]) + test_utils.CONF(['--config-file', paths[0]]) + self.assertEquals(test_utils.CONF.database.connection, 'x://y.z') + self.assertEquals(test_utils.CONF.database.min_pool_size, 10) + self.assertEquals(test_utils.CONF.database.max_pool_size, 20) + self.assertEquals(test_utils.CONF.database.max_retries, 30) + self.assertEquals(test_utils.CONF.database.retry_interval, 40) + self.assertEquals(test_utils.CONF.database.max_overflow, 50) + self.assertEquals(test_utils.CONF.database.connection_debug, 60) + self.assertEquals(test_utils.CONF.database.connection_trace, True) + + class SessionErrorWrapperTestCase(test_utils.BaseTestCase): def setUp(self): super(SessionErrorWrapperTestCase, self).setUp() diff --git a/tests/unit/db/test_api.py b/tests/unit/db/test_api.py index a31ffd0..f6e0d4c 100644 --- a/tests/unit/db/test_api.py +++ b/tests/unit/db/test_api.py @@ -32,9 +32,33 @@ class DBAPI(object): class DBAPITestCase(test_utils.BaseTestCase): + + def test_deprecated_dbapi_parameters(self): + paths = self.create_tempfiles([('test', + '[DEFAULT]\n' + 'db_backend=test_123\n' + 'dbapi_use_tpool=True\n' + )]) + + test_utils.CONF(['--config-file', paths[0]]) + self.assertEquals(test_utils.CONF.database.backend, 'test_123') + self.assertEquals(test_utils.CONF.database.use_tpool, True) + + def test_dbapi_parameters(self): + paths = self.create_tempfiles([('test', + '[database]\n' + 'backend=test_123\n' + 'use_tpool=True\n' + )]) + + test_utils.CONF(['--config-file', paths[0]]) + self.assertEquals(test_utils.CONF.database.backend, 'test_123') + self.assertEquals(test_utils.CONF.database.use_tpool, True) + def test_dbapi_api_class_method_and_tpool_false(self): backend_mapping = {'test_known': 'tests.unit.db.test_api'} - self.config(db_backend='test_known', dbapi_use_tpool=False) + self.config(backend='test_known', use_tpool=False, + group='database') info = dict(tpool=False) orig_execute = tpool.execute @@ -53,7 +77,8 @@ class DBAPITestCase(test_utils.BaseTestCase): def test_dbapi_api_class_method_and_tpool_true(self): backend_mapping = {'test_known': 'tests.unit.db.test_api'} - self.config(db_backend='test_known', dbapi_use_tpool=True) + self.config(backend='test_known', use_tpool=True, + group='database') info = dict(tpool=False) orig_execute = tpool.execute @@ -71,14 +96,16 @@ class DBAPITestCase(test_utils.BaseTestCase): self.assertTrue(info['tpool']) def test_dbapi_full_path_module_method(self): - self.config(db_backend='tests.unit.db.test_api') + self.config(backend='tests.unit.db.test_api', + group='database') dbapi = api.DBAPI() result = dbapi.api_class_call1(1, 2, kwarg1='meow') expected = ((1, 2), {'kwarg1': 'meow'}) self.assertEqual(expected, result) def test_dbapi_unknown_invalid_backend(self): - self.config(db_backend='tests.unit.db.not_existant') + self.config(backend='tests.unit.db.not_existent', + group='database') dbapi = api.DBAPI() def call_it(): diff --git a/tests/unit/middleware/test_correlation_id.py b/tests/unit/middleware/test_correlation_id.py new file mode 100644 index 0000000..070c23e --- /dev/null +++ b/tests/unit/middleware/test_correlation_id.py @@ -0,0 +1,51 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright (c) 2013 Rackspace Hosting +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import mock + +from openstack.common.middleware import correlation_id +from openstack.common import uuidutils +from tests import utils + + +class CorrelationIdMiddlewareTest(utils.BaseTestCase): + + def test_process_request(self): + app = mock.Mock() + req = mock.Mock() + req.headers = {} + original_method = uuidutils.generate_uuid + mock_generate_uuid = mock.Mock() + mock_generate_uuid.return_value = "fake_uuid" + uuidutils.generate_uuid = mock_generate_uuid + + middleware = correlation_id.CorrelationIdMiddleware(app) + middleware(req) + uuidutils.generate_uuid = original_method + + self.assertEquals(req.headers.get("X_CORRELATION_ID"), "fake_uuid") + + def test_process_request_should_not_regenerate_correlation_id(self): + app = mock.Mock() + req = mock.Mock() + req.headers = {"X_CORRELATION_ID": "correlation_id"} + + middleware = correlation_id.CorrelationIdMiddleware(app) + middleware(req) + + self.assertEquals(req.headers.get("X_CORRELATION_ID"), + "correlation_id") diff --git a/tests/unit/rpc/test_dispatcher.py b/tests/unit/rpc/test_dispatcher.py index 8998a90..a09d358 100644 --- a/tests/unit/rpc/test_dispatcher.py +++ b/tests/unit/rpc/test_dispatcher.py @@ -18,9 +18,12 @@ Unit Tests for rpc.dispatcher """ +import mox + from openstack.common import context from openstack.common.rpc import common as rpc_common from openstack.common.rpc import dispatcher +from openstack.common.rpc import serializer as rpc_serializer from tests import utils @@ -35,6 +38,7 @@ class RpcDispatcherTestCase(utils.BaseTestCase): def test_method(self, ctxt, arg1): self.test_method_ctxt = ctxt self.test_method_arg1 = arg1 + return 'fake-result' class API2(object): RPC_API_VERSION = '2.1' @@ -73,6 +77,11 @@ class RpcDispatcherTestCase(utils.BaseTestCase): def setUp(self): super(RpcDispatcherTestCase, self).setUp() self.ctxt = context.RequestContext('fake_user', 'fake_project') + self.mox = mox.Mox() + + def cleanUp(self): + super(RpcDispatcherTestCase, self).setUp() + self.mox.VerifyAll() def _test_dispatch(self, version, expectations): v2 = self.API2() @@ -158,3 +167,21 @@ class RpcDispatcherTestCase(utils.BaseTestCase): self.assertEqual(v1.test_method_arg1, None) self.assertEqual(v4.test_method_ctxt, self.ctxt) self.assertEqual(v4.test_method_arg1, 1) + + def test_serializer(self): + api = self.API1() + serializer = rpc_serializer.NoOpSerializer() + + self.mox.StubOutWithMock(serializer, 'serialize_entity') + self.mox.StubOutWithMock(serializer, 'deserialize_entity') + + serializer.deserialize_entity(self.ctxt, 1).AndReturn(1) + serializer.serialize_entity(self.ctxt, 'fake-result').AndReturn( + 'worked!') + + self.mox.ReplayAll() + + disp = dispatcher.RpcDispatcher([api], serializer) + result = disp.dispatch(self.ctxt, '1.0', 'test_method', + None, arg1=1) + self.assertEqual(result, 'worked!') diff --git a/tests/unit/rpc/test_proxy.py b/tests/unit/rpc/test_proxy.py index 63360ba..9427cbe 100644 --- a/tests/unit/rpc/test_proxy.py +++ b/tests/unit/rpc/test_proxy.py @@ -19,6 +19,7 @@ Unit Tests for rpc.proxy """ import copy +import mox import six @@ -27,11 +28,20 @@ from openstack.common import lockutils from openstack.common import rpc from openstack.common.rpc import common as rpc_common from openstack.common.rpc import proxy +from openstack.common.rpc import serializer as rpc_serializer from tests import utils class RpcProxyTestCase(utils.BaseTestCase): + def setUp(self): + super(RpcProxyTestCase, self).setUp() + self.mox = mox.Mox() + + def cleanUp(self): + super(RpcProxyTestCase, self).cleanUp() + self.mox.VerifyAll() + def _test_rpc_method(self, rpc_method, has_timeout=False, has_retval=False, server_params=None, supports_topic_override=True): topic = 'fake_topic' @@ -158,8 +168,44 @@ class RpcProxyTestCase(utils.BaseTestCase): 'args': {'a': 1, 'b': 2}} self.assertEqual(msg, expected) - def test_make_msg(self): - msg = proxy.RpcProxy.make_msg('test_method', a=1, b=2) + def test_make_msg_with_no_namespace(self): + proxy_obj = proxy.RpcProxy('fake', '1.0') + msg = proxy_obj.make_msg('test_method', a=1, b=2) expected = {'method': 'test_method', 'namespace': None, 'args': {'a': 1, 'b': 2}} self.assertEqual(msg, expected) + + def test_make_msg_with_namespace(self): + class TestProxy(proxy.RpcProxy): + RPC_API_NAMESPACE = 'meow' + + proxy_obj = TestProxy('fake', '1.0') + msg = proxy_obj.make_msg('test_method', a=1, b=2) + expected = {'method': 'test_method', 'namespace': 'meow', + 'args': {'a': 1, 'b': 2}} + self.assertEqual(msg, expected) + + def test_serializer(self): + ctxt = context.RequestContext('fake', 'fake') + serializer = rpc_serializer.NoOpSerializer() + + self.mox.StubOutWithMock(serializer, 'serialize_entity') + self.mox.StubOutWithMock(serializer, 'deserialize_entity') + self.mox.StubOutWithMock(rpc, 'call') + + serializer.serialize_entity(ctxt, 1).AndReturn(1) + serializer.serialize_entity(ctxt, 2).AndReturn(2) + rpc.call(ctxt, 'fake', + {'args': {'a': 1, 'b': 2}, + 'namespace': None, + 'method': 'foo', + 'version': '1.0'}, + None).AndReturn('foo') + serializer.deserialize_entity(ctxt, 'foo').AndReturn('worked!') + + self.mox.ReplayAll() + + rpc_proxy = proxy.RpcProxy('fake', '1.0', serializer=serializer) + msg = rpc_proxy.make_msg('foo', a=1, b=2) + result = rpc_proxy.call(ctxt, msg) + self.assertEqual(result, 'worked!') diff --git a/tests/unit/test_setup.py b/tests/unit/test_setup.py deleted file mode 100644 index 626d71b..0000000 --- a/tests/unit/test_setup.py +++ /dev/null @@ -1,257 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import io -import os -import StringIO -import sys -from tempfile import mkstemp - -import fixtures - -from openstack.common import setup -from tests import utils - - -class DiveDir(fixtures.Fixture): - """Dive into given directory and return back on cleanup. - - :ivar path: The target directory. - """ - - def __init__(self, path): - self.path = path - - def setUp(self): - super(DiveDir, self).setUp() - self.old_path = os.getcwd() - os.chdir(self.path) - self.addCleanup(os.chdir, self.old_path) - - -class EmailTestCase(utils.BaseTestCase): - - def test_str_dict_replace(self): - string = 'Johnnie T. Hozer' - mapping = {'T.': 'The'} - self.assertEqual('Johnnie The Hozer', - setup.canonicalize_emails(string, mapping)) - - -class MailmapTestCase(utils.BaseTestCase): - - def setUp(self): - super(MailmapTestCase, self).setUp() - self.useFixture(fixtures.NestedTempfile()) - (fd, self.mailmap) = mkstemp(prefix='openstack', suffix='.setup') - - def test_mailmap_with_fullname(self): - with open(self.mailmap, 'w') as mm_fh: - mm_fh.write("Foo Bar <email@foo.com> Foo Bar <email@bar.com>\n") - self.assertEqual({'<email@bar.com>': '<email@foo.com>'}, - setup.parse_mailmap(self.mailmap)) - - def test_mailmap_with_firstname(self): - with open(self.mailmap, 'w') as mm_fh: - mm_fh.write("Foo <email@foo.com> Foo <email@bar.com>\n") - self.assertEqual({'<email@bar.com>': '<email@foo.com>'}, - setup.parse_mailmap(self.mailmap)) - - def test_mailmap_with_noname(self): - with open(self.mailmap, 'w') as mm_fh: - mm_fh.write("<email@foo.com> <email@bar.com>\n") - self.assertEqual({'<email@bar.com>': '<email@foo.com>'}, - setup.parse_mailmap(self.mailmap)) - - -class GitLogsTest(utils.BaseTestCase): - - def setUp(self): - super(GitLogsTest, self).setUp() - temp_path = self.useFixture(fixtures.TempDir()).path - self.useFixture(DiveDir(temp_path)) - - @staticmethod - def _root_dir(): - # NOTE(yamahata): get root directory of repository - # NOTE(aababilov): use openstack.common.setup.__file__ - # because openstack/common/setup.py uses this - # variable to find the root. - # Do not use test_setup.__file__ variable because - # openstack package can be installed somewhere and - # its location will differ from tests' one. - import openstack.common.setup - return os.path.dirname(os.path.dirname(os.path.dirname( - openstack.common.setup.__file__))) - - def test_write_git_changelog(self): - root_dir = self._root_dir() - exist_files = [os.path.join(root_dir, f) for f in ".git", ".mailmap"] - self.useFixture(fixtures.MonkeyPatch( - "os.path.exists", - lambda path: os.path.abspath(path) in exist_files)) - self.useFixture(fixtures.FakePopen(lambda _: { - "stdout": StringIO.StringIO("Author: Foo Bar <email@bar.com>\n") - })) - - builtin_open = open - - def _fake_open(name, mode): - if name.endswith('.mailmap'): - # StringIO.StringIO doesn't have __exit__ (at least python 2.6) - return io.BytesIO("Foo Bar <email@foo.com> <email@bar.com>\n") - return builtin_open(name, mode) - self.useFixture(fixtures.MonkeyPatch("__builtin__.open", _fake_open)) - - setup.write_git_changelog() - - with open("ChangeLog", "r") as ch_fh: - self.assertTrue("email@foo.com" in ch_fh.read()) - - def _fake_log_output(self, cmd, mapping): - for (k, v) in mapping.items(): - if cmd.startswith(k): - return v - return "" - - def test_generate_authors(self): - author_old = "Foo Foo <email@foo.com>" - author_new = "Bar Bar <email@bar.com>" - co_author = "Foo Bar <foo@bar.com>" - co_author_by = "Co-authored-by: " + co_author - - root_dir = self._root_dir() - - git_log_cmd = ("git --git-dir=%s log --format" % - os.path.join(root_dir, '.git')) - git_co_log_cmd = ("git --git-dir=%s log" % - os.path.join(root_dir, '.git')) - cmd_map = { - git_log_cmd: author_new, - git_co_log_cmd: co_author_by, - } - - exist_files = [os.path.join(root_dir, ".git"), - os.path.abspath("AUTHORS.in")] - self.useFixture(fixtures.MonkeyPatch( - "os.path.exists", - lambda path: os.path.abspath(path) in exist_files)) - - self.useFixture(fixtures.FakePopen(lambda proc_args: { - "stdout": StringIO.StringIO( - self._fake_log_output(proc_args["args"][2], cmd_map)) - })) - - with open("AUTHORS.in", "w") as auth_fh: - auth_fh.write(author_old) - - setup.generate_authors() - - with open("AUTHORS", "r") as auth_fh: - authors = auth_fh.read() - self.assertTrue(author_old in authors) - self.assertTrue(author_new in authors) - self.assertTrue(co_author in authors) - - -class GetCmdClassTest(utils.BaseTestCase): - - def test_get_cmdclass(self): - cmdclass = setup.get_cmdclass() - - self.assertTrue("sdist" in cmdclass) - build_sphinx = cmdclass.get("build_sphinx") - if build_sphinx: - self.useFixture(fixtures.MonkeyPatch( - "sphinx.setup_command.BuildDoc.run", lambda self: None)) - from distutils.dist import Distribution - distr = Distribution() - distr.packages = ("fake_package",) - distr.command_options["build_sphinx"] = {"source_dir": ["a", "."]} - pkg_fixture = fixtures.PythonPackage( - "fake_package", [("fake_module.py", "")]) - self.useFixture(pkg_fixture) - self.useFixture(DiveDir(pkg_fixture.base)) - - build_doc = build_sphinx(distr) - build_doc.run() - - self.assertTrue( - os.path.exists("api/autoindex.rst")) - self.assertTrue( - os.path.exists("api/fake_package.fake_module.rst")) - - -class ParseRequirementsTest(utils.BaseTestCase): - - def setUp(self): - super(ParseRequirementsTest, self).setUp() - self.useFixture(fixtures.NestedTempfile()) - (fd, self.tmp_file) = mkstemp(prefix='openstack', suffix='.setup') - - def test_parse_requirements_normal(self): - with open(self.tmp_file, 'w') as fh: - fh.write("foo\nbar") - self.assertEqual(['foo', 'bar'], - setup.parse_requirements([self.tmp_file])) - - def test_parse_requirements_with_git_egg_url(self): - with open(self.tmp_file, 'w') as fh: - fh.write("-e git://foo.com/zipball#egg=bar") - self.assertEqual(['bar'], setup.parse_requirements([self.tmp_file])) - - def test_parse_requirements_with_http_egg_url(self): - with open(self.tmp_file, 'w') as fh: - fh.write("https://foo.com/zipball#egg=bar") - self.assertEqual(['bar'], setup.parse_requirements([self.tmp_file])) - - def test_parse_requirements_removes_index_lines(self): - with open(self.tmp_file, 'w') as fh: - fh.write("-f foobar") - self.assertEqual([], setup.parse_requirements([self.tmp_file])) - - def test_parse_requirements_removes_argparse(self): - with open(self.tmp_file, 'w') as fh: - fh.write("argparse") - if sys.version_info >= (2, 7): - self.assertEqual([], setup.parse_requirements([self.tmp_file])) - - def test_get_requirement_from_file_empty(self): - actual = setup.get_reqs_from_files([]) - self.assertEqual([], actual) - - -class ParseDependencyLinksTest(utils.BaseTestCase): - - def setUp(self): - super(ParseDependencyLinksTest, self).setUp() - self.useFixture(fixtures.NestedTempfile()) - (fd, self.tmp_file) = mkstemp(prefix="openstack", suffix=".setup") - - def test_parse_dependency_normal(self): - with open(self.tmp_file, "w") as fh: - fh.write("http://test.com\n") - self.assertEqual( - ["http://test.com"], - setup.parse_dependency_links([self.tmp_file])) - - def test_parse_dependency_with_git_egg_url(self): - with open(self.tmp_file, "w") as fh: - fh.write("-e git://foo.com/zipball#egg=bar") - self.assertEqual( - ["git://foo.com/zipball#egg=bar"], - setup.parse_dependency_links([self.tmp_file])) diff --git a/tests/unit/test_version.py b/tests/unit/test_version.py deleted file mode 100644 index 7f80de3..0000000 --- a/tests/unit/test_version.py +++ /dev/null @@ -1,75 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012 Red Hat, Inc. -# Copyright 2012-2013 Hewlett-Packard Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import StringIO -import sys - -from oslo.config import cfg - -from openstack.common import version -from tests import utils - - -class DeferredVersionTestCase(utils.BaseTestCase): - - def setUp(self): - super(DeferredVersionTestCase, self).setUp() - self.conf = cfg.ConfigOpts() - - def test_cached_version(self): - class MyVersionInfo(version.VersionInfo): - def _get_version_from_pkg_resources(self): - return "5.5.5.5" - - deferred_string = MyVersionInfo("openstack").\ - cached_version_string() - self.conf([], project="project", prog="prog", version=deferred_string) - self.assertEquals("5.5.5.5", str(self.conf.version)) - - def test_print_cached_version(self): - class MyVersionInfo(version.VersionInfo): - def _get_version_from_pkg_resources(self): - return "5.5.5.5" - - deferred_string = MyVersionInfo("openstack")\ - .cached_version_string() - self.stubs.Set(sys, 'stderr', StringIO.StringIO()) - self.assertRaises(SystemExit, - self.conf, ['--version'], - project="project", - prog="prog", - version=deferred_string) - self.assertEquals("5.5.5.5", sys.stderr.getvalue().strip()) - - def test_print_cached_version_with_long_string(self): - my_version = "11111222223333344444555556666677777888889999900000" - - class MyVersionInfo(version.VersionInfo): - def _get_version_from_pkg_resources(self): - return my_version - - deferred_string = MyVersionInfo("openstack")\ - .cached_version_string() - - for i in range(50): - self.stubs.Set(sys, 'stderr', StringIO.StringIO()) - self.assertRaises(SystemExit, - self.conf, ['--version'], - project="project", - prog="prog", - version=deferred_string) - self.assertEquals(my_version, sys.stderr.getvalue().strip()) diff --git a/tests/utils.py b/tests/utils.py index c5c7c00..4682428 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -17,6 +17,9 @@ """Common utilities used in testing""" +import os +import tempfile + import fixtures from oslo.config import cfg import testtools @@ -36,6 +39,8 @@ class BaseTestCase(testtools.TestCase): self.useFixture(fixtures.FakeLogger('openstack.common')) self.useFixture(fixtures.Timeout(30, True)) self.stubs.Set(exception, '_FATAL_EXCEPTION_FORMAT_ERRORS', True) + self.useFixture(fixtures.NestedTempfile()) + self.tempdirs = [] def tearDown(self): super(BaseTestCase, self).tearDown() @@ -43,6 +48,21 @@ class BaseTestCase(testtools.TestCase): self.stubs.UnsetAll() self.stubs.SmartUnsetAll() + def create_tempfiles(self, files, ext='.conf'): + tempfiles = [] + for (basename, contents) in files: + if not os.path.isabs(basename): + (fd, path) = tempfile.mkstemp(prefix=basename, suffix=ext) + else: + path = basename + ext + fd = os.open(path, os.O_CREAT | os.O_WRONLY) + tempfiles.append(path) + try: + os.write(fd, contents) + finally: + os.close(fd) + return tempfiles + def config(self, **kw): """ Override some configuration values. diff --git a/tools/flakes.py b/tools/flakes.py deleted file mode 100644 index 191bd6e..0000000 --- a/tools/flakes.py +++ /dev/null @@ -1,24 +0,0 @@ -""" - wrapper for pyflakes to ignore gettext based warning: - "undefined name '_'" - - Synced in from openstack-common -""" - -__all__ = ['main'] - -import __builtin__ as builtins -import sys - -import pyflakes.api -from pyflakes import checker - - -def main(): - checker.Checker.builtIns = (set(dir(builtins)) | - set(['_']) | - set(checker._MAGIC_GLOBALS)) - sys.exit(pyflakes.api.main()) - -if __name__ == "__main__": - main() diff --git a/tools/pip-requires b/tools/pip-requires index b31640b..067af58 100644 --- a/tools/pip-requires +++ b/tools/pip-requires @@ -1,3 +1,5 @@ +d2to1>=0.2.10,<0.3 +pbr>=0.5,<0.6 PasteDeploy==1.5.0 WebOb==1.2.3 eventlet>=0.12.0 diff --git a/tools/test-requires b/tools/test-requires index 93f93e0..62c0eea 100644 --- a/tools/test-requires +++ b/tools/test-requires @@ -3,7 +3,8 @@ distribute>=0.6.24 coverage fixtures>=0.3.12 -flake8 +flake8==2.0 +hacking>=0.5.3,<0.6 mock mox==0.5.3 mysql-python @@ -12,6 +13,8 @@ nose-exclude nosexcover openstack.nose_plugin nosehtmloutput +pep8==1.4.5 +pyflakes==0.7.2 pylint pyzmq==2.2.0.1 redis @@ -17,6 +17,7 @@ commands = [flake8] show-source = True +ignore = H201,H202,H302,H304,H306,H401,H402,H403,H404 exclude = .venv,.tox,dist,doc,*.egg,.update-venv [testenv:pep8] |