summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDavanum Srinivas <dims@linux.vnet.ibm.com>2013-04-23 09:54:22 -0400
committerDavanum Srinivas <dims@linux.vnet.ibm.com>2013-05-20 21:05:22 -0400
commit4ff33b0390fff7e623be7c2242002e32a47eb855 (patch)
tree5b126191bc62466fa1f55768b2dc1f345172a39a
parent97bb81ddbcc47343c78e0a6efe724878fcb35ecb (diff)
downloadoslo-4ff33b0390fff7e623be7c2242002e32a47eb855.tar.gz
oslo-4ff33b0390fff7e623be7c2242002e32a47eb855.tar.xz
oslo-4ff33b0390fff7e623be7c2242002e32a47eb855.zip
Specify database group instead of DEFAULT
At the request of Quantum folks, Let us switch from DEFAULT to database for the database related options. This will help with migration etc. DocImpact Fixes LP# 1171837 Change-Id: If602a6a7cc0f2a202632dd14574fea60dce4b589
-rw-r--r--openstack/common/db/api.py19
-rw-r--r--openstack/common/db/sqlalchemy/session.py84
-rw-r--r--tests/unit/db/sqlalchemy/test_sqlalchemy.py45
-rw-r--r--tests/unit/db/test_api.py35
-rw-r--r--tests/utils.py20
5 files changed, 162 insertions, 41 deletions
diff --git a/openstack/common/db/api.py b/openstack/common/db/api.py
index 5603bb5..dcadecd 100644
--- a/openstack/common/db/api.py
+++ b/openstack/common/db/api.py
@@ -19,8 +19,9 @@
Supported configuration options:
-`db_backend`: DB backend name or full module path to DB backend module.
-`dbapi_use_tpool`: Enable thread pooling of DB API calls.
+The following two parameters are in the 'database' group:
+`backend`: DB backend name or full module path to DB backend module.
+`use_tpool`: Enable thread pooling of DB API calls.
A DB backend module should implement a method named 'get_backend' which
takes no arguments. The method can return any object that implements DB
@@ -44,17 +45,21 @@ from openstack.common import lockutils
db_opts = [
- cfg.StrOpt('db_backend',
+ cfg.StrOpt('backend',
default='sqlalchemy',
+ deprecated_name='db_backend',
+ deprecated_group='DEFAULT',
help='The backend to use for db'),
- cfg.BoolOpt('dbapi_use_tpool',
+ cfg.BoolOpt('use_tpool',
default=False,
+ deprecated_name='dbapi_use_tpool',
+ deprecated_group='DEFAULT',
help='Enable the experimental use of thread pooling for '
'all DB API calls')
]
CONF = cfg.CONF
-CONF.register_opts(db_opts)
+CONF.register_opts(db_opts, 'database')
class DBAPI(object):
@@ -75,8 +80,8 @@ class DBAPI(object):
if self.__backend:
# Another thread assigned it
return self.__backend
- backend_name = CONF.db_backend
- self.__use_tpool = CONF.dbapi_use_tpool
+ backend_name = CONF.database.backend
+ self.__use_tpool = CONF.database.use_tpool
if self.__use_tpool:
from eventlet import tpool
self.__tpool = tpool
diff --git a/openstack/common/db/sqlalchemy/session.py b/openstack/common/db/sqlalchemy/session.py
index a66ce9f..ccce7d3 100644
--- a/openstack/common/db/sqlalchemy/session.py
+++ b/openstack/common/db/sqlalchemy/session.py
@@ -260,53 +260,76 @@ from openstack.common import log as logging
from openstack.common.gettextutils import _
from openstack.common import timeutils
+DEFAULT = 'DEFAULT'
-sql_opts = [
- cfg.StrOpt('sql_connection',
+sqlite_db_opts = [
+ cfg.StrOpt('sqlite_db',
+ default='oslo.sqlite',
+ help='the filename to use with sqlite'),
+ cfg.BoolOpt('sqlite_synchronous',
+ default=True,
+ help='If true, use synchronous mode for sqlite'),
+]
+
+database_opts = [
+ cfg.StrOpt('connection',
default='sqlite:///' +
os.path.abspath(os.path.join(os.path.dirname(__file__),
'../', '$sqlite_db')),
help='The SQLAlchemy connection string used to connect to the '
'database',
+ deprecated_name='sql_connection',
+ deprecated_group=DEFAULT,
secret=True),
- cfg.StrOpt('sqlite_db',
- default='oslo.sqlite',
- help='the filename to use with sqlite'),
- cfg.IntOpt('sql_idle_timeout',
+ cfg.IntOpt('idle_timeout',
default=3600,
+ deprecated_name='sql_idle_timeout',
+ deprecated_group=DEFAULT,
help='timeout before idle sql connections are reaped'),
- cfg.BoolOpt('sqlite_synchronous',
- default=True,
- help='If passed, use synchronous mode for sqlite'),
- cfg.IntOpt('sql_min_pool_size',
+ cfg.IntOpt('min_pool_size',
default=1,
+ deprecated_name='sql_min_pool_size',
+ deprecated_group=DEFAULT,
help='Minimum number of SQL connections to keep open in a '
'pool'),
- cfg.IntOpt('sql_max_pool_size',
+ cfg.IntOpt('max_pool_size',
default=5,
+ deprecated_name='sql_max_pool_size',
+ deprecated_group=DEFAULT,
help='Maximum number of SQL connections to keep open in a '
'pool'),
- cfg.IntOpt('sql_max_retries',
+ cfg.IntOpt('max_retries',
default=10,
+ deprecated_name='sql_max_retries',
+ deprecated_group=DEFAULT,
help='maximum db connection retries during startup. '
'(setting -1 implies an infinite retry count)'),
- cfg.IntOpt('sql_retry_interval',
+ cfg.IntOpt('retry_interval',
default=10,
+ deprecated_name='sql_retry_interval',
+ deprecated_group=DEFAULT,
help='interval between retries of opening a sql connection'),
- cfg.IntOpt('sql_max_overflow',
+ cfg.IntOpt('max_overflow',
default=None,
+ deprecated_name='sql_max_overflow',
+ deprecated_group=DEFAULT,
help='If set, use this value for max_overflow with sqlalchemy'),
- cfg.IntOpt('sql_connection_debug',
+ cfg.IntOpt('connection_debug',
default=0,
+ deprecated_name='sql_connection_debug',
+ deprecated_group=DEFAULT,
help='Verbosity of SQL debugging information. 0=None, '
'100=Everything'),
- cfg.BoolOpt('sql_connection_trace',
+ cfg.BoolOpt('connection_trace',
default=False,
+ deprecated_name='sql_connection_trace',
+ deprecated_group=DEFAULT,
help='Add python stack traces to SQL as comment strings'),
]
CONF = cfg.CONF
-CONF.register_opts(sql_opts)
+CONF.register_opts(sqlite_db_opts)
+CONF.register_opts(database_opts, 'database')
LOG = logging.getLogger(__name__)
_ENGINE = None
@@ -315,8 +338,9 @@ _MAKER = None
def set_defaults(sql_connection, sqlite_db):
"""Set defaults for configuration variables."""
- cfg.set_defaults(sql_opts,
- sql_connection=sql_connection,
+ cfg.set_defaults(database_opts,
+ connection=sql_connection)
+ cfg.set_defaults(sqlite_db_opts,
sqlite_db=sqlite_db)
@@ -470,7 +494,7 @@ def get_engine(sqlite_fk=False):
"""Return a SQLAlchemy engine."""
global _ENGINE
if _ENGINE is None:
- _ENGINE = create_engine(CONF.sql_connection,
+ _ENGINE = create_engine(CONF.database.connection,
sqlite_fk=sqlite_fk)
return _ENGINE
@@ -533,15 +557,15 @@ def create_engine(sql_connection, sqlite_fk=False):
connection_dict = sqlalchemy.engine.url.make_url(sql_connection)
engine_args = {
- "pool_recycle": CONF.sql_idle_timeout,
+ "pool_recycle": CONF.database.idle_timeout,
"echo": False,
'convert_unicode': True,
}
# Map our SQL debug level to SQLAlchemy's options
- if CONF.sql_connection_debug >= 100:
+ if CONF.database.connection_debug >= 100:
engine_args['echo'] = 'debug'
- elif CONF.sql_connection_debug >= 50:
+ elif CONF.database.connection_debug >= 50:
engine_args['echo'] = True
if "sqlite" in connection_dict.drivername:
@@ -549,13 +573,13 @@ def create_engine(sql_connection, sqlite_fk=False):
engine_args["listeners"] = [SqliteForeignKeysListener()]
engine_args["poolclass"] = NullPool
- if CONF.sql_connection == "sqlite://":
+ if CONF.database.connection == "sqlite://":
engine_args["poolclass"] = StaticPool
engine_args["connect_args"] = {'check_same_thread': False}
else:
- engine_args['pool_size'] = CONF.sql_max_pool_size
- if CONF.sql_max_overflow is not None:
- engine_args['max_overflow'] = CONF.sql_max_overflow
+ engine_args['pool_size'] = CONF.database.max_pool_size
+ if CONF.database.max_overflow is not None:
+ engine_args['max_overflow'] = CONF.database.max_overflow
engine = sqlalchemy.create_engine(sql_connection, **engine_args)
@@ -569,7 +593,7 @@ def create_engine(sql_connection, sqlite_fk=False):
_synchronous_switch_listener)
sqlalchemy.event.listen(engine, 'connect', _add_regexp_listener)
- if (CONF.sql_connection_trace and
+ if (CONF.database.connection_trace and
engine.dialect.dbapi.__name__ == 'MySQLdb'):
_patch_mysqldb_with_stacktrace_comments()
@@ -579,7 +603,7 @@ def create_engine(sql_connection, sqlite_fk=False):
if not _is_db_connection_error(e.args[0]):
raise
- remaining = CONF.sql_max_retries
+ remaining = CONF.database.max_retries
if remaining == -1:
remaining = 'infinite'
while True:
@@ -587,7 +611,7 @@ def create_engine(sql_connection, sqlite_fk=False):
LOG.warn(msg % remaining)
if remaining != 'infinite':
remaining -= 1
- time.sleep(CONF.sql_retry_interval)
+ time.sleep(CONF.database.retry_interval)
try:
engine.connect()
break
diff --git a/tests/unit/db/sqlalchemy/test_sqlalchemy.py b/tests/unit/db/sqlalchemy/test_sqlalchemy.py
index 7cbe31d..b18825a 100644
--- a/tests/unit/db/sqlalchemy/test_sqlalchemy.py
+++ b/tests/unit/db/sqlalchemy/test_sqlalchemy.py
@@ -37,6 +37,51 @@ class TmpTable(BASE, models.ModelBase):
foo = Column(Integer)
+class SessionParametersTestCase(test_utils.BaseTestCase):
+
+ def test_deprecated_session_parameters(self):
+ paths = self.create_tempfiles([('test', """[DEFAULT]
+sql_connection=x://y.z
+sql_min_pool_size=10
+sql_max_pool_size=20
+sql_max_retries=30
+sql_retry_interval=40
+sql_max_overflow=50
+sql_connection_debug=60
+sql_connection_trace=True
+""")])
+ test_utils.CONF(['--config-file', paths[0]])
+ self.assertEquals(test_utils.CONF.database.connection, 'x://y.z')
+ self.assertEquals(test_utils.CONF.database.min_pool_size, 10)
+ self.assertEquals(test_utils.CONF.database.max_pool_size, 20)
+ self.assertEquals(test_utils.CONF.database.max_retries, 30)
+ self.assertEquals(test_utils.CONF.database.retry_interval, 40)
+ self.assertEquals(test_utils.CONF.database.max_overflow, 50)
+ self.assertEquals(test_utils.CONF.database.connection_debug, 60)
+ self.assertEquals(test_utils.CONF.database.connection_trace, True)
+
+ def test_session_parameters(self):
+ paths = self.create_tempfiles([('test', """[database]
+connection=x://y.z
+min_pool_size=10
+max_pool_size=20
+max_retries=30
+retry_interval=40
+max_overflow=50
+connection_debug=60
+connection_trace=True
+""")])
+ test_utils.CONF(['--config-file', paths[0]])
+ self.assertEquals(test_utils.CONF.database.connection, 'x://y.z')
+ self.assertEquals(test_utils.CONF.database.min_pool_size, 10)
+ self.assertEquals(test_utils.CONF.database.max_pool_size, 20)
+ self.assertEquals(test_utils.CONF.database.max_retries, 30)
+ self.assertEquals(test_utils.CONF.database.retry_interval, 40)
+ self.assertEquals(test_utils.CONF.database.max_overflow, 50)
+ self.assertEquals(test_utils.CONF.database.connection_debug, 60)
+ self.assertEquals(test_utils.CONF.database.connection_trace, True)
+
+
class SessionErrorWrapperTestCase(test_utils.BaseTestCase):
def setUp(self):
super(SessionErrorWrapperTestCase, self).setUp()
diff --git a/tests/unit/db/test_api.py b/tests/unit/db/test_api.py
index a31ffd0..f6e0d4c 100644
--- a/tests/unit/db/test_api.py
+++ b/tests/unit/db/test_api.py
@@ -32,9 +32,33 @@ class DBAPI(object):
class DBAPITestCase(test_utils.BaseTestCase):
+
+ def test_deprecated_dbapi_parameters(self):
+ paths = self.create_tempfiles([('test',
+ '[DEFAULT]\n'
+ 'db_backend=test_123\n'
+ 'dbapi_use_tpool=True\n'
+ )])
+
+ test_utils.CONF(['--config-file', paths[0]])
+ self.assertEquals(test_utils.CONF.database.backend, 'test_123')
+ self.assertEquals(test_utils.CONF.database.use_tpool, True)
+
+ def test_dbapi_parameters(self):
+ paths = self.create_tempfiles([('test',
+ '[database]\n'
+ 'backend=test_123\n'
+ 'use_tpool=True\n'
+ )])
+
+ test_utils.CONF(['--config-file', paths[0]])
+ self.assertEquals(test_utils.CONF.database.backend, 'test_123')
+ self.assertEquals(test_utils.CONF.database.use_tpool, True)
+
def test_dbapi_api_class_method_and_tpool_false(self):
backend_mapping = {'test_known': 'tests.unit.db.test_api'}
- self.config(db_backend='test_known', dbapi_use_tpool=False)
+ self.config(backend='test_known', use_tpool=False,
+ group='database')
info = dict(tpool=False)
orig_execute = tpool.execute
@@ -53,7 +77,8 @@ class DBAPITestCase(test_utils.BaseTestCase):
def test_dbapi_api_class_method_and_tpool_true(self):
backend_mapping = {'test_known': 'tests.unit.db.test_api'}
- self.config(db_backend='test_known', dbapi_use_tpool=True)
+ self.config(backend='test_known', use_tpool=True,
+ group='database')
info = dict(tpool=False)
orig_execute = tpool.execute
@@ -71,14 +96,16 @@ class DBAPITestCase(test_utils.BaseTestCase):
self.assertTrue(info['tpool'])
def test_dbapi_full_path_module_method(self):
- self.config(db_backend='tests.unit.db.test_api')
+ self.config(backend='tests.unit.db.test_api',
+ group='database')
dbapi = api.DBAPI()
result = dbapi.api_class_call1(1, 2, kwarg1='meow')
expected = ((1, 2), {'kwarg1': 'meow'})
self.assertEqual(expected, result)
def test_dbapi_unknown_invalid_backend(self):
- self.config(db_backend='tests.unit.db.not_existant')
+ self.config(backend='tests.unit.db.not_existent',
+ group='database')
dbapi = api.DBAPI()
def call_it():
diff --git a/tests/utils.py b/tests/utils.py
index c5c7c00..4682428 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -17,6 +17,9 @@
"""Common utilities used in testing"""
+import os
+import tempfile
+
import fixtures
from oslo.config import cfg
import testtools
@@ -36,6 +39,8 @@ class BaseTestCase(testtools.TestCase):
self.useFixture(fixtures.FakeLogger('openstack.common'))
self.useFixture(fixtures.Timeout(30, True))
self.stubs.Set(exception, '_FATAL_EXCEPTION_FORMAT_ERRORS', True)
+ self.useFixture(fixtures.NestedTempfile())
+ self.tempdirs = []
def tearDown(self):
super(BaseTestCase, self).tearDown()
@@ -43,6 +48,21 @@ class BaseTestCase(testtools.TestCase):
self.stubs.UnsetAll()
self.stubs.SmartUnsetAll()
+ def create_tempfiles(self, files, ext='.conf'):
+ tempfiles = []
+ for (basename, contents) in files:
+ if not os.path.isabs(basename):
+ (fd, path) = tempfile.mkstemp(prefix=basename, suffix=ext)
+ else:
+ path = basename + ext
+ fd = os.open(path, os.O_CREAT | os.O_WRONLY)
+ tempfiles.append(path)
+ try:
+ os.write(fd, contents)
+ finally:
+ os.close(fd)
+ return tempfiles
+
def config(self, **kw):
"""
Override some configuration values.