summaryrefslogtreecommitdiffstats
path: root/keystone
diff options
context:
space:
mode:
Diffstat (limited to 'keystone')
-rw-r--r--keystone/tests/__init__.py0
-rw-r--r--keystone/tests/_ldap_livetest.py160
-rw-r--r--keystone/tests/_ldap_tls_livetest.py113
-rw-r--r--keystone/tests/_sql_livetest.py45
-rw-r--r--keystone/tests/_test_import_auth_token.py25
-rw-r--r--keystone/tests/auth_plugin_external_disabled.conf2
-rw-r--r--keystone/tests/auth_plugin_external_domain.conf3
-rw-r--r--keystone/tests/backend_db2.conf4
-rw-r--r--keystone/tests/backend_ldap.conf9
-rw-r--r--keystone/tests/backend_ldap_sql.conf37
-rw-r--r--keystone/tests/backend_liveldap.conf17
-rw-r--r--keystone/tests/backend_mysql.conf4
-rw-r--r--keystone/tests/backend_pam.conf6
-rw-r--r--keystone/tests/backend_postgresql.conf4
-rw-r--r--keystone/tests/backend_sql.conf27
-rw-r--r--keystone/tests/backend_sql_disk.conf2
-rw-r--r--keystone/tests/backend_tls_liveldap.conf21
-rw-r--r--keystone/tests/core.py (renamed from keystone/test.py)4
-rw-r--r--keystone/tests/default_catalog.templates14
-rw-r--r--keystone/tests/default_fixtures.py124
-rw-r--r--keystone/tests/legacy_d5.mysql281
-rw-r--r--keystone/tests/legacy_d5.sqlite277
-rw-r--r--keystone/tests/legacy_diablo.mysql281
-rw-r--r--keystone/tests/legacy_diablo.sqlite283
-rw-r--r--keystone/tests/legacy_essex.mysql309
-rw-r--r--keystone/tests/legacy_essex.sqlite313
-rw-r--r--keystone/tests/test_auth.py851
-rw-r--r--keystone/tests/test_auth_plugin.conf3
-rw-r--r--keystone/tests/test_auth_plugin.py106
-rw-r--r--keystone/tests/test_backend.py2892
-rw-r--r--keystone/tests/test_backend_kvs.py119
-rw-r--r--keystone/tests/test_backend_ldap.py745
-rw-r--r--keystone/tests/test_backend_memcache.py186
-rw-r--r--keystone/tests/test_backend_pam.py68
-rw-r--r--keystone/tests/test_backend_sql.py415
-rw-r--r--keystone/tests/test_backend_templated.py67
-rw-r--r--keystone/tests/test_catalog.py77
-rw-r--r--keystone/tests/test_cert_setup.py101
-rw-r--r--keystone/tests/test_config.py19
-rw-r--r--keystone/tests/test_content_types.py1104
-rw-r--r--keystone/tests/test_contrib_s3_core.py61
-rw-r--r--keystone/tests/test_contrib_stats_core.py45
-rw-r--r--keystone/tests/test_drivers.py57
-rw-r--r--keystone/tests/test_exception.py163
-rw-r--r--keystone/tests/test_import_legacy.py120
-rw-r--r--keystone/tests/test_injection.py211
-rw-r--r--keystone/tests/test_ipv6.py51
-rw-r--r--keystone/tests/test_keystoneclient.py1174
-rw-r--r--keystone/tests/test_keystoneclient_sql.py175
-rw-r--r--keystone/tests/test_middleware.py163
-rw-r--r--keystone/tests/test_no_admin_token_auth.py47
-rw-r--r--keystone/tests/test_overrides.conf20
-rw-r--r--keystone/tests/test_pki_token_provider.conf2
-rw-r--r--keystone/tests/test_policy.py191
-rw-r--r--keystone/tests/test_s3_token_middleware.py233
-rw-r--r--keystone/tests/test_serializer.py297
-rw-r--r--keystone/tests/test_singular_plural.py52
-rw-r--r--keystone/tests/test_sizelimit.py57
-rw-r--r--keystone/tests/test_sql_core.py182
-rw-r--r--keystone/tests/test_sql_migrate_extensions.py47
-rw-r--r--keystone/tests/test_sql_upgrade.py1378
-rw-r--r--keystone/tests/test_ssl.py154
-rw-r--r--keystone/tests/test_token_bind.py182
-rw-r--r--keystone/tests/test_token_provider.py439
-rw-r--r--keystone/tests/test_url_middleware.py56
-rw-r--r--keystone/tests/test_utils.py66
-rw-r--r--keystone/tests/test_uuid_token_provider.conf2
-rw-r--r--keystone/tests/test_v3.py971
-rw-r--r--keystone/tests/test_v3_auth.py1860
-rw-r--r--keystone/tests/test_v3_catalog.py165
-rw-r--r--keystone/tests/test_v3_credential.py78
-rw-r--r--keystone/tests/test_v3_identity.py1557
-rw-r--r--keystone/tests/test_v3_policy.py59
-rw-r--r--keystone/tests/test_v3_protection.py308
-rw-r--r--keystone/tests/test_versions.py422
-rw-r--r--keystone/tests/test_wsgi.py213
-rw-r--r--keystone/tests/tmp/.gitkeep0
77 files changed, 20374 insertions, 2 deletions
diff --git a/keystone/tests/__init__.py b/keystone/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone/tests/__init__.py
diff --git a/keystone/tests/_ldap_livetest.py b/keystone/tests/_ldap_livetest.py
new file mode 100644
index 00000000..59da4e66
--- /dev/null
+++ b/keystone/tests/_ldap_livetest.py
@@ -0,0 +1,160 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import ldap
+import ldap.modlist
+import subprocess
+
+from keystone.common import ldap as ldap_common
+from keystone import config
+from keystone import exception
+from keystone.identity.backends import ldap as identity_ldap
+from keystone.tests import core as test
+
+import test_backend_ldap
+
+
+CONF = config.CONF
+
+
+def create_object(dn, attrs):
+ conn = ldap.initialize(CONF.ldap.url)
+ conn.simple_bind_s(CONF.ldap.user, CONF.ldap.password)
+ ldif = ldap.modlist.addModlist(attrs)
+ conn.add_s(dn, ldif)
+ conn.unbind_s()
+
+
+class LiveLDAPIdentity(test_backend_ldap.LDAPIdentity):
+
+ def clear_database(self):
+ devnull = open('/dev/null', 'w')
+ subprocess.call(['ldapdelete',
+ '-x',
+ '-D', CONF.ldap.user,
+ '-H', CONF.ldap.url,
+ '-w', CONF.ldap.password,
+ '-r', CONF.ldap.suffix],
+ stderr=devnull)
+
+ if CONF.ldap.suffix.startswith('ou='):
+ tree_dn_attrs = {'objectclass': 'organizationalUnit',
+ 'ou': 'openstack'}
+ else:
+ tree_dn_attrs = {'objectclass': ['dcObject', 'organizationalUnit'],
+ 'dc': 'openstack',
+ 'ou': 'openstack'}
+ create_object(CONF.ldap.suffix, tree_dn_attrs)
+ create_object(CONF.ldap.user_tree_dn,
+ {'objectclass': 'organizationalUnit',
+ 'ou': 'Users'})
+ create_object(CONF.ldap.role_tree_dn,
+ {'objectclass': 'organizationalUnit',
+ 'ou': 'Roles'})
+ create_object(CONF.ldap.tenant_tree_dn,
+ {'objectclass': 'organizationalUnit',
+ 'ou': 'Projects'})
+ create_object(CONF.ldap.group_tree_dn,
+ {'objectclass': 'organizationalUnit',
+ 'ou': 'UserGroups'})
+
+ def _set_config(self):
+ self.config([test.etcdir('keystone.conf.sample'),
+ test.testsdir('test_overrides.conf'),
+ test.testsdir('backend_liveldap.conf')])
+
+ def test_build_tree(self):
+ """Regression test for building the tree names
+ """
+ #logic is different from the fake backend.
+ user_api = identity_ldap.UserApi(CONF)
+ self.assertTrue(user_api)
+ self.assertEquals(user_api.tree_dn, CONF.ldap.user_tree_dn)
+
+ def tearDown(self):
+ test.TestCase.tearDown(self)
+
+ def test_user_enable_attribute_mask(self):
+ self.skipTest('Test is for Active Directory Only')
+
+ def test_ldap_dereferencing(self):
+ alt_users_ldif = {'objectclass': ['top', 'organizationalUnit'],
+ 'ou': 'alt_users'}
+ alt_fake_user_ldif = {'objectclass': ['person', 'inetOrgPerson'],
+ 'cn': 'alt_fake1',
+ 'sn': 'alt_fake1'}
+ aliased_users_ldif = {'objectclass': ['alias', 'extensibleObject'],
+ 'aliasedobjectname': "ou=alt_users,%s" %
+ CONF.ldap.suffix}
+ create_object("ou=alt_users,%s" % CONF.ldap.suffix, alt_users_ldif)
+ create_object("%s=alt_fake1,ou=alt_users,%s" %
+ (CONF.ldap.user_id_attribute, CONF.ldap.suffix),
+ alt_fake_user_ldif)
+ create_object("ou=alt_users,%s" % CONF.ldap.user_tree_dn,
+ aliased_users_ldif)
+
+ CONF.ldap.query_scope = 'sub'
+ CONF.ldap.alias_dereferencing = 'never'
+ self.identity_api = identity_ldap.Identity()
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ 'alt_fake1')
+
+ CONF.ldap.alias_dereferencing = 'searching'
+ self.identity_api = identity_ldap.Identity()
+ user_ref = self.identity_api.get_user('alt_fake1')
+ self.assertEqual(user_ref['id'], 'alt_fake1')
+
+ CONF.ldap.alias_dereferencing = 'always'
+ self.identity_api = identity_ldap.Identity()
+ user_ref = self.identity_api.get_user('alt_fake1')
+ self.assertEqual(user_ref['id'], 'alt_fake1')
+
+ def test_base_ldap_connection_deref_option(self):
+ deref = ldap_common.parse_deref('default')
+ ldap_wrapper = ldap_common.LdapWrapper(CONF.ldap.url,
+ CONF.ldap.page_size,
+ alias_dereferencing=deref)
+ self.assertEqual(ldap.get_option(ldap.OPT_DEREF),
+ ldap_wrapper.conn.get_option(ldap.OPT_DEREF))
+
+ deref = ldap_common.parse_deref('always')
+ ldap_wrapper = ldap_common.LdapWrapper(CONF.ldap.url,
+ CONF.ldap.page_size,
+ alias_dereferencing=deref)
+ self.assertEqual(ldap.DEREF_ALWAYS,
+ ldap_wrapper.conn.get_option(ldap.OPT_DEREF))
+
+ deref = ldap_common.parse_deref('finding')
+ ldap_wrapper = ldap_common.LdapWrapper(CONF.ldap.url,
+ CONF.ldap.page_size,
+ alias_dereferencing=deref)
+ self.assertEqual(ldap.DEREF_FINDING,
+ ldap_wrapper.conn.get_option(ldap.OPT_DEREF))
+
+ deref = ldap_common.parse_deref('never')
+ ldap_wrapper = ldap_common.LdapWrapper(CONF.ldap.url,
+ CONF.ldap.page_size,
+ alias_dereferencing=deref)
+ self.assertEqual(ldap.DEREF_NEVER,
+ ldap_wrapper.conn.get_option(ldap.OPT_DEREF))
+
+ deref = ldap_common.parse_deref('searching')
+ ldap_wrapper = ldap_common.LdapWrapper(CONF.ldap.url,
+ CONF.ldap.page_size,
+ alias_dereferencing=deref)
+ self.assertEqual(ldap.DEREF_SEARCHING,
+ ldap_wrapper.conn.get_option(ldap.OPT_DEREF))
diff --git a/keystone/tests/_ldap_tls_livetest.py b/keystone/tests/_ldap_tls_livetest.py
new file mode 100644
index 00000000..f1c43453
--- /dev/null
+++ b/keystone/tests/_ldap_tls_livetest.py
@@ -0,0 +1,113 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2013 OpenStack LLC
+# Copyright 2013 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import ldap
+import ldap.modlist
+
+from keystone import config
+from keystone import exception
+from keystone import identity
+from keystone.tests import core as test
+
+import _ldap_livetest
+
+
+CONF = config.CONF
+
+
+def create_object(dn, attrs):
+ conn = ldap.initialize(CONF.ldap.url)
+ conn.simple_bind_s(CONF.ldap.user, CONF.ldap.password)
+ ldif = ldap.modlist.addModlist(attrs)
+ conn.add_s(dn, ldif)
+ conn.unbind_s()
+
+
+class LiveTLSLDAPIdentity(_ldap_livetest.LiveLDAPIdentity):
+
+ def _set_config(self):
+ self.config([test.etcdir('keystone.conf.sample'),
+ test.testsdir('test_overrides.conf'),
+ test.testsdir('backend_tls_liveldap.conf')])
+
+ def test_tls_certfile_demand_option(self):
+ CONF.ldap.use_tls = True
+ CONF.ldap.tls_cacertdir = None
+ CONF.ldap.tls_req_cert = 'demand'
+ self.identity_api = identity.backends.ldap.Identity()
+
+ user = {'id': 'fake1',
+ 'name': 'fake1',
+ 'password': 'fakepass1',
+ 'tenants': ['bar']}
+ self.identity_api.create_user('fake1', user)
+ user_ref = self.identity_api.get_user('fake1')
+ self.assertEqual(user_ref['id'], 'fake1')
+
+ user['password'] = 'fakepass2'
+ self.identity_api.update_user('fake1', user)
+
+ self.identity_api.delete_user('fake1')
+ self.assertRaises(exception.UserNotFound, self.identity_api.get_user,
+ 'fake1')
+
+ def test_tls_certdir_demand_option(self):
+ CONF.ldap.use_tls = True
+ CONF.ldap.tls_cacertfile = None
+ CONF.ldap.tls_req_cert = 'demand'
+ self.identity_api = identity.backends.ldap.Identity()
+
+ user = {'id': 'fake1',
+ 'name': 'fake1',
+ 'password': 'fakepass1',
+ 'tenants': ['bar']}
+ self.identity_api.create_user('fake1', user)
+ user_ref = self.identity_api.get_user('fake1')
+ self.assertEqual(user_ref['id'], 'fake1')
+
+ user['password'] = 'fakepass2'
+ self.identity_api.update_user('fake1', user)
+
+ self.identity_api.delete_user('fake1')
+ self.assertRaises(exception.UserNotFound, self.identity_api.get_user,
+ 'fake1')
+
+ def test_tls_bad_certfile(self):
+ CONF.ldap.use_tls = True
+ CONF.ldap.tls_req_cert = 'demand'
+ CONF.ldap.tls_cacertfile = '/etc/keystone/ssl/certs/mythicalcert.pem'
+ CONF.ldap.tls_cacertdir = None
+ self.identity_api = identity.backends.ldap.Identity()
+
+ user = {'id': 'fake1',
+ 'name': 'fake1',
+ 'password': 'fakepass1',
+ 'tenants': ['bar']}
+ self.assertRaises(IOError, self.identity_api.create_user, 'fake', user)
+
+ def test_tls_bad_certdir(self):
+ CONF.ldap.use_tls = True
+ CONF.ldap.tls_cacertfile = None
+ CONF.ldap.tls_req_cert = 'demand'
+ CONF.ldap.tls_cacertdir = '/etc/keystone/ssl/mythicalcertdir'
+ self.identity_api = identity.backends.ldap.Identity()
+
+ user = {'id': 'fake1',
+ 'name': 'fake1',
+ 'password': 'fakepass1',
+ 'tenants': ['bar']}
+ self.assertRaises(IOError, self.identity_api.create_user, 'fake', user)
diff --git a/keystone/tests/_sql_livetest.py b/keystone/tests/_sql_livetest.py
new file mode 100644
index 00000000..a271ce7c
--- /dev/null
+++ b/keystone/tests/_sql_livetest.py
@@ -0,0 +1,45 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2013 Red Hat, Inc
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import test_sql_upgrade
+
+from keystone import config
+
+CONF = config.CONF
+
+
+class PostgresqlMigrateTests(test_sql_upgrade.SqlUpgradeTests):
+ def config_files(self):
+ files = (test_sql_upgrade.SqlUpgradeTests.
+ _config_file_list[:])
+ files.append("backend_postgresql.conf")
+ return files
+
+
+class MysqlMigrateTests(test_sql_upgrade.SqlUpgradeTests):
+ def config_files(self):
+ files = (test_sql_upgrade.SqlUpgradeTests.
+ _config_file_list[:])
+ files.append("backend_mysql.conf")
+ return files
+
+
+class Db2MigrateTests(test_sql_upgrade.SqlUpgradeTests):
+ def config_files(self):
+ files = (test_sql_upgrade.SqlUpgradeTests.
+ _config_file_list[:])
+ files.append("backend_db2.conf")
+ return files
diff --git a/keystone/tests/_test_import_auth_token.py b/keystone/tests/_test_import_auth_token.py
new file mode 100644
index 00000000..4e16f9a4
--- /dev/null
+++ b/keystone/tests/_test_import_auth_token.py
@@ -0,0 +1,25 @@
+"""This is an isolated test to prevent unexpected imports.
+
+This module must be run in isolation, e.g.:
+
+ $ ./run_tests.sh _test_import_auth_token.py
+
+This module can be removed when keystone.middleware.auth_token is removed.
+
+"""
+
+import unittest
+
+
+class TestAuthToken(unittest.TestCase):
+ def test_import(self):
+ # a consuming service like nova would import oslo.config first
+ from oslo.config import cfg
+ conf = cfg.CONF
+
+ # define some config options
+ conf.register_opt(cfg.BoolOpt('debug', default=False))
+
+ # and then import auth_token as a filter
+ from keystone.middleware import auth_token
+ self.assertTrue(auth_token)
diff --git a/keystone/tests/auth_plugin_external_disabled.conf b/keystone/tests/auth_plugin_external_disabled.conf
new file mode 100644
index 00000000..fed281d4
--- /dev/null
+++ b/keystone/tests/auth_plugin_external_disabled.conf
@@ -0,0 +1,2 @@
+[auth]
+methods = password, token
diff --git a/keystone/tests/auth_plugin_external_domain.conf b/keystone/tests/auth_plugin_external_domain.conf
new file mode 100644
index 00000000..b7be122f
--- /dev/null
+++ b/keystone/tests/auth_plugin_external_domain.conf
@@ -0,0 +1,3 @@
+[auth]
+methods = external, password, token
+external = keystone.auth.plugins.external.ExternalDomain
diff --git a/keystone/tests/backend_db2.conf b/keystone/tests/backend_db2.conf
new file mode 100644
index 00000000..44032255
--- /dev/null
+++ b/keystone/tests/backend_db2.conf
@@ -0,0 +1,4 @@
+#Used for running the Migrate tests against a live DB2 Server
+#See _sql_livetest.py
+[sql]
+connection = ibm_db_sa://keystone:keystone@/staktest?charset=utf8
diff --git a/keystone/tests/backend_ldap.conf b/keystone/tests/backend_ldap.conf
new file mode 100644
index 00000000..6b3f8a75
--- /dev/null
+++ b/keystone/tests/backend_ldap.conf
@@ -0,0 +1,9 @@
+[ldap]
+url = fake://memory
+user = cn=Admin
+password = password
+backend_entities = ['Tenant', 'User', 'UserRoleAssociation', 'Role', 'Group', 'Domain']
+suffix = cn=example,cn=com
+
+[identity]
+driver = keystone.identity.backends.ldap.Identity
diff --git a/keystone/tests/backend_ldap_sql.conf b/keystone/tests/backend_ldap_sql.conf
new file mode 100644
index 00000000..5579e75d
--- /dev/null
+++ b/keystone/tests/backend_ldap_sql.conf
@@ -0,0 +1,37 @@
+[sql]
+connection = sqlite://
+#For a file based sqlite use
+#connection = sqlite:////tmp/keystone.db
+#To Test MySQL:
+#connection = mysql://keystone:keystone@localhost/keystone?charset=utf8
+#To Test PostgreSQL:
+#connection = postgresql://keystone:keystone@localhost/keystone?client_encoding=utf8
+idle_timeout = 200
+
+[ldap]
+url = fake://memory
+user = cn=Admin
+password = password
+suffix = cn=example,cn=com
+
+[identity]
+driver = keystone.identity.backends.ldap.Identity
+
+[assignment]
+driver = keystone.assignment.backends.sql.Assignment
+
+[token]
+driver = keystone.token.backends.sql.Token
+
+[ec2]
+driver = keystone.contrib.ec2.backends.sql.Ec2
+
+[catalog]
+driver = keystone.catalog.backends.sql.Catalog
+
+[policy]
+driver = keystone.policy.backends.sql.Policy
+
+[trust]
+driver = keystone.trust.backends.sql.Trust
+
diff --git a/keystone/tests/backend_liveldap.conf b/keystone/tests/backend_liveldap.conf
new file mode 100644
index 00000000..297d96d6
--- /dev/null
+++ b/keystone/tests/backend_liveldap.conf
@@ -0,0 +1,17 @@
+[ldap]
+url = ldap://localhost
+user = dc=Manager,dc=openstack,dc=org
+password = test
+suffix = dc=openstack,dc=org
+group_tree_dn = ou=UserGroups,dc=openstack,dc=org
+role_tree_dn = ou=Roles,dc=openstack,dc=org
+tenant_tree_dn = ou=Projects,dc=openstack,dc=org
+user_tree_dn = ou=Users,dc=openstack,dc=org
+tenant_enabled_emulation = True
+user_enabled_emulation = True
+user_mail_attribute = mail
+use_dumb_member = True
+
+[identity]
+driver = keystone.identity.backends.ldap.Identity
+
diff --git a/keystone/tests/backend_mysql.conf b/keystone/tests/backend_mysql.conf
new file mode 100644
index 00000000..ee3b276e
--- /dev/null
+++ b/keystone/tests/backend_mysql.conf
@@ -0,0 +1,4 @@
+#Used for running the Migrate tests against a live Mysql Server
+#See _sql_livetest.py
+[sql]
+connection = mysql://keystone:keystone@localhost/keystone_test?charset=utf8
diff --git a/keystone/tests/backend_pam.conf b/keystone/tests/backend_pam.conf
new file mode 100644
index 00000000..41f868c7
--- /dev/null
+++ b/keystone/tests/backend_pam.conf
@@ -0,0 +1,6 @@
+[pam]
+userid = fakeuser
+password = fakepass
+
+[identity]
+driver = keystone.identity.backends.pam.PamIdentity
diff --git a/keystone/tests/backend_postgresql.conf b/keystone/tests/backend_postgresql.conf
new file mode 100644
index 00000000..8468ad33
--- /dev/null
+++ b/keystone/tests/backend_postgresql.conf
@@ -0,0 +1,4 @@
+#Used for running the Migrate tests against a live Postgresql Server
+#See _sql_livetest.py
+[sql]
+connection = postgresql://keystone:keystone@localhost/keystone_test?client_encoding=utf8
diff --git a/keystone/tests/backend_sql.conf b/keystone/tests/backend_sql.conf
new file mode 100644
index 00000000..0baf610c
--- /dev/null
+++ b/keystone/tests/backend_sql.conf
@@ -0,0 +1,27 @@
+[sql]
+connection = sqlite://
+#For a file based sqlite use
+#connection = sqlite:////tmp/keystone.db
+#To Test MySQL:
+#connection = mysql://keystone:keystone@localhost/keystone?charset=utf8
+#To Test PostgreSQL:
+#connection = postgresql://keystone:keystone@localhost/keystone?client_encoding=utf8
+idle_timeout = 200
+
+[identity]
+driver = keystone.identity.backends.sql.Identity
+
+[token]
+driver = keystone.token.backends.sql.Token
+
+[ec2]
+driver = keystone.contrib.ec2.backends.sql.Ec2
+
+[catalog]
+driver = keystone.catalog.backends.sql.Catalog
+
+[policy]
+driver = keystone.policy.backends.sql.Policy
+
+[trust]
+driver = keystone.trust.backends.sql.Trust
diff --git a/keystone/tests/backend_sql_disk.conf b/keystone/tests/backend_sql_disk.conf
new file mode 100644
index 00000000..0f8dfea7
--- /dev/null
+++ b/keystone/tests/backend_sql_disk.conf
@@ -0,0 +1,2 @@
+[sql]
+connection = sqlite:///tmp/test.db
diff --git a/keystone/tests/backend_tls_liveldap.conf b/keystone/tests/backend_tls_liveldap.conf
new file mode 100644
index 00000000..409af674
--- /dev/null
+++ b/keystone/tests/backend_tls_liveldap.conf
@@ -0,0 +1,21 @@
+[ldap]
+url = ldap://
+user = dc=Manager,dc=openstack,dc=org
+password = test
+suffix = dc=openstack,dc=org
+group_tree_dn = ou=UserGroups,dc=openstack,dc=org
+role_tree_dn = ou=Roles,dc=openstack,dc=org
+tenant_tree_dn = ou=Projects,dc=openstack,dc=org
+user_tree_dn = ou=Users,dc=openstack,dc=org
+tenant_enabled_emulation = True
+user_enabled_emulation = True
+user_mail_attribute = mail
+use_dumb_member = True
+use_tls = True
+tls_cacertfile = /etc/keystone/ssl/certs/cacert.pem
+tls_cacertdir = /etc/keystone/ssl/certs/
+tls_req_cert = demand
+
+[identity]
+driver = keystone.identity.backends.ldap.Identity
+
diff --git a/keystone/test.py b/keystone/tests/core.py
index 9118b2ea..21dc61dc 100644
--- a/keystone/test.py
+++ b/keystone/tests/core.py
@@ -57,9 +57,9 @@ from keystone import trust
LOG = logging.getLogger(__name__)
-ROOTDIR = os.path.dirname(os.path.abspath(os.curdir))
+ROOTDIR = os.path.dirname(os.path.abspath('..'))
VENDOR = os.path.join(ROOTDIR, 'vendor')
-TESTSDIR = os.path.join(ROOTDIR, 'tests')
+TESTSDIR = os.path.join(ROOTDIR, 'keystone', 'tests')
ETCDIR = os.path.join(ROOTDIR, 'etc')
TMPDIR = os.path.join(TESTSDIR, 'tmp')
diff --git a/keystone/tests/default_catalog.templates b/keystone/tests/default_catalog.templates
new file mode 100644
index 00000000..f26c949a
--- /dev/null
+++ b/keystone/tests/default_catalog.templates
@@ -0,0 +1,14 @@
+# config for TemplatedCatalog, using camelCase because I don't want to do
+# translations for keystone compat
+catalog.RegionOne.identity.publicURL = http://localhost:$(public_port)s/v2.0
+catalog.RegionOne.identity.adminURL = http://localhost:$(admin_port)s/v2.0
+catalog.RegionOne.identity.internalURL = http://localhost:$(admin_port)s/v2.0
+catalog.RegionOne.identity.name = 'Identity Service'
+catalog.RegionOne.identity.id = 1
+
+# fake compute service for now to help novaclient tests work
+catalog.RegionOne.compute.publicURL = http://localhost:$(compute_port)s/v1.1/$(tenant_id)s
+catalog.RegionOne.compute.adminURL = http://localhost:$(compute_port)s/v1.1/$(tenant_id)s
+catalog.RegionOne.compute.internalURL = http://localhost:$(compute_port)s/v1.1/$(tenant_id)s
+catalog.RegionOne.compute.name = 'Compute Service'
+catalog.RegionOne.compute.id = 2
diff --git a/keystone/tests/default_fixtures.py b/keystone/tests/default_fixtures.py
new file mode 100644
index 00000000..2695da88
--- /dev/null
+++ b/keystone/tests/default_fixtures.py
@@ -0,0 +1,124 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# NOTE(dolph): please try to avoid additional fixtures if possible; test suite
+# performance may be negatively affected.
+
+from keystone import assignment
+from keystone import config
+
+
+CONF = config.CONF
+
+
+DEFAULT_DOMAIN_ID = config.CONF.identity.default_domain_id
+
+
+TENANTS = [
+ {
+ 'id': 'bar',
+ 'name': 'BAR',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'description': 'description',
+ 'enabled': True,
+ }, {
+ 'id': 'baz',
+ 'name': 'BAZ',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'description': 'description',
+ 'enabled': True,
+ }, {
+ 'id': 'mtu',
+ 'name': 'MTU',
+ 'description': 'description',
+ 'enabled': True,
+ 'domain_id': DEFAULT_DOMAIN_ID
+ }, {
+ 'id': 'service',
+ 'name': 'service',
+ 'description': 'description',
+ 'enabled': True,
+ 'domain_id': DEFAULT_DOMAIN_ID
+ }
+]
+
+# NOTE(ja): a role of keystone_admin is done in setUp
+USERS = [
+ {
+ 'id': 'foo',
+ 'name': 'FOO',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'foo2',
+ 'tenants': ['bar'],
+ 'enabled': True,
+ 'email': 'foo@bar.com',
+ }, {
+ 'id': 'two',
+ 'name': 'TWO',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'two2',
+ 'email': 'two@example.com',
+ 'enabled': True,
+ 'tenant_id': 'baz',
+ 'tenants': ['baz'],
+ 'email': 'two@three.com',
+ }, {
+ 'id': 'badguy',
+ 'name': 'BadGuy',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'bad',
+ 'email': 'bad@guy.com',
+ 'enabled': False,
+ 'tenant_id': 'baz',
+ 'tenants': ['baz'],
+ 'email': 'badguy@goodguy.com',
+ }, {
+ 'id': 'sna',
+ 'name': 'SNA',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'snafu',
+ 'enabled': True,
+ 'tenants': ['bar'],
+ 'email': 'sna@snl.coom',
+ }
+]
+
+ROLES = [
+ {
+ 'id': 'admin',
+ 'name': 'admin',
+ }, {
+ 'id': 'member',
+ 'name': 'Member',
+ }, {
+ 'id': CONF.member_role_id,
+ 'name': CONF.member_role_name,
+ }, {
+ 'id': 'other',
+ 'name': 'Other',
+ }, {
+ 'id': 'browser',
+ 'name': 'Browser',
+ }, {
+ 'id': 'writer',
+ 'name': 'Writer',
+ }, {
+ 'id': 'service',
+ 'name': 'Service',
+ }
+]
+
+DOMAINS = [assignment.DEFAULT_DOMAIN]
diff --git a/keystone/tests/legacy_d5.mysql b/keystone/tests/legacy_d5.mysql
new file mode 100644
index 00000000..57b31feb
--- /dev/null
+++ b/keystone/tests/legacy_d5.mysql
@@ -0,0 +1,281 @@
+-- MySQL dump 10.13 Distrib 5.1.54, for debian-linux-gnu (x86_64)
+--
+-- Host: localhost Database: keystone
+-- ------------------------------------------------------
+-- Server version 5.1.54-1ubuntu4
+
+/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
+/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
+/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
+/*!40101 SET NAMES utf8 */;
+/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
+/*!40103 SET TIME_ZONE='+00:00' */;
+/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
+/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
+/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
+/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
+
+--
+-- Table structure for table `credentials`
+--
+
+DROP TABLE IF EXISTS `credentials`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `credentials` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `user_id` int(11) DEFAULT NULL,
+ `tenant_id` int(11) DEFAULT NULL,
+ `type` varchar(20) DEFAULT NULL,
+ `key` varchar(255) DEFAULT NULL,
+ `secret` varchar(255) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ KEY `tenant_id` (`tenant_id`),
+ KEY `user_id` (`user_id`)
+) ENGINE=MyISAM AUTO_INCREMENT=3 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `credentials`
+--
+
+LOCK TABLES `credentials` WRITE;
+/*!40000 ALTER TABLE `credentials` DISABLE KEYS */;
+INSERT INTO `credentials` VALUES (1,1,1,'EC2','admin','secrete'),(2,2,2,'EC2','demo','secrete');
+/*!40000 ALTER TABLE `credentials` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `endpoint_templates`
+--
+
+DROP TABLE IF EXISTS `endpoint_templates`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `endpoint_templates` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `region` varchar(255) DEFAULT NULL,
+ `service_id` int(11) DEFAULT NULL,
+ `public_url` varchar(2000) DEFAULT NULL,
+ `admin_url` varchar(2000) DEFAULT NULL,
+ `internal_url` varchar(2000) DEFAULT NULL,
+ `enabled` tinyint(1) DEFAULT NULL,
+ `is_global` tinyint(1) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ KEY `service_id` (`service_id`)
+) ENGINE=MyISAM AUTO_INCREMENT=4 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `endpoint_templates`
+--
+
+LOCK TABLES `endpoint_templates` WRITE;
+/*!40000 ALTER TABLE `endpoint_templates` DISABLE KEYS */;
+INSERT INTO `endpoint_templates` VALUES (1,'RegionOne',1,'http://10.4.128.10:8774/v1.1/%tenant_id%','http://10.4.128.10:8774/v1.1/%tenant_id%','http://10.4.128.10:8774/v1.1/%tenant_id%',1,1),(2,'RegionOne',2,'http://10.4.128.10:9292/v1.1/%tenant_id%','http://10.4.128.10:9292/v1.1/%tenant_id%','http://10.4.128.10:9292/v1.1/%tenant_id%',1,1),(3,'RegionOne',3,'http://10.4.128.10:5000/v2.0','http://10.4.128.10:35357/v2.0','http://10.4.128.10:5000/v2.0',1,1);
+/*!40000 ALTER TABLE `endpoint_templates` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `endpoints`
+--
+
+DROP TABLE IF EXISTS `endpoints`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `endpoints` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `tenant_id` int(11) DEFAULT NULL,
+ `endpoint_template_id` int(11) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `endpoint_template_id` (`endpoint_template_id`,`tenant_id`)
+) ENGINE=MyISAM DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `endpoints`
+--
+
+LOCK TABLES `endpoints` WRITE;
+/*!40000 ALTER TABLE `endpoints` DISABLE KEYS */;
+/*!40000 ALTER TABLE `endpoints` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `roles`
+--
+
+DROP TABLE IF EXISTS `roles`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `roles` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) DEFAULT NULL,
+ `desc` varchar(255) DEFAULT NULL,
+ `service_id` int(11) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name` (`name`,`service_id`),
+ KEY `service_id` (`service_id`)
+) ENGINE=MyISAM AUTO_INCREMENT=5 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `roles`
+--
+
+LOCK TABLES `roles` WRITE;
+/*!40000 ALTER TABLE `roles` DISABLE KEYS */;
+INSERT INTO `roles` VALUES (1,'Admin',NULL,NULL),(2,'Member',NULL,NULL),(3,'KeystoneAdmin',NULL,NULL),(4,'KeystoneServiceAdmin',NULL,NULL);
+/*!40000 ALTER TABLE `roles` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `services`
+--
+
+DROP TABLE IF EXISTS `services`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `services` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) DEFAULT NULL,
+ `type` varchar(255) DEFAULT NULL,
+ `desc` varchar(255) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name` (`name`)
+) ENGINE=MyISAM AUTO_INCREMENT=4 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `services`
+--
+
+LOCK TABLES `services` WRITE;
+/*!40000 ALTER TABLE `services` DISABLE KEYS */;
+INSERT INTO `services` VALUES (1,'nova','compute','Nova Compute Service'),(2,'glance','image','Glance Image Service'),(3,'keystone','identity','Keystone Identity Service');
+/*!40000 ALTER TABLE `services` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `tenants`
+--
+
+DROP TABLE IF EXISTS `tenants`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `tenants` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) DEFAULT NULL,
+ `desc` varchar(255) DEFAULT NULL,
+ `enabled` int(11) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name` (`name`)
+) ENGINE=MyISAM AUTO_INCREMENT=4 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `tenants`
+--
+
+LOCK TABLES `tenants` WRITE;
+/*!40000 ALTER TABLE `tenants` DISABLE KEYS */;
+INSERT INTO `tenants` VALUES (1,'admin',NULL,1),(2,'demo',NULL,1),(3,'invisible_to_admin',NULL,1);
+/*!40000 ALTER TABLE `tenants` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `token`
+--
+
+DROP TABLE IF EXISTS `token`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `token` (
+ `id` varchar(255) NOT NULL,
+ `user_id` int(11) DEFAULT NULL,
+ `tenant_id` int(11) DEFAULT NULL,
+ `expires` datetime DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `id` (`id`)
+) ENGINE=MyISAM DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `token`
+--
+
+LOCK TABLES `token` WRITE;
+/*!40000 ALTER TABLE `token` DISABLE KEYS */;
+INSERT INTO `token` VALUES ('secrete',1,1,'2015-02-05 00:00:00');
+/*!40000 ALTER TABLE `token` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `user_roles`
+--
+
+DROP TABLE IF EXISTS `user_roles`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `user_roles` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `user_id` int(11) DEFAULT NULL,
+ `role_id` int(11) DEFAULT NULL,
+ `tenant_id` int(11) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `user_id` (`user_id`,`role_id`,`tenant_id`),
+ KEY `tenant_id` (`tenant_id`),
+ KEY `role_id` (`role_id`)
+) ENGINE=MyISAM AUTO_INCREMENT=8 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `user_roles`
+--
+
+LOCK TABLES `user_roles` WRITE;
+/*!40000 ALTER TABLE `user_roles` DISABLE KEYS */;
+INSERT INTO `user_roles` VALUES (1,1,1,1),(2,2,2,2),(3,2,2,3),(4,1,1,2),(5,1,1,NULL),(6,1,3,NULL),(7,1,4,NULL);
+/*!40000 ALTER TABLE `user_roles` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `users`
+--
+
+DROP TABLE IF EXISTS `users`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `users` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) DEFAULT NULL,
+ `password` varchar(255) DEFAULT NULL,
+ `email` varchar(255) DEFAULT NULL,
+ `enabled` int(11) DEFAULT NULL,
+ `tenant_id` int(11) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name` (`name`),
+ KEY `tenant_id` (`tenant_id`)
+) ENGINE=MyISAM AUTO_INCREMENT=3 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `users`
+--
+
+LOCK TABLES `users` WRITE;
+/*!40000 ALTER TABLE `users` DISABLE KEYS */;
+INSERT INTO `users` VALUES (1,'admin','secrete',NULL,1,NULL),(2,'demo','secrete',NULL,1,NULL);
+/*!40000 ALTER TABLE `users` ENABLE KEYS */;
+UNLOCK TABLES;
+/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
+
+/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
+/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
+/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
+/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
+/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
+/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
+/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
+
+-- Dump completed on 2012-02-14 0:16:40
diff --git a/keystone/tests/legacy_d5.sqlite b/keystone/tests/legacy_d5.sqlite
new file mode 100644
index 00000000..d96dbf40
--- /dev/null
+++ b/keystone/tests/legacy_d5.sqlite
@@ -0,0 +1,277 @@
+begin;
+-- MySQL dump 10.13 Distrib 5.1.54, for debian-linux-gnu (x86_64)
+--
+-- Host: localhost Database: keystone
+-- ------------------------------------------------------
+-- Server version 5.1.54-1ubuntu4
+
+/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
+/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
+/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
+/*!40101 SET NAMES utf8 */;
+/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
+/*!40103 SET TIME_ZONE='+00:00' */;
+/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
+/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
+/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
+/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
+
+--
+-- Table structure for table `credentials`
+--
+
+DROP TABLE IF EXISTS `credentials`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `credentials` (
+ `id` integer NOT NULL primary key autoincrement,
+ `user_id` integer NULL,
+ `tenant_id` integer NULL,
+ `type` varchar(20) NULL,
+ `key` varchar(255) NULL,
+ `secret` varchar(255) NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `credentials`
+--
+
+
+/*!40000 ALTER TABLE `credentials` DISABLE KEYS */;
+INSERT INTO `credentials` VALUES (1,1,1,'EC2','admin','secrete');
+INSERT INTO `credentials` VALUES (2,2,2,'EC2','demo','secrete');
+/*!40000 ALTER TABLE `credentials` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `endpoint_templates`
+--
+
+DROP TABLE IF EXISTS `endpoint_templates`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `endpoint_templates` (
+ `id` integer NOT NULL primary key autoincrement,
+ `region` varchar(255) NULL,
+ `service_id` integer NULL,
+ `public_url` varchar(2000) NULL,
+ `admin_url` varchar(2000) NULL,
+ `internal_url` varchar(2000) NULL,
+ `enabled` integer NULL,
+ `is_global` integer NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `endpoint_templates`
+--
+
+
+/*!40000 ALTER TABLE `endpoint_templates` DISABLE KEYS */;
+INSERT INTO `endpoint_templates` VALUES (1,'RegionOne',1,'http://10.4.128.10:8774/v1.1/%tenant_id%','http://10.4.128.10:8774/v1.1/%tenant_id%','http://10.4.128.10:8774/v1.1/%tenant_id%',1,1);
+INSERT INTO `endpoint_templates` VALUES (2,'RegionOne',2,'http://10.4.128.10:9292/v1.1/%tenant_id%','http://10.4.128.10:9292/v1.1/%tenant_id%','http://10.4.128.10:9292/v1.1/%tenant_id%',1,1);
+INSERT INTO `endpoint_templates` VALUES (3,'RegionOne',3,'http://10.4.128.10:5000/v2.0','http://10.4.128.10:35357/v2.0','http://10.4.128.10:5000/v2.0',1,1);
+/*!40000 ALTER TABLE `endpoint_templates` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `endpoints`
+--
+
+DROP TABLE IF EXISTS `endpoints`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `endpoints` (
+ `id` integer NOT NULL primary key autoincrement,
+ `tenant_id` integer NULL,
+ `endpoint_template_id` integer NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `endpoints`
+--
+
+
+/*!40000 ALTER TABLE `endpoints` DISABLE KEYS */;
+/*!40000 ALTER TABLE `endpoints` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `roles`
+--
+
+DROP TABLE IF EXISTS `roles`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `roles` (
+ `id` integer NOT NULL primary key autoincrement,
+ `name` varchar(255) NULL,
+ `desc` varchar(255) NULL,
+ `service_id` integer NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `roles`
+--
+
+
+/*!40000 ALTER TABLE `roles` DISABLE KEYS */;
+INSERT INTO `roles` VALUES (1,'Admin',NULL,NULL);
+INSERT INTO `roles` VALUES (2,'Member',NULL,NULL);
+INSERT INTO `roles` VALUES (3,'KeystoneAdmin',NULL,NULL);
+INSERT INTO `roles` VALUES (4,'KeystoneServiceAdmin',NULL,NULL);
+/*!40000 ALTER TABLE `roles` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `services`
+--
+
+DROP TABLE IF EXISTS `services`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `services` (
+ `id` integer NOT NULL primary key autoincrement,
+ `name` varchar(255) NULL,
+ `type` varchar(255) NULL,
+ `desc` varchar(255) NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `services`
+--
+
+
+/*!40000 ALTER TABLE `services` DISABLE KEYS */;
+INSERT INTO `services` VALUES (1,'nova','compute','Nova Compute Service');
+INSERT INTO `services` VALUES (2,'glance','image','Glance Image Service');
+INSERT INTO `services` VALUES (3,'keystone','identity','Keystone Identity Service');
+/*!40000 ALTER TABLE `services` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `tenants`
+--
+
+DROP TABLE IF EXISTS `tenants`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `tenants` (
+ `id` integer NOT NULL primary key autoincrement,
+ `name` varchar(255) NULL,
+ `desc` varchar(255) NULL,
+ `enabled` integer NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `tenants`
+--
+
+
+/*!40000 ALTER TABLE `tenants` DISABLE KEYS */;
+INSERT INTO `tenants` VALUES (1,'admin',NULL,1);
+INSERT INTO `tenants` VALUES (2,'demo',NULL,1);
+INSERT INTO `tenants` VALUES (3,'invisible_to_admin',NULL,1);
+/*!40000 ALTER TABLE `tenants` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `token`
+--
+
+DROP TABLE IF EXISTS `token`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `token` (
+ `id` varchar(255) NOT NULL,
+ `user_id` integer NULL,
+ `tenant_id` integer NULL,
+ `expires` datetime NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `token`
+--
+
+
+/*!40000 ALTER TABLE `token` DISABLE KEYS */;
+INSERT INTO `token` VALUES ('secrete',1,1,'2015-02-05 00:00:00');
+/*!40000 ALTER TABLE `token` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `user_roles`
+--
+
+DROP TABLE IF EXISTS `user_roles`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `user_roles` (
+ `id` integer NOT NULL primary key autoincrement,
+ `user_id` integer NULL,
+ `role_id` integer NULL,
+ `tenant_id` integer NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `user_roles`
+--
+
+
+/*!40000 ALTER TABLE `user_roles` DISABLE KEYS */;
+INSERT INTO `user_roles` VALUES (1,1,1,1);
+INSERT INTO `user_roles` VALUES (2,2,2,2);
+INSERT INTO `user_roles` VALUES (3,2,2,3);
+INSERT INTO `user_roles` VALUES (4,1,1,2);
+INSERT INTO `user_roles` VALUES (5,1,1,NULL);
+INSERT INTO `user_roles` VALUES (6,1,3,NULL);
+INSERT INTO `user_roles` VALUES (7,1,4,NULL);
+/*!40000 ALTER TABLE `user_roles` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `users`
+--
+
+DROP TABLE IF EXISTS `users`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `users` (
+ `id` integer NOT NULL primary key autoincrement,
+ `name` varchar(255) NULL,
+ `password` varchar(255) NULL,
+ `email` varchar(255) NULL,
+ `enabled` integer NULL,
+ `tenant_id` integer NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `users`
+--
+
+
+/*!40000 ALTER TABLE `users` DISABLE KEYS */;
+INSERT INTO `users` VALUES (1,'admin','secrete',NULL,1,NULL);
+INSERT INTO `users` VALUES (2,'demo','secrete',NULL,1,NULL);
+/*!40000 ALTER TABLE `users` ENABLE KEYS */;
+
+/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
+
+/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
+/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
+/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
+/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
+/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
+/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
+/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
+
+-- Dump completed on 2012-02-14 0:16:40
+commit;
diff --git a/keystone/tests/legacy_diablo.mysql b/keystone/tests/legacy_diablo.mysql
new file mode 100644
index 00000000..543f439f
--- /dev/null
+++ b/keystone/tests/legacy_diablo.mysql
@@ -0,0 +1,281 @@
+-- MySQL dump 10.13 Distrib 5.1.58, for debian-linux-gnu (x86_64)
+--
+-- Host: localhost Database: keystone
+-- ------------------------------------------------------
+-- Server version 5.1.58-1ubuntu1
+
+/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
+/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
+/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
+/*!40101 SET NAMES utf8 */;
+/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
+/*!40103 SET TIME_ZONE='+00:00' */;
+/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
+/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
+/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
+/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
+
+--
+-- Table structure for table `credentials`
+--
+
+DROP TABLE IF EXISTS `credentials`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `credentials` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `user_id` int(11) DEFAULT NULL,
+ `tenant_id` int(11) DEFAULT NULL,
+ `type` varchar(20) DEFAULT NULL,
+ `key` varchar(255) DEFAULT NULL,
+ `secret` varchar(255) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ KEY `tenant_id` (`tenant_id`),
+ KEY `user_id` (`user_id`)
+) ENGINE=MyISAM AUTO_INCREMENT=3 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `credentials`
+--
+
+LOCK TABLES `credentials` WRITE;
+/*!40000 ALTER TABLE `credentials` DISABLE KEYS */;
+INSERT INTO `credentials` VALUES (1,1,1,'EC2','admin','secrete'),(2,2,2,'EC2','demo','secrete');
+/*!40000 ALTER TABLE `credentials` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `endpoint_templates`
+--
+
+DROP TABLE IF EXISTS `endpoint_templates`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `endpoint_templates` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `region` varchar(255) DEFAULT NULL,
+ `service_id` int(11) DEFAULT NULL,
+ `public_url` varchar(2000) DEFAULT NULL,
+ `admin_url` varchar(2000) DEFAULT NULL,
+ `internal_url` varchar(2000) DEFAULT NULL,
+ `enabled` tinyint(1) DEFAULT NULL,
+ `is_global` tinyint(1) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ KEY `service_id` (`service_id`)
+) ENGINE=MyISAM AUTO_INCREMENT=5 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `endpoint_templates`
+--
+
+LOCK TABLES `endpoint_templates` WRITE;
+/*!40000 ALTER TABLE `endpoint_templates` DISABLE KEYS */;
+INSERT INTO `endpoint_templates` VALUES (1,'RegionOne',1,'http://192.168.2.10:8774/v1.1/%tenant_id%','http://192.168.2.10:8774/v1.1/%tenant_id%','http://192.168.2.10:8774/v1.1/%tenant_id%',1,1),(2,'RegionOne',2,'http://192.168.2.10:9292/v1','http://192.168.2.10:9292/v1','http://192.168.2.10:9292/v1',1,1),(3,'RegionOne',3,'http://192.168.2.10:5000/v2.0','http://192.168.2.10:35357/v2.0','http://192.168.2.10:5000/v2.0',1,1),(4,'RegionOne',4,'http://192.168.2.10:8080/v1/AUTH_%tenant_id%','http://192.168.2.10:8080/','http://192.168.2.10:8080/v1/AUTH_%tenant_id%',1,1);
+/*!40000 ALTER TABLE `endpoint_templates` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `endpoints`
+--
+
+DROP TABLE IF EXISTS `endpoints`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `endpoints` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `tenant_id` int(11) DEFAULT NULL,
+ `endpoint_template_id` int(11) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `endpoint_template_id` (`endpoint_template_id`,`tenant_id`)
+) ENGINE=MyISAM DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `endpoints`
+--
+
+LOCK TABLES `endpoints` WRITE;
+/*!40000 ALTER TABLE `endpoints` DISABLE KEYS */;
+/*!40000 ALTER TABLE `endpoints` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `roles`
+--
+
+DROP TABLE IF EXISTS `roles`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `roles` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) DEFAULT NULL,
+ `desc` varchar(255) DEFAULT NULL,
+ `service_id` int(11) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name` (`name`,`service_id`),
+ KEY `service_id` (`service_id`)
+) ENGINE=MyISAM AUTO_INCREMENT=7 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `roles`
+--
+
+LOCK TABLES `roles` WRITE;
+/*!40000 ALTER TABLE `roles` DISABLE KEYS */;
+INSERT INTO `roles` VALUES (1,'Admin',NULL,NULL),(2,'Member',NULL,NULL),(3,'KeystoneAdmin',NULL,NULL),(4,'KeystoneServiceAdmin',NULL,NULL),(5,'sysadmin',NULL,NULL),(6,'netadmin',NULL,NULL);
+/*!40000 ALTER TABLE `roles` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `services`
+--
+
+DROP TABLE IF EXISTS `services`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `services` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) DEFAULT NULL,
+ `type` varchar(255) DEFAULT NULL,
+ `desc` varchar(255) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name` (`name`)
+) ENGINE=MyISAM AUTO_INCREMENT=5 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `services`
+--
+
+LOCK TABLES `services` WRITE;
+/*!40000 ALTER TABLE `services` DISABLE KEYS */;
+INSERT INTO `services` VALUES (1,'nova','compute','Nova Compute Service'),(2,'glance','image','Glance Image Service'),(3,'keystone','identity','Keystone Identity Service'),(4,'swift','object-store','Swift Service');
+/*!40000 ALTER TABLE `services` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `tenants`
+--
+
+DROP TABLE IF EXISTS `tenants`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `tenants` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) DEFAULT NULL,
+ `desc` varchar(255) DEFAULT NULL,
+ `enabled` int(11) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name` (`name`)
+) ENGINE=MyISAM AUTO_INCREMENT=4 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `tenants`
+--
+
+LOCK TABLES `tenants` WRITE;
+/*!40000 ALTER TABLE `tenants` DISABLE KEYS */;
+INSERT INTO `tenants` VALUES (1,'admin',NULL,1),(2,'demo',NULL,1),(3,'invisible_to_admin',NULL,1);
+/*!40000 ALTER TABLE `tenants` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `token`
+--
+
+DROP TABLE IF EXISTS `token`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `token` (
+ `id` varchar(255) NOT NULL,
+ `user_id` int(11) DEFAULT NULL,
+ `tenant_id` int(11) DEFAULT NULL,
+ `expires` datetime DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `id` (`id`)
+) ENGINE=MyISAM DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `token`
+--
+
+LOCK TABLES `token` WRITE;
+/*!40000 ALTER TABLE `token` DISABLE KEYS */;
+INSERT INTO `token` VALUES ('secrete',1,1,'2015-02-05 00:00:00');
+/*!40000 ALTER TABLE `token` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `user_roles`
+--
+
+DROP TABLE IF EXISTS `user_roles`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `user_roles` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `user_id` int(11) DEFAULT NULL,
+ `role_id` int(11) DEFAULT NULL,
+ `tenant_id` int(11) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `user_id` (`user_id`,`role_id`,`tenant_id`),
+ KEY `tenant_id` (`tenant_id`),
+ KEY `role_id` (`role_id`)
+) ENGINE=MyISAM AUTO_INCREMENT=10 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `user_roles`
+--
+
+LOCK TABLES `user_roles` WRITE;
+/*!40000 ALTER TABLE `user_roles` DISABLE KEYS */;
+INSERT INTO `user_roles` VALUES (1,1,1,1),(2,2,2,2),(3,2,5,2),(4,2,6,2),(5,2,2,3),(6,1,1,2),(7,1,1,NULL),(8,1,3,NULL),(9,1,4,NULL);
+/*!40000 ALTER TABLE `user_roles` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `users`
+--
+
+DROP TABLE IF EXISTS `users`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `users` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) DEFAULT NULL,
+ `password` varchar(255) DEFAULT NULL,
+ `email` varchar(255) DEFAULT NULL,
+ `enabled` int(11) DEFAULT NULL,
+ `tenant_id` int(11) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name` (`name`),
+ KEY `tenant_id` (`tenant_id`)
+) ENGINE=MyISAM AUTO_INCREMENT=3 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `users`
+--
+
+LOCK TABLES `users` WRITE;
+/*!40000 ALTER TABLE `users` DISABLE KEYS */;
+INSERT INTO `users` VALUES (1,'admin','secrete',NULL,1,NULL),(2,'demo','secrete',NULL,1,NULL);
+/*!40000 ALTER TABLE `users` ENABLE KEYS */;
+UNLOCK TABLES;
+/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
+
+/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
+/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
+/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
+/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
+/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
+/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
+/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
+
+-- Dump completed on 2012-02-13 17:30:03
diff --git a/keystone/tests/legacy_diablo.sqlite b/keystone/tests/legacy_diablo.sqlite
new file mode 100644
index 00000000..edf15be4
--- /dev/null
+++ b/keystone/tests/legacy_diablo.sqlite
@@ -0,0 +1,283 @@
+begin;
+-- MySQL dump 10.13 Distrib 5.1.58, for debian-linux-gnu (x86_64)
+--
+-- Host: localhost Database: keystone
+-- ------------------------------------------------------
+-- Server version 5.1.58-1ubuntu1
+
+/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
+/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
+/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
+/*!40101 SET NAMES utf8 */;
+/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
+/*!40103 SET TIME_ZONE='+00:00' */;
+/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
+/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
+/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
+/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
+
+--
+-- Table structure for table `credentials`
+--
+
+DROP TABLE IF EXISTS `credentials`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `credentials` (
+ `id` integer NOT NULL primary key autoincrement,
+ `user_id` integer NULL,
+ `tenant_id` integer NULL,
+ `type` varchar(20) NULL,
+ `key` varchar(255) NULL,
+ `secret` varchar(255) NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `credentials`
+--
+
+
+/*!40000 ALTER TABLE `credentials` DISABLE KEYS */;
+INSERT INTO `credentials` VALUES (1,1,1,'EC2','admin','secrete');
+INSERT INTO `credentials` VALUES (2,2,2,'EC2','demo','secrete');
+/*!40000 ALTER TABLE `credentials` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `endpoint_templates`
+--
+
+DROP TABLE IF EXISTS `endpoint_templates`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `endpoint_templates` (
+ `id` integer NOT NULL primary key autoincrement,
+ `region` varchar(255) NULL,
+ `service_id` integer NULL,
+ `public_url` varchar(2000) NULL,
+ `admin_url` varchar(2000) NULL,
+ `internal_url` varchar(2000) NULL,
+ `enabled` integer NULL,
+ `is_global` integer NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `endpoint_templates`
+--
+
+
+/*!40000 ALTER TABLE `endpoint_templates` DISABLE KEYS */;
+INSERT INTO `endpoint_templates` VALUES (1,'RegionOne',1,'http://192.168.2.10:8774/v1.1/%tenant_id%','http://192.168.2.10:8774/v1.1/%tenant_id%','http://192.168.2.10:8774/v1.1/%tenant_id%',1,1);
+INSERT INTO `endpoint_templates` VALUES (2,'RegionOne',2,'http://192.168.2.10:9292/v1','http://192.168.2.10:9292/v1','http://192.168.2.10:9292/v1',1,1);
+INSERT INTO `endpoint_templates` VALUES (3,'RegionOne',3,'http://192.168.2.10:5000/v2.0','http://192.168.2.10:35357/v2.0','http://192.168.2.10:5000/v2.0',1,1);
+INSERT INTO `endpoint_templates` VALUES (4,'RegionOne',4,'http://192.168.2.10:8080/v1/AUTH_%tenant_id%','http://192.168.2.10:8080/','http://192.168.2.10:8080/v1/AUTH_%tenant_id%',1,1);
+/*!40000 ALTER TABLE `endpoint_templates` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `endpoints`
+--
+
+DROP TABLE IF EXISTS `endpoints`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `endpoints` (
+ `id` integer NOT NULL primary key autoincrement,
+ `tenant_id` integer NULL,
+ `endpoint_template_id` integer NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `endpoints`
+--
+
+
+/*!40000 ALTER TABLE `endpoints` DISABLE KEYS */;
+/*!40000 ALTER TABLE `endpoints` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `roles`
+--
+
+DROP TABLE IF EXISTS `roles`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `roles` (
+ `id` integer NOT NULL primary key autoincrement,
+ `name` varchar(255) NULL,
+ `desc` varchar(255) NULL,
+ `service_id` integer NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `roles`
+--
+
+
+/*!40000 ALTER TABLE `roles` DISABLE KEYS */;
+INSERT INTO `roles` VALUES (1,'Admin',NULL,NULL);
+INSERT INTO `roles` VALUES (2,'Member',NULL,NULL);
+INSERT INTO `roles` VALUES (3,'KeystoneAdmin',NULL,NULL);
+INSERT INTO `roles` VALUES (4,'KeystoneServiceAdmin',NULL,NULL);
+INSERT INTO `roles` VALUES (5,'sysadmin',NULL,NULL);
+INSERT INTO `roles` VALUES (6,'netadmin',NULL,NULL);
+/*!40000 ALTER TABLE `roles` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `services`
+--
+
+DROP TABLE IF EXISTS `services`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `services` (
+ `id` integer NOT NULL primary key autoincrement,
+ `name` varchar(255) NULL,
+ `type` varchar(255) NULL,
+ `desc` varchar(255) NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `services`
+--
+
+
+/*!40000 ALTER TABLE `services` DISABLE KEYS */;
+INSERT INTO `services` VALUES (1,'nova','compute','Nova Compute Service');
+INSERT INTO `services` VALUES (2,'glance','image','Glance Image Service');
+INSERT INTO `services` VALUES (3,'keystone','identity','Keystone Identity Service');
+INSERT INTO `services` VALUES (4,'swift','object-store','Swift Service');
+/*!40000 ALTER TABLE `services` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `tenants`
+--
+
+DROP TABLE IF EXISTS `tenants`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `tenants` (
+ `id` integer NOT NULL primary key autoincrement,
+ `name` varchar(255) NULL,
+ `desc` varchar(255) NULL,
+ `enabled` integer NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `tenants`
+--
+
+
+/*!40000 ALTER TABLE `tenants` DISABLE KEYS */;
+INSERT INTO `tenants` VALUES (1,'admin',NULL,1);
+INSERT INTO `tenants` VALUES (2,'demo',NULL,1);
+INSERT INTO `tenants` VALUES (3,'invisible_to_admin',NULL,1);
+/*!40000 ALTER TABLE `tenants` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `token`
+--
+
+DROP TABLE IF EXISTS `token`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `token` (
+ `id` varchar(255) NOT NULL,
+ `user_id` integer NULL,
+ `tenant_id` integer NULL,
+ `expires` datetime NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `token`
+--
+
+
+/*!40000 ALTER TABLE `token` DISABLE KEYS */;
+INSERT INTO `token` VALUES ('secrete',1,1,'2015-02-05 00:00:00');
+/*!40000 ALTER TABLE `token` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `user_roles`
+--
+
+DROP TABLE IF EXISTS `user_roles`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `user_roles` (
+ `id` integer NOT NULL primary key autoincrement,
+ `user_id` integer NULL,
+ `role_id` integer NULL,
+ `tenant_id` integer NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `user_roles`
+--
+
+
+/*!40000 ALTER TABLE `user_roles` DISABLE KEYS */;
+INSERT INTO `user_roles` VALUES (1,1,1,1);
+INSERT INTO `user_roles` VALUES (2,2,2,2);
+INSERT INTO `user_roles` VALUES (3,2,5,2);
+INSERT INTO `user_roles` VALUES (4,2,6,2);
+INSERT INTO `user_roles` VALUES (5,2,2,3);
+INSERT INTO `user_roles` VALUES (6,1,1,2);
+INSERT INTO `user_roles` VALUES (7,1,1,NULL);
+INSERT INTO `user_roles` VALUES (8,1,3,NULL);
+INSERT INTO `user_roles` VALUES (9,1,4,NULL);
+/*!40000 ALTER TABLE `user_roles` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `users`
+--
+
+DROP TABLE IF EXISTS `users`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `users` (
+ `id` integer NOT NULL primary key autoincrement,
+ `name` varchar(255) NULL,
+ `password` varchar(255) NULL,
+ `email` varchar(255) NULL,
+ `enabled` integer NULL,
+ `tenant_id` integer NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `users`
+--
+
+
+/*!40000 ALTER TABLE `users` DISABLE KEYS */;
+INSERT INTO `users` VALUES (1,'admin','secrete',NULL,1,NULL);
+INSERT INTO `users` VALUES (2,'demo','secrete',NULL,1,NULL);
+/*!40000 ALTER TABLE `users` ENABLE KEYS */;
+
+/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
+
+/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
+/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
+/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
+/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
+/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
+/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
+/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
+
+-- Dump completed on 2012-02-13 17:30:03
+commit;
diff --git a/keystone/tests/legacy_essex.mysql b/keystone/tests/legacy_essex.mysql
new file mode 100644
index 00000000..eade2cbf
--- /dev/null
+++ b/keystone/tests/legacy_essex.mysql
@@ -0,0 +1,309 @@
+-- MySQL dump 10.13 Distrib 5.1.58, for debian-linux-gnu (x86_64)
+--
+-- Host: localhost Database: keystone
+-- ------------------------------------------------------
+-- Server version 5.1.58-1ubuntu1
+
+/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
+/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
+/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
+/*!40101 SET NAMES utf8 */;
+/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
+/*!40103 SET TIME_ZONE='+00:00' */;
+/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
+/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
+/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
+/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
+
+--
+-- Table structure for table `credentials`
+--
+
+DROP TABLE IF EXISTS `credentials`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `credentials` (
+ `user_id` int(11) DEFAULT NULL,
+ `tenant_id` int(11) DEFAULT NULL,
+ `secret` varchar(255) DEFAULT NULL,
+ `key` varchar(255) DEFAULT NULL,
+ `type` varchar(20) DEFAULT NULL,
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ PRIMARY KEY (`id`)
+) ENGINE=MyISAM DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `credentials`
+--
+
+LOCK TABLES `credentials` WRITE;
+/*!40000 ALTER TABLE `credentials` DISABLE KEYS */;
+/*!40000 ALTER TABLE `credentials` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `endpoint_templates`
+--
+
+DROP TABLE IF EXISTS `endpoint_templates`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `endpoint_templates` (
+ `is_global` tinyint(1) DEFAULT NULL,
+ `region` varchar(255) DEFAULT NULL,
+ `public_url` varchar(2000) DEFAULT NULL,
+ `enabled` tinyint(1) DEFAULT NULL,
+ `internal_url` varchar(2000) DEFAULT NULL,
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `service_id` int(11) DEFAULT NULL,
+ `admin_url` varchar(2000) DEFAULT NULL,
+ `version_id` varchar(20) DEFAULT NULL,
+ `version_list` varchar(2000) DEFAULT NULL,
+ `version_info` varchar(500) DEFAULT NULL,
+ PRIMARY KEY (`id`)
+) ENGINE=MyISAM AUTO_INCREMENT=6 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `endpoint_templates`
+--
+
+LOCK TABLES `endpoint_templates` WRITE;
+/*!40000 ALTER TABLE `endpoint_templates` DISABLE KEYS */;
+INSERT INTO `endpoint_templates` VALUES (1,'RegionOne','http://4.2.2.1:8774/v1.1/%tenant_id%',1,'http://4.2.2.1:8774/v1.1/%tenant_id%',1,1,'http://4.2.2.1:8774/v1.1/%tenant_id%',NULL,NULL,NULL),(1,'RegionOne','http://4.2.2.1:8773/services/Cloud',1,'http://4.2.2.1:8773/services/Cloud',2,2,'http://4.2.2.1:8773/services/Admin',NULL,NULL,NULL),(1,'RegionOne','http://4.2.2.1:9292/v1',1,'http://4.2.2.1:9292/v1',3,3,'http://4.2.2.1:9292/v1',NULL,NULL,NULL),(1,'RegionOne','http://4.2.2.1:5000/v2.0',1,'http://4.2.2.1:5000/v2.0',4,4,'http://4.2.2.1:35357/v2.0',NULL,NULL,NULL),(1,'RegionOne','http://4.2.2.1:8080/v1/AUTH_%tenant_id%',1,'http://4.2.2.1:8080/v1/AUTH_%tenant_id%',5,5,'http://4.2.2.1:8080/',NULL,NULL,NULL);
+/*!40000 ALTER TABLE `endpoint_templates` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `endpoints`
+--
+
+DROP TABLE IF EXISTS `endpoints`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `endpoints` (
+ `endpoint_template_id` int(11) DEFAULT NULL,
+ `tenant_id` int(11) DEFAULT NULL,
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `endpoint_template_id` (`endpoint_template_id`,`tenant_id`)
+) ENGINE=MyISAM DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `endpoints`
+--
+
+LOCK TABLES `endpoints` WRITE;
+/*!40000 ALTER TABLE `endpoints` DISABLE KEYS */;
+/*!40000 ALTER TABLE `endpoints` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `migrate_version`
+--
+
+DROP TABLE IF EXISTS `migrate_version`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `migrate_version` (
+ `repository_id` varchar(250) NOT NULL,
+ `repository_path` text,
+ `version` int(11) DEFAULT NULL,
+ PRIMARY KEY (`repository_id`)
+) ENGINE=MyISAM DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `migrate_version`
+--
+
+LOCK TABLES `migrate_version` WRITE;
+/*!40000 ALTER TABLE `migrate_version` DISABLE KEYS */;
+INSERT INTO `migrate_version` VALUES ('Keystone','/opt/stack/keystone/keystone/backends/sqlalchemy/migrate_repo',11);
+/*!40000 ALTER TABLE `migrate_version` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `roles`
+--
+
+DROP TABLE IF EXISTS `roles`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `roles` (
+ `service_id` int(11) DEFAULT NULL,
+ `desc` varchar(255) DEFAULT NULL,
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name` (`name`,`service_id`)
+) ENGINE=MyISAM AUTO_INCREMENT=7 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `roles`
+--
+
+LOCK TABLES `roles` WRITE;
+/*!40000 ALTER TABLE `roles` DISABLE KEYS */;
+INSERT INTO `roles` VALUES (NULL,NULL,1,'admin'),(NULL,NULL,2,'Member'),(NULL,NULL,3,'KeystoneAdmin'),(NULL,NULL,4,'KeystoneServiceAdmin'),(NULL,NULL,5,'sysadmin'),(NULL,NULL,6,'netadmin');
+/*!40000 ALTER TABLE `roles` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `services`
+--
+
+DROP TABLE IF EXISTS `services`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `services` (
+ `desc` varchar(255) DEFAULT NULL,
+ `type` varchar(255) DEFAULT NULL,
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) DEFAULT NULL,
+ `owner_id` int(11) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name` (`name`),
+ UNIQUE KEY `name_2` (`name`)
+) ENGINE=MyISAM AUTO_INCREMENT=6 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `services`
+--
+
+LOCK TABLES `services` WRITE;
+/*!40000 ALTER TABLE `services` DISABLE KEYS */;
+INSERT INTO `services` VALUES ('Nova Compute Service','compute',1,'nova',NULL),('EC2 Compatibility Layer','ec2',2,'ec2',NULL),('Glance Image Service','image',3,'glance',NULL),('Keystone Identity Service','identity',4,'keystone',NULL),('Swift Service','object-store',5,'swift',NULL);
+/*!40000 ALTER TABLE `services` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `tenants`
+--
+
+DROP TABLE IF EXISTS `tenants`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `tenants` (
+ `desc` varchar(255) DEFAULT NULL,
+ `enabled` tinyint(1) DEFAULT NULL,
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) DEFAULT NULL,
+ `uid` varchar(255) NOT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `tenants_uid_key` (`uid`),
+ UNIQUE KEY `name` (`name`),
+ UNIQUE KEY `name_2` (`name`)
+) ENGINE=MyISAM AUTO_INCREMENT=4 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `tenants`
+--
+
+LOCK TABLES `tenants` WRITE;
+/*!40000 ALTER TABLE `tenants` DISABLE KEYS */;
+INSERT INTO `tenants` VALUES (NULL,1,1,'admin','182c1fbf7eef44eda162ff3fd30c0a76'),(NULL,1,2,'demo','b1a7ea3a884f4d0685a98cd6e682a5da'),(NULL,1,3,'invisible_to_admin','f4d1eed9bb5d4d35a5f37af934f87574');
+/*!40000 ALTER TABLE `tenants` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `tokens`
+--
+
+DROP TABLE IF EXISTS `tokens`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `tokens` (
+ `tenant_id` int(11) DEFAULT NULL,
+ `expires` datetime DEFAULT NULL,
+ `user_id` int(11) DEFAULT NULL,
+ `id` varchar(255) NOT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `id` (`id`)
+) ENGINE=MyISAM DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `tokens`
+--
+
+LOCK TABLES `tokens` WRITE;
+/*!40000 ALTER TABLE `tokens` DISABLE KEYS */;
+INSERT INTO `tokens` VALUES (1,'2015-02-05 00:00:00',1,'123123123123123123123');
+/*!40000 ALTER TABLE `tokens` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `user_roles`
+--
+
+DROP TABLE IF EXISTS `user_roles`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `user_roles` (
+ `tenant_id` int(11) DEFAULT NULL,
+ `user_id` int(11) DEFAULT NULL,
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `role_id` int(11) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `user_id` (`user_id`,`role_id`,`tenant_id`)
+) ENGINE=MyISAM AUTO_INCREMENT=10 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `user_roles`
+--
+
+LOCK TABLES `user_roles` WRITE;
+/*!40000 ALTER TABLE `user_roles` DISABLE KEYS */;
+INSERT INTO `user_roles` VALUES (1,1,1,1),(2,2,2,2),(2,2,3,5),(2,2,4,6),(3,2,5,2),(2,1,6,1),(NULL,1,7,1),(NULL,1,8,3),(NULL,1,9,4);
+/*!40000 ALTER TABLE `user_roles` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `users`
+--
+
+DROP TABLE IF EXISTS `users`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `users` (
+ `name` varchar(255) DEFAULT NULL,
+ `tenant_id` int(11) DEFAULT NULL,
+ `enabled` tinyint(1) DEFAULT NULL,
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `password` varchar(255) DEFAULT NULL,
+ `email` varchar(255) DEFAULT NULL,
+ `uid` varchar(255) NOT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `users_uid_key` (`uid`),
+ UNIQUE KEY `name` (`name`),
+ UNIQUE KEY `name_2` (`name`)
+) ENGINE=MyISAM AUTO_INCREMENT=3 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `users`
+--
+
+LOCK TABLES `users` WRITE;
+/*!40000 ALTER TABLE `users` DISABLE KEYS */;
+INSERT INTO `users` VALUES ('admin',NULL,1,1,'$6$rounds=40000$hFXlgBSMi599197d$tmGKBpoGHNRsLB3ruK9f1wPvvtfWWuMEUzdqUAynsmmYXBK6eekyNHTzzhwXTM3mWpnaMHCI4mHPOycqmPJJc0',NULL,'c93b19ea3fa94484824213db8ac0afce'),('demo',NULL,1,2,'$6$rounds=40000$RBsX2ja9fdj2uTNQ$/wJOn510AYKW9BPFAJneVQAjm6TM0Ty11LG.u4.k4RhmoUcXNSjGKmQT6KO0SsvypMM7A.doWgt73V5rNnv5h.',NULL,'04c6697e88ff4667820903fcce05d904');
+/*!40000 ALTER TABLE `users` ENABLE KEYS */;
+UNLOCK TABLES;
+/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
+
+/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
+/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
+/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
+/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
+/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
+/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
+/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
+
+-- Dump completed on 2012-02-13 19:23:51
diff --git a/keystone/tests/legacy_essex.sqlite b/keystone/tests/legacy_essex.sqlite
new file mode 100644
index 00000000..72326d76
--- /dev/null
+++ b/keystone/tests/legacy_essex.sqlite
@@ -0,0 +1,313 @@
+begin;
+-- MySQL dump 10.13 Distrib 5.1.58, for debian-linux-gnu (x86_64)
+--
+-- Host: localhost Database: keystone
+-- ------------------------------------------------------
+-- Server version 5.1.58-1ubuntu1
+
+/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
+/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
+/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
+/*!40101 SET NAMES utf8 */;
+/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
+/*!40103 SET TIME_ZONE='+00:00' */;
+/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
+/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
+/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
+/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
+
+--
+-- Table structure for table `credentials`
+--
+
+DROP TABLE IF EXISTS `credentials`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `credentials` (
+ `user_id` integer NULL,
+ `tenant_id` integer NULL,
+ `secret` varchar(255) NULL,
+ `key` varchar(255) NULL,
+ `type` varchar(20) NULL,
+ `id` integer NOT NULL primary key autoincrement
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `credentials`
+--
+
+
+/*!40000 ALTER TABLE `credentials` DISABLE KEYS */;
+/*!40000 ALTER TABLE `credentials` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `endpoint_templates`
+--
+
+DROP TABLE IF EXISTS `endpoint_templates`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `endpoint_templates` (
+ `is_global` integer NULL,
+ `region` varchar(255) NULL,
+ `public_url` varchar(2000) NULL,
+ `enabled` integer NULL,
+ `internal_url` varchar(2000) NULL,
+ `id` integer NOT NULL primary key autoincrement,
+ `service_id` integer NULL,
+ `admin_url` varchar(2000) NULL,
+ `version_id` varchar(20) NULL,
+ `version_list` varchar(2000) NULL,
+ `version_info` varchar(500) NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `endpoint_templates`
+--
+
+
+/*!40000 ALTER TABLE `endpoint_templates` DISABLE KEYS */;
+INSERT INTO `endpoint_templates` VALUES (1,'RegionOne','http://4.2.2.1:8774/v1.1/%tenant_id%',1,'http://4.2.2.1:8774/v1.1/%tenant_id%',1,1,'http://4.2.2.1:8774/v1.1/%tenant_id%',NULL,NULL,NULL);
+INSERT INTO `endpoint_templates` VALUES (1,'RegionOne','http://4.2.2.1:8773/services/Cloud',1,'http://4.2.2.1:8773/services/Cloud',2,2,'http://4.2.2.1:8773/services/Admin',NULL,NULL,NULL);
+INSERT INTO `endpoint_templates` VALUES (1,'RegionOne','http://4.2.2.1:9292/v1',1,'http://4.2.2.1:9292/v1',3,3,'http://4.2.2.1:9292/v1',NULL,NULL,NULL);
+INSERT INTO `endpoint_templates` VALUES (1,'RegionOne','http://4.2.2.1:5000/v2.0',1,'http://4.2.2.1:5000/v2.0',4,4,'http://4.2.2.1:35357/v2.0',NULL,NULL,NULL);
+INSERT INTO `endpoint_templates` VALUES (1,'RegionOne','http://4.2.2.1:8080/v1/AUTH_%tenant_id%',1,'http://4.2.2.1:8080/v1/AUTH_%tenant_id%',5,5,'http://4.2.2.1:8080/',NULL,NULL,NULL);
+/*!40000 ALTER TABLE `endpoint_templates` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `endpoints`
+--
+
+DROP TABLE IF EXISTS `endpoints`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `endpoints` (
+ `endpoint_template_id` integer NULL,
+ `tenant_id` integer NULL,
+ `id` integer NOT NULL primary key autoincrement
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `endpoints`
+--
+
+
+/*!40000 ALTER TABLE `endpoints` DISABLE KEYS */;
+/*!40000 ALTER TABLE `endpoints` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `migrate_version`
+--
+
+DROP TABLE IF EXISTS `migrate_version`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `migrate_version` (
+ `repository_id` varchar(250) NOT NULL,
+ `repository_path` text,
+ `version` integer NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `migrate_version`
+--
+
+
+/*!40000 ALTER TABLE `migrate_version` DISABLE KEYS */;
+INSERT INTO `migrate_version` VALUES ('Keystone','/opt/stack/keystone/keystone/backends/sqlalchemy/migrate_repo',11);
+/*!40000 ALTER TABLE `migrate_version` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `roles`
+--
+
+DROP TABLE IF EXISTS `roles`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `roles` (
+ `service_id` integer NULL,
+ `desc` varchar(255) NULL,
+ `id` integer NOT NULL primary key autoincrement,
+ `name` varchar(255) NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `roles`
+--
+
+
+/*!40000 ALTER TABLE `roles` DISABLE KEYS */;
+INSERT INTO `roles` VALUES (NULL,NULL,1,'admin');
+INSERT INTO `roles` VALUES (NULL,NULL,2,'Member');
+INSERT INTO `roles` VALUES (NULL,NULL,3,'KeystoneAdmin');
+INSERT INTO `roles` VALUES (NULL,NULL,4,'KeystoneServiceAdmin');
+INSERT INTO `roles` VALUES (NULL,NULL,5,'sysadmin');
+INSERT INTO `roles` VALUES (NULL,NULL,6,'netadmin');
+/*!40000 ALTER TABLE `roles` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `services`
+--
+
+DROP TABLE IF EXISTS `services`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `services` (
+ `desc` varchar(255) NULL,
+ `type` varchar(255) NULL,
+ `id` integer NOT NULL primary key autoincrement,
+ `name` varchar(255) NULL,
+ `owner_id` integer NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `services`
+--
+
+
+/*!40000 ALTER TABLE `services` DISABLE KEYS */;
+INSERT INTO `services` VALUES ('Nova Compute Service','compute',1,'nova',NULL);
+INSERT INTO `services` VALUES ('EC2 Compatibility Layer','ec2',2,'ec2',NULL);
+INSERT INTO `services` VALUES ('Glance Image Service','image',3,'glance',NULL);
+INSERT INTO `services` VALUES ('Keystone Identity Service','identity',4,'keystone',NULL);
+INSERT INTO `services` VALUES ('Swift Service','object-store',5,'swift',NULL);
+/*!40000 ALTER TABLE `services` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `tenants`
+--
+
+DROP TABLE IF EXISTS `tenants`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `tenants` (
+ `desc` varchar(255) NULL,
+ `enabled` integer NULL,
+ `id` integer NOT NULL primary key autoincrement,
+ `name` varchar(255) NULL,
+ `uid` varchar(255) NOT NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `tenants`
+--
+
+
+/*!40000 ALTER TABLE `tenants` DISABLE KEYS */;
+INSERT INTO `tenants` VALUES (NULL,1,1,'admin','182c1fbf7eef44eda162ff3fd30c0a76');
+INSERT INTO `tenants` VALUES (NULL,1,2,'demo','b1a7ea3a884f4d0685a98cd6e682a5da');
+INSERT INTO `tenants` VALUES (NULL,1,3,'invisible_to_admin','f4d1eed9bb5d4d35a5f37af934f87574');
+/*!40000 ALTER TABLE `tenants` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `tokens`
+--
+
+DROP TABLE IF EXISTS `tokens`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `tokens` (
+ `tenant_id` integer NULL,
+ `expires` datetime NULL,
+ `user_id` integer NULL,
+ `id` varchar(255) NOT NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `tokens`
+--
+
+
+/*!40000 ALTER TABLE `tokens` DISABLE KEYS */;
+INSERT INTO `tokens` VALUES (1,'2015-02-05 00:00:00',1,'123123123123123123123');
+/*!40000 ALTER TABLE `tokens` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `user_roles`
+--
+
+DROP TABLE IF EXISTS `user_roles`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `user_roles` (
+ `tenant_id` integer NULL,
+ `user_id` integer NULL,
+ `id` integer NOT NULL primary key autoincrement,
+ `role_id` integer NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `user_roles`
+--
+
+
+/*!40000 ALTER TABLE `user_roles` DISABLE KEYS */;
+INSERT INTO `user_roles` VALUES (1,1,1,1);
+INSERT INTO `user_roles` VALUES (2,2,2,2);
+INSERT INTO `user_roles` VALUES (2,2,3,5);
+INSERT INTO `user_roles` VALUES (2,2,4,6);
+INSERT INTO `user_roles` VALUES (3,2,5,2);
+INSERT INTO `user_roles` VALUES (2,1,6,1);
+INSERT INTO `user_roles` VALUES (NULL,1,7,1);
+INSERT INTO `user_roles` VALUES (NULL,1,8,3);
+INSERT INTO `user_roles` VALUES (NULL,1,9,4);
+/*!40000 ALTER TABLE `user_roles` ENABLE KEYS */;
+
+
+--
+-- Table structure for table `users`
+--
+
+DROP TABLE IF EXISTS `users`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `users` (
+ `name` varchar(255) NULL,
+ `tenant_id` integer NULL,
+ `enabled` integer NULL,
+ `id` integer NOT NULL primary key autoincrement,
+ `password` varchar(255) NULL,
+ `email` varchar(255) NULL,
+ `uid` varchar(255) NOT NULL
+) ;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `users`
+--
+
+
+/*!40000 ALTER TABLE `users` DISABLE KEYS */;
+INSERT INTO `users` VALUES ('admin',NULL,1,1,'$6$rounds=40000$hFXlgBSMi599197d$tmGKBpoGHNRsLB3ruK9f1wPvvtfWWuMEUzdqUAynsmmYXBK6eekyNHTzzhwXTM3mWpnaMHCI4mHPOycqmPJJc0',NULL,'c93b19ea3fa94484824213db8ac0afce');
+INSERT INTO `users` VALUES ('demo',NULL,1,2,'$6$rounds=40000$RBsX2ja9fdj2uTNQ$/wJOn510AYKW9BPFAJneVQAjm6TM0Ty11LG.u4.k4RhmoUcXNSjGKmQT6KO0SsvypMM7A.doWgt73V5rNnv5h.',NULL,'04c6697e88ff4667820903fcce05d904');
+/*!40000 ALTER TABLE `users` ENABLE KEYS */;
+
+/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
+
+/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
+/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
+/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
+/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
+/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
+/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
+/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
+
+-- Dump completed on 2012-02-13 19:23:51
+commit;
diff --git a/keystone/tests/test_auth.py b/keystone/tests/test_auth.py
new file mode 100644
index 00000000..598b11d3
--- /dev/null
+++ b/keystone/tests/test_auth.py
@@ -0,0 +1,851 @@
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import datetime
+import uuid
+
+from keystone.tests import core as test
+
+from keystone import auth
+from keystone import config
+from keystone import exception
+from keystone.openstack.common import timeutils
+from keystone import token
+from keystone import trust
+
+import default_fixtures
+
+
+CONF = config.CONF
+TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
+
+
+def _build_user_auth(token=None, user_id=None, username=None,
+ password=None, tenant_id=None, tenant_name=None,
+ trust_id=None):
+ """Build auth dictionary.
+
+ It will create an auth dictionary based on all the arguments
+ that it receives.
+ """
+ auth_json = {}
+ if token is not None:
+ auth_json['token'] = token
+ if username or password:
+ auth_json['passwordCredentials'] = {}
+ if username is not None:
+ auth_json['passwordCredentials']['username'] = username
+ if user_id is not None:
+ auth_json['passwordCredentials']['userId'] = user_id
+ if password is not None:
+ auth_json['passwordCredentials']['password'] = password
+ if tenant_name is not None:
+ auth_json['tenantName'] = tenant_name
+ if tenant_id is not None:
+ auth_json['tenantId'] = tenant_id
+ if trust_id is not None:
+ auth_json['trust_id'] = trust_id
+ return auth_json
+
+
+class AuthTest(test.TestCase):
+ def setUp(self):
+ super(AuthTest, self).setUp()
+
+ CONF.identity.driver = 'keystone.identity.backends.kvs.Identity'
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ # need to register the token provider first because auth controller
+ # depends on it
+ token.provider.Manager()
+
+ self.controller = token.controllers.Auth()
+
+ def assertEqualTokens(self, a, b):
+ """Assert that two tokens are equal.
+
+ Compare two tokens except for their ids. This also truncates
+ the time in the comparison.
+ """
+ def normalize(token):
+ token['access']['token']['id'] = 'dummy'
+ del token['access']['token']['expires']
+ del token['access']['token']['issued_at']
+ return token
+
+ self.assertCloseEnoughForGovernmentWork(
+ timeutils.parse_isotime(a['access']['token']['expires']),
+ timeutils.parse_isotime(b['access']['token']['expires']))
+ self.assertCloseEnoughForGovernmentWork(
+ timeutils.parse_isotime(a['access']['token']['issued_at']),
+ timeutils.parse_isotime(b['access']['token']['issued_at']))
+ return self.assertDictEqual(normalize(a), normalize(b))
+
+
+class AuthBadRequests(AuthTest):
+ def setUp(self):
+ super(AuthBadRequests, self).setUp()
+
+ def test_no_external_auth(self):
+ """Verify that _authenticate_external() raises exception if N/A."""
+ self.assertRaises(
+ token.controllers.ExternalAuthNotApplicable,
+ self.controller._authenticate_external,
+ {}, {})
+
+ def test_no_token_in_auth(self):
+ """Verify that _authenticate_token() raises exception if no token."""
+ self.assertRaises(
+ exception.ValidationError,
+ self.controller._authenticate_token,
+ None, {})
+
+ def test_no_credentials_in_auth(self):
+ """Verify that _authenticate_local() raises exception if no creds."""
+ self.assertRaises(
+ exception.ValidationError,
+ self.controller._authenticate_local,
+ None, {})
+
+ def test_authenticate_blank_request_body(self):
+ """Verify sending empty json dict raises the right exception."""
+ self.assertRaises(exception.ValidationError,
+ self.controller.authenticate,
+ {}, {})
+
+ def test_authenticate_blank_auth(self):
+ """Verify sending blank 'auth' raises the right exception."""
+ body_dict = _build_user_auth()
+ self.assertRaises(exception.ValidationError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_authenticate_invalid_auth_content(self):
+ """Verify sending invalid 'auth' raises the right exception."""
+ self.assertRaises(exception.ValidationError,
+ self.controller.authenticate,
+ {}, {'auth': 'abcd'})
+
+ def test_authenticate_user_id_too_large(self):
+ """Verify sending large 'userId' raises the right exception."""
+ body_dict = _build_user_auth(user_id='0' * 65, username='FOO',
+ password='foo2')
+ self.assertRaises(exception.ValidationSizeError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_authenticate_username_too_large(self):
+ """Verify sending large 'username' raises the right exception."""
+ body_dict = _build_user_auth(username='0' * 65, password='foo2')
+ self.assertRaises(exception.ValidationSizeError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_authenticate_tenant_id_too_large(self):
+ """Verify sending large 'tenantId' raises the right exception."""
+ body_dict = _build_user_auth(username='FOO', password='foo2',
+ tenant_id='0' * 65)
+ self.assertRaises(exception.ValidationSizeError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_authenticate_tenant_name_too_large(self):
+ """Verify sending large 'tenantName' raises the right exception."""
+ body_dict = _build_user_auth(username='FOO', password='foo2',
+ tenant_name='0' * 65)
+ self.assertRaises(exception.ValidationSizeError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_authenticate_token_too_large(self):
+ """Verify sending large 'token' raises the right exception."""
+ body_dict = _build_user_auth(token={'id': '0' * 8193})
+ self.assertRaises(exception.ValidationSizeError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_authenticate_password_too_large(self):
+ """Verify sending large 'password' raises the right exception."""
+ length = CONF.identity.max_password_length + 1
+ body_dict = _build_user_auth(username='FOO', password='0' * length)
+ self.assertRaises(exception.ValidationSizeError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+
+class AuthWithToken(AuthTest):
+ def setUp(self):
+ super(AuthWithToken, self).setUp()
+
+ def test_unscoped_token(self):
+ """Verify getting an unscoped token with password creds."""
+ body_dict = _build_user_auth(username='FOO',
+ password='foo2')
+ unscoped_token = self.controller.authenticate({}, body_dict)
+ tenant = unscoped_token["access"]["token"].get("tenant", None)
+ self.assertEqual(tenant, None)
+
+ def test_auth_invalid_token(self):
+ """Verify exception is raised if invalid token."""
+ body_dict = _build_user_auth(token={"id": uuid.uuid4().hex})
+ self.assertRaises(
+ exception.Unauthorized,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_auth_bad_formatted_token(self):
+ """Verify exception is raised if invalid token."""
+ body_dict = _build_user_auth(token={})
+ self.assertRaises(
+ exception.ValidationError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_auth_unscoped_token_no_project(self):
+ """Verify getting an unscoped token with an unscoped token."""
+ body_dict = _build_user_auth(
+ username='FOO',
+ password='foo2')
+ unscoped_token = self.controller.authenticate({}, body_dict)
+
+ body_dict = _build_user_auth(
+ token=unscoped_token["access"]["token"])
+ unscoped_token_2 = self.controller.authenticate({}, body_dict)
+
+ self.assertEqualTokens(unscoped_token, unscoped_token_2)
+
+ def test_auth_unscoped_token_project(self):
+ """Verify getting a token in a tenant with an unscoped token."""
+ # Add a role in so we can check we get this back
+ self.identity_api.add_role_to_user_and_project(
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ self.role_member['id'])
+ # Get an unscoped tenant
+ body_dict = _build_user_auth(
+ username='FOO',
+ password='foo2')
+ unscoped_token = self.controller.authenticate({}, body_dict)
+ # Get a token on BAR tenant using the unscoped tenant
+ body_dict = _build_user_auth(
+ token=unscoped_token["access"]["token"],
+ tenant_name="BAR")
+ scoped_token = self.controller.authenticate({}, body_dict)
+
+ tenant = scoped_token["access"]["token"]["tenant"]
+ roles = scoped_token["access"]["metadata"]["roles"]
+ self.assertEquals(tenant["id"], self.tenant_bar['id'])
+ self.assertEquals(roles[0], self.role_member['id'])
+
+ def test_auth_token_project_group_role(self):
+ """Verify getting a token in a tenant with group roles."""
+ # Add a v2 style role in so we can check we get this back
+ self.identity_api.add_role_to_user_and_project(
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ self.role_member['id'])
+ # Now create a group role for this user as well
+ new_group = {'id': uuid.uuid4().hex, 'domain_id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_group(new_group['id'], new_group)
+ self.identity_api.add_user_to_group(self.user_foo['id'],
+ new_group['id'])
+ self.identity_api.create_grant(
+ group_id=new_group['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_admin['id'])
+
+ # Get a scoped token for the tenant
+ body_dict = _build_user_auth(
+ username='FOO',
+ password='foo2',
+ tenant_name="BAR")
+
+ scoped_token = self.controller.authenticate({}, body_dict)
+
+ tenant = scoped_token["access"]["token"]["tenant"]
+ roles = scoped_token["access"]["metadata"]["roles"]
+ self.assertEquals(tenant["id"], self.tenant_bar['id'])
+ self.assertIn(self.role_member['id'], roles)
+ self.assertIn(self.role_admin['id'], roles)
+
+ def test_auth_token_cross_domain_group_and_project(self):
+ """Verify getting a token in cross domain group/project roles."""
+ # create domain, project and group and grant roles to user
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain1['id'], domain1)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.identity_api.create_project(project1['id'], project1)
+ role_foo_domain1 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(role_foo_domain1['id'],
+ role_foo_domain1)
+ role_group_domain1 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(role_group_domain1['id'],
+ role_group_domain1)
+ self.identity_api.add_user_to_project(project1['id'],
+ self.user_foo['id'])
+ new_group = {'id': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_group(new_group['id'], new_group)
+ self.identity_api.add_user_to_group(self.user_foo['id'],
+ new_group['id'])
+ self.identity_api.create_grant(
+ user_id=self.user_foo['id'],
+ project_id=project1['id'],
+ role_id=self.role_member['id'])
+ self.identity_api.create_grant(
+ group_id=new_group['id'],
+ project_id=project1['id'],
+ role_id=self.role_admin['id'])
+ self.identity_api.create_grant(
+ user_id=self.user_foo['id'],
+ domain_id=domain1['id'],
+ role_id=role_foo_domain1['id'])
+ self.identity_api.create_grant(
+ group_id=new_group['id'],
+ domain_id=domain1['id'],
+ role_id=role_group_domain1['id'])
+
+ # Get a scoped token for the tenant
+ body_dict = _build_user_auth(
+ username=self.user_foo['name'],
+ password=self.user_foo['password'],
+ tenant_name=project1['name'])
+
+ scoped_token = self.controller.authenticate({}, body_dict)
+ tenant = scoped_token["access"]["token"]["tenant"]
+ roles = scoped_token["access"]["metadata"]["roles"]
+ self.assertEquals(tenant["id"], project1['id'])
+ self.assertIn(self.role_member['id'], roles)
+ self.assertIn(self.role_admin['id'], roles)
+ self.assertNotIn(role_foo_domain1['id'], roles)
+ self.assertNotIn(role_group_domain1['id'], roles)
+
+ def test_belongs_to_no_tenant(self):
+ r = self.controller.authenticate(
+ {},
+ auth={
+ 'passwordCredentials': {
+ 'username': self.user_foo['name'],
+ 'password': self.user_foo['password']
+ }
+ })
+ unscoped_token_id = r['access']['token']['id']
+ self.assertRaises(
+ exception.Unauthorized,
+ self.controller.validate_token,
+ dict(is_admin=True, query_string={'belongsTo': 'BAR'}),
+ token_id=unscoped_token_id)
+
+ def test_belongs_to(self):
+ body_dict = _build_user_auth(
+ username='FOO',
+ password='foo2',
+ tenant_name="BAR")
+
+ scoped_token = self.controller.authenticate({}, body_dict)
+ scoped_token_id = scoped_token['access']['token']['id']
+
+ self.assertRaises(
+ exception.Unauthorized,
+ self.controller.validate_token,
+ dict(is_admin=True, query_string={'belongsTo': 'me'}),
+ token_id=scoped_token_id)
+
+ self.assertRaises(
+ exception.Unauthorized,
+ self.controller.validate_token,
+ dict(is_admin=True, query_string={'belongsTo': 'BAR'}),
+ token_id=scoped_token_id)
+
+ def test_token_auth_with_binding(self):
+ CONF.token.bind = ['kerberos']
+ body_dict = _build_user_auth()
+ context = {'REMOTE_USER': 'FOO', 'AUTH_TYPE': 'Negotiate'}
+ unscoped_token = self.controller.authenticate(context, body_dict)
+
+ # the token should have bind information in it
+ bind = unscoped_token['access']['token']['bind']
+ self.assertEqual(bind['kerberos'], 'FOO')
+
+ body_dict = _build_user_auth(
+ token=unscoped_token['access']['token'],
+ tenant_name='BAR')
+
+ # using unscoped token without remote user context fails
+ self.assertRaises(
+ exception.Unauthorized,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ # using token with remote user context succeeds
+ scoped_token = self.controller.authenticate(context, body_dict)
+
+ # the bind information should be carried over from the original token
+ bind = scoped_token['access']['token']['bind']
+ self.assertEqual(bind['kerberos'], 'FOO')
+
+
+class AuthWithPasswordCredentials(AuthTest):
+ def setUp(self):
+ super(AuthWithPasswordCredentials, self).setUp()
+
+ def test_auth_invalid_user(self):
+ """Verify exception is raised if invalid user."""
+ body_dict = _build_user_auth(
+ username=uuid.uuid4().hex,
+ password=uuid.uuid4().hex)
+ self.assertRaises(
+ exception.Unauthorized,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_auth_valid_user_invalid_password(self):
+ """Verify exception is raised if invalid password."""
+ body_dict = _build_user_auth(
+ username="FOO",
+ password=uuid.uuid4().hex)
+ self.assertRaises(
+ exception.Unauthorized,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_auth_empty_password(self):
+ """Verify exception is raised if empty password."""
+ body_dict = _build_user_auth(
+ username="FOO",
+ password="")
+ self.assertRaises(
+ exception.Unauthorized,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_auth_no_password(self):
+ """Verify exception is raised if empty password."""
+ body_dict = _build_user_auth(username="FOO")
+ self.assertRaises(
+ exception.ValidationError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_authenticate_blank_password_credentials(self):
+ """Sending empty dict as passwordCredentials raises a 400 error."""
+ body_dict = {'passwordCredentials': {}, 'tenantName': 'demo'}
+ self.assertRaises(exception.ValidationError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_authenticate_no_username(self):
+ """Verify skipping username raises the right exception."""
+ body_dict = _build_user_auth(password="pass",
+ tenant_name="demo")
+ self.assertRaises(exception.ValidationError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_bind_without_remote_user(self):
+ CONF.token.bind = ['kerberos']
+ body_dict = _build_user_auth(username='FOO', password='foo2',
+ tenant_name='BAR')
+ token = self.controller.authenticate({}, body_dict)
+ self.assertNotIn('bind', token['access']['token'])
+
+
+class AuthWithRemoteUser(AuthTest):
+ def setUp(self):
+ super(AuthWithRemoteUser, self).setUp()
+
+ def test_unscoped_remote_authn(self):
+ """Verify getting an unscoped token with external authn."""
+ body_dict = _build_user_auth(
+ username='FOO',
+ password='foo2')
+ local_token = self.controller.authenticate(
+ {}, body_dict)
+
+ body_dict = _build_user_auth()
+ remote_token = self.controller.authenticate(
+ {'REMOTE_USER': 'FOO'}, body_dict)
+
+ self.assertEqualTokens(local_token, remote_token)
+
+ def test_unscoped_remote_authn_jsonless(self):
+ """Verify that external auth with invalid request fails."""
+ self.assertRaises(
+ exception.ValidationError,
+ self.controller.authenticate,
+ {'REMOTE_USER': 'FOO'},
+ None)
+
+ def test_scoped_remote_authn(self):
+ """Verify getting a token with external authn."""
+ body_dict = _build_user_auth(
+ username='FOO',
+ password='foo2',
+ tenant_name='BAR')
+ local_token = self.controller.authenticate(
+ {}, body_dict)
+
+ body_dict = _build_user_auth(
+ tenant_name='BAR')
+ remote_token = self.controller.authenticate(
+ {'REMOTE_USER': 'FOO'}, body_dict)
+
+ self.assertEqualTokens(local_token, remote_token)
+
+ def test_scoped_nometa_remote_authn(self):
+ """Verify getting a token with external authn and no metadata."""
+ body_dict = _build_user_auth(
+ username='TWO',
+ password='two2',
+ tenant_name='BAZ')
+ local_token = self.controller.authenticate(
+ {}, body_dict)
+
+ body_dict = _build_user_auth(tenant_name='BAZ')
+ remote_token = self.controller.authenticate(
+ {'REMOTE_USER': 'TWO'}, body_dict)
+
+ self.assertEqualTokens(local_token, remote_token)
+
+ def test_scoped_remote_authn_invalid_user(self):
+ """Verify that external auth with invalid user fails."""
+ body_dict = _build_user_auth(tenant_name="BAR")
+ self.assertRaises(
+ exception.Unauthorized,
+ self.controller.authenticate,
+ {'REMOTE_USER': uuid.uuid4().hex},
+ body_dict)
+
+ def test_bind_with_kerberos(self):
+ CONF.token.bind = ['kerberos']
+ kerb = {'REMOTE_USER': 'FOO', 'AUTH_TYPE': 'Negotiate'}
+ body_dict = _build_user_auth(tenant_name="BAR")
+ token = self.controller.authenticate(kerb, body_dict)
+ self.assertEqual(token['access']['token']['bind']['kerberos'], 'FOO')
+
+ def test_bind_without_config_opt(self):
+ CONF.token.bind = ['x509']
+ kerb = {'REMOTE_USER': 'FOO', 'AUTH_TYPE': 'Negotiate'}
+ body_dict = _build_user_auth(tenant_name='BAR')
+ token = self.controller.authenticate(kerb, body_dict)
+ self.assertNotIn('bind', token['access']['token'])
+
+
+class AuthWithTrust(AuthTest):
+ def setUp(self):
+ super(AuthWithTrust, self).setUp()
+ self.opt_in_group('trust', enabled=True)
+
+ trust.Manager()
+ self.trust_controller = trust.controllers.TrustV3()
+ self.auth_v3_controller = auth.controllers.Auth()
+ self.trustor = self.user_foo
+ self.trustee = self.user_two
+ self.assigned_roles = [self.role_member['id'],
+ self.role_browser['id']]
+ for assigned_role in self.assigned_roles:
+ self.identity_api.add_role_to_user_and_project(
+ self.trustor['id'], self.tenant_bar['id'], assigned_role)
+
+ self.sample_data = {'trustor_user_id': self.trustor['id'],
+ 'trustee_user_id': self.trustee['id'],
+ 'project_id': self.tenant_bar['id'],
+ 'impersonation': 'True',
+ 'roles': [{'id': self.role_browser['id']},
+ {'name': self.role_member['name']}]}
+ expires_at = timeutils.strtime(timeutils.utcnow() +
+ datetime.timedelta(minutes=10),
+ fmt=TIME_FORMAT)
+ self.create_trust(expires_at=expires_at)
+
+ def create_trust(self, expires_at=None, impersonation='True'):
+ username = self.trustor['name'],
+ password = 'foo2'
+ body_dict = _build_user_auth(username=username, password=password)
+ self.unscoped_token = self.controller.authenticate({}, body_dict)
+ context = {'token_id': self.unscoped_token['access']['token']['id']}
+ trust_data = copy.deepcopy(self.sample_data)
+ trust_data['expires_at'] = expires_at
+ trust_data['impersonation'] = impersonation
+
+ self.new_trust = self.trust_controller.create_trust(
+ context, trust=trust_data)['trust']
+
+ def build_v2_token_request(self, username, password):
+ body_dict = _build_user_auth(username=username, password=password)
+ self.unscoped_token = self.controller.authenticate({}, body_dict)
+ unscoped_token_id = self.unscoped_token['access']['token']['id']
+ request_body = _build_user_auth(token={'id': unscoped_token_id},
+ trust_id=self.new_trust['id'],
+ tenant_id=self.tenant_bar['id'])
+ return request_body
+
+ def test_create_trust_bad_data_fails(self):
+ context = {'token_id': self.unscoped_token['access']['token']['id']}
+ bad_sample_data = {'trustor_user_id': self.trustor['id']}
+
+ self.assertRaises(exception.ValidationError,
+ self.trust_controller.create_trust,
+ context, trust=bad_sample_data)
+
+ def test_create_trust_no_roles(self):
+ self.new_trust = None
+ self.sample_data['roles'] = []
+ self.create_trust()
+ self.assertEquals(self.new_trust['roles'], [])
+
+ def test_create_trust(self):
+ self.assertEquals(self.new_trust['trustor_user_id'],
+ self.trustor['id'])
+ self.assertEquals(self.new_trust['trustee_user_id'],
+ self.trustee['id'])
+ role_ids = [self.role_browser['id'], self.role_member['id']]
+ self.assertTrue(timeutils.parse_strtime(self.new_trust['expires_at'],
+ fmt=TIME_FORMAT))
+ self.assertIn('http://localhost:5000/v3/OS-TRUST/',
+ self.new_trust['links']['self'])
+ self.assertIn('http://localhost:5000/v3/OS-TRUST/',
+ self.new_trust['roles_links']['self'])
+
+ for role in self.new_trust['roles']:
+ self.assertIn(role['id'], role_ids)
+
+ def test_get_trust(self):
+ context = {'token_id': self.unscoped_token['access']['token']['id']}
+ trust = self.trust_controller.get_trust(context,
+ self.new_trust['id'])['trust']
+ self.assertEquals(trust['trustor_user_id'],
+ self.trustor['id'])
+ self.assertEquals(trust['trustee_user_id'],
+ self.trustee['id'])
+ role_ids = [self.role_browser['id'], self.role_member['id']]
+ for role in self.new_trust['roles']:
+ self.assertIn(role['id'], role_ids)
+
+ def test_create_trust_no_impersonation(self):
+ self.create_trust(expires_at=None, impersonation='False')
+ self.assertEquals(self.new_trust['trustor_user_id'],
+ self.trustor['id'])
+ self.assertEquals(self.new_trust['trustee_user_id'],
+ self.trustee['id'])
+ self.assertEquals(self.new_trust['impersonation'],
+ 'False')
+ auth_response = self.fetch_v2_token_from_trust()
+ token_user = auth_response['access']['user']
+ self.assertEquals(token_user['id'],
+ self.new_trust['trustee_user_id'])
+
+ # TODO(ayoung): Endpoints
+
+ def test_token_from_trust_wrong_user_fails(self):
+ request_body = self.build_v2_token_request('FOO', 'foo2')
+ self.assertRaises(
+ exception.Forbidden,
+ self.controller.authenticate, {}, request_body)
+
+ def fetch_v2_token_from_trust(self):
+ request_body = self.build_v2_token_request('TWO', 'two2')
+ auth_response = self.controller.authenticate({}, request_body)
+ return auth_response
+
+ def fetch_v3_token_from_trust(self):
+ v3_password_data = {
+ 'identity': {
+ "methods": ["password"],
+ "password": {
+ "user": {
+ "id": self.trustee["id"],
+ "password": self.trustee["password"]}}
+ },
+ 'scope': {
+ 'project': {
+ 'id': self.tenant_baz['id']}}}
+ auth_response = (self.auth_v3_controller.authenticate_for_token
+ ({'query_string': {}}, v3_password_data))
+ token = auth_response.headers['X-Subject-Token']
+
+ v3_req_with_trust = {
+ "identity": {
+ "methods": ["token"],
+ "token": {"id": token}},
+ "scope": {
+ "OS-TRUST:trust": {"id": self.new_trust['id']}}}
+ token_auth_response = (self.auth_v3_controller.authenticate_for_token
+ ({'query_string': {}}, v3_req_with_trust))
+ return token_auth_response
+
+ def test_create_v3_token_from_trust(self):
+ auth_response = self.fetch_v3_token_from_trust()
+
+ trust_token_user = auth_response.json['token']['user']
+ self.assertEquals(trust_token_user['id'], self.trustor['id'])
+
+ trust_token_trust = auth_response.json['token']['OS-TRUST:trust']
+ self.assertEquals(trust_token_trust['id'], self.new_trust['id'])
+ self.assertEquals(trust_token_trust['trustor_user']['id'],
+ self.trustor['id'])
+ self.assertEquals(trust_token_trust['trustee_user']['id'],
+ self.trustee['id'])
+
+ trust_token_roles = auth_response.json['token']['roles']
+ self.assertEquals(len(trust_token_roles), 2)
+
+ def test_v3_trust_token_get_token_fails(self):
+ auth_response = self.fetch_v3_token_from_trust()
+ trust_token = auth_response.headers['X-Subject-Token']
+ v3_token_data = {'identity': {
+ 'methods': ['token'],
+ 'token': {'id': trust_token}
+ }}
+ self.assertRaises(
+ exception.Forbidden,
+ self.auth_v3_controller.authenticate_for_token,
+ {'query_string': {}}, v3_token_data)
+
+ def test_token_from_trust(self):
+ auth_response = self.fetch_v2_token_from_trust()
+
+ self.assertIsNotNone(auth_response)
+ self.assertEquals(len(auth_response['access']['metadata']['roles']),
+ 2,
+ "user_foo has three roles, but the token should"
+ " only get the two roles specified in the trust.")
+
+ def assert_token_count_for_trust(self, expected_value):
+ tokens = self.trust_controller.token_api.list_tokens(
+ self.trustee['id'], trust_id=self.new_trust['id'])
+ token_count = len(tokens)
+ self.assertEquals(token_count, expected_value)
+
+ def test_delete_tokens_for_user_invalidates_tokens_from_trust(self):
+ self.assert_token_count_for_trust(0)
+ self.fetch_v2_token_from_trust()
+ self.assert_token_count_for_trust(1)
+ self.trust_controller._delete_tokens_for_user(self.trustee['id'])
+ self.assert_token_count_for_trust(0)
+
+ def test_token_from_trust_cant_get_another_token(self):
+ auth_response = self.fetch_v2_token_from_trust()
+ trust_token_id = auth_response['access']['token']['id']
+ request_body = _build_user_auth(token={'id': trust_token_id},
+ tenant_id=self.tenant_bar['id'])
+ self.assertRaises(
+ exception.Forbidden,
+ self.controller.authenticate, {}, request_body)
+
+ def test_delete_trust_revokes_token(self):
+ context = {'token_id': self.unscoped_token['access']['token']['id']}
+ self.fetch_v2_token_from_trust()
+ trust_id = self.new_trust['id']
+ tokens = self.token_api.list_tokens(self.trustor['id'],
+ trust_id=trust_id)
+ self.assertEquals(len(tokens), 1)
+ self.trust_controller.delete_trust(context, trust_id=trust_id)
+ tokens = self.token_api.list_tokens(self.trustor['id'],
+ trust_id=trust_id)
+ self.assertEquals(len(tokens), 0)
+
+ def test_token_from_trust_with_no_role_fails(self):
+ for assigned_role in self.assigned_roles:
+ self.identity_api.remove_role_from_user_and_project(
+ self.trustor['id'], self.tenant_bar['id'], assigned_role)
+ request_body = self.build_v2_token_request('TWO', 'two2')
+ self.assertRaises(
+ exception.Forbidden,
+ self.controller.authenticate, {}, request_body)
+
+ def test_expired_trust_get_token_fails(self):
+ expiry = "1999-02-18T10:10:00Z"
+ self.create_trust(expiry)
+ request_body = self.build_v2_token_request('TWO', 'two2')
+ self.assertRaises(
+ exception.Forbidden,
+ self.controller.authenticate, {}, request_body)
+
+ def test_token_from_trust_with_wrong_role_fails(self):
+ self.identity_api.add_role_to_user_and_project(
+ self.trustor['id'],
+ self.tenant_bar['id'],
+ self.role_other['id'])
+ for assigned_role in self.assigned_roles:
+ self.identity_api.remove_role_from_user_and_project(
+ self.trustor['id'], self.tenant_bar['id'], assigned_role)
+
+ request_body = self.build_v2_token_request('TWO', 'two2')
+
+ self.assertRaises(
+ exception.Forbidden,
+ self.controller.authenticate, {}, request_body)
+
+
+class TokenExpirationTest(AuthTest):
+ def _maintain_token_expiration(self):
+ """Token expiration should be maintained after re-auth & validation."""
+ timeutils.set_time_override()
+
+ r = self.controller.authenticate(
+ {},
+ auth={
+ 'passwordCredentials': {
+ 'username': self.user_foo['name'],
+ 'password': self.user_foo['password']
+ }
+ })
+ unscoped_token_id = r['access']['token']['id']
+ original_expiration = r['access']['token']['expires']
+
+ timeutils.advance_time_seconds(1)
+
+ r = self.controller.validate_token(
+ dict(is_admin=True, query_string={}),
+ token_id=unscoped_token_id)
+ self.assertEqual(original_expiration, r['access']['token']['expires'])
+
+ timeutils.advance_time_seconds(1)
+
+ r = self.controller.authenticate(
+ {},
+ auth={
+ 'token': {
+ 'id': unscoped_token_id,
+ },
+ 'tenantId': self.tenant_bar['id'],
+ })
+ scoped_token_id = r['access']['token']['id']
+ self.assertEqual(original_expiration, r['access']['token']['expires'])
+
+ timeutils.advance_time_seconds(1)
+
+ r = self.controller.validate_token(
+ dict(is_admin=True, query_string={}),
+ token_id=scoped_token_id)
+ self.assertEqual(original_expiration, r['access']['token']['expires'])
+
+ def test_maintain_uuid_token_expiration(self):
+ self.opt_in_group('signing', token_format='UUID')
+ self._maintain_token_expiration()
+
+
+class NonDefaultAuthTest(test.TestCase):
+
+ def test_add_non_default_auth_method(self):
+ self.opt_in_group('auth', methods=['password', 'token', 'custom'])
+ config.setup_authentication()
+ self.assertTrue(hasattr(CONF.auth, 'custom'))
diff --git a/keystone/tests/test_auth_plugin.conf b/keystone/tests/test_auth_plugin.conf
new file mode 100644
index 00000000..edec8f79
--- /dev/null
+++ b/keystone/tests/test_auth_plugin.conf
@@ -0,0 +1,3 @@
+[auth]
+methods = external,password,token,simple-challenge-response
+simple-challenge-response = challenge_response_method.SimpleChallengeResponse
diff --git a/keystone/tests/test_auth_plugin.py b/keystone/tests/test_auth_plugin.py
new file mode 100644
index 00000000..e3346cf1
--- /dev/null
+++ b/keystone/tests/test_auth_plugin.py
@@ -0,0 +1,106 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2013 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from keystone.tests import core as test
+
+from keystone import auth
+from keystone import exception
+from keystone import token
+
+
+# for testing purposes only
+METHOD_NAME = 'simple-challenge-response'
+EXPECTED_RESPONSE = uuid.uuid4().hex
+DEMO_USER_ID = uuid.uuid4().hex
+
+
+class SimpleChallengeResponse(auth.AuthMethodHandler):
+ def authenticate(self, context, auth_payload, user_context):
+ if 'response' in auth_payload:
+ if auth_payload['response'] != EXPECTED_RESPONSE:
+ raise exception.Unauthorized('Wrong answer')
+ user_context['user_id'] = DEMO_USER_ID
+ else:
+ return {"challenge": "What's the name of your high school?"}
+
+
+class TestAuthPlugin(test.TestCase):
+ def setUp(self):
+ super(TestAuthPlugin, self).setUp()
+ self.config([
+ test.etcdir('keystone.conf.sample'),
+ test.testsdir('test_overrides.conf'),
+ test.testsdir('backend_sql.conf'),
+ test.testsdir('backend_sql_disk.conf'),
+ test.testsdir('test_auth_plugin.conf')])
+ self.load_backends()
+ auth.controllers.AUTH_METHODS[METHOD_NAME] = SimpleChallengeResponse()
+
+ # need to register the token provider first because auth controller
+ # depends on it
+ token.provider.Manager()
+
+ self.api = auth.controllers.Auth()
+
+ def test_unsupported_auth_method(self):
+ method_name = uuid.uuid4().hex
+ auth_data = {'methods': [method_name]}
+ auth_data[method_name] = {'test': 'test'}
+ auth_data = {'identity': auth_data}
+ self.assertRaises(exception.AuthMethodNotSupported,
+ auth.controllers.AuthInfo,
+ None,
+ auth_data)
+
+ def test_addition_auth_steps(self):
+ auth_data = {'methods': ['simple-challenge-response']}
+ auth_data['simple-challenge-response'] = {
+ 'test': 'test'}
+ auth_data = {'identity': auth_data}
+ auth_info = auth.controllers.AuthInfo(None, auth_data)
+ auth_context = {'extras': {}, 'method_names': []}
+ try:
+ self.api.authenticate({}, auth_info, auth_context)
+ except exception.AdditionalAuthRequired as e:
+ self.assertTrue('methods' in e.authentication)
+ self.assertTrue(METHOD_NAME in e.authentication['methods'])
+ self.assertTrue(METHOD_NAME in e.authentication)
+ self.assertTrue('challenge' in e.authentication[METHOD_NAME])
+
+ # test correct response
+ auth_data = {'methods': ['simple-challenge-response']}
+ auth_data['simple-challenge-response'] = {
+ 'response': EXPECTED_RESPONSE}
+ auth_data = {'identity': auth_data}
+ auth_info = auth.controllers.AuthInfo(None, auth_data)
+ auth_context = {'extras': {}, 'method_names': []}
+ self.api.authenticate({}, auth_info, auth_context)
+ self.assertEqual(auth_context['user_id'], DEMO_USER_ID)
+
+ # test incorrect response
+ auth_data = {'methods': ['simple-challenge-response']}
+ auth_data['simple-challenge-response'] = {
+ 'response': uuid.uuid4().hex}
+ auth_data = {'identity': auth_data}
+ auth_info = auth.controllers.AuthInfo(None, auth_data)
+ auth_context = {'extras': {}, 'method_names': []}
+ self.assertRaises(exception.Unauthorized,
+ self.api.authenticate,
+ {},
+ auth_info,
+ auth_context)
diff --git a/keystone/tests/test_backend.py b/keystone/tests/test_backend.py
new file mode 100644
index 00000000..8622b10d
--- /dev/null
+++ b/keystone/tests/test_backend.py
@@ -0,0 +1,2892 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import uuid
+
+from keystone.tests import core as test
+
+from keystone.catalog import core
+from keystone import config
+from keystone import exception
+from keystone.openstack.common import timeutils
+
+import default_fixtures
+
+
+CONF = config.CONF
+DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
+TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
+NULL_OBJECT = object()
+
+
+class IdentityTests(object):
+ def _get_domain_fixture(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain['id'], domain)
+ return domain
+
+ def test_project_add_and_remove_user_role(self):
+ user_refs = self.identity_api.get_project_users(self.tenant_bar['id'])
+ self.assertNotIn(self.user_two['id'], [x['id'] for x in user_refs])
+
+ self.identity_api.add_role_to_user_and_project(
+ tenant_id=self.tenant_bar['id'],
+ user_id=self.user_two['id'],
+ role_id=self.role_other['id'])
+ user_refs = self.identity_api.get_project_users(self.tenant_bar['id'])
+ self.assertIn(self.user_two['id'], [x['id'] for x in user_refs])
+
+ self.identity_api.remove_role_from_user_and_project(
+ tenant_id=self.tenant_bar['id'],
+ user_id=self.user_two['id'],
+ role_id=self.role_other['id'])
+
+ user_refs = self.identity_api.get_project_users(self.tenant_bar['id'])
+ self.assertNotIn(self.user_two['id'], [x['id'] for x in user_refs])
+
+ def test_authenticate_bad_user(self):
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ user_id=uuid.uuid4().hex,
+ password=self.user_foo['password'])
+
+ def test_authenticate_bad_password(self):
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ user_id=self.user_foo['id'],
+ password=uuid.uuid4().hex)
+
+ def test_authenticate(self):
+ user_ref = self.identity_api.authenticate(
+ user_id=self.user_sna['id'],
+ password=self.user_sna['password'])
+ # NOTE(termie): the password field is left in user_foo to make
+ # it easier to authenticate in tests, but should
+ # not be returned by the api
+ self.user_sna.pop('password')
+ self.user_sna['enabled'] = True
+ self.assertDictEqual(user_ref, self.user_sna)
+
+ def test_authenticate_and_get_roles_no_metadata(self):
+ user = {
+ 'id': 'no_meta',
+ 'name': 'NO_META',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'no_meta2',
+ }
+ self.identity_api.create_user(user['id'], user)
+ self.identity_api.add_user_to_project(self.tenant_baz['id'],
+ user['id'])
+ user_ref = self.identity_api.authenticate(
+ user_id=user['id'],
+ password=user['password'])
+ # NOTE(termie): the password field is left in user_foo to make
+ # it easier to authenticate in tests, but should
+ # not be returned by the api
+ user.pop('password')
+ self.assertDictContainsSubset(user, user_ref)
+ role_list = self.identity_api.get_roles_for_user_and_project(
+ user['id'], self.tenant_baz['id'])
+ self.assertEqual(len(role_list), 1)
+ self.assertIn(CONF.member_role_id, role_list)
+
+ def test_password_hashed(self):
+ user_ref = self.identity_api._get_user(self.user_foo['id'])
+ self.assertNotEqual(user_ref['password'], self.user_foo['password'])
+
+ def test_create_unicode_user_name(self):
+ unicode_name = u'name \u540d\u5b57'
+ user = {'id': uuid.uuid4().hex,
+ 'name': unicode_name,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex}
+ ref = self.identity_api.create_user(user['id'], user)
+ self.assertEqual(unicode_name, ref['name'])
+
+ def test_get_project(self):
+ tenant_ref = self.identity_api.get_project(self.tenant_bar['id'])
+ self.assertDictEqual(tenant_ref, self.tenant_bar)
+
+ def test_get_project_404(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.get_project,
+ uuid.uuid4().hex)
+
+ def test_get_project_by_name(self):
+ tenant_ref = self.identity_api.get_project_by_name(
+ self.tenant_bar['name'],
+ DEFAULT_DOMAIN_ID)
+ self.assertDictEqual(tenant_ref, self.tenant_bar)
+
+ def test_get_project_by_name_404(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.get_project_by_name,
+ uuid.uuid4().hex,
+ DEFAULT_DOMAIN_ID)
+
+ def test_get_project_users(self):
+ tenant_ref = self.identity_api.get_project_users(self.tenant_baz['id'])
+ user_ids = []
+ for user in tenant_ref:
+ self.assertNotIn('password', user)
+ user_ids.append(user.get('id'))
+ self.assertEquals(len(user_ids), 2)
+ self.assertIn(self.user_two['id'], user_ids)
+ self.assertIn(self.user_badguy['id'], user_ids)
+
+ def test_get_project_users_404(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.get_project_users,
+ uuid.uuid4().hex)
+
+ def test_get_user(self):
+ user_ref = self.identity_api.get_user(self.user_foo['id'])
+ # NOTE(termie): the password field is left in user_foo to make
+ # it easier to authenticate in tests, but should
+ # not be returned by the api
+ self.user_foo.pop('password')
+ self.assertDictEqual(user_ref, self.user_foo)
+
+ def test_get_user_404(self):
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ uuid.uuid4().hex)
+
+ def test_get_user_by_name(self):
+ user_ref = self.identity_api.get_user_by_name(
+ self.user_foo['name'], DEFAULT_DOMAIN_ID)
+
+ # NOTE(termie): the password field is left in user_foo to make
+ # it easier to authenticate in tests, but should
+ # not be returned by the api
+ self.user_foo.pop('password')
+ self.assertDictEqual(user_ref, self.user_foo)
+
+ def test_get_user_by_name_404(self):
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user_by_name,
+ uuid.uuid4().hex,
+ DEFAULT_DOMAIN_ID)
+
+ def test_get_role(self):
+ role_ref = self.identity_api.get_role(self.role_admin['id'])
+ role_ref_dict = dict((x, role_ref[x]) for x in role_ref)
+ self.assertDictEqual(role_ref_dict, self.role_admin)
+
+ def test_get_role_404(self):
+ self.assertRaises(exception.RoleNotFound,
+ self.identity_api.get_role,
+ uuid.uuid4().hex)
+
+ def test_create_duplicate_role_name_fails(self):
+ role = {'id': 'fake1',
+ 'name': 'fake1name'}
+ self.identity_api.create_role('fake1', role)
+ role['id'] = 'fake2'
+ self.assertRaises(exception.Conflict,
+ self.identity_api.create_role,
+ 'fake2',
+ role)
+
+ def test_rename_duplicate_role_name_fails(self):
+ role1 = {
+ 'id': 'fake1',
+ 'name': 'fake1name'
+ }
+ role2 = {
+ 'id': 'fake2',
+ 'name': 'fake2name'
+ }
+ self.identity_api.create_role('fake1', role1)
+ self.identity_api.create_role('fake2', role2)
+ role1['name'] = 'fake2name'
+ self.assertRaises(exception.Conflict,
+ self.identity_api.update_role,
+ 'fake1',
+ role1)
+
+ def test_create_duplicate_user_id_fails(self):
+ user = {'id': 'fake1',
+ 'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'fakepass',
+ 'tenants': ['bar']}
+ self.identity_api.create_user('fake1', user)
+ user['name'] = 'fake2'
+ self.assertRaises(exception.Conflict,
+ self.identity_api.create_user,
+ 'fake1',
+ user)
+
+ def test_create_duplicate_user_name_fails(self):
+ user = {'id': 'fake1',
+ 'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'fakepass',
+ 'tenants': ['bar']}
+ self.identity_api.create_user('fake1', user)
+ user['id'] = 'fake2'
+ self.assertRaises(exception.Conflict,
+ self.identity_api.create_user,
+ 'fake2',
+ user)
+
+ def test_create_duplicate_user_name_in_different_domains(self):
+ new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(new_domain['id'], new_domain)
+ user1 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex}
+ user2 = {'id': uuid.uuid4().hex,
+ 'name': user1['name'],
+ 'domain_id': new_domain['id'],
+ 'password': uuid.uuid4().hex}
+ self.identity_api.create_user(user1['id'], user1)
+ self.identity_api.create_user(user2['id'], user2)
+
+ def test_move_user_between_domains(self):
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain2['id'], domain2)
+ user = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'],
+ 'password': uuid.uuid4().hex}
+ self.identity_api.create_user(user['id'], user)
+ user['domain_id'] = domain2['id']
+ self.identity_api.update_user(user['id'], user)
+
+ def test_move_user_between_domains_with_clashing_names_fails(self):
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain2['id'], domain2)
+ # First, create a user in domain1
+ user1 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'],
+ 'password': uuid.uuid4().hex}
+ self.identity_api.create_user(user1['id'], user1)
+ # Now create a user in domain2 with a potentially clashing
+ # name - which should work since we have domain separation
+ user2 = {'id': uuid.uuid4().hex,
+ 'name': user1['name'],
+ 'domain_id': domain2['id'],
+ 'password': uuid.uuid4().hex}
+ self.identity_api.create_user(user2['id'], user2)
+ # Now try and move user1 into the 2nd domain - which should
+ # fail since the names clash
+ user1['domain_id'] = domain2['id']
+ self.assertRaises(exception.Conflict,
+ self.identity_api.update_user,
+ user1['id'],
+ user1)
+
+ def test_rename_duplicate_user_name_fails(self):
+ user1 = {'id': 'fake1',
+ 'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'fakepass',
+ 'tenants': ['bar']}
+ user2 = {'id': 'fake2',
+ 'name': 'fake2',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'fakepass',
+ 'tenants': ['bar']}
+ self.identity_api.create_user('fake1', user1)
+ self.identity_api.create_user('fake2', user2)
+ user2['name'] = 'fake1'
+ self.assertRaises(exception.Conflict,
+ self.identity_api.update_user,
+ 'fake2',
+ user2)
+
+ def test_update_user_id_fails(self):
+ user = {'id': 'fake1',
+ 'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'fakepass',
+ 'tenants': ['bar']}
+ self.identity_api.create_user('fake1', user)
+ user['id'] = 'fake2'
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_user,
+ 'fake1',
+ user)
+ user_ref = self.identity_api.get_user('fake1')
+ self.assertEqual(user_ref['id'], 'fake1')
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ 'fake2')
+
+ def test_create_duplicate_project_id_fails(self):
+ tenant = {'id': 'fake1', 'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.identity_api.create_project('fake1', tenant)
+ tenant['name'] = 'fake2'
+ self.assertRaises(exception.Conflict,
+ self.identity_api.create_project,
+ 'fake1',
+ tenant)
+
+ def test_create_duplicate_project_name_fails(self):
+ tenant = {'id': 'fake1', 'name': 'fake',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.identity_api.create_project('fake1', tenant)
+ tenant['id'] = 'fake2'
+ self.assertRaises(exception.Conflict,
+ self.identity_api.create_project,
+ 'fake1',
+ tenant)
+
+ def test_create_duplicate_project_name_in_different_domains(self):
+ new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(new_domain['id'], new_domain)
+ tenant1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ tenant2 = {'id': uuid.uuid4().hex, 'name': tenant1['name'],
+ 'domain_id': new_domain['id']}
+ self.identity_api.create_project(tenant1['id'], tenant1)
+ self.identity_api.create_project(tenant2['id'], tenant2)
+
+ def test_move_project_between_domains(self):
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain2['id'], domain2)
+ project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.identity_api.create_project(project['id'], project)
+ project['domain_id'] = domain2['id']
+ self.identity_api.update_project(project['id'], project)
+
+ def test_move_project_between_domains_with_clashing_names_fails(self):
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain2['id'], domain2)
+ # First, create a project in domain1
+ project1 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.identity_api.create_project(project1['id'], project1)
+ # Now create a project in domain2 with a potentially clashing
+ # name - which should work since we have domain separation
+ project2 = {'id': uuid.uuid4().hex,
+ 'name': project1['name'],
+ 'domain_id': domain2['id']}
+ self.identity_api.create_project(project2['id'], project2)
+ # Now try and move project1 into the 2nd domain - which should
+ # fail since the names clash
+ project1['domain_id'] = domain2['id']
+ self.assertRaises(exception.Conflict,
+ self.identity_api.update_project,
+ project1['id'],
+ project1)
+
+ def test_rename_duplicate_project_name_fails(self):
+ tenant1 = {'id': 'fake1', 'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ tenant2 = {'id': 'fake2', 'name': 'fake2',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.identity_api.create_project('fake1', tenant1)
+ self.identity_api.create_project('fake2', tenant2)
+ tenant2['name'] = 'fake1'
+ self.assertRaises(exception.Error,
+ self.identity_api.update_project,
+ 'fake2',
+ tenant2)
+
+ def test_update_project_id_does_nothing(self):
+ tenant = {'id': 'fake1', 'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.identity_api.create_project('fake1', tenant)
+ tenant['id'] = 'fake2'
+ self.identity_api.update_project('fake1', tenant)
+ tenant_ref = self.identity_api.get_project('fake1')
+ self.assertEqual(tenant_ref['id'], 'fake1')
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.get_project,
+ 'fake2')
+
+ def test_list_role_assignments_unfiltered(self):
+ """Test for unfiltered listing role assignments.
+
+ Test Plan:
+ - Create a domain, with a user, group & project
+ - Find how many role assignments already exist (from default
+ fixtures)
+ - Create a grant of each type (user/group on project/domain)
+ - Check the number of assignments has gone up by 4 and that
+ the entries we added are in the list returned
+
+ """
+ new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(new_domain['id'], new_domain)
+ new_user = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex, 'enabled': True,
+ 'domain_id': new_domain['id']}
+ self.identity_api.create_user(new_user['id'],
+ new_user)
+ new_group = {'id': uuid.uuid4().hex, 'domain_id': new_domain['id'],
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_group(new_group['id'], new_group)
+ new_project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': new_domain['id']}
+ self.identity_api.create_project(new_project['id'], new_project)
+
+ # First check how many role grant already exist
+ existing_assignments = len(self.identity_api.list_role_assignments())
+
+ # Now create the grants (roles are defined in default_fixtures)
+ self.identity_api.create_grant(user_id=new_user['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ self.identity_api.create_grant(user_id=new_user['id'],
+ project_id=new_project['id'],
+ role_id='other')
+ self.identity_api.create_grant(group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='admin')
+ self.identity_api.create_grant(group_id=new_group['id'],
+ project_id=new_project['id'],
+ role_id='admin')
+
+ # Read back the list of assignments - check it is gone up by 4
+ assignment_list = self.identity_api.list_role_assignments()
+ self.assertEquals(len(assignment_list), existing_assignments + 4)
+
+ # Now check that each of our four new entries are in the list
+ self.assertIn(
+ {'user_id': new_user['id'], 'domain_id': new_domain['id'],
+ 'role_id': 'member'},
+ assignment_list)
+ self.assertIn(
+ {'user_id': new_user['id'], 'project_id': new_project['id'],
+ 'role_id': 'other'},
+ assignment_list)
+ self.assertIn(
+ {'group_id': new_group['id'], 'domain_id': new_domain['id'],
+ 'role_id': 'admin'},
+ assignment_list)
+ self.assertIn(
+ {'group_id': new_group['id'], 'project_id': new_project['id'],
+ 'role_id': 'admin'},
+ assignment_list)
+
+ def test_add_duplicate_role_grant(self):
+ roles_ref = self.identity_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'])
+ self.assertNotIn(self.role_admin['id'], roles_ref)
+ self.identity_api.add_role_to_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], self.role_admin['id'])
+ self.assertRaises(exception.Conflict,
+ self.identity_api.add_role_to_user_and_project,
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ self.role_admin['id'])
+
+ def test_get_role_by_user_and_project(self):
+ roles_ref = self.identity_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'])
+ self.assertNotIn(self.role_admin['id'], roles_ref)
+ self.identity_api.add_role_to_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], self.role_admin['id'])
+ roles_ref = self.identity_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'])
+ self.assertIn(self.role_admin['id'], roles_ref)
+ self.assertNotIn('member', roles_ref)
+
+ self.identity_api.add_role_to_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], 'member')
+ roles_ref = self.identity_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'])
+ self.assertIn(self.role_admin['id'], roles_ref)
+ self.assertIn('member', roles_ref)
+
+ def test_get_roles_for_user_and_domain(self):
+ """Test for getting roles for user on a domain.
+
+ Test Plan:
+ - Create a domain, with 2 users
+ - Check no roles yet exit
+ - Give user1 two roles on the domain, user2 one role
+ - Get roles on user1 and the domain - maybe sure we only
+ get back the 2 roles on user1
+ - Delete both roles from user1
+ - Check we get no roles back for user1 on domain
+
+ """
+ new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(new_domain['id'], new_domain)
+ new_user1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex, 'enabled': True,
+ 'domain_id': new_domain['id']}
+ self.identity_api.create_user(new_user1['id'], new_user1)
+ new_user2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex, 'enabled': True,
+ 'domain_id': new_domain['id']}
+ self.identity_api.create_user(new_user2['id'], new_user2)
+ roles_ref = self.identity_api.list_grants(
+ user_id=new_user1['id'],
+ domain_id=new_domain['id'])
+ self.assertEquals(len(roles_ref), 0)
+ # Now create the grants (roles are defined in default_fixtures)
+ self.identity_api.create_grant(user_id=new_user1['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ self.identity_api.create_grant(user_id=new_user1['id'],
+ domain_id=new_domain['id'],
+ role_id='other')
+ self.identity_api.create_grant(user_id=new_user2['id'],
+ domain_id=new_domain['id'],
+ role_id='admin')
+ # Read back the roles for user1 on domain
+ roles_ids = self.identity_api.get_roles_for_user_and_domain(
+ new_user1['id'], new_domain['id'])
+ self.assertEqual(len(roles_ids), 2)
+ self.assertIn(self.role_member['id'], roles_ids)
+ self.assertIn(self.role_other['id'], roles_ids)
+
+ # Now delete both grants for user1
+ self.identity_api.delete_grant(user_id=new_user1['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ self.identity_api.delete_grant(user_id=new_user1['id'],
+ domain_id=new_domain['id'],
+ role_id='other')
+ roles_ref = self.identity_api.list_grants(
+ user_id=new_user1['id'],
+ domain_id=new_domain['id'])
+ self.assertEquals(len(roles_ref), 0)
+
+ def test_get_roles_for_user_and_domain_404(self):
+ """Test errors raised when getting roles for user on a domain.
+
+ Test Plan:
+ - Check non-existing user gives UserNotFound
+ - Check non-existing domain gives DomainNotFound
+
+ """
+ new_domain = self._get_domain_fixture()
+ new_user1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex, 'enabled': True,
+ 'domain_id': new_domain['id']}
+ self.identity_api.create_user(new_user1['id'], new_user1)
+
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_roles_for_user_and_domain,
+ uuid.uuid4().hex,
+ new_domain['id'])
+
+ self.assertRaises(exception.DomainNotFound,
+ self.identity_api.get_roles_for_user_and_domain,
+ new_user1['id'],
+ uuid.uuid4().hex)
+
+ def test_get_roles_for_user_and_project_404(self):
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_roles_for_user_and_project,
+ uuid.uuid4().hex,
+ self.tenant_bar['id'])
+
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.get_roles_for_user_and_project,
+ self.user_foo['id'],
+ uuid.uuid4().hex)
+
+ def test_add_role_to_user_and_project_404(self):
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.add_role_to_user_and_project,
+ uuid.uuid4().hex,
+ self.tenant_bar['id'],
+ self.role_admin['id'])
+
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.add_role_to_user_and_project,
+ self.user_foo['id'],
+ uuid.uuid4().hex,
+ self.role_admin['id'])
+
+ self.assertRaises(exception.RoleNotFound,
+ self.identity_api.add_role_to_user_and_project,
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ uuid.uuid4().hex)
+
+ def test_remove_role_from_user_and_project(self):
+ self.identity_api.add_role_to_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], 'member')
+ self.identity_api.remove_role_from_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], 'member')
+ roles_ref = self.identity_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'])
+ self.assertNotIn('member', roles_ref)
+ self.assertRaises(exception.NotFound,
+ self.identity_api.remove_role_from_user_and_project,
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ 'member')
+
+ def test_get_role_grant_by_user_and_project(self):
+ roles_ref = self.identity_api.list_grants(
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_bar['id'])
+ self.assertEquals(len(roles_ref), 1)
+ self.identity_api.create_grant(user_id=self.user_foo['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_admin['id'])
+ roles_ref = self.identity_api.list_grants(
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_bar['id'])
+ self.assertIn(self.role_admin['id'],
+ [role_ref['id'] for role_ref in roles_ref])
+
+ self.identity_api.create_grant(user_id=self.user_foo['id'],
+ project_id=self.tenant_bar['id'],
+ role_id='member')
+ roles_ref = self.identity_api.list_grants(
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_bar['id'])
+
+ roles_ref_ids = []
+ for ref in roles_ref:
+ roles_ref_ids.append(ref['id'])
+ self.assertIn(self.role_admin['id'], roles_ref_ids)
+ self.assertIn('member', roles_ref_ids)
+
+ def test_get_role_grants_for_user_and_project_404(self):
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.list_grants,
+ user_id=uuid.uuid4().hex,
+ project_id=self.tenant_bar['id'])
+
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.list_grants,
+ user_id=self.user_foo['id'],
+ project_id=uuid.uuid4().hex)
+
+ def test_add_role_grant_to_user_and_project_404(self):
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.create_grant,
+ user_id=uuid.uuid4().hex,
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_admin['id'])
+
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.create_grant,
+ user_id=self.user_foo['id'],
+ project_id=uuid.uuid4().hex,
+ role_id=self.role_admin['id'])
+
+ self.assertRaises(exception.RoleNotFound,
+ self.identity_api.create_grant,
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=uuid.uuid4().hex)
+
+ def test_remove_role_grant_from_user_and_project(self):
+ self.identity_api.create_grant(user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+ roles_ref = self.identity_api.list_grants(
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'])
+ self.assertDictEqual(roles_ref[0], self.role_member)
+
+ self.identity_api.delete_grant(user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+ roles_ref = self.identity_api.list_grants(
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'])
+ self.assertEquals(len(roles_ref), 0)
+ self.assertRaises(exception.NotFound,
+ self.identity_api.delete_grant,
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+
+ def test_get_and_remove_role_grant_by_group_and_project(self):
+ new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(new_domain['id'], new_domain)
+ new_group = {'id': uuid.uuid4().hex, 'domain_id': new_domain['id'],
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_group(new_group['id'], new_group)
+ new_user = {'id': uuid.uuid4().hex, 'name': 'new_user',
+ 'password': 'secret', 'enabled': True,
+ 'domain_id': new_domain['id']}
+ self.identity_api.create_user(new_user['id'], new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+ roles_ref = self.identity_api.list_grants(
+ group_id=new_group['id'],
+ project_id=self.tenant_bar['id'])
+ self.assertEquals(len(roles_ref), 0)
+ self.identity_api.create_grant(group_id=new_group['id'],
+ project_id=self.tenant_bar['id'],
+ role_id='member')
+ roles_ref = self.identity_api.list_grants(
+ group_id=new_group['id'],
+ project_id=self.tenant_bar['id'])
+ self.assertDictEqual(roles_ref[0], self.role_member)
+
+ self.identity_api.delete_grant(group_id=new_group['id'],
+ project_id=self.tenant_bar['id'],
+ role_id='member')
+ roles_ref = self.identity_api.list_grants(
+ group_id=new_group['id'],
+ project_id=self.tenant_bar['id'])
+ self.assertEquals(len(roles_ref), 0)
+ self.assertRaises(exception.NotFound,
+ self.identity_api.delete_grant,
+ group_id=new_group['id'],
+ project_id=self.tenant_bar['id'],
+ role_id='member')
+
+ def test_get_and_remove_role_grant_by_group_and_domain(self):
+ new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(new_domain['id'], new_domain)
+ new_group = {'id': uuid.uuid4().hex, 'domain_id': new_domain['id'],
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_group(new_group['id'], new_group)
+ new_user = {'id': uuid.uuid4().hex, 'name': 'new_user',
+ 'password': uuid.uuid4().hex, 'enabled': True,
+ 'domain_id': new_domain['id']}
+ self.identity_api.create_user(new_user['id'], new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+
+ roles_ref = self.identity_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertEquals(len(roles_ref), 0)
+
+ self.identity_api.create_grant(group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+
+ roles_ref = self.identity_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertDictEqual(roles_ref[0], self.role_member)
+
+ self.identity_api.delete_grant(group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ roles_ref = self.identity_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertEquals(len(roles_ref), 0)
+ self.assertRaises(exception.NotFound,
+ self.identity_api.delete_grant,
+ group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+
+ def test_get_and_remove_correct_role_grant_from_a_mix(self):
+ new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(new_domain['id'], new_domain)
+ new_project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': new_domain['id']}
+ self.identity_api.create_project(new_project['id'], new_project)
+ new_group = {'id': uuid.uuid4().hex, 'domain_id': new_domain['id'],
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_group(new_group['id'], new_group)
+ new_group2 = {'id': uuid.uuid4().hex, 'domain_id': new_domain['id'],
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_group(new_group2['id'], new_group2)
+ new_user = {'id': uuid.uuid4().hex, 'name': 'new_user',
+ 'password': uuid.uuid4().hex, 'enabled': True,
+ 'domain_id': new_domain['id']}
+ self.identity_api.create_user(new_user['id'], new_user)
+ new_user2 = {'id': uuid.uuid4().hex, 'name': 'new_user2',
+ 'password': uuid.uuid4().hex, 'enabled': True,
+ 'domain_id': new_domain['id']}
+ self.identity_api.create_user(new_user2['id'], new_user2)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+ # First check we have no grants
+ roles_ref = self.identity_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertEquals(len(roles_ref), 0)
+ # Now add the grant we are going to test for, and some others as
+ # well just to make sure we get back the right one
+ self.identity_api.create_grant(group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+
+ self.identity_api.create_grant(group_id=new_group2['id'],
+ domain_id=new_domain['id'],
+ role_id=self.role_admin['id'])
+ self.identity_api.create_grant(user_id=new_user2['id'],
+ domain_id=new_domain['id'],
+ role_id=self.role_admin['id'])
+ self.identity_api.create_grant(group_id=new_group['id'],
+ project_id=new_project['id'],
+ role_id=self.role_admin['id'])
+
+ roles_ref = self.identity_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertDictEqual(roles_ref[0], self.role_member)
+
+ self.identity_api.delete_grant(group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ roles_ref = self.identity_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertEquals(len(roles_ref), 0)
+ self.assertRaises(exception.NotFound,
+ self.identity_api.delete_grant,
+ group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+
+ def test_get_and_remove_role_grant_by_user_and_domain(self):
+ new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(new_domain['id'], new_domain)
+ new_user = {'id': uuid.uuid4().hex, 'name': 'new_user',
+ 'password': 'secret', 'enabled': True,
+ 'domain_id': new_domain['id']}
+ self.identity_api.create_user(new_user['id'], new_user)
+ roles_ref = self.identity_api.list_grants(
+ user_id=new_user['id'],
+ domain_id=new_domain['id'])
+ self.assertEquals(len(roles_ref), 0)
+ self.identity_api.create_grant(user_id=new_user['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ roles_ref = self.identity_api.list_grants(
+ user_id=new_user['id'],
+ domain_id=new_domain['id'])
+ self.assertDictEqual(roles_ref[0], self.role_member)
+
+ self.identity_api.delete_grant(user_id=new_user['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ roles_ref = self.identity_api.list_grants(
+ user_id=new_user['id'],
+ domain_id=new_domain['id'])
+ self.assertEquals(len(roles_ref), 0)
+ self.assertRaises(exception.NotFound,
+ self.identity_api.delete_grant,
+ user_id=new_user['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+
+ def test_get_and_remove_role_grant_by_group_and_cross_domain(self):
+ group1_domain1_role = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(group1_domain1_role['id'],
+ group1_domain1_role)
+ group1_domain2_role = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(group1_domain2_role['id'],
+ group1_domain2_role)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain2['id'], domain2)
+ group1 = {'id': uuid.uuid4().hex, 'domain_id': domain1['id'],
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_group(group1['id'], group1)
+ roles_ref = self.identity_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain1['id'])
+ self.assertEquals(len(roles_ref), 0)
+ roles_ref = self.identity_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain2['id'])
+ self.assertEquals(len(roles_ref), 0)
+ self.identity_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=group1_domain1_role['id'])
+ self.identity_api.create_grant(group_id=group1['id'],
+ domain_id=domain2['id'],
+ role_id=group1_domain2_role['id'])
+ roles_ref = self.identity_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain1['id'])
+ self.assertDictEqual(roles_ref[0], group1_domain1_role)
+ roles_ref = self.identity_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain2['id'])
+ self.assertDictEqual(roles_ref[0], group1_domain2_role)
+
+ self.identity_api.delete_grant(group_id=group1['id'],
+ domain_id=domain2['id'],
+ role_id=group1_domain2_role['id'])
+ roles_ref = self.identity_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain2['id'])
+ self.assertEquals(len(roles_ref), 0)
+ self.assertRaises(exception.NotFound,
+ self.identity_api.delete_grant,
+ group_id=group1['id'],
+ domain_id=domain2['id'],
+ role_id=group1_domain2_role['id'])
+
+ def test_get_and_remove_role_grant_by_user_and_cross_domain(self):
+ user1_domain1_role = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(user1_domain1_role['id'],
+ user1_domain1_role)
+ user1_domain2_role = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(user1_domain2_role['id'],
+ user1_domain2_role)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain2['id'], domain2)
+ user1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'password': uuid.uuid4().hex,
+ 'enabled': True}
+ self.identity_api.create_user(user1['id'], user1)
+ roles_ref = self.identity_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain1['id'])
+ self.assertEquals(len(roles_ref), 0)
+ roles_ref = self.identity_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain2['id'])
+ self.assertEquals(len(roles_ref), 0)
+ self.identity_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=user1_domain1_role['id'])
+ self.identity_api.create_grant(user_id=user1['id'],
+ domain_id=domain2['id'],
+ role_id=user1_domain2_role['id'])
+ roles_ref = self.identity_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain1['id'])
+ self.assertDictEqual(roles_ref[0], user1_domain1_role)
+ roles_ref = self.identity_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain2['id'])
+ self.assertDictEqual(roles_ref[0], user1_domain2_role)
+
+ self.identity_api.delete_grant(user_id=user1['id'],
+ domain_id=domain2['id'],
+ role_id=user1_domain2_role['id'])
+ roles_ref = self.identity_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain2['id'])
+ self.assertEquals(len(roles_ref), 0)
+ self.assertRaises(exception.NotFound,
+ self.identity_api.delete_grant,
+ user_id=user1['id'],
+ domain_id=domain2['id'],
+ role_id=user1_domain2_role['id'])
+
+ def test_role_grant_by_group_and_cross_domain_project(self):
+ role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(role1['id'], role1)
+ role2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(role2['id'], role2)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain2['id'], domain2)
+ group1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'enabled': True}
+ self.identity_api.create_group(group1['id'], group1)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain2['id']}
+ self.identity_api.create_project(project1['id'], project1)
+ roles_ref = self.identity_api.list_grants(
+ group_id=group1['id'],
+ project_id=project1['id'])
+ self.assertEquals(len(roles_ref), 0)
+ self.identity_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ self.identity_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role2['id'])
+ roles_ref = self.identity_api.list_grants(
+ group_id=group1['id'],
+ project_id=project1['id'])
+
+ roles_ref_ids = []
+ for i, ref in enumerate(roles_ref):
+ roles_ref_ids.append(ref['id'])
+ self.assertIn(role1['id'], roles_ref_ids)
+ self.assertIn(role2['id'], roles_ref_ids)
+
+ self.identity_api.delete_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ roles_ref = self.identity_api.list_grants(
+ group_id=group1['id'],
+ project_id=project1['id'])
+ self.assertEquals(len(roles_ref), 1)
+ self.assertDictEqual(roles_ref[0], role2)
+
+ def test_role_grant_by_user_and_cross_domain_project(self):
+ role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(role1['id'], role1)
+ role2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(role2['id'], role2)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain2['id'], domain2)
+ user1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'password': uuid.uuid4().hex,
+ 'enabled': True}
+ self.identity_api.create_user(user1['id'], user1)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain2['id']}
+ self.identity_api.create_project(project1['id'], project1)
+ roles_ref = self.identity_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEquals(len(roles_ref), 0)
+ self.identity_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ self.identity_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role2['id'])
+ roles_ref = self.identity_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+
+ roles_ref_ids = []
+ for ref in roles_ref:
+ roles_ref_ids.append(ref['id'])
+ self.assertIn(role1['id'], roles_ref_ids)
+ self.assertIn(role2['id'], roles_ref_ids)
+
+ self.identity_api.delete_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ roles_ref = self.identity_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEquals(len(roles_ref), 1)
+ self.assertDictEqual(roles_ref[0], role2)
+
+ def test_multi_role_grant_by_user_group_on_project_domain(self):
+ role_list = []
+ for _ in range(10):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(role['id'], role)
+ role_list.append(role)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain1['id'], domain1)
+ user1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'password': uuid.uuid4().hex,
+ 'enabled': True}
+ self.identity_api.create_user(user1['id'], user1)
+ group1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'enabled': True}
+ self.identity_api.create_group(group1['id'], group1)
+ group2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'enabled': True}
+ self.identity_api.create_group(group2['id'], group2)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.identity_api.create_project(project1['id'], project1)
+
+ self.identity_api.add_user_to_group(user1['id'],
+ group1['id'])
+ self.identity_api.add_user_to_group(user1['id'],
+ group2['id'])
+
+ roles_ref = self.identity_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEquals(len(roles_ref), 0)
+ self.identity_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[0]['id'])
+ self.identity_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[1]['id'])
+ self.identity_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[2]['id'])
+ self.identity_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[3]['id'])
+ self.identity_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role_list[4]['id'])
+ self.identity_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role_list[5]['id'])
+ self.identity_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role_list[6]['id'])
+ self.identity_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role_list[7]['id'])
+ roles_ref = self.identity_api.list_grants(user_id=user1['id'],
+ domain_id=domain1['id'])
+ self.assertEquals(len(roles_ref), 2)
+ self.assertIn(role_list[0], roles_ref)
+ self.assertIn(role_list[1], roles_ref)
+ roles_ref = self.identity_api.list_grants(group_id=group1['id'],
+ domain_id=domain1['id'])
+ self.assertEquals(len(roles_ref), 2)
+ self.assertIn(role_list[2], roles_ref)
+ self.assertIn(role_list[3], roles_ref)
+ roles_ref = self.identity_api.list_grants(user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEquals(len(roles_ref), 2)
+ self.assertIn(role_list[4], roles_ref)
+ self.assertIn(role_list[5], roles_ref)
+ roles_ref = self.identity_api.list_grants(group_id=group1['id'],
+ project_id=project1['id'])
+ self.assertEquals(len(roles_ref), 2)
+ self.assertIn(role_list[6], roles_ref)
+ self.assertIn(role_list[7], roles_ref)
+
+ # Now test the alternate way of getting back lists of grants,
+ # where user and group roles are combined. These should match
+ # the above results.
+ combined_role_list = self.identity_api.get_roles_for_user_and_project(
+ user1['id'], project1['id'])
+ self.assertEquals(len(combined_role_list), 4)
+ self.assertIn(role_list[4]['id'], combined_role_list)
+ self.assertIn(role_list[5]['id'], combined_role_list)
+ self.assertIn(role_list[6]['id'], combined_role_list)
+ self.assertIn(role_list[7]['id'], combined_role_list)
+
+ combined_role_list = self.identity_api.get_roles_for_user_and_domain(
+ user1['id'], domain1['id'])
+ self.assertEquals(len(combined_role_list), 4)
+ self.assertIn(role_list[0]['id'], combined_role_list)
+ self.assertIn(role_list[1]['id'], combined_role_list)
+ self.assertIn(role_list[2]['id'], combined_role_list)
+ self.assertIn(role_list[3]['id'], combined_role_list)
+
+ def test_multi_group_grants_on_project_domain(self):
+ """Test multiple group roles for user on project and domain.
+
+ Test Plan:
+ - Create 6 roles
+ - Create a domain, with a project, user and two groups
+ - Make the user a member of both groups
+ - Check no roles yet exit
+ - Assign a role to each user and both groups on both the
+ project and domain
+ - Get a list of effective roles for the user on both the
+ project and domain, checking we get back the correct three
+ roles
+
+ """
+ role_list = []
+ for _ in range(6):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(role['id'], role)
+ role_list.append(role)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain1['id'], domain1)
+ user1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'password': uuid.uuid4().hex,
+ 'enabled': True}
+ self.identity_api.create_user(user1['id'], user1)
+ group1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'enabled': True}
+ self.identity_api.create_group(group1['id'], group1)
+ group2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'enabled': True}
+ self.identity_api.create_group(group2['id'], group2)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.identity_api.create_project(project1['id'], project1)
+
+ self.identity_api.add_user_to_group(user1['id'],
+ group1['id'])
+ self.identity_api.add_user_to_group(user1['id'],
+ group2['id'])
+
+ roles_ref = self.identity_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEquals(len(roles_ref), 0)
+ self.identity_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[0]['id'])
+ self.identity_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[1]['id'])
+ self.identity_api.create_grant(group_id=group2['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[2]['id'])
+ self.identity_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role_list[3]['id'])
+ self.identity_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role_list[4]['id'])
+ self.identity_api.create_grant(group_id=group2['id'],
+ project_id=project1['id'],
+ role_id=role_list[5]['id'])
+
+ # Read by the roles, ensuring we get the correct 3 roles for
+ # both project and domain
+ combined_role_list = self.identity_api.get_roles_for_user_and_project(
+ user1['id'], project1['id'])
+ self.assertEquals(len(combined_role_list), 3)
+ self.assertIn(role_list[3]['id'], combined_role_list)
+ self.assertIn(role_list[4]['id'], combined_role_list)
+ self.assertIn(role_list[5]['id'], combined_role_list)
+
+ combined_role_list = self.identity_api.get_roles_for_user_and_domain(
+ user1['id'], domain1['id'])
+ self.assertEquals(len(combined_role_list), 3)
+ self.assertIn(role_list[0]['id'], combined_role_list)
+ self.assertIn(role_list[1]['id'], combined_role_list)
+ self.assertIn(role_list[2]['id'], combined_role_list)
+
+ def test_delete_role_with_user_and_group_grants(self):
+ role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(role1['id'], role1)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain1['id'], domain1)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.identity_api.create_project(project1['id'], project1)
+ user1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'password': uuid.uuid4().hex,
+ 'enabled': True}
+ self.identity_api.create_user(user1['id'], user1)
+ group1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'enabled': True}
+ self.identity_api.create_group(group1['id'], group1)
+ self.identity_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ self.identity_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role1['id'])
+ self.identity_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ self.identity_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=role1['id'])
+ roles_ref = self.identity_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEquals(len(roles_ref), 1)
+ roles_ref = self.identity_api.list_grants(
+ group_id=group1['id'],
+ project_id=project1['id'])
+ self.assertEquals(len(roles_ref), 1)
+ roles_ref = self.identity_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain1['id'])
+ self.assertEquals(len(roles_ref), 1)
+ roles_ref = self.identity_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain1['id'])
+ self.assertEquals(len(roles_ref), 1)
+ self.identity_api.delete_role(role1['id'])
+ roles_ref = self.identity_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEquals(len(roles_ref), 0)
+ roles_ref = self.identity_api.list_grants(
+ group_id=group1['id'],
+ project_id=project1['id'])
+ self.assertEquals(len(roles_ref), 0)
+ roles_ref = self.identity_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain1['id'])
+ self.assertEquals(len(roles_ref), 0)
+ roles_ref = self.identity_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain1['id'])
+ self.assertEquals(len(roles_ref), 0)
+
+ def test_delete_user_with_group_project_domain_links(self):
+ role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(role1['id'], role1)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain1['id'], domain1)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.identity_api.create_project(project1['id'], project1)
+ user1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'password': uuid.uuid4().hex,
+ 'enabled': True}
+ self.identity_api.create_user(user1['id'], user1)
+ group1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'enabled': True}
+ self.identity_api.create_group(group1['id'], group1)
+ self.identity_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ self.identity_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role1['id'])
+ self.identity_api.add_user_to_group(user_id=user1['id'],
+ group_id=group1['id'])
+ roles_ref = self.identity_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEquals(len(roles_ref), 1)
+ roles_ref = self.identity_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain1['id'])
+ self.assertEquals(len(roles_ref), 1)
+ self.identity_api.check_user_in_group(
+ user_id=user1['id'],
+ group_id=group1['id'])
+ self.identity_api.delete_user(user1['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.list_grants,
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.list_grants,
+ user_id=user1['id'],
+ domain_id=domain1['id'])
+ self.assertRaises(exception.NotFound,
+ self.identity_api.check_user_in_group,
+ user1['id'],
+ group1['id'])
+
+ def test_delete_group_with_user_project_domain_links(self):
+ role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(role1['id'], role1)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain1['id'], domain1)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.identity_api.create_project(project1['id'], project1)
+ user1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'password': uuid.uuid4().hex,
+ 'enabled': True}
+ self.identity_api.create_user(user1['id'], user1)
+ group1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'enabled': True}
+ self.identity_api.create_group(group1['id'], group1)
+ self.identity_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ self.identity_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=role1['id'])
+ self.identity_api.add_user_to_group(user_id=user1['id'],
+ group_id=group1['id'])
+ roles_ref = self.identity_api.list_grants(
+ group_id=group1['id'],
+ project_id=project1['id'])
+ self.assertEquals(len(roles_ref), 1)
+ roles_ref = self.identity_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain1['id'])
+ self.assertEquals(len(roles_ref), 1)
+ self.identity_api.check_user_in_group(
+ user_id=user1['id'],
+ group_id=group1['id'])
+ self.identity_api.delete_group(group1['id'])
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.list_grants,
+ group_id=group1['id'],
+ project_id=project1['id'])
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.list_grants,
+ group_id=group1['id'],
+ domain_id=domain1['id'])
+ self.identity_api.get_user(user1['id'])
+
+ def test_delete_domain_with_user_group_project_links(self):
+ #TODO(chungg):add test case once expected behaviour defined
+ pass
+
+ def test_role_crud(self):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(role['id'], role)
+ role_ref = self.identity_api.get_role(role['id'])
+ role_ref_dict = dict((x, role_ref[x]) for x in role_ref)
+ self.assertDictEqual(role_ref_dict, role)
+
+ role['name'] = uuid.uuid4().hex
+ self.identity_api.update_role(role['id'], role)
+ role_ref = self.identity_api.get_role(role['id'])
+ role_ref_dict = dict((x, role_ref[x]) for x in role_ref)
+ self.assertDictEqual(role_ref_dict, role)
+
+ self.identity_api.delete_role(role['id'])
+ self.assertRaises(exception.RoleNotFound,
+ self.identity_api.get_role,
+ role['id'])
+
+ def test_update_role_404(self):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.assertRaises(exception.RoleNotFound,
+ self.identity_api.update_role,
+ role['id'],
+ role)
+
+ def test_add_user_to_project(self):
+ self.identity_api.add_user_to_project(self.tenant_baz['id'],
+ self.user_foo['id'])
+ tenants = self.identity_api.get_projects_for_user(self.user_foo['id'])
+ self.assertIn(self.tenant_baz['id'], tenants)
+
+ def test_add_user_to_project_missing_default_role(self):
+ self.assignment_api.delete_role(CONF.member_role_id)
+ self.assertRaises(exception.RoleNotFound,
+ self.assignment_api.get_role,
+ CONF.member_role_id)
+ self.identity_api.add_user_to_project(self.tenant_baz['id'],
+ self.user_foo['id'])
+ tenants = self.identity_api.get_projects_for_user(self.user_foo['id'])
+ self.assertIn(self.tenant_baz['id'], tenants)
+ default_role = self.assignment_api.get_role(CONF.member_role_id)
+ self.assertIsNotNone(default_role)
+
+ def test_add_user_to_project_404(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.add_user_to_project,
+ uuid.uuid4().hex,
+ self.user_foo['id'])
+
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.add_user_to_project,
+ self.tenant_bar['id'],
+ uuid.uuid4().hex)
+
+ def test_remove_user_from_project(self):
+ self.identity_api.add_user_to_project(self.tenant_baz['id'],
+ self.user_foo['id'])
+ self.identity_api.remove_user_from_project(self.tenant_baz['id'],
+ self.user_foo['id'])
+ tenants = self.identity_api.get_projects_for_user(self.user_foo['id'])
+ self.assertNotIn(self.tenant_baz['id'], tenants)
+
+ def test_remove_user_from_project_404(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.remove_user_from_project,
+ uuid.uuid4().hex,
+ self.user_foo['id'])
+
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.remove_user_from_project,
+ self.tenant_bar['id'],
+ uuid.uuid4().hex)
+
+ self.assertRaises(exception.NotFound,
+ self.identity_api.remove_user_from_project,
+ self.tenant_baz['id'],
+ self.user_foo['id'])
+
+ def test_get_projects_for_user_404(self):
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_projects_for_user,
+ uuid.uuid4().hex)
+
+ def test_update_project_404(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.update_project,
+ uuid.uuid4().hex,
+ dict())
+
+ def test_delete_project_404(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.delete_project,
+ uuid.uuid4().hex)
+
+ def test_update_user_404(self):
+ user_id = uuid.uuid4().hex
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.update_user,
+ user_id,
+ {'id': user_id})
+
+ def test_delete_user_with_project_association(self):
+ user = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex}
+ self.identity_api.create_user(user['id'], user)
+ self.identity_api.add_user_to_project(self.tenant_bar['id'],
+ user['id'])
+ self.identity_api.delete_user(user['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_projects_for_user,
+ user['id'])
+
+ def test_delete_user_with_project_roles(self):
+ user = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex}
+ self.identity_api.create_user(user['id'], user)
+ self.identity_api.add_role_to_user_and_project(
+ user['id'],
+ self.tenant_bar['id'],
+ self.role_member['id'])
+ self.identity_api.delete_user(user['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_projects_for_user,
+ user['id'])
+
+ def test_delete_user_404(self):
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.delete_user,
+ uuid.uuid4().hex)
+
+ def test_delete_role_404(self):
+ self.assertRaises(exception.RoleNotFound,
+ self.identity_api.delete_role,
+ uuid.uuid4().hex)
+
+ def test_create_project_long_name_fails(self):
+ tenant = {'id': 'fake1', 'name': 'a' * 65,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_project,
+ tenant['id'],
+ tenant)
+
+ def test_create_project_blank_name_fails(self):
+ tenant = {'id': 'fake1', 'name': '',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_project,
+ tenant['id'],
+ tenant)
+
+ def test_create_project_invalid_name_fails(self):
+ tenant = {'id': 'fake1', 'name': None,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_project,
+ tenant['id'],
+ tenant)
+ tenant = {'id': 'fake1', 'name': 123,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_project,
+ tenant['id'],
+ tenant)
+
+ def test_update_project_blank_name_fails(self):
+ tenant = {'id': 'fake1', 'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.identity_api.create_project('fake1', tenant)
+ tenant['name'] = ''
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_project,
+ tenant['id'],
+ tenant)
+
+ def test_update_project_long_name_fails(self):
+ tenant = {'id': 'fake1', 'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.identity_api.create_project('fake1', tenant)
+ tenant['name'] = 'a' * 65
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_project,
+ tenant['id'],
+ tenant)
+
+ def test_update_project_invalid_name_fails(self):
+ tenant = {'id': 'fake1', 'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.identity_api.create_project('fake1', tenant)
+ tenant['name'] = None
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_project,
+ tenant['id'],
+ tenant)
+
+ tenant['name'] = 123
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_project,
+ tenant['id'],
+ tenant)
+
+ def test_create_user_long_name_fails(self):
+ user = {'id': 'fake1', 'name': 'a' * 65,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_user,
+ 'fake1',
+ user)
+
+ def test_create_user_blank_name_fails(self):
+ user = {'id': 'fake1', 'name': '',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_user,
+ 'fake1',
+ user)
+
+ def test_create_user_invalid_name_fails(self):
+ user = {'id': 'fake1', 'name': None,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_user,
+ 'fake1',
+ user)
+
+ user = {'id': 'fake1', 'name': 123,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_user,
+ 'fake1',
+ user)
+
+ def test_update_project_invalid_enabled_type_string(self):
+ project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'enabled': True,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.identity_api.create_project(project['id'], project)
+ project_ref = self.identity_api.get_project(project['id'])
+ self.assertEqual(project_ref['enabled'], True)
+
+ # Strings are not valid boolean values
+ project['enabled'] = "false"
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_project,
+ project['id'],
+ project)
+
+ def test_create_project_invalid_enabled_type_string(self):
+ project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ # invalid string value
+ 'enabled': "true"}
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_project,
+ project['id'],
+ project)
+
+ def test_create_user_invalid_enabled_type_string(self):
+ user = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex,
+ # invalid string value
+ 'enabled': "true"}
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_user,
+ user['id'],
+ user)
+
+ def test_update_user_long_name_fails(self):
+ user = {'id': 'fake1', 'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.identity_api.create_user('fake1', user)
+ user['name'] = 'a' * 65
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_user,
+ 'fake1',
+ user)
+
+ def test_update_user_blank_name_fails(self):
+ user = {'id': 'fake1', 'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.identity_api.create_user('fake1', user)
+ user['name'] = ''
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_user,
+ 'fake1',
+ user)
+
+ def test_update_user_invalid_name_fails(self):
+ user = {'id': 'fake1', 'name': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.identity_api.create_user('fake1', user)
+
+ user['name'] = None
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_user,
+ 'fake1',
+ user)
+
+ user['name'] = 123
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_user,
+ 'fake1',
+ user)
+
+ def test_list_users(self):
+ users = self.identity_api.list_users()
+ for test_user in default_fixtures.USERS:
+ self.assertTrue(x for x in users if x['id'] == test_user['id'])
+
+ def test_list_groups(self):
+ group1 = {
+ 'id': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id,
+ 'name': uuid.uuid4().hex}
+ group2 = {
+ 'id': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id,
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_group(group1['id'], group1)
+ self.identity_api.create_group(group2['id'], group2)
+ groups = self.identity_api.list_groups()
+ self.assertEquals(len(groups), 2)
+ group_ids = []
+ for group in groups:
+ group_ids.append(group.get('id'))
+ self.assertIn(group1['id'], group_ids)
+ self.assertIn(group2['id'], group_ids)
+
+ def test_list_domains(self):
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain1['id'], domain1)
+ self.identity_api.create_domain(domain2['id'], domain2)
+ domains = self.identity_api.list_domains()
+ self.assertEquals(len(domains), 3)
+ domain_ids = []
+ for domain in domains:
+ domain_ids.append(domain.get('id'))
+ self.assertIn(DEFAULT_DOMAIN_ID, domain_ids)
+ self.assertIn(domain1['id'], domain_ids)
+ self.assertIn(domain2['id'], domain_ids)
+
+ def test_list_projects(self):
+ projects = self.identity_api.list_projects()
+ self.assertEquals(len(projects), 4)
+ project_ids = []
+ for project in projects:
+ project_ids.append(project.get('id'))
+ self.assertIn(self.tenant_bar['id'], project_ids)
+ self.assertIn(self.tenant_baz['id'], project_ids)
+
+ def test_list_projects_for_domain(self):
+ project_ids = ([x['id'] for x in
+ self.assignment_api.list_projects(DEFAULT_DOMAIN_ID)])
+ self.assertEquals(len(project_ids), 4)
+ self.assertIn(self.tenant_bar['id'], project_ids)
+ self.assertIn(self.tenant_baz['id'], project_ids)
+ self.assertIn(self.tenant_mtu['id'], project_ids)
+ self.assertIn(self.tenant_service['id'], project_ids)
+
+ def test_list_projects_for_alternate_domain(self):
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.assignment_api.create_domain(domain1['id'], domain1)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.assignment_api.create_project(project1['id'], project1)
+ project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.assignment_api.create_project(project2['id'], project2)
+ project_ids = ([x['id'] for x in
+ self.assignment_api.list_projects(domain1['id'])])
+ self.assertEquals(len(project_ids), 2)
+ self.assertIn(project1['id'], project_ids)
+ self.assertIn(project2['id'], project_ids)
+
+ def test_list_roles(self):
+ roles = self.identity_api.list_roles()
+ for test_role in default_fixtures.ROLES:
+ self.assertTrue(x for x in roles if x['id'] == test_role['id'])
+
+ def test_delete_project_with_role_assignments(self):
+ tenant = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.identity_api.create_project(tenant['id'], tenant)
+ self.identity_api.add_role_to_user_and_project(
+ self.user_foo['id'], tenant['id'], 'member')
+ self.identity_api.delete_project(tenant['id'])
+ self.assertRaises(exception.NotFound,
+ self.identity_api.get_project,
+ tenant['id'])
+
+ def test_delete_role_check_role_grant(self):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ alt_role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(role['id'], role)
+ self.identity_api.create_role(alt_role['id'], alt_role)
+ self.identity_api.add_role_to_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], role['id'])
+ self.identity_api.add_role_to_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], alt_role['id'])
+ self.identity_api.delete_role(role['id'])
+ roles_ref = self.identity_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'])
+ self.assertNotIn(role['id'], roles_ref)
+ self.assertIn(alt_role['id'], roles_ref)
+
+ def test_create_project_doesnt_modify_passed_in_dict(self):
+ new_project = {'id': 'tenant_id', 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ original_project = new_project.copy()
+ self.identity_api.create_project('tenant_id', new_project)
+ self.assertDictEqual(original_project, new_project)
+
+ def test_create_user_doesnt_modify_passed_in_dict(self):
+ new_user = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ original_user = new_user.copy()
+ self.identity_api.create_user('user_id', new_user)
+ self.assertDictEqual(original_user, new_user)
+
+ def test_update_user_enable(self):
+ user = {'id': 'fake1', 'name': 'fake1', 'enabled': True,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.identity_api.create_user('fake1', user)
+ user_ref = self.identity_api.get_user('fake1')
+ self.assertEqual(user_ref['enabled'], True)
+
+ user['enabled'] = False
+ self.identity_api.update_user('fake1', user)
+ user_ref = self.identity_api.get_user('fake1')
+ self.assertEqual(user_ref['enabled'], user['enabled'])
+
+ # If not present, enabled field should not be updated
+ del user['enabled']
+ self.identity_api.update_user('fake1', user)
+ user_ref = self.identity_api.get_user('fake1')
+ self.assertEqual(user_ref['enabled'], False)
+
+ user['enabled'] = True
+ self.identity_api.update_user('fake1', user)
+ user_ref = self.identity_api.get_user('fake1')
+ self.assertEqual(user_ref['enabled'], user['enabled'])
+
+ del user['enabled']
+ self.identity_api.update_user('fake1', user)
+ user_ref = self.identity_api.get_user('fake1')
+ self.assertEqual(user_ref['enabled'], True)
+
+ # Integers are valid Python's booleans. Explicitly test it.
+ user['enabled'] = 0
+ self.identity_api.update_user('fake1', user)
+ user_ref = self.identity_api.get_user('fake1')
+ self.assertEqual(user_ref['enabled'], False)
+
+ # Any integers other than 0 are interpreted as True
+ user['enabled'] = -42
+ self.identity_api.update_user('fake1', user)
+ user_ref = self.identity_api.get_user('fake1')
+ self.assertEqual(user_ref['enabled'], True)
+
+ def test_update_user_enable_fails(self):
+ user = {'id': 'fake1', 'name': 'fake1', 'enabled': True,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.identity_api.create_user('fake1', user)
+ user_ref = self.identity_api.get_user('fake1')
+ self.assertEqual(user_ref['enabled'], True)
+
+ # Strings are not valid boolean values
+ user['enabled'] = "false"
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_user,
+ 'fake1',
+ user)
+
+ def test_update_project_enable(self):
+ tenant = {'id': 'fake1', 'name': 'fake1', 'enabled': True,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.identity_api.create_project('fake1', tenant)
+ tenant_ref = self.identity_api.get_project('fake1')
+ self.assertEqual(tenant_ref['enabled'], True)
+
+ tenant['enabled'] = False
+ self.identity_api.update_project('fake1', tenant)
+ tenant_ref = self.identity_api.get_project('fake1')
+ self.assertEqual(tenant_ref['enabled'], tenant['enabled'])
+
+ # If not present, enabled field should not be updated
+ del tenant['enabled']
+ self.identity_api.update_project('fake1', tenant)
+ tenant_ref = self.identity_api.get_project('fake1')
+ self.assertEqual(tenant_ref['enabled'], False)
+
+ tenant['enabled'] = True
+ self.identity_api.update_project('fake1', tenant)
+ tenant_ref = self.identity_api.get_project('fake1')
+ self.assertEqual(tenant_ref['enabled'], tenant['enabled'])
+
+ del tenant['enabled']
+ self.identity_api.update_project('fake1', tenant)
+ tenant_ref = self.identity_api.get_project('fake1')
+ self.assertEqual(tenant_ref['enabled'], True)
+
+ def test_add_user_to_group(self):
+ domain = self._get_domain_fixture()
+ new_group = {'id': uuid.uuid4().hex, 'domain_id': domain['id'],
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_group(new_group['id'], new_group)
+ new_user = {'id': uuid.uuid4().hex, 'name': 'new_user',
+ 'password': uuid.uuid4().hex, 'enabled': True,
+ 'domain_id': domain['id']}
+ self.identity_api.create_user(new_user['id'], new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+ groups = self.identity_api.list_groups_for_user(new_user['id'])
+
+ found = False
+ for x in groups:
+ if (x['id'] == new_group['id']):
+ found = True
+ self.assertTrue(found)
+
+ def test_add_user_to_group_404(self):
+ domain = self._get_domain_fixture()
+ new_user = {'id': uuid.uuid4().hex, 'name': 'new_user',
+ 'password': uuid.uuid4().hex, 'enabled': True,
+ 'domain_id': domain['id']}
+ self.identity_api.create_user(new_user['id'], new_user)
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.add_user_to_group,
+ new_user['id'],
+ uuid.uuid4().hex)
+
+ new_group = {'id': uuid.uuid4().hex, 'domain_id': domain['id'],
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_group(new_group['id'], new_group)
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.add_user_to_group,
+ uuid.uuid4().hex,
+ new_group['id'])
+
+ def test_check_user_in_group(self):
+ domain = self._get_domain_fixture()
+ new_group = {'id': uuid.uuid4().hex, 'domain_id': domain['id'],
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_group(new_group['id'], new_group)
+ new_user = {'id': uuid.uuid4().hex, 'name': 'new_user',
+ 'password': uuid.uuid4().hex, 'enabled': True,
+ 'domain_id': domain['id']}
+ self.identity_api.create_user(new_user['id'], new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+ self.identity_api.check_user_in_group(new_user['id'], new_group['id'])
+
+ def test_check_user_not_in_group(self):
+ new_group = {
+ 'id': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id,
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_group(new_group['id'], new_group)
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.check_user_in_group,
+ uuid.uuid4().hex,
+ new_group['id'])
+
+ def test_list_users_in_group(self):
+ domain = self._get_domain_fixture()
+ new_group = {'id': uuid.uuid4().hex, 'domain_id': domain['id'],
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_group(new_group['id'], new_group)
+ new_user = {'id': uuid.uuid4().hex, 'name': 'new_user',
+ 'password': uuid.uuid4().hex, 'enabled': True,
+ 'domain_id': domain['id']}
+ self.identity_api.create_user(new_user['id'], new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+ user_refs = self.identity_api.list_users_in_group(new_group['id'])
+ found = False
+ for x in user_refs:
+ if (x['id'] == new_user['id']):
+ found = True
+ self.assertTrue(found)
+
+ def test_remove_user_from_group(self):
+ domain = self._get_domain_fixture()
+ new_group = {'id': uuid.uuid4().hex, 'domain_id': domain['id'],
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_group(new_group['id'], new_group)
+ new_user = {'id': uuid.uuid4().hex, 'name': 'new_user',
+ 'password': uuid.uuid4().hex, 'enabled': True,
+ 'domain_id': domain['id']}
+ self.identity_api.create_user(new_user['id'], new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+ groups = self.identity_api.list_groups_for_user(new_user['id'])
+ self.assertIn(new_group['id'], [x['id'] for x in groups])
+ self.identity_api.remove_user_from_group(new_user['id'],
+ new_group['id'])
+ groups = self.identity_api.list_groups_for_user(new_user['id'])
+ self.assertNotIn(new_group['id'], [x['id'] for x in groups])
+
+ def test_remove_user_from_group_404(self):
+ domain = self._get_domain_fixture()
+ new_user = {'id': uuid.uuid4().hex, 'name': 'new_user',
+ 'password': uuid.uuid4().hex, 'enabled': True,
+ 'domain_id': domain['id']}
+ self.identity_api.create_user(new_user['id'], new_user)
+ new_group = {'id': uuid.uuid4().hex, 'domain_id': domain['id'],
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_group(new_group['id'], new_group)
+ self.assertRaises(exception.NotFound,
+ self.identity_api.remove_user_from_group,
+ new_user['id'],
+ uuid.uuid4().hex)
+
+ self.assertRaises(exception.NotFound,
+ self.identity_api.remove_user_from_group,
+ uuid.uuid4().hex,
+ new_group['id'])
+
+ self.assertRaises(exception.NotFound,
+ self.identity_api.remove_user_from_group,
+ uuid.uuid4().hex,
+ uuid.uuid4().hex)
+
+ def test_group_crud(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain['id'], domain)
+ group = {'id': uuid.uuid4().hex, 'domain_id': domain['id'],
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_group(group['id'], group)
+ group_ref = self.identity_api.get_group(group['id'])
+ self.assertDictContainsSubset(group, group_ref)
+
+ group['name'] = uuid.uuid4().hex
+ self.identity_api.update_group(group['id'], group)
+ group_ref = self.identity_api.get_group(group['id'])
+ self.assertDictContainsSubset(group, group_ref)
+
+ self.identity_api.delete_group(group['id'])
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.get_group,
+ group['id'])
+
+ def test_create_duplicate_group_name_fails(self):
+ group1 = {'id': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID,
+ 'name': uuid.uuid4().hex}
+ group2 = {'id': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID,
+ 'name': group1['name']}
+ self.identity_api.create_group(group1['id'], group1)
+ self.assertRaises(exception.Conflict,
+ self.identity_api.create_group,
+ group2['id'], group2)
+
+ def test_create_duplicate_group_name_in_different_domains(self):
+ new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(new_domain['id'], new_domain)
+ group1 = {'id': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID,
+ 'name': uuid.uuid4().hex}
+ group2 = {'id': uuid.uuid4().hex, 'domain_id': new_domain['id'],
+ 'name': group1['name']}
+ self.identity_api.create_group(group1['id'], group1)
+ self.identity_api.create_group(group2['id'], group2)
+
+ def test_move_group_between_domains(self):
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain2['id'], domain2)
+ group = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.identity_api.create_group(group['id'], group)
+ group['domain_id'] = domain2['id']
+ self.identity_api.update_group(group['id'], group)
+
+ def test_move_group_between_domains_with_clashing_names_fails(self):
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain1['id'], domain1)
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain2['id'], domain2)
+ # First, create a group in domain1
+ group1 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.identity_api.create_group(group1['id'], group1)
+ # Now create a group in domain2 with a potentially clashing
+ # name - which should work since we have domain separation
+ group2 = {'id': uuid.uuid4().hex,
+ 'name': group1['name'],
+ 'domain_id': domain2['id']}
+ self.identity_api.create_group(group2['id'], group2)
+ # Now try and move group1 into the 2nd domain - which should
+ # fail since the names clash
+ group1['domain_id'] = domain2['id']
+ self.assertRaises(exception.Conflict,
+ self.identity_api.update_group,
+ group1['id'],
+ group1)
+
+ def test_project_crud(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ self.identity_api.create_domain(domain['id'], domain)
+ project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id']}
+ self.identity_api.create_project(project['id'], project)
+ project_ref = self.identity_api.get_project(project['id'])
+ self.assertDictContainsSubset(project, project_ref)
+
+ project['name'] = uuid.uuid4().hex
+ self.identity_api.update_project(project['id'], project)
+ project_ref = self.identity_api.get_project(project['id'])
+ self.assertDictContainsSubset(project, project_ref)
+
+ self.identity_api.delete_project(project['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.get_project,
+ project['id'])
+
+ def test_domain_crud(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ self.identity_api.create_domain(domain['id'], domain)
+ domain_ref = self.identity_api.get_domain(domain['id'])
+ self.assertDictEqual(domain_ref, domain)
+
+ domain['name'] = uuid.uuid4().hex
+ self.identity_api.update_domain(domain['id'], domain)
+ domain_ref = self.identity_api.get_domain(domain['id'])
+ self.assertDictEqual(domain_ref, domain)
+
+ self.identity_api.delete_domain(domain['id'])
+ self.assertRaises(exception.DomainNotFound,
+ self.identity_api.get_domain,
+ domain['id'])
+
+ def test_user_crud(self):
+ user = {'domain_id': CONF.identity.default_domain_id,
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex, 'password': 'passw0rd'}
+ self.identity_api.create_user(user['id'], user)
+ user_ref = self.identity_api.get_user(user['id'])
+ del user['password']
+ user_ref_dict = dict((x, user_ref[x]) for x in user_ref)
+ self.assertDictContainsSubset(user, user_ref_dict)
+
+ user['password'] = uuid.uuid4().hex
+ self.identity_api.update_user(user['id'], user)
+ user_ref = self.identity_api.get_user(user['id'])
+ del user['password']
+ user_ref_dict = dict((x, user_ref[x]) for x in user_ref)
+ self.assertDictContainsSubset(user, user_ref_dict)
+
+ self.identity_api.delete_user(user['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ user['id'])
+
+ def test_list_user_projects(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain['id'], domain)
+ user1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex, 'domain_id': domain['id'],
+ 'enabled': True}
+ self.identity_api.create_user(user1['id'], user1)
+ user_projects = self.identity_api.list_user_projects(user1['id'])
+ self.assertEquals(len(user_projects), 0)
+ self.identity_api.create_grant(user_id=user1['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_member['id'])
+ self.identity_api.create_grant(user_id=user1['id'],
+ project_id=self.tenant_baz['id'],
+ role_id=self.role_member['id'])
+ user_projects = self.identity_api.list_user_projects(user1['id'])
+ self.assertEquals(len(user_projects), 2)
+
+
+class TokenTests(object):
+ def _create_token_id(self):
+ # Token must start with MII here otherwise it fails the asn1 test
+ # and is not hashed in a SQL backend.
+ token_id = "MII"
+ for i in range(1, 20):
+ token_id += uuid.uuid4().hex
+ return token_id
+
+ def test_token_crud(self):
+ token_id = self._create_token_id()
+ data = {'id': token_id, 'a': 'b',
+ 'trust_id': None,
+ 'user': {'id': 'testuserid'}}
+ data_ref = self.token_api.create_token(token_id, data)
+ expires = data_ref.pop('expires')
+ data_ref.pop('user_id')
+ self.assertTrue(isinstance(expires, datetime.datetime))
+ data_ref.pop('id')
+ data.pop('id')
+ self.assertDictEqual(data_ref, data)
+
+ new_data_ref = self.token_api.get_token(token_id)
+ expires = new_data_ref.pop('expires')
+ self.assertTrue(isinstance(expires, datetime.datetime))
+ new_data_ref.pop('user_id')
+ new_data_ref.pop('id')
+
+ self.assertEquals(new_data_ref, data)
+
+ self.token_api.delete_token(token_id)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_api.get_token, token_id)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_api.delete_token, token_id)
+
+ def create_token_sample_data(self, tenant_id=None, trust_id=None,
+ user_id="testuserid"):
+ token_id = self._create_token_id()
+ data = {'id': token_id, 'a': 'b',
+ 'user': {'id': user_id}}
+ if tenant_id is not None:
+ data['tenant'] = {'id': tenant_id, 'name': tenant_id}
+ if tenant_id is NULL_OBJECT:
+ data['tenant'] = None
+ if trust_id is not None:
+ data['trust_id'] = trust_id
+ new_token = self.token_api.create_token(token_id, data)
+ return new_token['id']
+
+ def test_delete_tokens(self):
+ tokens = self.token_api.list_tokens('testuserid')
+ self.assertEquals(len(tokens), 0)
+ token_id1 = self.create_token_sample_data('testtenantid')
+ token_id2 = self.create_token_sample_data('testtenantid')
+ token_id3 = self.create_token_sample_data(tenant_id='testtenantid',
+ user_id="testuserid1")
+ tokens = self.token_api.list_tokens('testuserid')
+ self.assertEquals(len(tokens), 2)
+ self.assertIn(token_id2, tokens)
+ self.assertIn(token_id1, tokens)
+ self.token_api.delete_tokens(user_id='testuserid',
+ tenant_id='testtenantid')
+ tokens = self.token_api.list_tokens('testuserid')
+ self.assertEquals(len(tokens), 0)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_api.get_token, token_id1)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_api.get_token, token_id2)
+
+ self.token_api.get_token(token_id3)
+
+ def test_delete_tokens_trust(self):
+ tokens = self.token_api.list_tokens(user_id='testuserid')
+ self.assertEquals(len(tokens), 0)
+ token_id1 = self.create_token_sample_data(tenant_id='testtenantid',
+ trust_id='testtrustid')
+ token_id2 = self.create_token_sample_data(tenant_id='testtenantid',
+ user_id="testuserid1",
+ trust_id="testtrustid1")
+ tokens = self.token_api.list_tokens('testuserid')
+ self.assertEquals(len(tokens), 1)
+ self.assertIn(token_id1, tokens)
+ self.token_api.delete_tokens(user_id='testuserid',
+ tenant_id='testtenantid',
+ trust_id='testtrustid')
+ self.assertRaises(exception.TokenNotFound,
+ self.token_api.get_token, token_id1)
+ self.token_api.get_token(token_id2)
+
+ def test_token_list(self):
+ tokens = self.token_api.list_tokens('testuserid')
+ self.assertEquals(len(tokens), 0)
+ token_id1 = self.create_token_sample_data()
+ tokens = self.token_api.list_tokens('testuserid')
+ self.assertEquals(len(tokens), 1)
+ self.assertIn(token_id1, tokens)
+ token_id2 = self.create_token_sample_data()
+ tokens = self.token_api.list_tokens('testuserid')
+ self.assertEquals(len(tokens), 2)
+ self.assertIn(token_id2, tokens)
+ self.assertIn(token_id1, tokens)
+ self.token_api.delete_token(token_id1)
+ tokens = self.token_api.list_tokens('testuserid')
+ self.assertIn(token_id2, tokens)
+ self.assertNotIn(token_id1, tokens)
+ self.token_api.delete_token(token_id2)
+ tokens = self.token_api.list_tokens('testuserid')
+ self.assertNotIn(token_id2, tokens)
+ self.assertNotIn(token_id1, tokens)
+
+ # tenant-specific tokens
+ tenant1 = uuid.uuid4().hex
+ tenant2 = uuid.uuid4().hex
+ token_id3 = self.create_token_sample_data(tenant_id=tenant1)
+ token_id4 = self.create_token_sample_data(tenant_id=tenant2)
+ # test for existing but empty tenant (LP:1078497)
+ token_id5 = self.create_token_sample_data(tenant_id=NULL_OBJECT)
+ tokens = self.token_api.list_tokens('testuserid')
+ self.assertEquals(len(tokens), 3)
+ self.assertNotIn(token_id1, tokens)
+ self.assertNotIn(token_id2, tokens)
+ self.assertIn(token_id3, tokens)
+ self.assertIn(token_id4, tokens)
+ self.assertIn(token_id5, tokens)
+ tokens = self.token_api.list_tokens('testuserid', tenant2)
+ self.assertEquals(len(tokens), 1)
+ self.assertNotIn(token_id1, tokens)
+ self.assertNotIn(token_id2, tokens)
+ self.assertNotIn(token_id3, tokens)
+ self.assertIn(token_id4, tokens)
+
+ def test_token_list_trust(self):
+ trust_id = uuid.uuid4().hex
+ token_id5 = self.create_token_sample_data(trust_id=trust_id)
+ tokens = self.token_api.list_tokens('testuserid', trust_id=trust_id)
+ self.assertEquals(len(tokens), 1)
+ self.assertIn(token_id5, tokens)
+
+ def test_get_token_404(self):
+ self.assertRaises(exception.TokenNotFound,
+ self.token_api.get_token,
+ uuid.uuid4().hex)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_api.get_token,
+ None)
+
+ def test_delete_token_404(self):
+ self.assertRaises(exception.TokenNotFound,
+ self.token_api.delete_token,
+ uuid.uuid4().hex)
+
+ def test_expired_token(self):
+ token_id = uuid.uuid4().hex
+ expire_time = timeutils.utcnow() - datetime.timedelta(minutes=1)
+ data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
+ 'expires': expire_time,
+ 'trust_id': None,
+ 'user': {'id': 'testuserid'}}
+ data_ref = self.token_api.create_token(token_id, data)
+ data_ref.pop('user_id')
+ self.assertDictEqual(data_ref, data)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_api.get_token, token_id)
+
+ def test_null_expires_token(self):
+ token_id = uuid.uuid4().hex
+ data = {'id': token_id, 'id_hash': token_id, 'a': 'b', 'expires': None,
+ 'user': {'id': 'testuserid'}}
+ data_ref = self.token_api.create_token(token_id, data)
+ self.assertIsNotNone(data_ref['expires'])
+ new_data_ref = self.token_api.get_token(token_id)
+
+ # MySQL doesn't store microseconds, so discard them before testing
+ data_ref['expires'] = data_ref['expires'].replace(microsecond=0)
+ new_data_ref['expires'] = new_data_ref['expires'].replace(
+ microsecond=0)
+
+ self.assertEqual(data_ref, new_data_ref)
+
+ def check_list_revoked_tokens(self, token_ids):
+ revoked_ids = [x['id'] for x in self.token_api.list_revoked_tokens()]
+ for token_id in token_ids:
+ self.assertIn(token_id, revoked_ids)
+
+ def delete_token(self):
+ token_id = uuid.uuid4().hex
+ data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
+ 'user': {'id': 'testuserid'}}
+ data_ref = self.token_api.create_token(token_id, data)
+ self.token_api.delete_token(token_id)
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_api.get_token,
+ data_ref['id'])
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_api.delete_token,
+ data_ref['id'])
+ return token_id
+
+ def test_list_revoked_tokens_returns_empty_list(self):
+ revoked_ids = [x['id'] for x in self.token_api.list_revoked_tokens()]
+ self.assertEqual(revoked_ids, [])
+
+ def test_list_revoked_tokens_for_single_token(self):
+ self.check_list_revoked_tokens([self.delete_token()])
+
+ def test_list_revoked_tokens_for_multiple_tokens(self):
+ self.check_list_revoked_tokens([self.delete_token()
+ for x in xrange(2)])
+
+ def test_flush_expired_token(self):
+ token_id = uuid.uuid4().hex
+ expire_time = timeutils.utcnow() - datetime.timedelta(minutes=1)
+ data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
+ 'expires': expire_time,
+ 'trust_id': None,
+ 'user': {'id': 'testuserid'}}
+ data_ref = self.token_api.create_token(token_id, data)
+ data_ref.pop('user_id')
+ self.assertDictEqual(data_ref, data)
+
+ token_id = uuid.uuid4().hex
+ expire_time = timeutils.utcnow() + datetime.timedelta(minutes=1)
+ data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
+ 'expires': expire_time,
+ 'trust_id': None,
+ 'user': {'id': 'testuserid'}}
+ data_ref = self.token_api.create_token(token_id, data)
+ data_ref.pop('user_id')
+ self.assertDictEqual(data_ref, data)
+
+ self.token_api.flush_expired_tokens()
+ tokens = self.token_api.list_tokens('testuserid')
+ self.assertEqual(len(tokens), 1)
+ self.assertIn(token_id, tokens)
+
+
+class TrustTests(object):
+ def create_sample_trust(self, new_id):
+ self.trustor = self.user_foo
+ self.trustee = self.user_two
+ trust_data = (self.trust_api.create_trust
+ (new_id,
+ {'trustor_user_id': self.trustor['id'],
+ 'trustee_user_id': self.user_two['id'],
+ 'project_id': self.tenant_bar['id'],
+ 'expires_at': timeutils.
+ parse_isotime('2031-02-18T18:10:00Z'),
+ 'impersonation': True},
+ roles=[{"id": "member"},
+ {"id": "other"},
+ {"id": "browser"}]))
+ return trust_data
+
+ def test_delete_trust(self):
+ new_id = uuid.uuid4().hex
+ trust_data = self.create_sample_trust(new_id)
+ trust_id = trust_data['id']
+ self.assertIsNotNone(trust_data)
+ trust_data = self.trust_api.get_trust(trust_id)
+ self.assertEquals(new_id, trust_data['id'])
+ self.trust_api.delete_trust(trust_id)
+ self.assertIsNone(self.trust_api.get_trust(trust_id))
+
+ def test_delete_trust_not_found(self):
+ trust_id = uuid.uuid4().hex
+ self.assertRaises(exception.TrustNotFound,
+ self.trust_api.delete_trust,
+ trust_id)
+
+ def test_get_trust(self):
+ new_id = uuid.uuid4().hex
+ trust_data = self.create_sample_trust(new_id)
+ trust_id = trust_data['id']
+ self.assertIsNotNone(trust_data)
+ trust_data = self.trust_api.get_trust(trust_id)
+ self.assertEquals(new_id, trust_data['id'])
+
+ def test_create_trust(self):
+ new_id = uuid.uuid4().hex
+ trust_data = self.create_sample_trust(new_id)
+
+ self.assertEquals(new_id, trust_data['id'])
+ self.assertEquals(self.trustee['id'], trust_data['trustee_user_id'])
+ self.assertEquals(self.trustor['id'], trust_data['trustor_user_id'])
+ self.assertTrue(timeutils.normalize_time(trust_data['expires_at']) >
+ timeutils.utcnow())
+
+ self.assertEquals([{'id': 'member'},
+ {'id': 'other'},
+ {'id': 'browser'}], trust_data['roles'])
+
+ def test_list_trust_by_trustee(self):
+ for i in range(3):
+ self.create_sample_trust(uuid.uuid4().hex)
+ trusts = self.trust_api.list_trusts_for_trustee(self.trustee['id'])
+ self.assertEqual(len(trusts), 3)
+ self.assertEqual(trusts[0]["trustee_user_id"], self.trustee['id'])
+ trusts = self.trust_api.list_trusts_for_trustee(self.trustor['id'])
+ self.assertEqual(len(trusts), 0)
+
+ def test_list_trust_by_trustor(self):
+ for i in range(3):
+ self.create_sample_trust(uuid.uuid4().hex)
+ trusts = self.trust_api.list_trusts_for_trustor(self.trustor['id'])
+ self.assertEqual(len(trusts), 3)
+ self.assertEqual(trusts[0]["trustor_user_id"], self.trustor['id'])
+ trusts = self.trust_api.list_trusts_for_trustor(self.trustee['id'])
+ self.assertEqual(len(trusts), 0)
+
+ def test_list_trusts(self):
+ for i in range(3):
+ self.create_sample_trust(uuid.uuid4().hex)
+ trusts = self.trust_api.list_trusts()
+ self.assertEqual(len(trusts), 3)
+
+
+class CommonHelperTests(test.TestCase):
+ def test_format_helper_raises_malformed_on_missing_key(self):
+ with self.assertRaises(exception.MalformedEndpoint):
+ core.format_url("http://%(foo)s/%(bar)s", {"foo": "1"})
+
+ def test_format_helper_raises_malformed_on_wrong_type(self):
+ with self.assertRaises(exception.MalformedEndpoint):
+ core.format_url("http://%foo%s", {"foo": "1"})
+
+ def test_format_helper_raises_malformed_on_incomplete_format(self):
+ with self.assertRaises(exception.MalformedEndpoint):
+ core.format_url("http://%(foo)", {"foo": "1"})
+
+
+class CatalogTests(object):
+ def test_service_crud(self):
+ # create
+ service_id = uuid.uuid4().hex
+ new_service = {
+ 'id': service_id,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ res = self.catalog_api.create_service(
+ service_id,
+ new_service.copy())
+ self.assertDictEqual(res, new_service)
+
+ # list
+ services = self.catalog_api.list_services()
+ self.assertIn(service_id, [x['id'] for x in services])
+
+ # delete
+ self.catalog_api.delete_service(service_id)
+ self.assertRaises(exception.ServiceNotFound,
+ self.catalog_api.delete_service,
+ service_id)
+ self.assertRaises(exception.ServiceNotFound,
+ self.catalog_api.get_service,
+ service_id)
+
+ def test_delete_service_with_endpoint(self):
+ # create a service
+ service = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_service(service['id'], service)
+
+ # create an endpoint attached to the service
+ endpoint = {
+ 'id': uuid.uuid4().hex,
+ 'region': uuid.uuid4().hex,
+ 'interface': uuid.uuid4().hex[:8],
+ 'url': uuid.uuid4().hex,
+ 'service_id': service['id'],
+ }
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+
+ # deleting the service should also delete the endpoint
+ self.catalog_api.delete_service(service['id'])
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.get_endpoint,
+ endpoint['id'])
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.delete_endpoint,
+ endpoint['id'])
+
+ def test_get_service_404(self):
+ self.assertRaises(exception.ServiceNotFound,
+ self.catalog_api.get_service,
+ uuid.uuid4().hex)
+
+ def test_delete_service_404(self):
+ self.assertRaises(exception.ServiceNotFound,
+ self.catalog_api.delete_service,
+ uuid.uuid4().hex)
+
+ def test_create_endpoint_404(self):
+ endpoint = {
+ 'id': uuid.uuid4().hex,
+ 'service_id': uuid.uuid4().hex,
+ }
+ self.assertRaises(exception.ServiceNotFound,
+ self.catalog_api.create_endpoint,
+ endpoint['id'],
+ endpoint)
+
+ def test_get_endpoint_404(self):
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.get_endpoint,
+ uuid.uuid4().hex)
+
+ def test_delete_endpoint_404(self):
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.delete_endpoint,
+ uuid.uuid4().hex)
+
+ def test_create_endpoint(self):
+ service = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_service(service['id'], service.copy())
+
+ endpoint = {
+ 'id': uuid.uuid4().hex,
+ 'region': "0" * 255,
+ 'service_id': service['id'],
+ 'interface': 'public',
+ 'url': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy())
+
+
+class PolicyTests(object):
+ def _new_policy_ref(self):
+ return {
+ 'id': uuid.uuid4().hex,
+ 'policy': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'endpoint_id': uuid.uuid4().hex,
+ }
+
+ def assertEqualPolicies(self, a, b):
+ self.assertEqual(a['id'], b['id'])
+ self.assertEqual(a['endpoint_id'], b['endpoint_id'])
+ self.assertEqual(a['policy'], b['policy'])
+ self.assertEqual(a['type'], b['type'])
+
+ def test_create(self):
+ ref = self._new_policy_ref()
+ res = self.policy_api.create_policy(ref['id'], ref)
+ self.assertEqualPolicies(ref, res)
+
+ def test_get(self):
+ ref = self._new_policy_ref()
+ res = self.policy_api.create_policy(ref['id'], ref)
+
+ res = self.policy_api.get_policy(ref['id'])
+ self.assertEqualPolicies(ref, res)
+
+ def test_list(self):
+ ref = self._new_policy_ref()
+ self.policy_api.create_policy(ref['id'], ref)
+
+ res = self.policy_api.list_policies()
+ res = [x for x in res if x['id'] == ref['id']][0]
+ self.assertEqualPolicies(ref, res)
+
+ def test_update(self):
+ ref = self._new_policy_ref()
+ self.policy_api.create_policy(ref['id'], ref)
+ orig = ref
+
+ ref = self._new_policy_ref()
+
+ # (cannot change policy ID)
+ self.assertRaises(exception.ValidationError,
+ self.policy_api.update_policy,
+ orig['id'],
+ ref)
+
+ ref['id'] = orig['id']
+ res = self.policy_api.update_policy(orig['id'], ref)
+ self.assertEqualPolicies(ref, res)
+
+ def test_delete(self):
+ ref = self._new_policy_ref()
+ self.policy_api.create_policy(ref['id'], ref)
+
+ self.policy_api.delete_policy(ref['id'])
+ self.assertRaises(exception.PolicyNotFound,
+ self.policy_api.delete_policy,
+ ref['id'])
+ self.assertRaises(exception.PolicyNotFound,
+ self.policy_api.get_policy,
+ ref['id'])
+ res = self.policy_api.list_policies()
+ self.assertFalse(len([x for x in res if x['id'] == ref['id']]))
+
+ def test_get_policy_404(self):
+ self.assertRaises(exception.PolicyNotFound,
+ self.policy_api.get_policy,
+ uuid.uuid4().hex)
+
+ def test_update_policy_404(self):
+ ref = self._new_policy_ref()
+ self.assertRaises(exception.PolicyNotFound,
+ self.policy_api.update_policy,
+ ref['id'],
+ ref)
+
+ def test_delete_policy_404(self):
+ self.assertRaises(exception.PolicyNotFound,
+ self.policy_api.delete_policy,
+ uuid.uuid4().hex)
+
+
+class InheritanceTests(object):
+
+ def test_inherited_role_grants_for_user(self):
+ """Test inherited user roles.
+
+ Test Plan:
+ - Enable OS-INHERIT extension
+ - Create 3 roles
+ - Create a domain, with a project and a user
+ - Check no roles yet exit
+ - Assign a direct user role to the project and a (non-inherited)
+ user role to the domain
+ - Get a list of effective roles - should only get the one direct role
+ - Now add an inherited user role to the domain
+ - Get a list of effective roles - should have two roles, one
+ direct and one by virtue of the inherited user role
+ - Also get effective roles for the domain - the role marked as
+ inherited should not show up
+
+ """
+ self.opt_in_group('os_inherit', enabled=True)
+ role_list = []
+ for _ in range(3):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(role['id'], role)
+ role_list.append(role)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain1['id'], domain1)
+ user1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'password': uuid.uuid4().hex,
+ 'enabled': True}
+ self.identity_api.create_user(user1['id'], user1)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.identity_api.create_project(project1['id'], project1)
+
+ roles_ref = self.identity_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEquals(len(roles_ref), 0)
+
+ # Create the first two roles - the domain one is not inherited
+ self.identity_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role_list[0]['id'])
+ self.identity_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[1]['id'])
+
+ # Now get the effective roles for the user and project, this
+ # should only include the direct role assignment on the project
+ combined_role_list = self.identity_api.get_roles_for_user_and_project(
+ user1['id'], project1['id'])
+ self.assertEquals(len(combined_role_list), 1)
+ self.assertIn(role_list[0]['id'], combined_role_list)
+
+ # Now add an inherited role on the domain
+ self.identity_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[2]['id'],
+ inherited_to_projects=True)
+
+ # Now get the effective roles for the user and project again, this
+ # should now include the inherited role on the domain
+ combined_role_list = self.identity_api.get_roles_for_user_and_project(
+ user1['id'], project1['id'])
+ self.assertEquals(len(combined_role_list), 2)
+ self.assertIn(role_list[0]['id'], combined_role_list)
+ self.assertIn(role_list[2]['id'], combined_role_list)
+
+ # Finally, check that the inherited role does not appear as a valid
+ # directly assigned role on the domain itself
+ combined_role_list = self.identity_api.get_roles_for_user_and_domain(
+ user1['id'], domain1['id'])
+ self.assertEquals(len(combined_role_list), 1)
+ self.assertIn(role_list[1]['id'], combined_role_list)
+
+ def test_inherited_role_grants_for_group(self):
+ """Test inherited group roles.
+
+ Test Plan:
+ - Enable OS-INHERIT extension
+ - Create 4 roles
+ - Create a domain, with a project, user and two groups
+ - Make the user a member of both groups
+ - Check no roles yet exit
+ - Assign a direct user role to the project and a (non-inherited)
+ group role on the domain
+ - Get a list of effective roles - should only get the one direct role
+ - Now add two inherited group roles to the domain
+ - Get a list of effective roles - should have three roles, one
+ direct and two by virtue of inherited group roles
+
+ """
+ self.opt_in_group('os_inherit', enabled=True)
+ role_list = []
+ for _ in range(4):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(role['id'], role)
+ role_list.append(role)
+ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_domain(domain1['id'], domain1)
+ user1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'password': uuid.uuid4().hex,
+ 'enabled': True}
+ self.identity_api.create_user(user1['id'], user1)
+ group1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'enabled': True}
+ self.identity_api.create_group(group1['id'], group1)
+ group2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'], 'enabled': True}
+ self.identity_api.create_group(group2['id'], group2)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id']}
+ self.identity_api.create_project(project1['id'], project1)
+
+ self.identity_api.add_user_to_group(user1['id'],
+ group1['id'])
+ self.identity_api.add_user_to_group(user1['id'],
+ group2['id'])
+
+ roles_ref = self.identity_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEquals(len(roles_ref), 0)
+
+ # Create two roles - the domain one is not inherited
+ self.identity_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role_list[0]['id'])
+ self.identity_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[1]['id'])
+
+ # Now get the effective roles for the user and project, this
+ # should only include the direct role assignment on the project
+ combined_role_list = self.identity_api.get_roles_for_user_and_project(
+ user1['id'], project1['id'])
+ self.assertEquals(len(combined_role_list), 1)
+ self.assertIn(role_list[0]['id'], combined_role_list)
+
+ # Now add to more group roles, both inherited, to the domain
+ self.identity_api.create_grant(group_id=group2['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[2]['id'],
+ inherited_to_projects=True)
+ self.identity_api.create_grant(group_id=group2['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[3]['id'],
+ inherited_to_projects=True)
+
+ # Now get the effective roles for the user and project again, this
+ # should now include the inherited roles on the domain
+ combined_role_list = self.identity_api.get_roles_for_user_and_project(
+ user1['id'], project1['id'])
+ self.assertEquals(len(combined_role_list), 3)
+ self.assertIn(role_list[0]['id'], combined_role_list)
+ self.assertIn(role_list[2]['id'], combined_role_list)
+ self.assertIn(role_list[3]['id'], combined_role_list)
diff --git a/keystone/tests/test_backend_kvs.py b/keystone/tests/test_backend_kvs.py
new file mode 100644
index 00000000..34b87c60
--- /dev/null
+++ b/keystone/tests/test_backend_kvs.py
@@ -0,0 +1,119 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import uuid
+
+from keystone import exception
+from keystone import identity
+from keystone.tests import core as test
+
+import default_fixtures
+import test_backend
+
+
+class KvsIdentity(test.TestCase, test_backend.IdentityTests):
+ def setUp(self):
+ super(KvsIdentity, self).setUp()
+ identity.CONF.identity.driver = (
+ 'keystone.identity.backends.kvs.Identity')
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ def test_list_user_projects(self):
+ # NOTE(chungg): not implemented
+ self.skipTest('Blocked by bug 1119770')
+
+ def test_create_duplicate_group_name_in_different_domains(self):
+ self.skipTest('Blocked by bug 1119770')
+
+ def test_create_duplicate_user_name_in_different_domains(self):
+ self.skipTest('Blocked by bug 1119770')
+
+ def test_create_duplicate_project_name_in_different_domains(self):
+ self.skipTest('Blocked by bug 1119770')
+
+ def test_move_user_between_domains(self):
+ self.skipTest('Blocked by bug 1119770')
+
+ def test_move_user_between_domains_with_clashing_names_fails(self):
+ self.skipTest('Blocked by bug 1119770')
+
+ def test_move_group_between_domains(self):
+ self.skipTest('Blocked by bug 1119770')
+
+ def test_move_group_between_domains_with_clashing_names_fails(self):
+ self.skipTest('Blocked by bug 1119770')
+
+ def test_move_project_between_domains(self):
+ self.skipTest('Blocked by bug 1119770')
+
+ def test_move_project_between_domains_with_clashing_names_fails(self):
+ self.skipTest('Blocked by bug 1119770')
+
+
+class KvsToken(test.TestCase, test_backend.TokenTests):
+ def setUp(self):
+ super(KvsToken, self).setUp()
+ identity.CONF.identity.driver = (
+ 'keystone.identity.backends.kvs.Identity')
+ self.load_backends()
+
+
+class KvsTrust(test.TestCase, test_backend.TrustTests):
+ def setUp(self):
+ super(KvsTrust, self).setUp()
+ identity.CONF.identity.driver = (
+ 'keystone.identity.backends.kvs.Identity')
+ identity.CONF.trust.driver = (
+ 'keystone.trust.backends.kvs.Trust')
+ identity.CONF.catalog.driver = (
+ 'keystone.catalog.backends.kvs.Catalog')
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+
+class KvsCatalog(test.TestCase, test_backend.CatalogTests):
+ def setUp(self):
+ super(KvsCatalog, self).setUp()
+ identity.CONF.identity.driver = (
+ 'keystone.identity.backends.kvs.Identity')
+ identity.CONF.trust.driver = (
+ 'keystone.trust.backends.kvs.Trust')
+ identity.CONF.catalog.driver = (
+ 'keystone.catalog.backends.kvs.Catalog')
+ self.load_backends()
+ self._load_fake_catalog()
+
+ def _load_fake_catalog(self):
+ self.catalog_foobar = self.catalog_api.driver._create_catalog(
+ 'foo', 'bar',
+ {'RegionFoo': {'service_bar': {'foo': 'bar'}}})
+
+ def test_get_catalog_404(self):
+ # FIXME(dolph): this test should be moved up to test_backend
+ # FIXME(dolph): exceptions should be UserNotFound and ProjectNotFound
+ self.assertRaises(exception.NotFound,
+ self.catalog_api.get_catalog,
+ uuid.uuid4().hex,
+ 'bar')
+
+ self.assertRaises(exception.NotFound,
+ self.catalog_api.get_catalog,
+ 'foo',
+ uuid.uuid4().hex)
+
+ def test_get_catalog(self):
+ catalog_ref = self.catalog_api.get_catalog('foo', 'bar')
+ self.assertDictEqual(catalog_ref, self.catalog_foobar)
diff --git a/keystone/tests/test_backend_ldap.py b/keystone/tests/test_backend_ldap.py
new file mode 100644
index 00000000..9c1c98d5
--- /dev/null
+++ b/keystone/tests/test_backend_ldap.py
@@ -0,0 +1,745 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+# Copyright 2013 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from keystone import assignment
+from keystone.common.ldap import fakeldap
+from keystone.common import sql
+from keystone import config
+from keystone import exception
+from keystone import identity
+from keystone.tests import core as test
+
+import default_fixtures
+import test_backend
+
+
+CONF = config.CONF
+
+
+class BaseLDAPIdentity(test_backend.IdentityTests):
+ def _get_domain_fixture(self):
+ """Domains in LDAP are read-only, so just return the static one."""
+ return self.identity_api.get_domain(CONF.identity.default_domain_id)
+
+ def clear_database(self):
+ db = fakeldap.FakeShelve().get_instance()
+ db.clear()
+
+ def _set_config(self):
+ self.config([test.etcdir('keystone.conf.sample'),
+ test.testsdir('test_overrides.conf'),
+ test.testsdir('backend_ldap.conf')])
+
+ def test_build_tree(self):
+ """Regression test for building the tree names
+ """
+ user_api = identity.backends.ldap.UserApi(CONF)
+ self.assertTrue(user_api)
+ self.assertEquals(user_api.tree_dn, "ou=Users,%s" % CONF.ldap.suffix)
+
+ def test_configurable_allowed_user_actions(self):
+ user = {'id': 'fake1',
+ 'name': 'fake1',
+ 'password': 'fakepass1',
+ 'tenants': ['bar']}
+ self.identity_api.create_user('fake1', user)
+ user_ref = self.identity_api.get_user('fake1')
+ self.assertEqual(user_ref['id'], 'fake1')
+
+ user['password'] = 'fakepass2'
+ self.identity_api.update_user('fake1', user)
+
+ self.identity_api.delete_user('fake1')
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ 'fake1')
+
+ def test_configurable_forbidden_user_actions(self):
+ CONF.ldap.user_allow_create = False
+ CONF.ldap.user_allow_update = False
+ CONF.ldap.user_allow_delete = False
+ self.load_backends()
+
+ user = {'id': 'fake1',
+ 'name': 'fake1',
+ 'password': 'fakepass1',
+ 'tenants': ['bar']}
+ self.assertRaises(exception.ForbiddenAction,
+ self.identity_api.create_user,
+ 'fake1',
+ user)
+
+ self.user_foo['password'] = 'fakepass2'
+ self.assertRaises(exception.ForbiddenAction,
+ self.identity_api.update_user,
+ self.user_foo['id'],
+ self.user_foo)
+
+ self.assertRaises(exception.ForbiddenAction,
+ self.identity_api.delete_user,
+ self.user_foo['id'])
+
+ def test_user_filter(self):
+ user_ref = self.identity_api.get_user(self.user_foo['id'])
+ self.user_foo.pop('password')
+ self.assertDictEqual(user_ref, self.user_foo)
+
+ CONF.ldap.user_filter = '(CN=DOES_NOT_MATCH)'
+ self.load_backends()
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ self.user_foo['id'])
+
+ def test_get_role_grant_by_user_and_project(self):
+ self.skipTest('Blocked by bug 1101287')
+
+ def test_get_role_grants_for_user_and_project_404(self):
+ self.skipTest('Blocked by bug 1101287')
+
+ def test_add_role_grant_to_user_and_project_404(self):
+ self.skipTest('Blocked by bug 1101287')
+
+ def test_remove_role_grant_from_user_and_project(self):
+ self.skipTest('Blocked by bug 1101287')
+
+ def test_get_and_remove_role_grant_by_group_and_project(self):
+ self.skipTest('Blocked by bug 1101287')
+
+ def test_get_and_remove_role_grant_by_group_and_domain(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_get_and_remove_role_grant_by_user_and_domain(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_get_and_remove_correct_role_grant_from_a_mix(self):
+ self.skipTest('Blocked by bug 1101287')
+
+ def test_get_and_remove_role_grant_by_group_and_cross_domain(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_get_and_remove_role_grant_by_user_and_cross_domain(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_role_grant_by_group_and_cross_domain_project(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_role_grant_by_user_and_cross_domain_project(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_multi_role_grant_by_user_group_on_project_domain(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_delete_role_with_user_and_group_grants(self):
+ self.skipTest('Blocked by bug 1101287')
+
+ def test_delete_user_with_group_project_domain_links(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_delete_group_with_user_project_domain_links(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_list_user_projects(self):
+ self.skipTest('Blocked by bug 1101287')
+
+ def test_create_duplicate_user_name_in_different_domains(self):
+ self.skipTest('Blocked by bug 1101276')
+
+ def test_create_duplicate_project_name_in_different_domains(self):
+ self.skipTest('Blocked by bug 1101276')
+
+ def test_create_duplicate_group_name_in_different_domains(self):
+ self.skipTest(
+ 'N/A: LDAP does not support multiple domains')
+
+ def test_move_user_between_domains(self):
+ self.skipTest('Blocked by bug 1101276')
+
+ def test_move_user_between_domains_with_clashing_names_fails(self):
+ self.skipTest('Blocked by bug 1101276')
+
+ def test_move_group_between_domains(self):
+ self.skipTest(
+ 'N/A: LDAP does not support multiple domains')
+
+ def test_move_group_between_domains_with_clashing_names_fails(self):
+ self.skipTest('Blocked by bug 1101276')
+
+ def test_move_project_between_domains(self):
+ self.skipTest('Blocked by bug 1101276')
+
+ def test_move_project_between_domains_with_clashing_names_fails(self):
+ self.skipTest('Blocked by bug 1101276')
+
+ def test_get_roles_for_user_and_domain(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
+ def test_list_role_assignments_unfiltered(self):
+ self.skipTest('Blocked by bug 1195019')
+
+ def test_multi_group_grants_on_project_domain(self):
+ self.skipTest('Blocked by bug 1101287')
+
+ def test_list_group_members_missing_entry(self):
+ """List group members with deleted user.
+
+ If a group has a deleted entry for a member, the non-deleted members
+ are returned.
+
+ """
+
+ # Create a group
+ group_id = None
+ group = dict(name=uuid.uuid4().hex)
+ group_id = self.identity_api.create_group(group_id, group)['id']
+
+ # Create a couple of users and add them to the group.
+ user_id = None
+ user = dict(name=uuid.uuid4().hex, id=uuid.uuid4().hex)
+ user_1_id = self.identity_api.create_user(user_id, user)['id']
+
+ self.identity_api.add_user_to_group(user_1_id, group_id)
+
+ user_id = None
+ user = dict(name=uuid.uuid4().hex, id=uuid.uuid4().hex)
+ user_2_id = self.identity_api.create_user(user_id, user)['id']
+
+ self.identity_api.add_user_to_group(user_2_id, group_id)
+
+ # Delete user 2
+ # NOTE(blk-u): need to go directly to user interface to keep from
+ # updating the group.
+ self.identity_api.driver.user.delete(user_2_id)
+
+ # List group users and verify only user 1.
+ res = self.identity_api.list_users_in_group(group_id)
+
+ self.assertEqual(len(res), 1, "Expected 1 entry (user_1)")
+ self.assertEqual(res[0]['id'], user_1_id, "Expected user 1 id")
+
+ def test_list_domains(self):
+ domains = self.identity_api.list_domains()
+ self.assertEquals(
+ domains,
+ [assignment.DEFAULT_DOMAIN])
+
+ def test_authenticate_requires_simple_bind(self):
+ user = {
+ 'id': 'no_meta',
+ 'name': 'NO_META',
+ 'domain_id': test_backend.DEFAULT_DOMAIN_ID,
+ 'password': 'no_meta2',
+ 'enabled': True,
+ }
+ self.identity_api.create_user(user['id'], user)
+ self.identity_api.add_user_to_project(self.tenant_baz['id'],
+ user['id'])
+ self.identity_api.driver.user.LDAP_USER = None
+ self.identity_api.driver.user.LDAP_PASSWORD = None
+
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ user_id=user['id'],
+ password=None)
+
+ # (spzala)The group and domain crud tests below override the standard ones
+ # in test_backend.py so that we can exclude the update name test, since we
+ # do not yet support the update of either group or domain names with LDAP.
+ # In the tests below, the update is demonstrated by updating description.
+ # Refer to bug 1136403 for more detail.
+ def test_group_crud(self):
+ group = {
+ 'id': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex}
+ self.identity_api.create_group(group['id'], group)
+ group_ref = self.identity_api.get_group(group['id'])
+ self.assertDictEqual(group_ref, group)
+ group['description'] = uuid.uuid4().hex
+ self.identity_api.update_group(group['id'], group)
+ group_ref = self.identity_api.get_group(group['id'])
+ self.assertDictEqual(group_ref, group)
+
+ self.identity_api.delete_group(group['id'])
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.get_group,
+ group['id'])
+
+
+class LDAPIdentity(test.TestCase, BaseLDAPIdentity):
+ def setUp(self):
+ super(LDAPIdentity, self).setUp()
+ self._set_config()
+ self.clear_database()
+
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ def test_configurable_allowed_project_actions(self):
+ tenant = {'id': 'fake1', 'name': 'fake1', 'enabled': True}
+ self.identity_api.create_project('fake1', tenant)
+ tenant_ref = self.identity_api.get_project('fake1')
+ self.assertEqual(tenant_ref['id'], 'fake1')
+
+ tenant['enabled'] = False
+ self.identity_api.update_project('fake1', tenant)
+
+ self.identity_api.delete_project('fake1')
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.get_project,
+ 'fake1')
+
+ def test_configurable_forbidden_project_actions(self):
+ CONF.ldap.tenant_allow_create = False
+ CONF.ldap.tenant_allow_update = False
+ CONF.ldap.tenant_allow_delete = False
+ self.load_backends()
+
+ tenant = {'id': 'fake1', 'name': 'fake1'}
+ self.assertRaises(exception.ForbiddenAction,
+ self.identity_api.create_project,
+ 'fake1',
+ tenant)
+
+ self.tenant_bar['enabled'] = False
+ self.assertRaises(exception.ForbiddenAction,
+ self.identity_api.update_project,
+ self.tenant_bar['id'],
+ self.tenant_bar)
+ self.assertRaises(exception.ForbiddenAction,
+ self.identity_api.delete_project,
+ self.tenant_bar['id'])
+
+ def test_configurable_allowed_role_actions(self):
+ role = {'id': 'fake1', 'name': 'fake1'}
+ self.identity_api.create_role('fake1', role)
+ role_ref = self.identity_api.get_role('fake1')
+ self.assertEqual(role_ref['id'], 'fake1')
+
+ role['name'] = 'fake2'
+ self.identity_api.update_role('fake1', role)
+
+ self.identity_api.delete_role('fake1')
+ self.assertRaises(exception.RoleNotFound,
+ self.identity_api.get_role,
+ 'fake1')
+
+ def test_configurable_forbidden_role_actions(self):
+ CONF.ldap.role_allow_create = False
+ CONF.ldap.role_allow_update = False
+ CONF.ldap.role_allow_delete = False
+ self.load_backends()
+
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.assertRaises(exception.ForbiddenAction,
+ self.identity_api.create_role,
+ role['id'],
+ role)
+
+ self.role_member['name'] = uuid.uuid4().hex
+ self.assertRaises(exception.ForbiddenAction,
+ self.identity_api.update_role,
+ self.role_member['id'],
+ self.role_member)
+
+ self.assertRaises(exception.ForbiddenAction,
+ self.identity_api.delete_role,
+ self.role_member['id'])
+
+ def test_project_filter(self):
+ tenant_ref = self.identity_api.get_project(self.tenant_bar['id'])
+ self.assertDictEqual(tenant_ref, self.tenant_bar)
+
+ CONF.ldap.tenant_filter = '(CN=DOES_NOT_MATCH)'
+ self.load_backends()
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.get_project,
+ self.tenant_bar['id'])
+
+ def test_role_filter(self):
+ role_ref = self.identity_api.get_role(self.role_member['id'])
+ self.assertDictEqual(role_ref, self.role_member)
+
+ CONF.ldap.role_filter = '(CN=DOES_NOT_MATCH)'
+ self.load_backends()
+ self.assertRaises(exception.RoleNotFound,
+ self.identity_api.get_role,
+ self.role_member['id'])
+
+ def test_dumb_member(self):
+ CONF.ldap.use_dumb_member = True
+ CONF.ldap.dumb_member = 'cn=dumb,cn=example,cn=com'
+ self.clear_database()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ 'dumb')
+
+ def test_project_attribute_mapping(self):
+ CONF.ldap.tenant_name_attribute = 'ou'
+ CONF.ldap.tenant_desc_attribute = 'description'
+ CONF.ldap.tenant_enabled_attribute = 'enabled'
+ self.clear_database()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ tenant_ref = self.identity_api.get_project(self.tenant_baz['id'])
+ self.assertEqual(tenant_ref['id'], self.tenant_baz['id'])
+ self.assertEqual(tenant_ref['name'], self.tenant_baz['name'])
+ self.assertEqual(
+ tenant_ref['description'],
+ self.tenant_baz['description'])
+ self.assertEqual(tenant_ref['enabled'], self.tenant_baz['enabled'])
+
+ CONF.ldap.tenant_name_attribute = 'description'
+ CONF.ldap.tenant_desc_attribute = 'ou'
+ self.load_backends()
+ tenant_ref = self.identity_api.get_project(self.tenant_baz['id'])
+ self.assertEqual(tenant_ref['id'], self.tenant_baz['id'])
+ self.assertEqual(tenant_ref['name'], self.tenant_baz['description'])
+ self.assertEqual(tenant_ref['description'], self.tenant_baz['name'])
+ self.assertEqual(tenant_ref['enabled'], self.tenant_baz['enabled'])
+
+ def test_project_attribute_ignore(self):
+ CONF.ldap.tenant_attribute_ignore = ['name',
+ 'description',
+ 'enabled']
+ self.clear_database()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ tenant_ref = self.identity_api.get_project(self.tenant_baz['id'])
+ self.assertEqual(tenant_ref['id'], self.tenant_baz['id'])
+ self.assertNotIn('name', tenant_ref)
+ self.assertNotIn('description', tenant_ref)
+ self.assertNotIn('enabled', tenant_ref)
+
+ def test_role_attribute_mapping(self):
+ CONF.ldap.role_name_attribute = 'ou'
+ self.clear_database()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ role_ref = self.identity_api.get_role(self.role_member['id'])
+ self.assertEqual(role_ref['id'], self.role_member['id'])
+ self.assertEqual(role_ref['name'], self.role_member['name'])
+
+ CONF.ldap.role_name_attribute = 'sn'
+ self.load_backends()
+ role_ref = self.identity_api.get_role(self.role_member['id'])
+ self.assertEqual(role_ref['id'], self.role_member['id'])
+ self.assertNotIn('name', role_ref)
+
+ def test_role_attribute_ignore(self):
+ CONF.ldap.role_attribute_ignore = ['name']
+ self.clear_database()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ role_ref = self.identity_api.get_role(self.role_member['id'])
+ self.assertEqual(role_ref['id'], self.role_member['id'])
+ self.assertNotIn('name', role_ref)
+
+ def test_user_enable_attribute_mask(self):
+ CONF.ldap.user_enabled_attribute = 'enabled'
+ CONF.ldap.user_enabled_mask = 2
+ CONF.ldap.user_enabled_default = 512
+ self.clear_database()
+ user = {'id': 'fake1', 'name': 'fake1', 'enabled': True}
+ self.identity_api.create_user('fake1', user)
+ user_ref = self.identity_api.get_user('fake1')
+ self.assertEqual(user_ref['enabled'], True)
+
+ user['enabled'] = False
+ self.identity_api.update_user('fake1', user)
+ user_ref = self.identity_api.get_user('fake1')
+ self.assertEqual(user_ref['enabled'], False)
+
+ user['enabled'] = True
+ self.identity_api.update_user('fake1', user)
+ user_ref = self.identity_api.get_user('fake1')
+ self.assertEqual(user_ref['enabled'], True)
+
+ def test_user_api_get_connection_no_user_password(self):
+ """Don't bind in case the user and password are blank."""
+ self.config([test.etcdir('keystone.conf.sample'),
+ test.testsdir('test_overrides.conf')])
+ CONF.ldap.url = "fake://memory"
+ user_api = identity.backends.ldap.UserApi(CONF)
+ self.stubs.Set(fakeldap, 'FakeLdap',
+ self.mox.CreateMock(fakeldap.FakeLdap))
+ # we have to track all calls on 'conn' to make sure that
+ # conn.simple_bind_s is not called
+ conn = self.mox.CreateMockAnything()
+ conn = fakeldap.FakeLdap(CONF.ldap.url).AndReturn(conn)
+ self.mox.ReplayAll()
+
+ user_api.get_connection(user=None, password=None)
+
+ def test_wrong_ldap_scope(self):
+ CONF.ldap.query_scope = uuid.uuid4().hex
+ self.assertRaisesRegexp(
+ ValueError,
+ 'Invalid LDAP scope: %s. *' % CONF.ldap.query_scope,
+ identity.backends.ldap.Identity)
+
+ def test_wrong_alias_dereferencing(self):
+ CONF.ldap.alias_dereferencing = uuid.uuid4().hex
+ self.assertRaisesRegexp(
+ ValueError,
+ 'Invalid LDAP deref option: %s\.' % CONF.ldap.alias_dereferencing,
+ identity.backends.ldap.Identity)
+
+ def test_user_extra_attribute_mapping(self):
+ CONF.ldap.user_additional_attribute_mapping = ['description:name']
+ self.load_backends()
+ user = {
+ 'id': 'extra_attributes',
+ 'name': 'EXTRA_ATTRIBUTES',
+ 'password': 'extra',
+ }
+ self.identity_api.create_user(user['id'], user)
+ dn, attrs = self.identity_api.driver.user._ldap_get(user['id'])
+ self.assertTrue(user['name'] in attrs['description'])
+
+ def test_parse_extra_attribute_mapping(self):
+ option_list = ['description:name', 'gecos:password',
+ 'fake:invalid', 'invalid1', 'invalid2:',
+ 'description:name:something']
+ mapping = self.identity_api.driver.user._parse_extra_attrs(option_list)
+ expected_dict = {'description': 'name', 'gecos': 'password'}
+ self.assertDictEqual(expected_dict, mapping)
+
+# TODO(henry-nash): These need to be removed when the full LDAP implementation
+# is submitted - see Bugs 1092187, 1101287, 1101276, 1101289
+
+ def test_domain_crud(self):
+ domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'enabled': True, 'description': uuid.uuid4().hex}
+ with self.assertRaises(exception.Forbidden):
+ self.identity_api.create_domain(domain['id'], domain)
+ with self.assertRaises(exception.Conflict):
+ self.identity_api.create_domain(
+ CONF.identity.default_domain_id, domain)
+ with self.assertRaises(exception.DomainNotFound):
+ self.identity_api.get_domain(domain['id'])
+ with self.assertRaises(exception.DomainNotFound):
+ domain['description'] = uuid.uuid4().hex
+ self.identity_api.update_domain(domain['id'], domain)
+ with self.assertRaises(exception.Forbidden):
+ self.identity_api.update_domain(
+ CONF.identity.default_domain_id, domain)
+ with self.assertRaises(exception.DomainNotFound):
+ self.identity_api.get_domain(domain['id'])
+ with self.assertRaises(exception.DomainNotFound):
+ self.identity_api.delete_domain(domain['id'])
+ with self.assertRaises(exception.Forbidden):
+ self.identity_api.delete_domain(CONF.identity.default_domain_id)
+ self.assertRaises(exception.DomainNotFound,
+ self.identity_api.get_domain,
+ domain['id'])
+
+ def test_project_crud(self):
+ # NOTE(topol): LDAP implementation does not currently support the
+ # updating of a project name so this method override
+ # provides a different update test
+ project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id,
+ 'description': uuid.uuid4().hex
+ }
+ self.assignment_api.create_project(project['id'], project)
+ project_ref = self.assignment_api.get_project(project['id'])
+
+ # NOTE(crazed): If running live test with emulation, there will be
+ # an enabled key in the project_ref.
+ if self.assignment_api.driver.project.enabled_emulation:
+ project['enabled'] = True
+ self.assertDictEqual(project_ref, project)
+
+ project['description'] = uuid.uuid4().hex
+ self.identity_api.update_project(project['id'], project)
+ project_ref = self.identity_api.get_project(project['id'])
+ self.assertDictEqual(project_ref, project)
+
+ self.identity_api.delete_project(project['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.get_project,
+ project['id'])
+
+ def test_multi_role_grant_by_user_group_on_project_domain(self):
+ # This is a partial implementation of the standard test that
+ # is defined in test_backend.py. It omits both domain and
+ # group grants. since neither of these are yet supported by
+ # the ldap backend.
+
+ role_list = []
+ for _ in range(2):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ user1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id,
+ 'password': uuid.uuid4().hex,
+ 'enabled': True}
+ self.identity_api.create_user(user1['id'], user1)
+ project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id}
+ self.identity_api.create_project(project1['id'], project1)
+
+ self.identity_api.add_role_to_user_and_project(
+ user_id=user1['id'],
+ tenant_id=project1['id'],
+ role_id=role_list[0]['id'])
+ self.identity_api.add_role_to_user_and_project(
+ user_id=user1['id'],
+ tenant_id=project1['id'],
+ role_id=role_list[1]['id'])
+
+ # Although list_grants are not yet supported, we can test the
+ # alternate way of getting back lists of grants, where user
+ # and group roles are combined. Only directly assigned user
+ # roles are available, since group grants are not yet supported
+
+ combined_role_list = self.identity_api.get_roles_for_user_and_project(
+ user1['id'], project1['id'])
+ self.assertEquals(len(combined_role_list), 2)
+ self.assertIn(role_list[0]['id'], combined_role_list)
+ self.assertIn(role_list[1]['id'], combined_role_list)
+
+ # Finally, although domain roles are not implemented, check we can
+ # issue the combined get roles call with benign results, since thus is
+ # used in token generation
+
+ combined_role_list = self.identity_api.get_roles_for_user_and_domain(
+ user1['id'], CONF.identity.default_domain_id)
+ self.assertEquals(len(combined_role_list), 0)
+
+ def test_list_projects_for_alternate_domain(self):
+ self.skipTest(
+ 'N/A: LDAP does not support multiple domains')
+
+
+class LDAPIdentityEnabledEmulation(LDAPIdentity):
+ def setUp(self):
+ super(LDAPIdentityEnabledEmulation, self).setUp()
+ self.config([test.etcdir('keystone.conf.sample'),
+ test.testsdir('test_overrides.conf'),
+ test.testsdir('backend_ldap.conf')])
+ CONF.ldap.user_enabled_emulation = True
+ CONF.ldap.tenant_enabled_emulation = True
+ self.clear_database()
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ for obj in [self.tenant_bar, self.tenant_baz, self.user_foo,
+ self.user_two, self.user_badguy]:
+ obj.setdefault('enabled', True)
+
+ def test_project_crud(self):
+ # NOTE(topol): LDAPIdentityEnabledEmulation will create an
+ # enabled key in the project dictionary so this
+ # method override handles this side-effect
+ project = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id,
+ 'description': uuid.uuid4().hex}
+
+ self.identity_api.create_project(project['id'], project)
+ project_ref = self.identity_api.get_project(project['id'])
+
+ # self.identity_api.create_project adds an enabled
+ # key with a value of True when LDAPIdentityEnabledEmulation
+ # is used so we now add this expected key to the project dictionary
+ project['enabled'] = True
+ self.assertDictEqual(project_ref, project)
+
+ project['description'] = uuid.uuid4().hex
+ self.identity_api.update_project(project['id'], project)
+ project_ref = self.identity_api.get_project(project['id'])
+ self.assertDictEqual(project_ref, project)
+
+ self.identity_api.delete_project(project['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.get_project,
+ project['id'])
+
+ def test_user_crud(self):
+ user = {
+ 'id': uuid.uuid4().hex,
+ 'domain_id': CONF.identity.default_domain_id,
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex}
+ self.identity_api.create_user(user['id'], user)
+ user['enabled'] = True
+ user_ref = self.identity_api.get_user(user['id'])
+ del user['password']
+ user_ref_dict = dict((x, user_ref[x]) for x in user_ref)
+ self.assertDictEqual(user_ref_dict, user)
+
+ user['password'] = uuid.uuid4().hex
+ self.identity_api.update_user(user['id'], user)
+ user_ref = self.identity_api.get_user(user['id'])
+ del user['password']
+ user_ref_dict = dict((x, user_ref[x]) for x in user_ref)
+ self.assertDictEqual(user_ref_dict, user)
+
+ self.identity_api.delete_user(user['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ user['id'])
+
+ def test_user_enable_attribute_mask(self):
+ self.skipTest(
+ "Enabled emulation conflicts with enabled mask")
+
+
+class LdapIdentitySqlAssignment(sql.Base, test.TestCase, BaseLDAPIdentity):
+
+ def _set_config(self):
+ self.config([test.etcdir('keystone.conf.sample'),
+ test.testsdir('test_overrides.conf'),
+ test.testsdir('backend_ldap_sql.conf')])
+
+ def setUp(self):
+ self._set_config()
+ self.clear_database()
+ self.load_backends()
+ self.engine = self.get_engine()
+ sql.ModelBase.metadata.create_all(bind=self.engine)
+ self.load_fixtures(default_fixtures)
+ #defaulted by the data load
+ self.user_foo['enabled'] = True
+
+ def tearDown(self):
+ sql.ModelBase.metadata.drop_all(bind=self.engine)
+ self.engine.dispose()
+ sql.set_global_engine(None)
+
+ def test_domain_crud(self):
+ pass
+
+ def test_list_domains(self):
+ domains = self.identity_api.list_domains()
+ self.assertEquals(domains, [assignment.DEFAULT_DOMAIN])
+
+ def test_project_filter(self):
+ self.skipTest(
+ 'N/A: Not part of SQL backend')
+
+ def test_role_filter(self):
+ self.skipTest(
+ 'N/A: Not part of SQL backend')
diff --git a/keystone/tests/test_backend_memcache.py b/keystone/tests/test_backend_memcache.py
new file mode 100644
index 00000000..0377c0e6
--- /dev/null
+++ b/keystone/tests/test_backend_memcache.py
@@ -0,0 +1,186 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import datetime
+import uuid
+
+import memcache
+
+from keystone.tests import core as test
+
+from keystone.common import utils
+from keystone import exception
+from keystone.openstack.common import jsonutils
+from keystone.openstack.common import timeutils
+from keystone import token
+from keystone.token.backends import memcache as token_memcache
+
+import test_backend
+
+
+class MemcacheClient(object):
+ """Replicates a tiny subset of memcached client interface."""
+
+ def __init__(self, *args, **kwargs):
+ """Ignores the passed in args."""
+ self.cache = {}
+ self.reject_cas = False
+
+ def add(self, key, value):
+ if self.get(key):
+ return False
+ return self.set(key, value)
+
+ def append(self, key, value):
+ existing_value = self.get(key)
+ if existing_value:
+ self.set(key, existing_value + value)
+ return True
+ return False
+
+ def check_key(self, key):
+ if not isinstance(key, str):
+ raise memcache.Client.MemcachedStringEncodingError()
+
+ def gets(self, key):
+ #Call self.get() since we don't really do 'cas' here.
+ return self.get(key)
+
+ def get(self, key):
+ """Retrieves the value for a key or None."""
+ self.check_key(key)
+ obj = self.cache.get(key)
+ now = utils.unixtime(timeutils.utcnow())
+ if obj and (obj[1] == 0 or obj[1] > now):
+ # NOTE(morganfainberg): This behaves more like memcache
+ # actually does and prevents modification of the passed in
+ # reference from affecting the cached back-end data. This makes
+ # tests a little easier to write.
+ #
+ # The back-end store should only change with an explicit
+ # set/delete/append/etc
+ data_copy = copy.deepcopy(obj[0])
+ return data_copy
+
+ def set(self, key, value, time=0):
+ """Sets the value for a key."""
+ self.check_key(key)
+ # NOTE(morganfainberg): This behaves more like memcache
+ # actually does and prevents modification of the passed in
+ # reference from affecting the cached back-end data. This makes
+ # tests a little easier to write.
+ #
+ # The back-end store should only change with an explicit
+ # set/delete/append/etc
+ data_copy = copy.deepcopy(value)
+ self.cache[key] = (data_copy, time)
+ return True
+
+ def cas(self, key, value, time=0, min_compress_len=0):
+ # Call self.set() since we don't really do 'cas' here.
+ if self.reject_cas:
+ return False
+ return self.set(key, value, time=time)
+
+ def reset_cas(self):
+ #This is a stub for the memcache client reset_cas function.
+ pass
+
+ def delete(self, key):
+ self.check_key(key)
+ try:
+ del self.cache[key]
+ except KeyError:
+ #NOTE(bcwaldon): python-memcached always returns the same value
+ pass
+
+
+class MemcacheToken(test.TestCase, test_backend.TokenTests):
+ def setUp(self):
+ super(MemcacheToken, self).setUp()
+ fake_client = MemcacheClient()
+ self.token_man = token.Manager()
+ self.token_man.driver = token_memcache.Token(client=fake_client)
+ self.token_api = self.token_man
+
+ def test_create_unicode_token_id(self):
+ token_id = unicode(self._create_token_id())
+ data = {'id': token_id, 'a': 'b',
+ 'user': {'id': 'testuserid'}}
+ self.token_api.create_token(token_id, data)
+ self.token_api.get_token(token_id)
+
+ def test_create_unicode_user_id(self):
+ token_id = self._create_token_id()
+ user_id = unicode(uuid.uuid4().hex)
+ data = {'id': token_id, 'a': 'b',
+ 'user': {'id': user_id}}
+ self.token_api.create_token(token_id, data)
+ self.token_api.get_token(token_id)
+
+ def test_list_tokens_unicode_user_id(self):
+ user_id = unicode(uuid.uuid4().hex)
+ self.token_api.list_tokens(user_id)
+
+ def test_flush_expired_token(self):
+ with self.assertRaises(exception.NotImplemented):
+ self.token_api.flush_expired_tokens()
+
+ def test_cleanup_user_index_on_create(self):
+ valid_token_id = uuid.uuid4().hex
+ second_valid_token_id = uuid.uuid4().hex
+ expired_token_id = uuid.uuid4().hex
+ user_id = unicode(uuid.uuid4().hex)
+
+ expire_delta = datetime.timedelta(seconds=86400)
+
+ valid_data = {'id': valid_token_id, 'a': 'b',
+ 'user': {'id': user_id}}
+ second_valid_data = {'id': second_valid_token_id, 'a': 'b',
+ 'user': {'id': user_id}}
+ expired_data = {'id': expired_token_id, 'a': 'b',
+ 'user': {'id': user_id}}
+ self.token_api.create_token(valid_token_id, valid_data)
+ self.token_api.create_token(expired_token_id, expired_data)
+ # NOTE(morganfainberg): Directly access the data cache since we need to
+ # get expired tokens as well as valid tokens. token_api.list_tokens()
+ # will not return any expired tokens in the list.
+ user_key = self.token_api.driver._prefix_user_id(user_id)
+ user_record = self.token_api.driver.client.get(user_key)
+ user_token_list = jsonutils.loads('[%s]' % user_record)
+ self.assertEquals(len(user_token_list), 2)
+ expired_token_ptk = self.token_api.driver._prefix_token_id(
+ expired_token_id)
+ expired_token = self.token_api.driver.client.get(expired_token_ptk)
+ expired_token['expires'] = (timeutils.utcnow() - expire_delta)
+ self.token_api.driver.client.set(expired_token_ptk, expired_token)
+
+ self.token_api.create_token(second_valid_token_id, second_valid_data)
+ user_record = self.token_api.driver.client.get(user_key)
+ user_token_list = jsonutils.loads('[%s]' % user_record)
+ self.assertEquals(len(user_token_list), 2)
+
+ def test_cas_failure(self):
+ self.token_api.driver.client.reject_cas = True
+ token_id = uuid.uuid4().hex
+ user_id = unicode(uuid.uuid4().hex)
+ user_key = self.token_api.driver._prefix_user_id(user_id)
+ token_data = jsonutils.dumps(token_id)
+ self.assertRaises(
+ exception.UnexpectedError,
+ self.token_api.driver._update_user_list_with_cas,
+ user_key, token_data)
diff --git a/keystone/tests/test_backend_pam.py b/keystone/tests/test_backend_pam.py
new file mode 100644
index 00000000..65817837
--- /dev/null
+++ b/keystone/tests/test_backend_pam.py
@@ -0,0 +1,68 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from keystone.tests import core as test
+
+from keystone import config
+from keystone.identity.backends import pam as identity_pam
+
+
+CONF = config.CONF
+DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
+
+
+class PamIdentity(test.TestCase):
+ def setUp(self):
+ super(PamIdentity, self).setUp()
+ self.config([test.etcdir('keystone.conf.sample'),
+ test.testsdir('test_overrides.conf'),
+ test.testsdir('backend_pam.conf')])
+ self.identity_api = identity_pam.PamIdentity()
+ id = uuid.uuid4().hex
+ self.tenant_in = {'id': id, 'name': id}
+ self.user_in = {'id': CONF.pam.userid, 'name': CONF.pam.userid}
+
+ def test_get_project(self):
+ tenant_out = self.identity_api.get_project(self.tenant_in['id'])
+ self.assertDictEqual(self.tenant_in, tenant_out)
+
+ def test_get_project_by_name(self):
+ tenant_in_name = self.tenant_in['name']
+ tenant_out = self.identity_api.get_project_by_name(
+ tenant_in_name, DEFAULT_DOMAIN_ID)
+ self.assertDictEqual(self.tenant_in, tenant_out)
+
+ def test_get_user(self):
+ user_out = self.identity_api.get_user(self.user_in['id'])
+ self.assertDictEqual(self.user_in, user_out)
+
+ def test_get_user_by_name(self):
+ user_out = self.identity_api.get_user_by_name(
+ self.user_in['name'], DEFAULT_DOMAIN_ID)
+ self.assertDictEqual(self.user_in, user_out)
+
+ def test_get_metadata_for_non_root(self):
+ metadata_out = self.identity_api._get_metadata(self.user_in['id'],
+ self.tenant_in['id'])
+ self.assertDictEqual({}, metadata_out)
+
+ def test_get_metadata_for_root(self):
+ metadata = {'is_admin': True}
+ metadata_out = self.identity_api._get_metadata('root',
+ self.tenant_in['id'])
+ self.assertDictEqual(metadata, metadata_out)
diff --git a/keystone/tests/test_backend_sql.py b/keystone/tests/test_backend_sql.py
new file mode 100644
index 00000000..773ae862
--- /dev/null
+++ b/keystone/tests/test_backend_sql.py
@@ -0,0 +1,415 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+import sqlalchemy
+
+from keystone.common import sql
+from keystone import config
+from keystone import exception
+from keystone.tests import core as test
+
+import default_fixtures
+import test_backend
+
+
+CONF = config.CONF
+DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
+
+
+class SqlTests(test.TestCase, sql.Base):
+
+ def setUp(self):
+ super(SqlTests, self).setUp()
+ self.config([test.etcdir('keystone.conf.sample'),
+ test.testsdir('test_overrides.conf'),
+ test.testsdir('backend_sql.conf')])
+
+ self.load_backends()
+
+ # create tables and keep an engine reference for cleanup.
+ # this must be done after the models are loaded by the managers.
+ self.engine = self.get_engine()
+ sql.ModelBase.metadata.create_all(bind=self.engine)
+
+ # populate the engine with tables & fixtures
+ self.load_fixtures(default_fixtures)
+ #defaulted by the data load
+ self.user_foo['enabled'] = True
+
+ def tearDown(self):
+ sql.ModelBase.metadata.drop_all(bind=self.engine)
+ self.engine.dispose()
+ sql.set_global_engine(None)
+ super(SqlTests, self).tearDown()
+
+
+class SqlModels(SqlTests):
+ def setUp(self):
+ super(SqlModels, self).setUp()
+
+ self.metadata = sql.ModelBase.metadata
+ self.metadata.bind = self.engine
+
+ def select_table(self, name):
+ table = sqlalchemy.Table(name,
+ self.metadata,
+ autoload=True)
+ s = sqlalchemy.select([table])
+ return s
+
+ def assertExpectedSchema(self, table, cols):
+ table = self.select_table(table)
+ for col, type_, length in cols:
+ self.assertIsInstance(table.c[col].type, type_)
+ if length:
+ self.assertEquals(table.c[col].type.length, length)
+
+ def test_user_model(self):
+ cols = (('id', sql.String, 64),
+ ('name', sql.String, 64),
+ ('password', sql.String, 128),
+ ('domain_id', sql.String, 64),
+ ('enabled', sql.Boolean, None),
+ ('extra', sql.JsonBlob, None))
+ self.assertExpectedSchema('user', cols)
+
+ def test_group_model(self):
+ cols = (('id', sql.String, 64),
+ ('name', sql.String, 64),
+ ('description', sql.Text, None),
+ ('domain_id', sql.String, 64),
+ ('extra', sql.JsonBlob, None))
+ self.assertExpectedSchema('group', cols)
+
+ def test_domain_model(self):
+ cols = (('id', sql.String, 64),
+ ('name', sql.String, 64),
+ ('enabled', sql.Boolean, None))
+ self.assertExpectedSchema('domain', cols)
+
+ def test_project_model(self):
+ cols = (('id', sql.String, 64),
+ ('name', sql.String, 64),
+ ('description', sql.Text, None),
+ ('domain_id', sql.String, 64),
+ ('enabled', sql.Boolean, None),
+ ('extra', sql.JsonBlob, None))
+ self.assertExpectedSchema('project', cols)
+
+ def test_role_model(self):
+ cols = (('id', sql.String, 64),
+ ('name', sql.String, 255))
+ self.assertExpectedSchema('role', cols)
+
+ def test_user_project_metadata_model(self):
+ cols = (('user_id', sql.String, 64),
+ ('project_id', sql.String, 64),
+ ('data', sql.JsonBlob, None))
+ self.assertExpectedSchema('user_project_metadata', cols)
+
+ def test_user_domain_metadata_model(self):
+ cols = (('user_id', sql.String, 64),
+ ('domain_id', sql.String, 64),
+ ('data', sql.JsonBlob, None))
+ self.assertExpectedSchema('user_domain_metadata', cols)
+
+ def test_group_project_metadata_model(self):
+ cols = (('group_id', sql.String, 64),
+ ('project_id', sql.String, 64),
+ ('data', sql.JsonBlob, None))
+ self.assertExpectedSchema('group_project_metadata', cols)
+
+ def test_group_domain_metadata_model(self):
+ cols = (('group_id', sql.String, 64),
+ ('domain_id', sql.String, 64),
+ ('data', sql.JsonBlob, None))
+ self.assertExpectedSchema('group_domain_metadata', cols)
+
+ def test_user_group_membership(self):
+ cols = (('group_id', sql.String, 64),
+ ('user_id', sql.String, 64))
+ self.assertExpectedSchema('user_group_membership', cols)
+
+
+class SqlIdentity(SqlTests, test_backend.IdentityTests):
+ def test_password_hashed(self):
+ session = self.identity_api.get_session()
+ user_ref = self.identity_api._get_user(session, self.user_foo['id'])
+ self.assertNotEqual(user_ref['password'], self.user_foo['password'])
+
+ def test_delete_user_with_project_association(self):
+ user = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex}
+ self.identity_api.create_user(user['id'], user)
+ self.identity_api.add_user_to_project(self.tenant_bar['id'],
+ user['id'])
+ self.identity_api.delete_user(user['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_projects_for_user,
+ user['id'])
+
+ def test_create_null_user_name(self):
+ user = {'id': uuid.uuid4().hex,
+ 'name': None,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex}
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_user,
+ user['id'],
+ user)
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ user['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user_by_name,
+ user['name'],
+ DEFAULT_DOMAIN_ID)
+
+ def test_create_null_project_name(self):
+ tenant = {'id': uuid.uuid4().hex,
+ 'name': None,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_project,
+ tenant['id'],
+ tenant)
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.get_project,
+ tenant['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.get_project_by_name,
+ tenant['name'],
+ DEFAULT_DOMAIN_ID)
+
+ def test_create_null_role_name(self):
+ role = {'id': uuid.uuid4().hex,
+ 'name': None}
+ self.assertRaises(exception.Conflict,
+ self.identity_api.create_role,
+ role['id'],
+ role)
+ self.assertRaises(exception.RoleNotFound,
+ self.identity_api.get_role,
+ role['id'])
+
+ def test_delete_project_with_user_association(self):
+ user = {'id': 'fake',
+ 'name': 'fakeuser',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'passwd'}
+ self.identity_api.create_user('fake', user)
+ self.identity_api.add_user_to_project(self.tenant_bar['id'],
+ user['id'])
+ self.identity_api.delete_project(self.tenant_bar['id'])
+ tenants = self.identity_api.get_projects_for_user(user['id'])
+ self.assertEquals(tenants, [])
+
+ def test_metadata_removed_on_delete_user(self):
+ # A test to check that the internal representation
+ # or roles is correctly updated when a user is deleted
+ user = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'passwd'}
+ self.identity_api.create_user(user['id'], user)
+ role = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(role['id'], role)
+ self.identity_api.add_role_to_user_and_project(
+ user['id'],
+ self.tenant_bar['id'],
+ role['id'])
+ self.identity_api.delete_user(user['id'])
+
+ # Now check whether the internal representation of roles
+ # has been deleted
+ self.assertRaises(exception.MetadataNotFound,
+ self.assignment_api._get_metadata,
+ user['id'],
+ self.tenant_bar['id'])
+
+ def test_metadata_removed_on_delete_project(self):
+ # A test to check that the internal representation
+ # or roles is correctly updated when a project is deleted
+ user = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'passwd'}
+ self.identity_api.create_user(user['id'], user)
+ role = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.identity_api.create_role(role['id'], role)
+ self.identity_api.add_role_to_user_and_project(
+ user['id'],
+ self.tenant_bar['id'],
+ role['id'])
+ self.identity_api.delete_project(self.tenant_bar['id'])
+
+ # Now check whether the internal representation of roles
+ # has been deleted
+ self.assertRaises(exception.MetadataNotFound,
+ self.assignment_api._get_metadata,
+ user['id'],
+ self.tenant_bar['id'])
+
+ def test_update_project_returns_extra(self):
+ """This tests for backwards-compatibility with an essex/folsom bug.
+
+ Non-indexed attributes were returned in an 'extra' attribute, instead
+ of on the entity itself; for consistency and backwards compatibility,
+ those attributes should be included twice.
+
+ This behavior is specific to the SQL driver.
+
+ """
+ tenant_id = uuid.uuid4().hex
+ arbitrary_key = uuid.uuid4().hex
+ arbitrary_value = uuid.uuid4().hex
+ tenant = {
+ 'id': tenant_id,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ arbitrary_key: arbitrary_value}
+ ref = self.identity_api.create_project(tenant_id, tenant)
+ self.assertEqual(arbitrary_value, ref[arbitrary_key])
+ self.assertIsNone(ref.get('extra'))
+
+ tenant['name'] = uuid.uuid4().hex
+ ref = self.identity_api.update_project(tenant_id, tenant)
+ self.assertEqual(arbitrary_value, ref[arbitrary_key])
+ self.assertEqual(arbitrary_value, ref['extra'][arbitrary_key])
+
+ def test_update_user_returns_extra(self):
+ """This tests for backwards-compatibility with an essex/folsom bug.
+
+ Non-indexed attributes were returned in an 'extra' attribute, instead
+ of on the entity itself; for consistency and backwards compatibility,
+ those attributes should be included twice.
+
+ This behavior is specific to the SQL driver.
+
+ """
+ user_id = uuid.uuid4().hex
+ arbitrary_key = uuid.uuid4().hex
+ arbitrary_value = uuid.uuid4().hex
+ user = {
+ 'id': user_id,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': uuid.uuid4().hex,
+ arbitrary_key: arbitrary_value}
+ ref = self.identity_api.create_user(user_id, user)
+ self.assertEqual(arbitrary_value, ref[arbitrary_key])
+ self.assertIsNone(ref.get('password'))
+ self.assertIsNone(ref.get('extra'))
+
+ user['name'] = uuid.uuid4().hex
+ user['password'] = uuid.uuid4().hex
+ ref = self.identity_api.update_user(user_id, user)
+ self.assertIsNone(ref.get('password'))
+ self.assertIsNone(ref['extra'].get('password'))
+ self.assertEqual(arbitrary_value, ref[arbitrary_key])
+ self.assertEqual(arbitrary_value, ref['extra'][arbitrary_key])
+
+
+class SqlTrust(SqlTests, test_backend.TrustTests):
+ pass
+
+
+class SqlToken(SqlTests, test_backend.TokenTests):
+ pass
+
+
+class SqlCatalog(SqlTests, test_backend.CatalogTests):
+ def test_malformed_catalog_throws_error(self):
+ service = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_service(service['id'], service.copy())
+
+ malformed_url = "http://192.168.1.104:$(compute_port)s/v2/$(tenant)s"
+ endpoint = {
+ 'id': uuid.uuid4().hex,
+ 'region': uuid.uuid4().hex,
+ 'service_id': service['id'],
+ 'interface': 'public',
+ 'url': malformed_url,
+ }
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy())
+
+ with self.assertRaises(exception.MalformedEndpoint):
+ self.catalog_api.get_catalog('fake-user', 'fake-tenant')
+
+ def test_get_catalog_with_empty_public_url(self):
+ service = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_service(service['id'], service.copy())
+
+ endpoint = {
+ 'id': uuid.uuid4().hex,
+ 'region': uuid.uuid4().hex,
+ 'interface': 'public',
+ 'url': '',
+ 'service_id': service['id'],
+ }
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy())
+
+ catalog = self.catalog_api.get_catalog('user', 'tenant')
+ catalog_endpoint = catalog[endpoint['region']][service['type']]
+ self.assertEqual(catalog_endpoint['name'], service['name'])
+ self.assertEqual(catalog_endpoint['id'], endpoint['id'])
+ self.assertEqual(catalog_endpoint['publicURL'], '')
+ self.assertIsNone(catalog_endpoint.get('adminURL'))
+ self.assertIsNone(catalog_endpoint.get('internalURL'))
+
+ def test_create_endpoint_400(self):
+ service = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ }
+ self.catalog_api.create_service(service['id'], service.copy())
+
+ endpoint = {
+ 'id': uuid.uuid4().hex,
+ 'region': "0" * 256,
+ 'service_id': service['id'],
+ 'interface': 'public',
+ 'url': uuid.uuid4().hex,
+ }
+
+ with self.assertRaises(exception.StringLengthExceeded):
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy())
+
+
+class SqlPolicy(SqlTests, test_backend.PolicyTests):
+ pass
+
+
+class SqlInheritance(SqlTests, test_backend.InheritanceTests):
+ pass
diff --git a/keystone/tests/test_backend_templated.py b/keystone/tests/test_backend_templated.py
new file mode 100644
index 00000000..603ad82a
--- /dev/null
+++ b/keystone/tests/test_backend_templated.py
@@ -0,0 +1,67 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+
+from keystone.tests import core as test
+
+from keystone import exception
+
+import default_fixtures
+import test_backend
+
+DEFAULT_CATALOG_TEMPLATES = os.path.abspath(os.path.join(
+ os.path.dirname(__file__),
+ 'default_catalog.templates'))
+
+
+class TestTemplatedCatalog(test.TestCase, test_backend.CatalogTests):
+
+ DEFAULT_FIXTURE = {
+ 'RegionOne': {
+ 'compute': {
+ 'adminURL': 'http://localhost:8774/v1.1/bar',
+ 'publicURL': 'http://localhost:8774/v1.1/bar',
+ 'internalURL': 'http://localhost:8774/v1.1/bar',
+ 'name': "'Compute Service'",
+ 'id': '2'
+ },
+ 'identity': {
+ 'adminURL': 'http://localhost:35357/v2.0',
+ 'publicURL': 'http://localhost:5000/v2.0',
+ 'internalURL': 'http://localhost:35357/v2.0',
+ 'name': "'Identity Service'",
+ 'id': '1'
+ }
+ }
+ }
+
+ def setUp(self):
+ super(TestTemplatedCatalog, self).setUp()
+ self.opt_in_group('catalog', template_file=DEFAULT_CATALOG_TEMPLATES)
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ def test_get_catalog(self):
+ catalog_ref = self.catalog_api.get_catalog('foo', 'bar')
+ self.assertDictEqual(catalog_ref, self.DEFAULT_FIXTURE)
+
+ def test_malformed_catalog_throws_error(self):
+ (self.catalog_api.driver.templates
+ ['RegionOne']['compute']['adminURL']) = \
+ 'http://localhost:$(compute_port)s/v1.1/$(tenant)s'
+ with self.assertRaises(exception.MalformedEndpoint):
+ self.catalog_api.get_catalog('fake-user', 'fake-tenant')
diff --git a/keystone/tests/test_catalog.py b/keystone/tests/test_catalog.py
new file mode 100644
index 00000000..3c00b1e8
--- /dev/null
+++ b/keystone/tests/test_catalog.py
@@ -0,0 +1,77 @@
+import uuid
+
+import test_content_types
+
+
+BASE_URL = 'http://127.0.0.1:35357/v2'
+
+
+class V2CatalogTestCase(test_content_types.RestfulTestCase):
+ def setUp(self):
+ super(V2CatalogTestCase, self).setUp()
+ self.service_id = uuid.uuid4().hex
+ self.service = self.new_service_ref()
+ self.service['id'] = self.service_id
+ self.catalog_api.create_service(
+ self.service_id,
+ self.service.copy())
+
+ def new_ref(self):
+ """Populates a ref with attributes common to all API entities."""
+ return {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'enabled': True}
+
+ def new_service_ref(self):
+ ref = self.new_ref()
+ ref['type'] = uuid.uuid4().hex
+ return ref
+
+ def _get_token_id(self, r):
+ """Applicable only to JSON."""
+ return r.result['access']['token']['id']
+
+ def assertValidErrorResponse(self, response):
+ self.assertEqual(response.status_code, 400)
+
+ def _endpoint_create(self, expected_status=200, missing_param=None):
+ path = '/v2.0/endpoints'
+ body = {
+ "endpoint": {
+ "adminurl": "http://localhost:8080",
+ "service_id": self.service_id,
+ "region": "regionOne",
+ "internalurl": "http://localhost:8080",
+ "publicurl": "http://localhost:8080"
+ }
+ }
+ if missing_param:
+ body['endpoint'][missing_param] = None
+ r = self.admin_request(method='POST', token=self.get_scoped_token(),
+ path=path, expected_status=expected_status,
+ body=body)
+ return body, r
+
+ def test_endpoint_create(self):
+ req_body, response = self._endpoint_create(expected_status=200)
+ self.assertTrue('endpoint' in response.result)
+ self.assertTrue('id' in response.result['endpoint'])
+ for field, value in req_body['endpoint'].iteritems():
+ self.assertEqual(response.result['endpoint'][field], value)
+
+ def test_endpoint_create_with_missing_adminurl(self):
+ req_body, response = self._endpoint_create(expected_status=200,
+ missing_param='adminurl')
+ self.assertEqual(response.status_code, 200)
+
+ def test_endpoint_create_with_missing_internalurl(self):
+ req_body, response = self._endpoint_create(expected_status=200,
+ missing_param='internalurl')
+ self.assertEqual(response.status_code, 200)
+
+ def test_endpoint_create_with_missing_publicurl(self):
+ req_body, response = self._endpoint_create(expected_status=400,
+ missing_param='publicurl')
+ self.assertValidErrorResponse(response)
diff --git a/keystone/tests/test_cert_setup.py b/keystone/tests/test_cert_setup.py
new file mode 100644
index 00000000..88fa6d75
--- /dev/null
+++ b/keystone/tests/test_cert_setup.py
@@ -0,0 +1,101 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import shutil
+
+from keystone.tests import core as test
+
+from keystone.common import openssl
+from keystone import exception
+from keystone import token
+
+import default_fixtures
+
+
+ROOTDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+SSLDIR = "%s/tests/ssl/" % ROOTDIR
+CONF = test.CONF
+DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
+
+
+def rootdir(*p):
+ return os.path.join(SSLDIR, *p)
+
+
+CERTDIR = rootdir("certs")
+KEYDIR = rootdir("private")
+
+
+class CertSetupTestCase(test.TestCase):
+
+ def setUp(self):
+ super(CertSetupTestCase, self).setUp()
+ CONF.signing.certfile = os.path.join(CERTDIR, 'signing_cert.pem')
+ CONF.signing.ca_certs = os.path.join(CERTDIR, "ca.pem")
+ CONF.signing.ca_key = os.path.join(CERTDIR, "cakey.pem")
+ CONF.signing.keyfile = os.path.join(KEYDIR, "signing_key.pem")
+
+ CONF.ssl.ca_certs = CONF.signing.ca_certs
+ CONF.ssl.ca_key = CONF.signing.ca_key
+
+ CONF.ssl.certfile = os.path.join(CERTDIR, 'keystone.pem')
+ CONF.ssl.keyfile = os.path.join(KEYDIR, 'keystonekey.pem')
+
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ self.controller = token.controllers.Auth()
+
+ def test_can_handle_missing_certs(self):
+ self.opt_in_group('signing', certfile='invalid')
+ user = {
+ 'id': 'fake1',
+ 'name': 'fake1',
+ 'password': 'fake1',
+ 'domain_id': DEFAULT_DOMAIN_ID
+ }
+ body_dict = {
+ 'passwordCredentials': {
+ 'userId': user['id'],
+ 'password': user['password'],
+ },
+ }
+ self.identity_api.create_user(user['id'], user)
+ self.assertRaises(exception.UnexpectedError,
+ self.controller.authenticate,
+ {}, body_dict)
+
+ def test_create_pki_certs(self):
+ pki = openssl.ConfigurePKI(None, None)
+ pki.run()
+ self.assertTrue(os.path.exists(CONF.signing.certfile))
+ self.assertTrue(os.path.exists(CONF.signing.ca_certs))
+ self.assertTrue(os.path.exists(CONF.signing.keyfile))
+
+ def test_create_ssl_certs(self):
+ ssl = openssl.ConfigureSSL(None, None)
+ ssl.run()
+ self.assertTrue(os.path.exists(CONF.ssl.ca_certs))
+ self.assertTrue(os.path.exists(CONF.ssl.certfile))
+ self.assertTrue(os.path.exists(CONF.ssl.keyfile))
+
+ def tearDown(self):
+ try:
+ shutil.rmtree(rootdir(SSLDIR))
+ except OSError:
+ pass
+ super(CertSetupTestCase, self).tearDown()
diff --git a/keystone/tests/test_config.py b/keystone/tests/test_config.py
new file mode 100644
index 00000000..28b372a6
--- /dev/null
+++ b/keystone/tests/test_config.py
@@ -0,0 +1,19 @@
+from keystone.tests import core as test
+
+from keystone import config
+from keystone import exception
+
+
+CONF = config.CONF
+
+
+class ConfigTestCase(test.TestCase):
+ def test_paste_config(self):
+ self.assertEqual(config.find_paste_config(),
+ test.etcdir('keystone-paste.ini'))
+ self.opt_in_group('paste_deploy', config_file='XYZ')
+ self.assertRaises(exception.PasteConfigNotFound,
+ config.find_paste_config)
+ self.opt_in_group('paste_deploy', config_file='')
+ self.assertEqual(config.find_paste_config(),
+ test.etcdir('keystone.conf.sample'))
diff --git a/keystone/tests/test_content_types.py b/keystone/tests/test_content_types.py
new file mode 100644
index 00000000..7c874732
--- /dev/null
+++ b/keystone/tests/test_content_types.py
@@ -0,0 +1,1104 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import io
+import uuid
+
+from lxml import etree
+import webtest
+
+from keystone.tests import core as test
+
+from keystone.common import extension
+from keystone.common import serializer
+from keystone.openstack.common import jsonutils
+
+import default_fixtures
+
+
+class RestfulTestCase(test.TestCase):
+ """Performs restful tests against the WSGI app over HTTP.
+
+ This class launches public & admin WSGI servers for every test, which can
+ be accessed by calling ``public_request()`` or ``admin_request()``,
+ respectfully.
+
+ ``restful_request()`` and ``request()`` methods are also exposed if you
+ need to bypass restful conventions or access HTTP details in your test
+ implementation.
+
+ Three new asserts are provided:
+
+ * ``assertResponseSuccessful``: called automatically for every request
+ unless an ``expected_status`` is provided
+ * ``assertResponseStatus``: called instead of ``assertResponseSuccessful``,
+ if an ``expected_status`` is provided
+ * ``assertValidResponseHeaders``: validates that the response headers
+ appear as expected
+
+ Requests are automatically serialized according to the defined
+ ``content_type``. Responses are automatically deserialized as well, and
+ available in the ``response.body`` attribute. The original body content is
+ available in the ``response.raw`` attribute.
+
+ """
+
+ # default content type to test
+ content_type = 'json'
+
+ def setUp(self):
+ super(RestfulTestCase, self).setUp()
+
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ self.public_app = webtest.TestApp(
+ self.loadapp('keystone', name='main'))
+ self.admin_app = webtest.TestApp(
+ self.loadapp('keystone', name='admin'))
+
+ # TODO(termie): add an admin user to the fixtures and use that user
+ # override the fixtures, for now
+ self.metadata_foobar = self.identity_api.add_role_to_user_and_project(
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ self.role_admin['id'])
+
+ def tearDown(self):
+ """Kill running servers and release references to avoid leaks."""
+ self.public_app = None
+ self.admin_app = None
+ super(RestfulTestCase, self).tearDown()
+
+ def request(self, app, path, body=None, headers=None, token=None,
+ expected_status=None, **kwargs):
+ if headers:
+ headers = dict([(str(k), str(v)) for k, v in headers.iteritems()])
+ else:
+ headers = {}
+
+ if token:
+ headers['X-Auth-Token'] = str(token)
+
+ # setting body this way because of:
+ # https://github.com/Pylons/webtest/issues/71
+ if body:
+ kwargs['body_file'] = io.BytesIO(body)
+
+ # sets environ['REMOTE_ADDR']
+ kwargs.setdefault('remote_addr', 'localhost')
+
+ response = app.request(path, headers=headers,
+ status=expected_status, **kwargs)
+
+ return response
+
+ def assertResponseSuccessful(self, response):
+ """Asserts that a status code lies inside the 2xx range.
+
+ :param response: :py:class:`httplib.HTTPResponse` to be
+ verified to have a status code between 200 and 299.
+
+ example::
+
+ self.assertResponseSuccessful(response, 203)
+ """
+ self.assertTrue(
+ response.status_code >= 200 and response.status_code <= 299,
+ 'Status code %d is outside of the expected range (2xx)\n\n%s' %
+ (response.status, response.body))
+
+ def assertResponseStatus(self, response, expected_status):
+ """Asserts a specific status code on the response.
+
+ :param response: :py:class:`httplib.HTTPResponse`
+ :param assert_status: The specific ``status`` result expected
+
+ example::
+
+ self.assertResponseStatus(response, 203)
+ """
+ self.assertEqual(
+ response.status_code,
+ expected_status,
+ 'Status code %s is not %s, as expected)\n\n%s' %
+ (response.status_code, expected_status, response.body))
+
+ def assertValidResponseHeaders(self, response):
+ """Ensures that response headers appear as expected."""
+ self.assertIn('X-Auth-Token', response.headers.get('Vary'))
+
+ def _to_content_type(self, body, headers, content_type=None):
+ """Attempt to encode JSON and XML automatically."""
+ content_type = content_type or self.content_type
+
+ if content_type == 'json':
+ headers['Accept'] = 'application/json'
+ if body:
+ headers['Content-Type'] = 'application/json'
+ return jsonutils.dumps(body)
+ elif content_type == 'xml':
+ headers['Accept'] = 'application/xml'
+ if body:
+ headers['Content-Type'] = 'application/xml'
+ return serializer.to_xml(body)
+
+ def _from_content_type(self, response, content_type=None):
+ """Attempt to decode JSON and XML automatically, if detected."""
+ content_type = content_type or self.content_type
+
+ if response.body is not None and response.body.strip():
+ # if a body is provided, a Content-Type is also expected
+ header = response.headers.get('Content-Type', None)
+ self.assertIn(content_type, header)
+
+ if content_type == 'json':
+ response.result = jsonutils.loads(response.body)
+ elif content_type == 'xml':
+ response.result = etree.fromstring(response.body)
+
+ def restful_request(self, method='GET', headers=None, body=None,
+ content_type=None, **kwargs):
+ """Serializes/deserializes json/xml as request/response body.
+
+ .. WARNING::
+
+ * Existing Accept header will be overwritten.
+ * Existing Content-Type header will be overwritten.
+
+ """
+ # Initialize headers dictionary
+ headers = {} if not headers else headers
+
+ body = self._to_content_type(body, headers, content_type)
+
+ # Perform the HTTP request/response
+ response = self.request(method=method, headers=headers, body=body,
+ **kwargs)
+
+ self._from_content_type(response, content_type)
+
+ # we can save some code & improve coverage by always doing this
+ if method != 'HEAD' and response.status_code >= 400:
+ self.assertValidErrorResponse(response)
+
+ # Contains the decoded response.body
+ return response
+
+ def _request(self, convert=True, **kwargs):
+ if convert:
+ response = self.restful_request(**kwargs)
+ else:
+ response = self.request(**kwargs)
+
+ self.assertValidResponseHeaders(response)
+ return response
+
+ def public_request(self, **kwargs):
+ return self._request(app=self.public_app, **kwargs)
+
+ def admin_request(self, **kwargs):
+ return self._request(app=self.admin_app, **kwargs)
+
+ def _get_token(self, body):
+ """Convenience method so that we can test authenticated requests."""
+ r = self.public_request(method='POST', path='/v2.0/tokens', body=body)
+ return self._get_token_id(r)
+
+ def get_unscoped_token(self):
+ """Convenience method so that we can test authenticated requests."""
+ return self._get_token({
+ 'auth': {
+ 'passwordCredentials': {
+ 'username': self.user_foo['name'],
+ 'password': self.user_foo['password'],
+ },
+ },
+ })
+
+ def get_scoped_token(self, tenant_id=None):
+ """Convenience method so that we can test authenticated requests."""
+ if not tenant_id:
+ tenant_id = self.tenant_bar['id']
+ return self._get_token({
+ 'auth': {
+ 'passwordCredentials': {
+ 'username': self.user_foo['name'],
+ 'password': self.user_foo['password'],
+ },
+ 'tenantId': tenant_id,
+ },
+ })
+
+ def _get_token_id(self, r):
+ """Helper method to return a token ID from a response.
+
+ This needs to be overridden by child classes for on their content type.
+
+ """
+ raise NotImplementedError()
+
+
+class CoreApiTests(object):
+ def assertValidError(self, error):
+ """Applicable to XML and JSON."""
+ self.assertIsNotNone(error.get('code'))
+ self.assertIsNotNone(error.get('title'))
+ self.assertIsNotNone(error.get('message'))
+
+ def assertValidVersion(self, version):
+ """Applicable to XML and JSON.
+
+ However, navigating links and media-types differs between content
+ types so they need to be validated separately.
+
+ """
+ self.assertIsNotNone(version)
+ self.assertIsNotNone(version.get('id'))
+ self.assertIsNotNone(version.get('status'))
+ self.assertIsNotNone(version.get('updated'))
+
+ def assertValidExtension(self, extension):
+ """Applicable to XML and JSON.
+
+ However, navigating extension links differs between content types.
+ They need to be validated separately with assertValidExtensionLink.
+
+ """
+ self.assertIsNotNone(extension)
+ self.assertIsNotNone(extension.get('name'))
+ self.assertIsNotNone(extension.get('namespace'))
+ self.assertIsNotNone(extension.get('alias'))
+ self.assertIsNotNone(extension.get('updated'))
+
+ def assertValidExtensionLink(self, link):
+ """Applicable to XML and JSON."""
+ self.assertIsNotNone(link.get('rel'))
+ self.assertIsNotNone(link.get('type'))
+ self.assertIsNotNone(link.get('href'))
+
+ def assertValidTenant(self, tenant):
+ """Applicable to XML and JSON."""
+ self.assertIsNotNone(tenant.get('id'))
+ self.assertIsNotNone(tenant.get('name'))
+
+ def assertValidUser(self, user):
+ """Applicable to XML and JSON."""
+ self.assertIsNotNone(user.get('id'))
+ self.assertIsNotNone(user.get('name'))
+
+ def assertValidRole(self, tenant):
+ """Applicable to XML and JSON."""
+ self.assertIsNotNone(tenant.get('id'))
+ self.assertIsNotNone(tenant.get('name'))
+
+ def test_public_not_found(self):
+ r = self.public_request(
+ path='/%s' % uuid.uuid4().hex,
+ expected_status=404)
+ self.assertValidErrorResponse(r)
+
+ def test_admin_not_found(self):
+ r = self.admin_request(
+ path='/%s' % uuid.uuid4().hex,
+ expected_status=404)
+ self.assertValidErrorResponse(r)
+
+ def test_public_multiple_choice(self):
+ r = self.public_request(path='/', expected_status=300)
+ self.assertValidMultipleChoiceResponse(r)
+
+ def test_admin_multiple_choice(self):
+ r = self.admin_request(path='/', expected_status=300)
+ self.assertValidMultipleChoiceResponse(r)
+
+ def test_public_version(self):
+ r = self.public_request(path='/v2.0/')
+ self.assertValidVersionResponse(r)
+
+ def test_admin_version(self):
+ r = self.admin_request(path='/v2.0/')
+ self.assertValidVersionResponse(r)
+
+ def test_public_extensions(self):
+ r = self.public_request(path='/v2.0/extensions')
+ self.assertValidExtensionListResponse(r,
+ extension.PUBLIC_EXTENSIONS)
+
+ def test_admin_extensions(self):
+ r = self.admin_request(path='/v2.0/extensions')
+ self.assertValidExtensionListResponse(r,
+ extension.ADMIN_EXTENSIONS)
+
+ def test_admin_extensions_404(self):
+ self.admin_request(path='/v2.0/extensions/invalid-extension',
+ expected_status=404)
+
+ def test_public_osksadm_extension_404(self):
+ self.public_request(path='/v2.0/extensions/OS-KSADM',
+ expected_status=404)
+
+ def test_admin_osksadm_extension(self):
+ r = self.admin_request(path='/v2.0/extensions/OS-KSADM')
+ self.assertValidExtensionResponse(r,
+ extension.ADMIN_EXTENSIONS)
+
+ def test_authenticate(self):
+ r = self.public_request(
+ method='POST',
+ path='/v2.0/tokens',
+ body={
+ 'auth': {
+ 'passwordCredentials': {
+ 'username': self.user_foo['name'],
+ 'password': self.user_foo['password'],
+ },
+ 'tenantId': self.tenant_bar['id'],
+ },
+ },
+ expected_status=200)
+ self.assertValidAuthenticationResponse(r, require_service_catalog=True)
+
+ def test_authenticate_unscoped(self):
+ r = self.public_request(
+ method='POST',
+ path='/v2.0/tokens',
+ body={
+ 'auth': {
+ 'passwordCredentials': {
+ 'username': self.user_foo['name'],
+ 'password': self.user_foo['password'],
+ },
+ },
+ },
+ expected_status=200)
+ self.assertValidAuthenticationResponse(r)
+
+ def test_get_tenants_for_token(self):
+ r = self.public_request(path='/v2.0/tenants',
+ token=self.get_scoped_token())
+ self.assertValidTenantListResponse(r)
+
+ def test_validate_token(self):
+ token = self.get_scoped_token()
+ r = self.admin_request(
+ path='/v2.0/tokens/%(token_id)s' % {
+ 'token_id': token,
+ },
+ token=token)
+ self.assertValidAuthenticationResponse(r)
+
+ def test_validate_token_service_role(self):
+ self.metadata_foobar = self.identity_api.add_role_to_user_and_project(
+ self.user_foo['id'],
+ self.tenant_service['id'],
+ self.role_service['id'])
+
+ token = self.get_scoped_token(tenant_id='service')
+ r = self.admin_request(
+ path='/v2.0/tokens/%s' % token,
+ token=token)
+ self.assertValidAuthenticationResponse(r)
+
+ def test_validate_token_belongs_to(self):
+ token = self.get_scoped_token()
+ path = ('/v2.0/tokens/%s?belongsTo=%s' % (token,
+ self.tenant_bar['id']))
+ r = self.admin_request(path=path, token=token)
+ self.assertValidAuthenticationResponse(r, require_service_catalog=True)
+
+ def test_validate_token_no_belongs_to_still_returns_catalog(self):
+ token = self.get_scoped_token()
+ path = ('/v2.0/tokens/%s' % token)
+ r = self.admin_request(path=path, token=token)
+ self.assertValidAuthenticationResponse(r, require_service_catalog=True)
+
+ def test_validate_token_head(self):
+ """The same call as above, except using HEAD.
+
+ There's no response to validate here, but this is included for the
+ sake of completely covering the core API.
+
+ """
+ token = self.get_scoped_token()
+ self.admin_request(
+ method='HEAD',
+ path='/v2.0/tokens/%(token_id)s' % {
+ 'token_id': token,
+ },
+ token=token,
+ expected_status=204)
+
+ def test_endpoints(self):
+ token = self.get_scoped_token()
+ r = self.admin_request(
+ path='/v2.0/tokens/%(token_id)s/endpoints' % {
+ 'token_id': token,
+ },
+ token=token)
+ self.assertValidEndpointListResponse(r)
+
+ def test_get_tenant(self):
+ token = self.get_scoped_token()
+ r = self.admin_request(
+ path='/v2.0/tenants/%(tenant_id)s' % {
+ 'tenant_id': self.tenant_bar['id'],
+ },
+ token=token)
+ self.assertValidTenantResponse(r)
+
+ def test_get_tenant_by_name(self):
+ token = self.get_scoped_token()
+ r = self.admin_request(
+ path='/v2.0/tenants?name=%(tenant_name)s' % {
+ 'tenant_name': self.tenant_bar['name'],
+ },
+ token=token)
+ self.assertValidTenantResponse(r)
+
+ def test_get_user_roles(self):
+ self.skipTest('Blocked by bug 933565')
+
+ token = self.get_scoped_token()
+ r = self.admin_request(
+ path='/v2.0/users/%(user_id)s/roles' % {
+ 'user_id': self.user_foo['id'],
+ },
+ token=token)
+ self.assertValidRoleListResponse(r)
+
+ def test_get_user_roles_with_tenant(self):
+ token = self.get_scoped_token()
+ r = self.admin_request(
+ path='/v2.0/tenants/%(tenant_id)s/users/%(user_id)s/roles' % {
+ 'tenant_id': self.tenant_bar['id'],
+ 'user_id': self.user_foo['id'],
+ },
+ token=token)
+ self.assertValidRoleListResponse(r)
+
+ def test_get_user(self):
+ token = self.get_scoped_token()
+ r = self.admin_request(
+ path='/v2.0/users/%(user_id)s' % {
+ 'user_id': self.user_foo['id'],
+ },
+ token=token)
+ self.assertValidUserResponse(r)
+
+ def test_get_user_by_name(self):
+ token = self.get_scoped_token()
+ r = self.admin_request(
+ path='/v2.0/users?name=%(user_name)s' % {
+ 'user_name': self.user_foo['name'],
+ },
+ token=token)
+ self.assertValidUserResponse(r)
+
+ def test_create_update_user_invalid_enabled_type(self):
+ # Enforce usage of boolean for 'enabled' field in JSON and XML
+ token = self.get_scoped_token()
+
+ # Test CREATE request
+ r = self.admin_request(
+ method='POST',
+ path='/v2.0/users',
+ body={
+ 'user': {
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ # In XML, only "true|false" are converted to boolean.
+ 'enabled': "False",
+ },
+ },
+ token=token,
+ expected_status=400)
+ self.assertValidErrorResponse(r)
+
+ r = self.admin_request(
+ method='POST',
+ path='/v2.0/users',
+ body={
+ 'user': {
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ # In JSON, 0|1 are not booleans
+ 'enabled': 0,
+ },
+ },
+ token=token,
+ expected_status=400)
+ self.assertValidErrorResponse(r)
+
+ # Test UPDATE request
+ path = '/v2.0/users/%(user_id)s' % {
+ 'user_id': self.user_foo['id'],
+ }
+
+ r = self.admin_request(
+ method='PUT',
+ path=path,
+ body={
+ 'user': {
+ # In XML, only "true|false" are converted to boolean.
+ 'enabled': "False",
+ },
+ },
+ token=token,
+ expected_status=400)
+ self.assertValidErrorResponse(r)
+
+ r = self.admin_request(
+ method='PUT',
+ path=path,
+ body={
+ 'user': {
+ # In JSON, 0|1 are not booleans
+ 'enabled': 1,
+ },
+ },
+ token=token,
+ expected_status=400)
+ self.assertValidErrorResponse(r)
+
+ def test_error_response(self):
+ """This triggers assertValidErrorResponse by convention."""
+ self.public_request(path='/v2.0/tenants', expected_status=401)
+
+ def test_invalid_parameter_error_response(self):
+ token = self.get_scoped_token()
+ bad_body = {
+ 'OS-KSADM:service%s' % uuid.uuid4().hex: {
+ 'name': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ },
+ }
+ res = self.admin_request(method='POST',
+ path='/v2.0/OS-KSADM/services',
+ body=bad_body,
+ token=token,
+ expected_status=400)
+ self.assertValidErrorResponse(res)
+ res = self.admin_request(method='POST',
+ path='/v2.0/users',
+ body=bad_body,
+ token=token,
+ expected_status=400)
+ self.assertValidErrorResponse(res)
+
+
+class JsonTestCase(RestfulTestCase, CoreApiTests):
+ content_type = 'json'
+
+ def _get_token_id(self, r):
+ """Applicable only to JSON."""
+ return r.result['access']['token']['id']
+
+ def assertValidErrorResponse(self, r):
+ self.assertIsNotNone(r.result.get('error'))
+ self.assertValidError(r.result['error'])
+ self.assertEqual(r.result['error']['code'], r.status_code)
+
+ def assertValidExtension(self, extension, expected):
+ super(JsonTestCase, self).assertValidExtension(extension)
+ descriptions = [ext['description'] for ext in expected.itervalues()]
+ description = extension.get('description')
+ self.assertIsNotNone(description)
+ self.assertIn(description, descriptions)
+ self.assertIsNotNone(extension.get('links'))
+ self.assertNotEmpty(extension.get('links'))
+ for link in extension.get('links'):
+ self.assertValidExtensionLink(link)
+
+ def assertValidExtensionListResponse(self, r, expected):
+ self.assertIsNotNone(r.result.get('extensions'))
+ self.assertIsNotNone(r.result['extensions'].get('values'))
+ self.assertNotEmpty(r.result['extensions'].get('values'))
+ for extension in r.result['extensions']['values']:
+ self.assertValidExtension(extension, expected)
+
+ def assertValidExtensionResponse(self, r, expected):
+ self.assertValidExtension(r.result.get('extension'), expected)
+
+ def assertValidAuthenticationResponse(self, r,
+ require_service_catalog=False):
+ self.assertIsNotNone(r.result.get('access'))
+ self.assertIsNotNone(r.result['access'].get('token'))
+ self.assertIsNotNone(r.result['access'].get('user'))
+
+ # validate token
+ self.assertIsNotNone(r.result['access']['token'].get('id'))
+ self.assertIsNotNone(r.result['access']['token'].get('expires'))
+ tenant = r.result['access']['token'].get('tenant')
+ if tenant is not None:
+ # validate tenant
+ self.assertIsNotNone(tenant.get('id'))
+ self.assertIsNotNone(tenant.get('name'))
+
+ # validate user
+ self.assertIsNotNone(r.result['access']['user'].get('id'))
+ self.assertIsNotNone(r.result['access']['user'].get('name'))
+
+ if require_service_catalog:
+ # roles are only provided with a service catalog
+ roles = r.result['access']['user'].get('roles')
+ self.assertNotEmpty(roles)
+ for role in roles:
+ self.assertIsNotNone(role.get('name'))
+
+ serviceCatalog = r.result['access'].get('serviceCatalog')
+ # validate service catalog
+ if require_service_catalog:
+ self.assertIsNotNone(serviceCatalog)
+ if serviceCatalog is not None:
+ self.assertTrue(isinstance(serviceCatalog, list))
+ if require_service_catalog:
+ self.assertNotEmpty(serviceCatalog)
+ for service in r.result['access']['serviceCatalog']:
+ # validate service
+ self.assertIsNotNone(service.get('name'))
+ self.assertIsNotNone(service.get('type'))
+
+ # services contain at least one endpoint
+ self.assertIsNotNone(service.get('endpoints'))
+ self.assertNotEmpty(service['endpoints'])
+ for endpoint in service['endpoints']:
+ # validate service endpoint
+ self.assertIsNotNone(endpoint.get('publicURL'))
+
+ def assertValidTenantListResponse(self, r):
+ self.assertIsNotNone(r.result.get('tenants'))
+ self.assertNotEmpty(r.result['tenants'])
+ for tenant in r.result['tenants']:
+ self.assertValidTenant(tenant)
+ self.assertIsNotNone(tenant.get('enabled'))
+ self.assertIn(tenant.get('enabled'), [True, False])
+
+ def assertValidUserResponse(self, r):
+ self.assertIsNotNone(r.result.get('user'))
+ self.assertValidUser(r.result['user'])
+
+ def assertValidTenantResponse(self, r):
+ self.assertIsNotNone(r.result.get('tenant'))
+ self.assertValidTenant(r.result['tenant'])
+
+ def assertValidRoleListResponse(self, r):
+ self.assertIsNotNone(r.result.get('roles'))
+ self.assertNotEmpty(r.result['roles'])
+ for role in r.result['roles']:
+ self.assertValidRole(role)
+
+ def assertValidVersion(self, version):
+ super(JsonTestCase, self).assertValidVersion(version)
+
+ self.assertIsNotNone(version.get('links'))
+ self.assertNotEmpty(version.get('links'))
+ for link in version.get('links'):
+ self.assertIsNotNone(link.get('rel'))
+ self.assertIsNotNone(link.get('href'))
+
+ self.assertIsNotNone(version.get('media-types'))
+ self.assertNotEmpty(version.get('media-types'))
+ for media in version.get('media-types'):
+ self.assertIsNotNone(media.get('base'))
+ self.assertIsNotNone(media.get('type'))
+
+ def assertValidMultipleChoiceResponse(self, r):
+ self.assertIsNotNone(r.result.get('versions'))
+ self.assertIsNotNone(r.result['versions'].get('values'))
+ self.assertNotEmpty(r.result['versions']['values'])
+ for version in r.result['versions']['values']:
+ self.assertValidVersion(version)
+
+ def assertValidVersionResponse(self, r):
+ self.assertValidVersion(r.result.get('version'))
+
+ def assertValidEndpointListResponse(self, r):
+ self.assertIsNotNone(r.result.get('endpoints'))
+ self.assertNotEmpty(r.result['endpoints'])
+ for endpoint in r.result['endpoints']:
+ self.assertIsNotNone(endpoint.get('id'))
+ self.assertIsNotNone(endpoint.get('name'))
+ self.assertIsNotNone(endpoint.get('type'))
+ self.assertIsNotNone(endpoint.get('publicURL'))
+ self.assertIsNotNone(endpoint.get('internalURL'))
+ self.assertIsNotNone(endpoint.get('adminURL'))
+
+ def test_service_crud_requires_auth(self):
+ """Service CRUD should 401 without an X-Auth-Token (bug 1006822)."""
+ # values here don't matter because we should 401 before they're checked
+ service_path = '/v2.0/OS-KSADM/services/%s' % uuid.uuid4().hex
+ service_body = {
+ 'OS-KSADM:service': {
+ 'name': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ },
+ }
+
+ r = self.admin_request(method='GET',
+ path='/v2.0/OS-KSADM/services',
+ expected_status=401)
+ self.assertValidErrorResponse(r)
+
+ r = self.admin_request(method='POST',
+ path='/v2.0/OS-KSADM/services',
+ body=service_body,
+ expected_status=401)
+ self.assertValidErrorResponse(r)
+
+ r = self.admin_request(method='GET',
+ path=service_path,
+ expected_status=401)
+ self.assertValidErrorResponse(r)
+
+ r = self.admin_request(method='DELETE',
+ path=service_path,
+ expected_status=401)
+ self.assertValidErrorResponse(r)
+
+ def test_user_role_list_requires_auth(self):
+ """User role list should 401 without an X-Auth-Token (bug 1006815)."""
+ # values here don't matter because we should 401 before they're checked
+ path = '/v2.0/tenants/%(tenant_id)s/users/%(user_id)s/roles' % {
+ 'tenant_id': uuid.uuid4().hex,
+ 'user_id': uuid.uuid4().hex,
+ }
+
+ r = self.admin_request(path=path, expected_status=401)
+ self.assertValidErrorResponse(r)
+
+ def test_fetch_revocation_list_nonadmin_fails(self):
+ self.admin_request(
+ method='GET',
+ path='/v2.0/tokens/revoked',
+ expected_status=401)
+
+ def test_fetch_revocation_list_admin_200(self):
+ token = self.get_scoped_token()
+ r = self.admin_request(
+ method='GET',
+ path='/v2.0/tokens/revoked',
+ token=token,
+ expected_status=200)
+ self.assertValidRevocationListResponse(r)
+
+ def assertValidRevocationListResponse(self, response):
+ self.assertIsNotNone(response.result['signed'])
+
+ def test_create_update_user_json_invalid_enabled_type(self):
+ # Enforce usage of boolean for 'enabled' field in JSON
+ token = self.get_scoped_token()
+
+ # Test CREATE request
+ r = self.admin_request(
+ method='POST',
+ path='/v2.0/users',
+ body={
+ 'user': {
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ # In JSON, "true|false" are not boolean
+ 'enabled': "true",
+ },
+ },
+ token=token,
+ expected_status=400)
+ self.assertValidErrorResponse(r)
+
+ # Test UPDATE request
+ r = self.admin_request(
+ method='PUT',
+ path='/v2.0/users/%(user_id)s' % {
+ 'user_id': self.user_foo['id'],
+ },
+ body={
+ 'user': {
+ # In JSON, "true|false" are not boolean
+ 'enabled': "true",
+ },
+ },
+ token=token,
+ expected_status=400)
+ self.assertValidErrorResponse(r)
+
+
+class XmlTestCase(RestfulTestCase, CoreApiTests):
+ xmlns = 'http://docs.openstack.org/identity/api/v2.0'
+ content_type = 'xml'
+
+ def _get_token_id(self, r):
+ return r.result.find(self._tag('token')).get('id')
+
+ def _tag(self, tag_name, xmlns=None):
+ """Helper method to build an namespaced element name."""
+ return '{%(ns)s}%(tag)s' % {'ns': xmlns or self.xmlns, 'tag': tag_name}
+
+ def assertValidErrorResponse(self, r):
+ xml = r.result
+ self.assertEqual(xml.tag, self._tag('error'))
+
+ self.assertValidError(xml)
+ self.assertEqual(xml.get('code'), str(r.status_code))
+
+ def assertValidExtension(self, extension, expected):
+ super(XmlTestCase, self).assertValidExtension(extension)
+
+ self.assertIsNotNone(extension.find(self._tag('description')))
+ self.assertTrue(extension.find(self._tag('description')).text)
+ links = extension.find(self._tag('links'))
+ self.assertNotEmpty(links.findall(self._tag('link')))
+ descriptions = [ext['description'] for ext in expected.itervalues()]
+ description = extension.find(self._tag('description')).text
+ self.assertIn(description, descriptions)
+ for link in links.findall(self._tag('link')):
+ self.assertValidExtensionLink(link)
+
+ def assertValidExtensionListResponse(self, r, expected):
+ xml = r.result
+ self.assertEqual(xml.tag, self._tag('extensions'))
+ self.assertNotEmpty(xml.findall(self._tag('extension')))
+ for ext in xml.findall(self._tag('extension')):
+ self.assertValidExtension(ext, expected)
+
+ def assertValidExtensionResponse(self, r, expected):
+ xml = r.result
+ self.assertEqual(xml.tag, self._tag('extension'))
+
+ self.assertValidExtension(xml, expected)
+
+ def assertValidVersion(self, version):
+ super(XmlTestCase, self).assertValidVersion(version)
+
+ links = version.find(self._tag('links'))
+ self.assertIsNotNone(links)
+ self.assertNotEmpty(links.findall(self._tag('link')))
+ for link in links.findall(self._tag('link')):
+ self.assertIsNotNone(link.get('rel'))
+ self.assertIsNotNone(link.get('href'))
+
+ media_types = version.find(self._tag('media-types'))
+ self.assertIsNotNone(media_types)
+ self.assertNotEmpty(media_types.findall(self._tag('media-type')))
+ for media in media_types.findall(self._tag('media-type')):
+ self.assertIsNotNone(media.get('base'))
+ self.assertIsNotNone(media.get('type'))
+
+ def assertValidMultipleChoiceResponse(self, r):
+ xml = r.result
+ self.assertEqual(xml.tag, self._tag('versions'))
+
+ self.assertNotEmpty(xml.findall(self._tag('version')))
+ for version in xml.findall(self._tag('version')):
+ self.assertValidVersion(version)
+
+ def assertValidVersionResponse(self, r):
+ xml = r.result
+ self.assertEqual(xml.tag, self._tag('version'))
+
+ self.assertValidVersion(xml)
+
+ def assertValidEndpointListResponse(self, r):
+ xml = r.result
+ self.assertEqual(xml.tag, self._tag('endpoints'))
+
+ self.assertNotEmpty(xml.findall(self._tag('endpoint')))
+ for endpoint in xml.findall(self._tag('endpoint')):
+ self.assertIsNotNone(endpoint.get('id'))
+ self.assertIsNotNone(endpoint.get('name'))
+ self.assertIsNotNone(endpoint.get('type'))
+ self.assertIsNotNone(endpoint.get('publicURL'))
+ self.assertIsNotNone(endpoint.get('internalURL'))
+ self.assertIsNotNone(endpoint.get('adminURL'))
+
+ def assertValidTenantResponse(self, r):
+ xml = r.result
+ self.assertEqual(xml.tag, self._tag('tenant'))
+
+ self.assertValidTenant(xml)
+
+ def assertValidUserResponse(self, r):
+ xml = r.result
+ self.assertEqual(xml.tag, self._tag('user'))
+
+ self.assertValidUser(xml)
+
+ def assertValidRoleListResponse(self, r):
+ xml = r.result
+ self.assertEqual(xml.tag, self._tag('roles'))
+
+ self.assertNotEmpty(r.result.findall(self._tag('role')))
+ for role in r.result.findall(self._tag('role')):
+ self.assertValidRole(role)
+
+ def assertValidAuthenticationResponse(self, r,
+ require_service_catalog=False):
+ xml = r.result
+ self.assertEqual(xml.tag, self._tag('access'))
+
+ # validate token
+ token = xml.find(self._tag('token'))
+ self.assertIsNotNone(token)
+ self.assertIsNotNone(token.get('id'))
+ self.assertIsNotNone(token.get('expires'))
+ tenant = token.find(self._tag('tenant'))
+ if tenant is not None:
+ # validate tenant
+ self.assertValidTenant(tenant)
+ self.assertIn(tenant.get('enabled'), ['true', 'false'])
+
+ user = xml.find(self._tag('user'))
+ self.assertIsNotNone(user)
+ self.assertIsNotNone(user.get('id'))
+ self.assertIsNotNone(user.get('name'))
+
+ if require_service_catalog:
+ # roles are only provided with a service catalog
+ roles = user.findall(self._tag('role'))
+ self.assertNotEmpty(roles)
+ for role in roles:
+ self.assertIsNotNone(role.get('name'))
+
+ serviceCatalog = xml.find(self._tag('serviceCatalog'))
+ # validate the serviceCatalog
+ if require_service_catalog:
+ self.assertIsNotNone(serviceCatalog)
+ if serviceCatalog is not None:
+ services = serviceCatalog.findall(self._tag('service'))
+ if require_service_catalog:
+ self.assertNotEmpty(services)
+ for service in services:
+ # validate service
+ self.assertIsNotNone(service.get('name'))
+ self.assertIsNotNone(service.get('type'))
+
+ # services contain at least one endpoint
+ endpoints = service.findall(self._tag('endpoint'))
+ self.assertNotEmpty(endpoints)
+ for endpoint in endpoints:
+ # validate service endpoint
+ self.assertIsNotNone(endpoint.get('publicURL'))
+
+ def assertValidTenantListResponse(self, r):
+ xml = r.result
+ self.assertEqual(xml.tag, self._tag('tenants'))
+
+ self.assertNotEmpty(r.result)
+ for tenant in r.result.findall(self._tag('tenant')):
+ self.assertValidTenant(tenant)
+ self.assertIn(tenant.get('enabled'), ['true', 'false'])
+
+ def test_authenticate_with_invalid_xml_in_password(self):
+ # public_request would auto escape the ampersand
+ self.public_request(
+ method='POST',
+ path='/v2.0/tokens',
+ headers={
+ 'Content-Type': 'application/xml'
+ },
+ body="""
+ <?xml version="1.0" encoding="UTF-8"?>
+ <auth xmlns="http://docs.openstack.org/identity/api/v2.0"
+ tenantId="bar">
+ <passwordCredentials username="FOO" password="&"/>
+ </auth>
+ """,
+ expected_status=400,
+ convert=False)
+
+ def test_add_tenant_xml(self):
+ """Create a tenant without providing description field."""
+ token = self.get_scoped_token()
+ r = self.admin_request(
+ method='POST',
+ path='/v2.0/tenants',
+ headers={
+ 'Content-Type': 'application/xml',
+ 'X-Auth-Token': token
+ },
+ body="""
+ <?xml version="1.0" encoding="UTF-8"?>
+ <tenant xmlns="http://docs.openstack.org/identity/api/v2.0"
+ enabled="true" name="ACME Corp">
+ <description></description>
+ </tenant>
+ """,
+ convert=False)
+ self._from_content_type(r, 'json')
+ self.assertIsNotNone(r.result.get('tenant'))
+ self.assertValidTenant(r.result['tenant'])
+ self.assertEqual(r.result['tenant'].get('description'), "")
+
+ def test_add_tenant_json(self):
+ """Create a tenant without providing description field."""
+ token = self.get_scoped_token()
+ r = self.admin_request(
+ method='POST',
+ path='/v2.0/tenants',
+ headers={
+ 'Content-Type': 'application/json',
+ 'X-Auth-Token': token
+ },
+ body="""
+ {"tenant":{
+ "name":"test1",
+ "description":"",
+ "enabled":true}
+ }
+ """,
+ convert=False)
+ self._from_content_type(r, 'json')
+ self.assertIsNotNone(r.result.get('tenant'))
+ self.assertValidTenant(r.result['tenant'])
+ self.assertEqual(r.result['tenant'].get('description'), "")
+
+ def test_create_project_invalid_enabled_type_string(self):
+ # Forbidden usage of string for 'enabled' field in JSON and XML
+ token = self.get_scoped_token()
+
+ r = self.admin_request(
+ method='POST',
+ path='/v2.0/tenants',
+ body={
+ 'tenant': {
+ 'name': uuid.uuid4().hex,
+ # In XML, only "true|false" are converted to boolean.
+ 'enabled': "False",
+ },
+ },
+ token=token,
+ expected_status=400)
+ self.assertValidErrorResponse(r)
+
+ def test_update_project_invalid_enabled_type_string(self):
+ # Forbidden usage of string for 'enabled' field in JSON and XML
+ token = self.get_scoped_token()
+
+ path = '/v2.0/tenants/%(tenant_id)s' % {
+ 'tenant_id': self.tenant_bar['id'],
+ }
+
+ r = self.admin_request(
+ method='PUT',
+ path=path,
+ body={
+ 'tenant': {
+ # In XML, only "true|false" are converted to boolean.
+ 'enabled': "False",
+ },
+ },
+ token=token,
+ expected_status=400)
+ self.assertValidErrorResponse(r)
diff --git a/keystone/tests/test_contrib_s3_core.py b/keystone/tests/test_contrib_s3_core.py
new file mode 100644
index 00000000..3cf799bc
--- /dev/null
+++ b/keystone/tests/test_contrib_s3_core.py
@@ -0,0 +1,61 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from keystone.tests import core as test
+
+from keystone.contrib import ec2
+from keystone.contrib import s3
+
+from keystone import exception
+
+
+class S3ContribCore(test.TestCase):
+ def setUp(self):
+ super(S3ContribCore, self).setUp()
+
+ self.load_backends()
+
+ self.ec2_api = ec2.Manager()
+ self.controller = s3.S3Controller()
+
+ def test_good_signature(self):
+ creds_ref = {'secret':
+ 'b121dd41cdcc42fe9f70e572e84295aa'}
+ credentials = {'token':
+ 'UFVUCjFCMk0yWThBc2dUcGdBbVk3UGhDZmc9PQphcHB'
+ 'saWNhdGlvbi9vY3RldC1zdHJlYW0KVHVlLCAxMSBEZWMgMjAxM'
+ 'iAyMTo0MTo0MSBHTVQKL2NvbnRfczMvdXBsb2FkZWRfZnJ'
+ 'vbV9zMy50eHQ=',
+ 'signature': 'IL4QLcLVaYgylF9iHj6Wb8BGZsw='}
+
+ self.assertIsNone(self.controller.check_signature(creds_ref,
+ credentials))
+
+ def test_bad_signature(self):
+ creds_ref = {'secret':
+ 'b121dd41cdcc42fe9f70e572e84295aa'}
+ credentials = {'token':
+ 'UFVUCjFCMk0yWThBc2dUcGdBbVk3UGhDZmc9PQphcHB'
+ 'saWNhdGlvbi9vY3RldC1zdHJlYW0KVHVlLCAxMSBEZWMgMjAxM'
+ 'iAyMTo0MTo0MSBHTVQKL2NvbnRfczMvdXBsb2FkZWRfZnJ'
+ 'vbV9zMy50eHQ=',
+ 'signature': uuid.uuid4().hex}
+
+ self.assertRaises(exception.Unauthorized,
+ self.controller.check_signature,
+ creds_ref, credentials)
diff --git a/keystone/tests/test_contrib_stats_core.py b/keystone/tests/test_contrib_stats_core.py
new file mode 100644
index 00000000..567c485e
--- /dev/null
+++ b/keystone/tests/test_contrib_stats_core.py
@@ -0,0 +1,45 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.contrib import stats
+
+from keystone import config
+from keystone.tests import core as test
+
+
+CONF = config.CONF
+
+
+class StatsContribCore(test.TestCase):
+ def setUp(self):
+ super(StatsContribCore, self).setUp()
+ self.stats_middleware = stats.StatsMiddleware(None)
+
+ def test_admin_request(self):
+ host_admin = "127.0.0.1:%s" % CONF.admin_port
+ self.assertEqual("admin",
+ self.stats_middleware._resolve_api(host_admin))
+
+ def test_public_request(self):
+ host_public = "127.0.0.1:%s" % CONF.public_port
+ self.assertEqual("public",
+ self.stats_middleware._resolve_api(host_public))
+
+ def test_other_request(self):
+ host_public = "127.0.0.1:%s" % CONF.public_port
+ host_other = host_public + "1"
+ self.assertEqual(host_other,
+ self.stats_middleware._resolve_api(host_other))
diff --git a/keystone/tests/test_drivers.py b/keystone/tests/test_drivers.py
new file mode 100644
index 00000000..c83c1a89
--- /dev/null
+++ b/keystone/tests/test_drivers.py
@@ -0,0 +1,57 @@
+import inspect
+import unittest2 as unittest
+
+from keystone import assignment
+from keystone import catalog
+from keystone import exception
+from keystone import identity
+from keystone import policy
+from keystone import token
+
+
+class TestDrivers(unittest.TestCase):
+ """Asserts that drivers are written as expected.
+
+ Public methods on drivers should raise keystone.exception.NotImplemented,
+ which renders to the API as a HTTP 501 Not Implemented.
+
+ """
+
+ def assertMethodNotImplemented(self, f):
+ """Asserts that a given method raises 501 Not Implemented.
+
+ Provides each argument with a value of None, ignoring optional
+ arguments.
+ """
+ args = inspect.getargspec(f).args
+ args.remove('self')
+ kwargs = dict(zip(args, [None] * len(args)))
+ with self.assertRaises(exception.NotImplemented):
+ f(**kwargs)
+
+ def assertInterfaceNotImplemented(self, interface):
+ """Public methods on an interface class should not be implemented."""
+ for name in dir(interface):
+ method = getattr(interface, name)
+ if name[0] != '_' and callable(method):
+ self.assertMethodNotImplemented(method)
+
+ def test_assignment_driver_unimplemented(self):
+ interface = assignment.Driver()
+ self.assertInterfaceNotImplemented(interface)
+
+ def test_catalog_driver_unimplemented(self):
+ interface = catalog.Driver()
+ self.assertInterfaceNotImplemented(interface)
+
+ def test_identity_driver_unimplemented(self):
+ interface = identity.Driver()
+ self.assertInterfaceNotImplemented(interface)
+
+ def test_policy_driver_unimplemented(self):
+ interface = policy.Driver()
+ self.assertInterfaceNotImplemented(interface)
+
+ def test_token_driver_unimplemented(self):
+ interface = token.Driver()
+ self.assertInterfaceNotImplemented(interface)
diff --git a/keystone/tests/test_exception.py b/keystone/tests/test_exception.py
new file mode 100644
index 00000000..9658ed19
--- /dev/null
+++ b/keystone/tests/test_exception.py
@@ -0,0 +1,163 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from keystone.tests import core as test
+
+from keystone.common import wsgi
+from keystone import config
+from keystone import exception
+from keystone.openstack.common import jsonutils
+
+
+CONF = config.CONF
+
+
+class ExceptionTestCase(test.TestCase):
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def assertValidJsonRendering(self, e):
+ resp = wsgi.render_exception(e)
+ self.assertEqual(resp.status_int, e.code)
+ self.assertEqual(resp.status, '%s %s' % (e.code, e.title))
+
+ j = jsonutils.loads(resp.body)
+ self.assertIsNotNone(j.get('error'))
+ self.assertIsNotNone(j['error'].get('code'))
+ self.assertIsNotNone(j['error'].get('title'))
+ self.assertIsNotNone(j['error'].get('message'))
+ self.assertNotIn('\n', j['error']['message'])
+ self.assertNotIn(' ', j['error']['message'])
+ self.assertTrue(type(j['error']['code']) is int)
+
+ def test_all_json_renderings(self):
+ """Everything callable in the exception module should be renderable.
+
+ ... except for the base error class (exception.Error), which is not
+ user-facing.
+
+ This test provides a custom message to bypass docstring parsing, which
+ should be tested separately.
+
+ """
+ for cls in [x for x in exception.__dict__.values() if callable(x)]:
+ if cls is not exception.Error and isinstance(cls, exception.Error):
+ self.assertValidJsonRendering(cls(message='Overriden.'))
+
+ def test_validation_error(self):
+ target = uuid.uuid4().hex
+ attribute = uuid.uuid4().hex
+ e = exception.ValidationError(target=target, attribute=attribute)
+ self.assertValidJsonRendering(e)
+ self.assertIn(target, unicode(e))
+ self.assertIn(attribute, unicode(e))
+
+ def test_not_found(self):
+ target = uuid.uuid4().hex
+ e = exception.NotFound(target=target)
+ self.assertValidJsonRendering(e)
+ self.assertIn(target, unicode(e))
+
+ def test_403_title(self):
+ e = exception.Forbidden()
+ resp = wsgi.render_exception(e)
+ j = jsonutils.loads(resp.body)
+ self.assertEqual('Forbidden', e.title)
+ self.assertEqual('Forbidden', j['error'].get('title'))
+
+ def test_unicode_message(self):
+ message = u'Comment \xe7a va'
+ e = exception.Error(message)
+
+ try:
+ self.assertEqual(unicode(e), message)
+ except UnicodeEncodeError:
+ self.fail("unicode error message not supported")
+
+
+class SecurityErrorTestCase(ExceptionTestCase):
+ """Tests whether security-related info is exposed to the API user."""
+ def test_unauthorized_exposure(self):
+ self.opt(debug=False)
+
+ risky_info = uuid.uuid4().hex
+ e = exception.Unauthorized(message=risky_info)
+ self.assertValidJsonRendering(e)
+ self.assertNotIn(risky_info, unicode(e))
+
+ def test_unauthorized_exposure_in_debug(self):
+ self.opt(debug=True)
+
+ risky_info = uuid.uuid4().hex
+ e = exception.Unauthorized(message=risky_info)
+ self.assertValidJsonRendering(e)
+ self.assertIn(risky_info, unicode(e))
+
+ def test_forbidden_exposure(self):
+ self.opt(debug=False)
+
+ risky_info = uuid.uuid4().hex
+ e = exception.Forbidden(message=risky_info)
+ self.assertValidJsonRendering(e)
+ self.assertNotIn(risky_info, unicode(e))
+
+ def test_forbidden_exposure_in_debug(self):
+ self.opt(debug=True)
+
+ risky_info = uuid.uuid4().hex
+ e = exception.Forbidden(message=risky_info)
+ self.assertValidJsonRendering(e)
+ self.assertIn(risky_info, unicode(e))
+
+ def test_forbidden_action_exposure(self):
+ self.opt(debug=False)
+
+ risky_info = uuid.uuid4().hex
+ action = uuid.uuid4().hex
+ e = exception.ForbiddenAction(message=risky_info, action=action)
+ self.assertValidJsonRendering(e)
+ self.assertNotIn(risky_info, unicode(e))
+ self.assertIn(action, unicode(e))
+
+ e = exception.ForbiddenAction(action=risky_info)
+ self.assertValidJsonRendering(e)
+ self.assertIn(risky_info, unicode(e))
+
+ def test_forbidden_action_exposure_in_debug(self):
+ self.opt(debug=True)
+
+ risky_info = uuid.uuid4().hex
+
+ e = exception.ForbiddenAction(message=risky_info)
+ self.assertValidJsonRendering(e)
+ self.assertIn(risky_info, unicode(e))
+
+ e = exception.ForbiddenAction(action=risky_info)
+ self.assertValidJsonRendering(e)
+ self.assertIn(risky_info, unicode(e))
+
+ def test_unicode_argument_message(self):
+ self.opt(debug=False)
+
+ risky_info = u'\u7ee7\u7eed\u884c\u7f29\u8fdb\u6216'
+ e = exception.Forbidden(message=risky_info)
+ self.assertValidJsonRendering(e)
+ self.assertNotIn(risky_info, unicode(e))
diff --git a/keystone/tests/test_import_legacy.py b/keystone/tests/test_import_legacy.py
new file mode 100644
index 00000000..b3b83c0f
--- /dev/null
+++ b/keystone/tests/test_import_legacy.py
@@ -0,0 +1,120 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+
+try:
+ import sqlite3 as dbapi
+except ImportError:
+ from pysqlite2 import dbapi2 as dbapi
+
+from keystone.tests import core as test
+
+from keystone.catalog.backends import templated as catalog_templated
+from keystone.common.sql import legacy
+from keystone import config
+from keystone import identity
+from keystone.identity.backends import sql as identity_sql
+
+
+CONF = config.CONF
+
+
+class ImportLegacy(test.TestCase):
+ def setUp(self):
+ super(ImportLegacy, self).setUp()
+ self.config([test.etcdir('keystone.conf.sample'),
+ test.testsdir('test_overrides.conf'),
+ test.testsdir('backend_sql.conf'),
+ test.testsdir('backend_sql_disk.conf')])
+ test.setup_test_database()
+ self.identity_man = identity.Manager()
+ self.identity_api = identity_sql.Identity()
+
+ def tearDown(self):
+ test.teardown_test_database()
+ super(ImportLegacy, self).tearDown()
+
+ def setup_old_database(self, sql_dump):
+ sql_path = test.testsdir(sql_dump)
+ db_path = test.tmpdir('%s.db' % sql_dump)
+ try:
+ os.unlink(db_path)
+ except OSError:
+ pass
+ script_str = open(sql_path).read().strip()
+ conn = dbapi.connect(db_path)
+ conn.executescript(script_str)
+ conn.commit()
+ return db_path
+
+ def test_import_d5(self):
+ db_path = self.setup_old_database('legacy_d5.sqlite')
+ migration = legacy.LegacyMigration('sqlite:///%s' % db_path)
+ migration.migrate_all()
+
+ admin_id = '1'
+ user_ref = self.identity_api.get_user(admin_id)
+ self.assertEquals(user_ref['name'], 'admin')
+ self.assertEquals(user_ref['enabled'], True)
+
+ # check password hashing
+ user_ref = self.identity_man.authenticate(
+ user_id=admin_id, password='secrete')
+
+ # check catalog
+ self._check_catalog(migration)
+
+ def test_import_diablo(self):
+ db_path = self.setup_old_database('legacy_diablo.sqlite')
+ migration = legacy.LegacyMigration('sqlite:///%s' % db_path)
+ migration.migrate_all()
+
+ admin_id = '1'
+ user_ref = self.identity_api.get_user(admin_id)
+ self.assertEquals(user_ref['name'], 'admin')
+ self.assertEquals(user_ref['enabled'], True)
+
+ # check password hashing
+ user_ref = self.identity_man.authenticate(
+ user_id=admin_id, password='secrete')
+
+ # check catalog
+ self._check_catalog(migration)
+
+ def test_import_essex(self):
+ db_path = self.setup_old_database('legacy_essex.sqlite')
+ migration = legacy.LegacyMigration('sqlite:///%s' % db_path)
+ migration.migrate_all()
+
+ admin_id = 'c93b19ea3fa94484824213db8ac0afce'
+ user_ref = self.identity_api.get_user(admin_id)
+ self.assertEquals(user_ref['name'], 'admin')
+ self.assertEquals(user_ref['enabled'], True)
+
+ # check password hashing
+ user_ref = self.identity_man.authenticate(
+ user_id=admin_id, password='secrete')
+
+ # check catalog
+ self._check_catalog(migration)
+
+ def _check_catalog(self, migration):
+ catalog_lines = migration.dump_catalog()
+ catalog = catalog_templated.parse_templates(catalog_lines)
+ self.assert_('RegionOne' in catalog)
+ self.assert_('compute' in catalog['RegionOne'])
+ self.assert_('adminURL' in catalog['RegionOne']['compute'])
diff --git a/keystone/tests/test_injection.py b/keystone/tests/test_injection.py
new file mode 100644
index 00000000..36cd0126
--- /dev/null
+++ b/keystone/tests/test_injection.py
@@ -0,0 +1,211 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import unittest2 as unittest
+import uuid
+
+from keystone.common import dependency
+
+
+class TestDependencyInjection(unittest.TestCase):
+ def tearDown(self):
+ dependency.reset()
+ super(TestDependencyInjection, self).tearDown()
+
+ def test_dependency_injection(self):
+ class Interface(object):
+ def do_work(self):
+ assert False
+
+ @dependency.provider('first_api')
+ class FirstImplementation(Interface):
+ def do_work(self):
+ return True
+
+ @dependency.provider('second_api')
+ class SecondImplementation(Interface):
+ def do_work(self):
+ return True
+
+ @dependency.requires('first_api', 'second_api')
+ class Consumer(object):
+ def do_work_with_dependencies(self):
+ assert self.first_api.do_work()
+ assert self.second_api.do_work()
+
+ # initialize dependency providers
+ first_api = FirstImplementation()
+ second_api = SecondImplementation()
+
+ # ... sometime later, initialize a dependency consumer
+ consumer = Consumer()
+
+ # the expected dependencies should be available to the consumer
+ self.assertIs(consumer.first_api, first_api)
+ self.assertIs(consumer.second_api, second_api)
+ self.assertIsInstance(consumer.first_api, Interface)
+ self.assertIsInstance(consumer.second_api, Interface)
+ consumer.do_work_with_dependencies()
+
+ def test_dependency_provider_configuration(self):
+ @dependency.provider('api')
+ class Configurable(object):
+ def __init__(self, value=None):
+ self.value = value
+
+ def get_value(self):
+ return self.value
+
+ @dependency.requires('api')
+ class Consumer(object):
+ def get_value(self):
+ return self.api.get_value()
+
+ # initialize dependency providers
+ api = Configurable(value=True)
+
+ # ... sometime later, initialize a dependency consumer
+ consumer = Consumer()
+
+ # the expected dependencies should be available to the consumer
+ self.assertIs(consumer.api, api)
+ self.assertIsInstance(consumer.api, Configurable)
+ self.assertTrue(consumer.get_value())
+
+ def test_dependency_consumer_configuration(self):
+ @dependency.provider('api')
+ class Provider(object):
+ def get_value(self):
+ return True
+
+ @dependency.requires('api')
+ class Configurable(object):
+ def __init__(self, value=None):
+ self.value = value
+
+ def get_value(self):
+ if self.value:
+ return self.api.get_value()
+
+ # initialize dependency providers
+ api = Provider()
+
+ # ... sometime later, initialize a dependency consumer
+ consumer = Configurable(value=True)
+
+ # the expected dependencies should be available to the consumer
+ self.assertIs(consumer.api, api)
+ self.assertIsInstance(consumer.api, Provider)
+ self.assertTrue(consumer.get_value())
+
+ def test_inherited_dependency(self):
+ class Interface(object):
+ def do_work(self):
+ assert False
+
+ @dependency.provider('first_api')
+ class FirstImplementation(Interface):
+ def do_work(self):
+ return True
+
+ @dependency.provider('second_api')
+ class SecondImplementation(Interface):
+ def do_work(self):
+ return True
+
+ @dependency.requires('first_api')
+ class ParentConsumer(object):
+ def do_work_with_dependencies(self):
+ assert self.first_api.do_work()
+
+ @dependency.requires('second_api')
+ class ChildConsumer(ParentConsumer):
+ def do_work_with_dependencies(self):
+ assert self.second_api.do_work()
+ super(ChildConsumer, self).do_work_with_dependencies()
+
+ # initialize dependency providers
+ first_api = FirstImplementation()
+ second_api = SecondImplementation()
+
+ # ... sometime later, initialize a dependency consumer
+ consumer = ChildConsumer()
+
+ # dependencies should be naturally inherited
+ self.assertEqual(
+ ParentConsumer._dependencies,
+ set(['first_api']))
+ self.assertEqual(
+ ChildConsumer._dependencies,
+ set(['first_api', 'second_api']))
+ self.assertEqual(
+ consumer._dependencies,
+ set(['first_api', 'second_api']))
+
+ # the expected dependencies should be available to the consumer
+ self.assertIs(consumer.first_api, first_api)
+ self.assertIs(consumer.second_api, second_api)
+ self.assertIsInstance(consumer.first_api, Interface)
+ self.assertIsInstance(consumer.second_api, Interface)
+ consumer.do_work_with_dependencies()
+
+ def test_unresolvable_dependency(self):
+ @dependency.requires(uuid.uuid4().hex)
+ class Consumer(object):
+ pass
+
+ with self.assertRaises(dependency.UnresolvableDependencyException):
+ Consumer()
+ dependency.resolve_future_dependencies()
+
+ def test_circular_dependency(self):
+ p1_name = uuid.uuid4().hex
+ p2_name = uuid.uuid4().hex
+
+ @dependency.provider(p1_name)
+ @dependency.requires(p2_name)
+ class P1(object):
+ pass
+
+ @dependency.provider(p2_name)
+ @dependency.requires(p1_name)
+ class P2(object):
+ pass
+
+ p1 = P1()
+ p2 = P2()
+
+ dependency.resolve_future_dependencies()
+
+ self.assertIs(getattr(p1, p2_name), p2)
+ self.assertIs(getattr(p2, p1_name), p1)
+
+ def test_reset(self):
+ # Can reset the registry of providers.
+
+ p_id = uuid.uuid4().hex
+
+ @dependency.provider(p_id)
+ class P(object):
+ pass
+
+ p_inst = P()
+
+ self.assertIs(dependency.REGISTRY[p_id], p_inst)
+
+ dependency.reset()
+
+ self.assertFalse(dependency.REGISTRY)
diff --git a/keystone/tests/test_ipv6.py b/keystone/tests/test_ipv6.py
new file mode 100644
index 00000000..fa64bc43
--- /dev/null
+++ b/keystone/tests/test_ipv6.py
@@ -0,0 +1,51 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from keystone.tests import core as test
+
+from keystone.common import environment
+from keystone import config
+
+
+CONF = config.CONF
+
+
+class IPv6TestCase(test.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls.skip_if_no_ipv6()
+
+ def setUp(self):
+ super(IPv6TestCase, self).setUp()
+ self.load_backends()
+
+ def test_ipv6_ok(self):
+ """Make sure both public and admin API work with ipv6."""
+ self.public_server = self.serveapp('keystone', name='main',
+ host="::1", port=0)
+ self.admin_server = self.serveapp('keystone', name='admin',
+ host="::1", port=0)
+ # Verify Admin
+ conn = environment.httplib.HTTPConnection('::1', CONF.admin_port)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(resp.status, 300)
+ # Verify Public
+ conn = environment.httplib.HTTPConnection('::1', CONF.public_port)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(resp.status, 300)
diff --git a/keystone/tests/test_keystoneclient.py b/keystone/tests/test_keystoneclient.py
new file mode 100644
index 00000000..7e59885d
--- /dev/null
+++ b/keystone/tests/test_keystoneclient.py
@@ -0,0 +1,1174 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+import webob
+
+from keystone import config
+from keystone.openstack.common import jsonutils
+from keystone.openstack.common import timeutils
+from keystone.tests import core as test
+
+import default_fixtures
+
+CONF = config.CONF
+DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
+OPENSTACK_REPO = 'https://review.openstack.org/p/openstack'
+KEYSTONECLIENT_REPO = '%s/python-keystoneclient.git' % OPENSTACK_REPO
+
+
+class CompatTestCase(test.TestCase):
+ def setUp(self):
+ super(CompatTestCase, self).setUp()
+
+ # The backends should be loaded and initialized before the servers are
+ # started because the servers use the backends.
+
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+
+ # TODO(termie): add an admin user to the fixtures and use that user
+ # override the fixtures, for now
+ self.metadata_foobar = self.identity_api.add_role_to_user_and_project(
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ self.role_admin['id'])
+
+ self.public_server = self.serveapp('keystone', name='main')
+ self.admin_server = self.serveapp('keystone', name='admin')
+
+ revdir = test.checkout_vendor(*self.get_checkout())
+ self.add_path(revdir)
+ self.clear_module('keystoneclient')
+
+ def tearDown(self):
+ self.public_server.kill()
+ self.admin_server.kill()
+ self.public_server = None
+ self.admin_server = None
+ super(CompatTestCase, self).tearDown()
+
+ def _public_url(self):
+ public_port = self.public_server.socket_info['socket'][1]
+ return "http://localhost:%s/v2.0" % public_port
+
+ def _admin_url(self):
+ admin_port = self.admin_server.socket_info['socket'][1]
+ return "http://localhost:%s/v2.0" % admin_port
+
+ def _client(self, admin=False, **kwargs):
+ from keystoneclient.v2_0 import client as ks_client
+
+ url = self._admin_url() if admin else self._public_url()
+ kc = ks_client.Client(endpoint=url,
+ auth_url=self._public_url(),
+ **kwargs)
+ kc.authenticate()
+ # have to manually overwrite the management url after authentication
+ kc.management_url = url
+ return kc
+
+ def get_client(self, user_ref=None, tenant_ref=None, admin=False):
+ if user_ref is None:
+ user_ref = self.user_foo
+ if tenant_ref is None:
+ for user in default_fixtures.USERS:
+ if user['id'] == user_ref['id']:
+ tenant_id = user['tenants'][0]
+ else:
+ tenant_id = tenant_ref['id']
+
+ return self._client(username=user_ref['name'],
+ password=user_ref['password'],
+ tenant_id=tenant_id,
+ admin=admin)
+
+
+class KeystoneClientTests(object):
+ """Tests for all versions of keystoneclient."""
+
+ def test_authenticate_tenant_name_and_tenants(self):
+ client = self.get_client()
+ tenants = client.tenants.list()
+ self.assertEquals(tenants[0].id, self.tenant_bar['id'])
+
+ def test_authenticate_tenant_id_and_tenants(self):
+ client = self._client(username=self.user_foo['name'],
+ password=self.user_foo['password'],
+ tenant_id='bar')
+ tenants = client.tenants.list()
+ self.assertEquals(tenants[0].id, self.tenant_bar['id'])
+
+ def test_authenticate_invalid_tenant_id(self):
+ from keystoneclient import exceptions as client_exceptions
+ self.assertRaises(client_exceptions.Unauthorized,
+ self._client,
+ username=self.user_foo['name'],
+ password=self.user_foo['password'],
+ tenant_id='baz')
+
+ def test_authenticate_token_no_tenant(self):
+ client = self.get_client()
+ token = client.auth_token
+ token_client = self._client(token=token)
+ tenants = token_client.tenants.list()
+ self.assertEquals(tenants[0].id, self.tenant_bar['id'])
+
+ def test_authenticate_token_tenant_id(self):
+ client = self.get_client()
+ token = client.auth_token
+ token_client = self._client(token=token, tenant_id='bar')
+ tenants = token_client.tenants.list()
+ self.assertEquals(tenants[0].id, self.tenant_bar['id'])
+
+ def test_authenticate_token_invalid_tenant_id(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client()
+ token = client.auth_token
+ self.assertRaises(client_exceptions.Unauthorized,
+ self._client, token=token,
+ tenant_id=uuid.uuid4().hex)
+
+ def test_authenticate_token_invalid_tenant_name(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client()
+ token = client.auth_token
+ self.assertRaises(client_exceptions.Unauthorized,
+ self._client, token=token,
+ tenant_name=uuid.uuid4().hex)
+
+ def test_authenticate_token_tenant_name(self):
+ client = self.get_client()
+ token = client.auth_token
+ token_client = self._client(token=token, tenant_name='BAR')
+ tenants = token_client.tenants.list()
+ self.assertEquals(tenants[0].id, self.tenant_bar['id'])
+ self.assertEquals(tenants[0].id, self.tenant_bar['id'])
+
+ def test_authenticate_and_delete_token(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ client = self.get_client(admin=True)
+ token = client.auth_token
+ token_client = self._client(token=token)
+ tenants = token_client.tenants.list()
+ self.assertEquals(tenants[0].id, self.tenant_bar['id'])
+
+ client.tokens.delete(token_client.auth_token)
+
+ self.assertRaises(client_exceptions.Unauthorized,
+ token_client.tenants.list)
+
+ def test_authenticate_no_password(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ user_ref = self.user_foo.copy()
+ user_ref['password'] = None
+ self.assertRaises(client_exceptions.AuthorizationFailure,
+ self.get_client,
+ user_ref)
+
+ def test_authenticate_no_username(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ user_ref = self.user_foo.copy()
+ user_ref['name'] = None
+ self.assertRaises(client_exceptions.AuthorizationFailure,
+ self.get_client,
+ user_ref)
+
+ def test_authenticate_disabled_tenant(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ admin_client = self.get_client(admin=True)
+
+ tenant = {
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'enabled': False,
+ }
+ tenant_ref = admin_client.tenants.create(
+ tenant_name=tenant['name'],
+ description=tenant['description'],
+ enabled=tenant['enabled'])
+ tenant['id'] = tenant_ref.id
+
+ user = {
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ 'email': uuid.uuid4().hex,
+ 'tenant_id': tenant['id'],
+ }
+ user_ref = admin_client.users.create(
+ name=user['name'],
+ password=user['password'],
+ email=user['email'],
+ tenant_id=user['tenant_id'])
+ user['id'] = user_ref.id
+
+ # password authentication
+ self.assertRaises(
+ client_exceptions.Unauthorized,
+ self._client,
+ username=user['name'],
+ password=user['password'],
+ tenant_id=tenant['id'])
+
+ # token authentication
+ client = self._client(
+ username=user['name'],
+ password=user['password'])
+ self.assertRaises(
+ client_exceptions.Unauthorized,
+ self._client,
+ token=client.auth_token,
+ tenant_id=tenant['id'])
+
+ # FIXME(ja): this test should require the "keystone:admin" roled
+ # (probably the role set via --keystone_admin_role flag)
+ # FIXME(ja): add a test that admin endpoint is only sent to admin user
+ # FIXME(ja): add a test that admin endpoint returns unauthorized if not
+ # admin
+ def test_tenant_create_update_and_delete(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ tenant_name = 'original_tenant'
+ tenant_description = 'My original tenant!'
+ tenant_enabled = True
+ client = self.get_client(admin=True)
+
+ # create, get, and list a tenant
+ tenant = client.tenants.create(tenant_name=tenant_name,
+ description=tenant_description,
+ enabled=tenant_enabled)
+ self.assertEquals(tenant.name, tenant_name)
+ self.assertEquals(tenant.description, tenant_description)
+ self.assertEquals(tenant.enabled, tenant_enabled)
+
+ tenant = client.tenants.get(tenant_id=tenant.id)
+ self.assertEquals(tenant.name, tenant_name)
+ self.assertEquals(tenant.description, tenant_description)
+ self.assertEquals(tenant.enabled, tenant_enabled)
+
+ tenant = [t for t in client.tenants.list() if t.id == tenant.id].pop()
+ self.assertEquals(tenant.name, tenant_name)
+ self.assertEquals(tenant.description, tenant_description)
+ self.assertEquals(tenant.enabled, tenant_enabled)
+
+ # update, get, and list a tenant
+ tenant_name = 'updated_tenant'
+ tenant_description = 'Updated tenant!'
+ tenant_enabled = False
+ tenant = client.tenants.update(tenant_id=tenant.id,
+ tenant_name=tenant_name,
+ enabled=tenant_enabled,
+ description=tenant_description)
+ self.assertEquals(tenant.name, tenant_name)
+ self.assertEquals(tenant.description, tenant_description)
+ self.assertEquals(tenant.enabled, tenant_enabled)
+
+ tenant = client.tenants.get(tenant_id=tenant.id)
+ self.assertEquals(tenant.name, tenant_name)
+ self.assertEquals(tenant.description, tenant_description)
+ self.assertEquals(tenant.enabled, tenant_enabled)
+
+ tenant = [t for t in client.tenants.list() if t.id == tenant.id].pop()
+ self.assertEquals(tenant.name, tenant_name)
+ self.assertEquals(tenant.description, tenant_description)
+ self.assertEquals(tenant.enabled, tenant_enabled)
+
+ # delete, get, and list a tenant
+ client.tenants.delete(tenant=tenant.id)
+ self.assertRaises(client_exceptions.NotFound, client.tenants.get,
+ tenant.id)
+ self.assertFalse([t for t in client.tenants.list()
+ if t.id == tenant.id])
+
+ def test_tenant_create_no_name(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.BadRequest,
+ client.tenants.create,
+ tenant_name="")
+
+ def test_tenant_delete_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.tenants.delete,
+ tenant=uuid.uuid4().hex)
+
+ def test_tenant_get_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.tenants.get,
+ tenant_id=uuid.uuid4().hex)
+
+ def test_tenant_update_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.tenants.update,
+ tenant_id=uuid.uuid4().hex)
+
+ def test_tenant_list(self):
+ client = self.get_client()
+ tenants = client.tenants.list()
+ self.assertEquals(len(tenants), 1)
+
+ # Admin endpoint should return *all* tenants
+ client = self.get_client(admin=True)
+ tenants = client.tenants.list()
+ self.assertEquals(len(tenants), len(default_fixtures.TENANTS))
+
+ def test_invalid_password(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ good_client = self._client(username=self.user_foo['name'],
+ password=self.user_foo['password'])
+ good_client.tenants.list()
+
+ self.assertRaises(client_exceptions.Unauthorized,
+ self._client,
+ username=self.user_foo['name'],
+ password=uuid.uuid4().hex)
+
+ def test_invalid_user_and_password(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ self.assertRaises(client_exceptions.Unauthorized,
+ self._client,
+ username=uuid.uuid4().hex,
+ password=uuid.uuid4().hex)
+
+ def test_change_password_invalidates_token(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ client = self.get_client(admin=True)
+
+ username = uuid.uuid4().hex
+ passwd = uuid.uuid4().hex
+ user = client.users.create(name=username, password=passwd,
+ email=uuid.uuid4().hex)
+
+ token_id = client.tokens.authenticate(username=username,
+ password=passwd).id
+
+ # authenticate with a token should work before a password change
+ client.tokens.authenticate(token=token_id)
+
+ client.users.update_password(user=user.id, password=uuid.uuid4().hex)
+
+ # authenticate with a token should not work after a password change
+ self.assertRaises(client_exceptions.Unauthorized,
+ client.tokens.authenticate,
+ token=token_id)
+
+ def test_disable_user_invalidates_token(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ admin_client = self.get_client(admin=True)
+ foo_client = self.get_client(self.user_foo)
+
+ admin_client.users.update_enabled(user=self.user_foo['id'],
+ enabled=False)
+
+ self.assertRaises(client_exceptions.Unauthorized,
+ foo_client.tokens.authenticate,
+ token=foo_client.auth_token)
+
+ self.assertRaises(client_exceptions.Unauthorized,
+ self.get_client,
+ self.user_foo)
+
+ def test_delete_user_invalidates_token(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ admin_client = self.get_client(admin=True)
+ client = self.get_client(admin=False)
+
+ username = uuid.uuid4().hex
+ password = uuid.uuid4().hex
+ user_id = admin_client.users.create(
+ name=username, password=password, email=uuid.uuid4().hex).id
+
+ token_id = client.tokens.authenticate(
+ username=username, password=password).id
+
+ # token should be usable before the user is deleted
+ client.tokens.authenticate(token=token_id)
+
+ admin_client.users.delete(user=user_id)
+
+ # authenticate with a token should not work after the user is deleted
+ self.assertRaises(client_exceptions.Unauthorized,
+ client.tokens.authenticate,
+ token=token_id)
+
+ def test_token_expiry_maintained(self):
+ timeutils.set_time_override()
+ foo_client = self.get_client(self.user_foo)
+
+ orig_token = foo_client.service_catalog.catalog['token']
+ timeutils.advance_time_seconds(1)
+ reauthenticated_token = foo_client.tokens.authenticate(
+ token=foo_client.auth_token)
+
+ self.assertCloseEnoughForGovernmentWork(
+ timeutils.parse_isotime(orig_token['expires']),
+ timeutils.parse_isotime(reauthenticated_token.expires))
+
+ def test_user_create_update_delete(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ test_username = 'new_user'
+ client = self.get_client(admin=True)
+ user = client.users.create(name=test_username,
+ password='password',
+ email='user1@test.com')
+ self.assertEquals(user.name, test_username)
+
+ user = client.users.get(user=user.id)
+ self.assertEquals(user.name, test_username)
+
+ user = client.users.update(user=user,
+ name=test_username,
+ email='user2@test.com')
+ self.assertEquals(user.email, 'user2@test.com')
+
+ # NOTE(termie): update_enabled doesn't return anything, probably a bug
+ client.users.update_enabled(user=user, enabled=False)
+ user = client.users.get(user.id)
+ self.assertFalse(user.enabled)
+
+ self.assertRaises(client_exceptions.Unauthorized,
+ self._client,
+ username=test_username,
+ password='password')
+ client.users.update_enabled(user, True)
+
+ user = client.users.update_password(user=user, password='password2')
+
+ self._client(username=test_username,
+ password='password2')
+
+ user = client.users.update_tenant(user=user, tenant='bar')
+ # TODO(ja): once keystonelight supports default tenant
+ # when you login without specifying tenant, the
+ # token should be scoped to tenant 'bar'
+
+ client.users.delete(user.id)
+ self.assertRaises(client_exceptions.NotFound, client.users.get,
+ user.id)
+
+ # Test creating a user with a tenant (auto-add to tenant)
+ user2 = client.users.create(name=test_username,
+ password='password',
+ email='user1@test.com',
+ tenant_id='bar')
+ self.assertEquals(user2.name, test_username)
+
+ def test_update_default_tenant_to_existing_value(self):
+ client = self.get_client(admin=True)
+
+ user = client.users.create(
+ name=uuid.uuid4().hex,
+ password=uuid.uuid4().hex,
+ email=uuid.uuid4().hex,
+ tenant_id=self.tenant_bar['id'])
+
+ # attempting to update the tenant with the existing value should work
+ user = client.users.update_tenant(
+ user=user, tenant=self.tenant_bar['id'])
+
+ def test_user_create_no_name(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.BadRequest,
+ client.users.create,
+ name="",
+ password=uuid.uuid4().hex,
+ email=uuid.uuid4().hex)
+
+ def test_user_create_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.users.create,
+ name=uuid.uuid4().hex,
+ password=uuid.uuid4().hex,
+ email=uuid.uuid4().hex,
+ tenant_id=uuid.uuid4().hex)
+
+ def test_user_get_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.users.get,
+ user=uuid.uuid4().hex)
+
+ def test_user_list_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.users.list,
+ tenant_id=uuid.uuid4().hex)
+
+ def test_user_update_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.users.update,
+ user=uuid.uuid4().hex)
+
+ def test_user_update_tenant_404(self):
+ self.skipTest('N/A')
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.users.update,
+ user=self.user_foo['id'],
+ tenant_id=uuid.uuid4().hex)
+
+ def test_user_update_password_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.users.update_password,
+ user=uuid.uuid4().hex,
+ password=uuid.uuid4().hex)
+
+ def test_user_delete_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.users.delete,
+ user=uuid.uuid4().hex)
+
+ def test_user_list(self):
+ client = self.get_client(admin=True)
+ users = client.users.list()
+ self.assertTrue(len(users) > 0)
+ user = users[0]
+ self.assertRaises(AttributeError, lambda: user.password)
+
+ def test_user_get(self):
+ client = self.get_client(admin=True)
+ user = client.users.get(user=self.user_foo['id'])
+ self.assertRaises(AttributeError, lambda: user.password)
+
+ def test_role_get(self):
+ client = self.get_client(admin=True)
+ role = client.roles.get(role=self.role_admin['id'])
+ self.assertEquals(role.id, self.role_admin['id'])
+
+ def test_role_crud(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ test_role = 'new_role'
+ client = self.get_client(admin=True)
+ role = client.roles.create(name=test_role)
+ self.assertEquals(role.name, test_role)
+
+ role = client.roles.get(role=role.id)
+ self.assertEquals(role.name, test_role)
+
+ client.roles.delete(role=role.id)
+
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.delete,
+ role=role.id)
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.get,
+ role=role.id)
+
+ def test_role_create_no_name(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.BadRequest,
+ client.roles.create,
+ name="")
+
+ def test_role_get_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.get,
+ role=uuid.uuid4().hex)
+
+ def test_role_delete_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.delete,
+ role=uuid.uuid4().hex)
+
+ def test_role_list_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.roles_for_user,
+ user=uuid.uuid4().hex,
+ tenant=uuid.uuid4().hex)
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.roles_for_user,
+ user=self.user_foo['id'],
+ tenant=uuid.uuid4().hex)
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.roles_for_user,
+ user=uuid.uuid4().hex,
+ tenant=self.tenant_bar['id'])
+
+ def test_role_list(self):
+ client = self.get_client(admin=True)
+ roles = client.roles.list()
+ # TODO(devcamcar): This assert should be more specific.
+ self.assertTrue(len(roles) > 0)
+
+ def test_ec2_credential_crud(self):
+ client = self.get_client()
+ creds = client.ec2.list(user_id=self.user_foo['id'])
+ self.assertEquals(creds, [])
+
+ cred = client.ec2.create(user_id=self.user_foo['id'],
+ tenant_id=self.tenant_bar['id'])
+ creds = client.ec2.list(user_id=self.user_foo['id'])
+ self.assertEquals(creds, [cred])
+
+ got = client.ec2.get(user_id=self.user_foo['id'], access=cred.access)
+ self.assertEquals(cred, got)
+
+ client.ec2.delete(user_id=self.user_foo['id'], access=cred.access)
+ creds = client.ec2.list(user_id=self.user_foo['id'])
+ self.assertEquals(creds, [])
+
+ def test_ec2_credentials_create_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client()
+ self.assertRaises(client_exceptions.NotFound,
+ client.ec2.create,
+ user_id=uuid.uuid4().hex,
+ tenant_id=self.tenant_bar['id'])
+ self.assertRaises(client_exceptions.NotFound,
+ client.ec2.create,
+ user_id=self.user_foo['id'],
+ tenant_id=uuid.uuid4().hex)
+
+ def test_ec2_credentials_delete_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client()
+ self.assertRaises(client_exceptions.NotFound,
+ client.ec2.delete,
+ user_id=uuid.uuid4().hex,
+ access=uuid.uuid4().hex)
+
+ def test_ec2_credentials_get_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client()
+ self.assertRaises(client_exceptions.NotFound,
+ client.ec2.get,
+ user_id=uuid.uuid4().hex,
+ access=uuid.uuid4().hex)
+
+ def test_ec2_credentials_list_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client()
+ self.assertRaises(client_exceptions.NotFound,
+ client.ec2.list,
+ user_id=uuid.uuid4().hex)
+
+ def test_ec2_credentials_list_user_forbidden(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ two = self.get_client(self.user_two)
+ self.assertRaises(client_exceptions.Forbidden, two.ec2.list,
+ user_id=self.user_foo['id'])
+
+ def test_ec2_credentials_get_user_forbidden(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ foo = self.get_client()
+ cred = foo.ec2.create(user_id=self.user_foo['id'],
+ tenant_id=self.tenant_bar['id'])
+
+ two = self.get_client(self.user_two)
+ self.assertRaises(client_exceptions.Forbidden, two.ec2.get,
+ user_id=self.user_foo['id'], access=cred.access)
+
+ foo.ec2.delete(user_id=self.user_foo['id'], access=cred.access)
+
+ def test_ec2_credentials_delete_user_forbidden(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ foo = self.get_client()
+ cred = foo.ec2.create(user_id=self.user_foo['id'],
+ tenant_id=self.tenant_bar['id'])
+
+ two = self.get_client(self.user_two)
+ self.assertRaises(client_exceptions.Forbidden, two.ec2.delete,
+ user_id=self.user_foo['id'], access=cred.access)
+
+ foo.ec2.delete(user_id=self.user_foo['id'], access=cred.access)
+
+ def test_service_crud(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+
+ service_name = uuid.uuid4().hex
+ service_type = uuid.uuid4().hex
+ service_desc = uuid.uuid4().hex
+
+ # create & read
+ service = client.services.create(name=service_name,
+ service_type=service_type,
+ description=service_desc)
+ self.assertEquals(service_name, service.name)
+ self.assertEquals(service_type, service.type)
+ self.assertEquals(service_desc, service.description)
+
+ service = client.services.get(id=service.id)
+ self.assertEquals(service_name, service.name)
+ self.assertEquals(service_type, service.type)
+ self.assertEquals(service_desc, service.description)
+
+ service = [x for x in client.services.list() if x.id == service.id][0]
+ self.assertEquals(service_name, service.name)
+ self.assertEquals(service_type, service.type)
+ self.assertEquals(service_desc, service.description)
+
+ # update is not supported in API v2...
+
+ # delete & read
+ client.services.delete(id=service.id)
+ self.assertRaises(client_exceptions.NotFound,
+ client.services.get,
+ id=service.id)
+ services = [x for x in client.services.list() if x.id == service.id]
+ self.assertEquals(len(services), 0)
+
+ def test_service_delete_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.services.delete,
+ id=uuid.uuid4().hex)
+
+ def test_service_get_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.services.get,
+ id=uuid.uuid4().hex)
+
+ def test_endpoint_delete_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.endpoints.delete,
+ id=uuid.uuid4().hex)
+
+ def test_admin_requires_adminness(self):
+ from keystoneclient import exceptions as client_exceptions
+ # FIXME(ja): this should be Unauthorized
+ exception = client_exceptions.ClientException
+
+ two = self.get_client(self.user_two, admin=True) # non-admin user
+
+ # USER CRUD
+ self.assertRaises(exception,
+ two.users.list)
+ self.assertRaises(exception,
+ two.users.get,
+ user=self.user_two['id'])
+ self.assertRaises(exception,
+ two.users.create,
+ name='oops',
+ password='password',
+ email='oops@test.com')
+ self.assertRaises(exception,
+ two.users.delete,
+ user=self.user_foo['id'])
+
+ # TENANT CRUD
+ self.assertRaises(exception,
+ two.tenants.list)
+ self.assertRaises(exception,
+ two.tenants.get,
+ tenant_id=self.tenant_bar['id'])
+ self.assertRaises(exception,
+ two.tenants.create,
+ tenant_name='oops',
+ description="shouldn't work!",
+ enabled=True)
+ self.assertRaises(exception,
+ two.tenants.delete,
+ tenant=self.tenant_baz['id'])
+
+ # ROLE CRUD
+ self.assertRaises(exception,
+ two.roles.get,
+ role=self.role_admin['id'])
+ self.assertRaises(exception,
+ two.roles.list)
+ self.assertRaises(exception,
+ two.roles.create,
+ name='oops')
+ self.assertRaises(exception,
+ two.roles.delete,
+ role=self.role_admin['id'])
+
+ # TODO(ja): MEMBERSHIP CRUD
+ # TODO(ja): determine what else todo
+
+
+class KcMasterTestCase(CompatTestCase, KeystoneClientTests):
+ def get_checkout(self):
+ return KEYSTONECLIENT_REPO, 'master'
+
+ def test_ec2_auth(self):
+ client = self.get_client()
+ cred = client.ec2.create(user_id=self.user_foo['id'],
+ tenant_id=self.tenant_bar['id'])
+
+ from keystoneclient.contrib.ec2 import utils as ec2_utils
+ signer = ec2_utils.Ec2Signer(cred.secret)
+ credentials = {'params': {'SignatureVersion': '2'},
+ 'access': cred.access,
+ 'verb': 'GET',
+ 'host': 'localhost',
+ 'path': '/thisisgoingtowork'}
+ signature = signer.generate(credentials)
+ credentials['signature'] = signature
+ url = '%s/ec2tokens' % (client.auth_url)
+ (resp, token) = client.request(url=url,
+ method='POST',
+ body={'credentials': credentials})
+ # make sure we have a v2 token
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn('access', token)
+
+ def test_tenant_add_and_remove_user(self):
+ client = self.get_client(admin=True)
+ client.roles.add_user_role(tenant=self.tenant_bar['id'],
+ user=self.user_two['id'],
+ role=self.role_other['id'])
+ user_refs = client.tenants.list_users(tenant=self.tenant_bar['id'])
+ self.assert_(self.user_two['id'] in [x.id for x in user_refs])
+ client.roles.remove_user_role(tenant=self.tenant_bar['id'],
+ user=self.user_two['id'],
+ role=self.role_other['id'])
+ roles = client.roles.roles_for_user(user=self.user_foo['id'],
+ tenant=self.tenant_bar['id'])
+ self.assertNotIn(self.role_other['id'], roles)
+ user_refs = client.tenants.list_users(tenant=self.tenant_bar['id'])
+ self.assertNotIn(self.user_two['id'], [x.id for x in user_refs])
+
+ def test_user_role_add_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.add_user_role,
+ tenant=uuid.uuid4().hex,
+ user=self.user_foo['id'],
+ role=self.role_member['id'])
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.add_user_role,
+ tenant=self.tenant_baz['id'],
+ user=uuid.uuid4().hex,
+ role=self.role_member['id'])
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.add_user_role,
+ tenant=self.tenant_baz['id'],
+ user=self.user_foo['id'],
+ role=uuid.uuid4().hex)
+
+ def test_user_role_remove_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.remove_user_role,
+ tenant=uuid.uuid4().hex,
+ user=self.user_foo['id'],
+ role=self.role_member['id'])
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.remove_user_role,
+ tenant=self.tenant_baz['id'],
+ user=uuid.uuid4().hex,
+ role=self.role_member['id'])
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.remove_user_role,
+ tenant=self.tenant_baz['id'],
+ user=self.user_foo['id'],
+ role=uuid.uuid4().hex)
+ self.assertRaises(client_exceptions.NotFound,
+ client.roles.remove_user_role,
+ tenant=self.tenant_baz['id'],
+ user=self.user_foo['id'],
+ role=self.role_member['id'])
+
+ def test_tenant_list_marker(self):
+ client = self.get_client()
+
+ # Add two arbitrary tenants to user for testing purposes
+ for i in range(2):
+ tenant_id = uuid.uuid4().hex
+ tenant = {'name': 'tenant-%s' % tenant_id, 'id': tenant_id,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.identity_api.create_project(tenant_id, tenant)
+ self.identity_api.add_user_to_project(tenant_id,
+ self.user_foo['id'])
+
+ tenants = client.tenants.list()
+ self.assertEqual(len(tenants), 3)
+
+ tenants_marker = client.tenants.list(marker=tenants[0].id)
+ self.assertEqual(len(tenants_marker), 2)
+ self.assertEqual(tenants[1].name, tenants_marker[0].name)
+ self.assertEqual(tenants[2].name, tenants_marker[1].name)
+
+ def test_tenant_list_marker_not_found(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ client = self.get_client()
+ self.assertRaises(client_exceptions.BadRequest,
+ client.tenants.list, marker=uuid.uuid4().hex)
+
+ def test_tenant_list_limit(self):
+ client = self.get_client()
+
+ # Add two arbitrary tenants to user for testing purposes
+ for i in range(2):
+ tenant_id = uuid.uuid4().hex
+ tenant = {'name': 'tenant-%s' % tenant_id, 'id': tenant_id,
+ 'domain_id': DEFAULT_DOMAIN_ID}
+ self.identity_api.create_project(tenant_id, tenant)
+ self.identity_api.add_user_to_project(tenant_id,
+ self.user_foo['id'])
+
+ tenants = client.tenants.list()
+ self.assertEqual(len(tenants), 3)
+
+ tenants_limited = client.tenants.list(limit=2)
+ self.assertEqual(len(tenants_limited), 2)
+ self.assertEqual(tenants[0].name, tenants_limited[0].name)
+ self.assertEqual(tenants[1].name, tenants_limited[1].name)
+
+ def test_tenant_list_limit_bad_value(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ client = self.get_client()
+ self.assertRaises(client_exceptions.BadRequest,
+ client.tenants.list, limit='a')
+ self.assertRaises(client_exceptions.BadRequest,
+ client.tenants.list, limit=-1)
+
+ def test_roles_get_by_user(self):
+ client = self.get_client(admin=True)
+ roles = client.roles.roles_for_user(user=self.user_foo['id'],
+ tenant=self.tenant_bar['id'])
+ self.assertTrue(len(roles) > 0)
+
+ def test_user_can_update_passwd(self):
+ client = self.get_client(self.user_two)
+
+ token_id = client.auth_token
+ new_password = uuid.uuid4().hex
+
+ # TODO(derekh): Update to use keystoneclient when available
+ class FakeResponse(object):
+ def start_fake_response(self, status, headers):
+ self.response_status = int(status.split(' ', 1)[0])
+ self.response_headers = dict(headers)
+ responseobject = FakeResponse()
+
+ req = webob.Request.blank(
+ '/v2.0/OS-KSCRUD/users/%s' % self.user_two['id'],
+ headers={'X-Auth-Token': token_id})
+ req.method = 'PATCH'
+ req.body = ('{"user":{"password":"%s","original_password":"%s"}}' %
+ (new_password, self.user_two['password']))
+ self.public_server.application(req.environ,
+ responseobject.start_fake_response)
+
+ self.user_two['password'] = new_password
+ self.get_client(self.user_two)
+
+ def test_user_cannot_update_other_users_passwd(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ client = self.get_client(self.user_two)
+
+ token_id = client.auth_token
+ new_password = uuid.uuid4().hex
+
+ # TODO(derekh): Update to use keystoneclient when available
+ class FakeResponse(object):
+ def start_fake_response(self, status, headers):
+ self.response_status = int(status.split(' ', 1)[0])
+ self.response_headers = dict(headers)
+ responseobject = FakeResponse()
+
+ req = webob.Request.blank(
+ '/v2.0/OS-KSCRUD/users/%s' % self.user_foo['id'],
+ headers={'X-Auth-Token': token_id})
+ req.method = 'PATCH'
+ req.body = ('{"user":{"password":"%s","original_password":"%s"}}' %
+ (new_password, self.user_two['password']))
+ self.public_server.application(req.environ,
+ responseobject.start_fake_response)
+ self.assertEquals(403, responseobject.response_status)
+
+ self.user_two['password'] = new_password
+ self.assertRaises(client_exceptions.Unauthorized,
+ self.get_client, self.user_two)
+
+ def test_tokens_after_user_update_passwd(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ client = self.get_client(self.user_two)
+
+ token_id = client.auth_token
+ new_password = uuid.uuid4().hex
+
+ # TODO(derekh): Update to use keystoneclient when available
+ class FakeResponse(object):
+ def start_fake_response(self, status, headers):
+ self.response_status = int(status.split(' ', 1)[0])
+ self.response_headers = dict(headers)
+ responseobject = FakeResponse()
+
+ req = webob.Request.blank(
+ '/v2.0/OS-KSCRUD/users/%s' % self.user_two['id'],
+ headers={'X-Auth-Token': token_id})
+ req.method = 'PATCH'
+ req.body = ('{"user":{"password":"%s","original_password":"%s"}}' %
+ (new_password, self.user_two['password']))
+
+ rv = self.public_server.application(
+ req.environ,
+ responseobject.start_fake_response)
+ response_json = jsonutils.loads(rv.pop())
+ new_token_id = response_json['access']['token']['id']
+
+ self.assertRaises(client_exceptions.Unauthorized, client.tenants.list)
+ client.auth_token = new_token_id
+ client.tenants.list()
+
+
+class KcEssex3TestCase(CompatTestCase, KeystoneClientTests):
+ def get_checkout(self):
+ return KEYSTONECLIENT_REPO, 'essex-3'
+
+ def test_tenant_add_and_remove_user(self):
+ client = self.get_client(admin=True)
+ client.roles.add_user_to_tenant(tenant_id=self.tenant_bar['id'],
+ user_id=self.user_two['id'],
+ role_id=self.role_member['id'])
+ role_refs = client.roles.get_user_role_refs(
+ user_id=self.user_two['id'])
+ self.assert_(self.tenant_baz['id'] in [x.tenantId for x in role_refs])
+
+ # get the "role_refs" so we get the proper id, this is how the clients
+ # do it
+ roleref_refs = client.roles.get_user_role_refs(
+ user_id=self.user_two['id'])
+ for roleref_ref in roleref_refs:
+ if (roleref_ref.roleId == self.role_member['id']
+ and roleref_ref.tenantId == self.tenant_baz['id']):
+ # use python's scope fall through to leave roleref_ref set
+ break
+
+ client.roles.remove_user_from_tenant(tenant_id=self.tenant_bar['id'],
+ user_id=self.user_two['id'],
+ role_id=roleref_ref.id)
+
+ role_refs = client.roles.get_user_role_refs(
+ user_id=self.user_two['id'])
+ self.assert_(self.tenant_baz['id'] not in
+ [x.tenantId for x in role_refs])
+
+ def test_roles_get_by_user(self):
+ client = self.get_client(admin=True)
+ roles = client.roles.get_user_role_refs(user_id='foo')
+ self.assertTrue(len(roles) > 0)
+
+ def test_role_list_404(self):
+ self.skipTest('N/A')
+
+ def test_authenticate_and_delete_token(self):
+ self.skipTest('N/A')
+
+ def test_user_create_update_delete(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ test_username = 'new_user'
+ client = self.get_client(admin=True)
+ user = client.users.create(name=test_username,
+ password='password',
+ email='user1@test.com')
+ self.assertEquals(user.name, test_username)
+
+ user = client.users.get(user=user.id)
+ self.assertEquals(user.name, test_username)
+
+ user = client.users.update_email(user=user, email='user2@test.com')
+ self.assertEquals(user.email, 'user2@test.com')
+
+ # NOTE(termie): update_enabled doesn't return anything, probably a bug
+ client.users.update_enabled(user=user, enabled=False)
+ user = client.users.get(user.id)
+ self.assertFalse(user.enabled)
+
+ self.assertRaises(client_exceptions.Unauthorized,
+ self._client,
+ username=test_username,
+ password='password')
+ client.users.update_enabled(user, True)
+
+ user = client.users.update_password(user=user, password='password2')
+
+ self._client(username=test_username,
+ password='password2')
+
+ user = client.users.update_tenant(user=user, tenant='bar')
+ # TODO(ja): once keystonelight supports default tenant
+ # when you login without specifying tenant, the
+ # token should be scoped to tenant 'bar'
+
+ client.users.delete(user.id)
+ self.assertRaises(client_exceptions.NotFound, client.users.get,
+ user.id)
+
+ def test_user_update_404(self):
+ self.skipTest('N/A')
+
+ def test_endpoint_create_404(self):
+ self.skipTest('N/A')
+
+ def test_endpoint_delete_404(self):
+ self.skipTest('N/A')
+
+ def test_policy_crud(self):
+ self.skipTest('N/A due to lack of endpoint CRUD')
+
+
+class Kc11TestCase(CompatTestCase, KeystoneClientTests):
+ def get_checkout(self):
+ return KEYSTONECLIENT_REPO, '0.1.1'
+
+ def test_policy_crud(self):
+ self.skipTest('N/A')
diff --git a/keystone/tests/test_keystoneclient_sql.py b/keystone/tests/test_keystoneclient_sql.py
new file mode 100644
index 00000000..105d8353
--- /dev/null
+++ b/keystone/tests/test_keystoneclient_sql.py
@@ -0,0 +1,175 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from keystone.common import sql
+from keystone import config
+from keystone.tests import core as test
+
+import test_keystoneclient
+
+
+CONF = config.CONF
+
+
+class KcMasterSqlTestCase(test_keystoneclient.KcMasterTestCase, sql.Base):
+ def config(self, config_files):
+ super(KcMasterSqlTestCase, self).config([
+ test.etcdir('keystone.conf.sample'),
+ test.testsdir('test_overrides.conf'),
+ test.testsdir('backend_sql.conf')])
+
+ self.load_backends()
+ self.engine = self.get_engine()
+ sql.ModelBase.metadata.create_all(bind=self.engine)
+
+ def tearDown(self):
+ sql.ModelBase.metadata.drop_all(bind=self.engine)
+ self.engine.dispose()
+ sql.set_global_engine(None)
+ super(KcMasterSqlTestCase, self).tearDown()
+
+ def test_endpoint_crud(self):
+ from keystoneclient import exceptions as client_exceptions
+
+ client = self.get_client(admin=True)
+
+ service = client.services.create(name=uuid.uuid4().hex,
+ service_type=uuid.uuid4().hex,
+ description=uuid.uuid4().hex)
+
+ endpoint_region = uuid.uuid4().hex
+ invalid_service_id = uuid.uuid4().hex
+ endpoint_publicurl = uuid.uuid4().hex
+ endpoint_internalurl = uuid.uuid4().hex
+ endpoint_adminurl = uuid.uuid4().hex
+
+ # a non-existent service ID should trigger a 404
+ self.assertRaises(client_exceptions.NotFound,
+ client.endpoints.create,
+ region=endpoint_region,
+ service_id=invalid_service_id,
+ publicurl=endpoint_publicurl,
+ adminurl=endpoint_adminurl,
+ internalurl=endpoint_internalurl)
+
+ endpoint = client.endpoints.create(region=endpoint_region,
+ service_id=service.id,
+ publicurl=endpoint_publicurl,
+ adminurl=endpoint_adminurl,
+ internalurl=endpoint_internalurl)
+
+ self.assertEquals(endpoint.region, endpoint_region)
+ self.assertEquals(endpoint.service_id, service.id)
+ self.assertEquals(endpoint.publicurl, endpoint_publicurl)
+ self.assertEquals(endpoint.internalurl, endpoint_internalurl)
+ self.assertEquals(endpoint.adminurl, endpoint_adminurl)
+
+ client.endpoints.delete(id=endpoint.id)
+ self.assertRaises(client_exceptions.NotFound, client.endpoints.delete,
+ id=endpoint.id)
+
+ def test_endpoint_create_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.endpoints.create,
+ region=uuid.uuid4().hex,
+ service_id=uuid.uuid4().hex,
+ publicurl=uuid.uuid4().hex,
+ adminurl=uuid.uuid4().hex,
+ internalurl=uuid.uuid4().hex)
+
+ def test_endpoint_delete_404(self):
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+ self.assertRaises(client_exceptions.NotFound,
+ client.endpoints.delete,
+ id=uuid.uuid4().hex)
+
+ def test_policy_crud(self):
+ # FIXME(dolph): this test was written prior to the v3 implementation of
+ # the client and essentially refers to a non-existent
+ # policy manager in the v2 client. this test needs to be
+ # moved to a test suite running against the v3 api
+ self.skipTest('Written prior to v3 client; needs refactor')
+
+ from keystoneclient import exceptions as client_exceptions
+ client = self.get_client(admin=True)
+
+ policy_blob = uuid.uuid4().hex
+ policy_type = uuid.uuid4().hex
+ service = client.services.create(
+ name=uuid.uuid4().hex,
+ service_type=uuid.uuid4().hex,
+ description=uuid.uuid4().hex)
+ endpoint = client.endpoints.create(
+ service_id=service.id,
+ region=uuid.uuid4().hex,
+ adminurl=uuid.uuid4().hex,
+ internalurl=uuid.uuid4().hex,
+ publicurl=uuid.uuid4().hex)
+
+ # create
+ policy = client.policies.create(
+ blob=policy_blob,
+ type=policy_type,
+ endpoint=endpoint.id)
+ self.assertEquals(policy_blob, policy.policy)
+ self.assertEquals(policy_type, policy.type)
+ self.assertEquals(endpoint.id, policy.endpoint_id)
+
+ policy = client.policies.get(policy=policy.id)
+ self.assertEquals(policy_blob, policy.policy)
+ self.assertEquals(policy_type, policy.type)
+ self.assertEquals(endpoint.id, policy.endpoint_id)
+
+ endpoints = [x for x in client.endpoints.list() if x.id == endpoint.id]
+ endpoint = endpoints[0]
+ self.assertEquals(policy_blob, policy.policy)
+ self.assertEquals(policy_type, policy.type)
+ self.assertEquals(endpoint.id, policy.endpoint_id)
+
+ # update
+ policy_blob = uuid.uuid4().hex
+ policy_type = uuid.uuid4().hex
+ endpoint = client.endpoints.create(
+ service_id=service.id,
+ region=uuid.uuid4().hex,
+ adminurl=uuid.uuid4().hex,
+ internalurl=uuid.uuid4().hex,
+ publicurl=uuid.uuid4().hex)
+
+ policy = client.policies.update(
+ policy=policy.id,
+ blob=policy_blob,
+ type=policy_type,
+ endpoint=endpoint.id)
+
+ policy = client.policies.get(policy=policy.id)
+ self.assertEquals(policy_blob, policy.policy)
+ self.assertEquals(policy_type, policy.type)
+ self.assertEquals(endpoint.id, policy.endpoint_id)
+
+ # delete
+ client.policies.delete(policy=policy.id)
+ self.assertRaises(
+ client_exceptions.NotFound,
+ client.policies.get,
+ policy=policy.id)
+ policies = [x for x in client.policies.list() if x.id == policy.id]
+ self.assertEquals(len(policies), 0)
diff --git a/keystone/tests/test_middleware.py b/keystone/tests/test_middleware.py
new file mode 100644
index 00000000..df33d172
--- /dev/null
+++ b/keystone/tests/test_middleware.py
@@ -0,0 +1,163 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import webob
+
+from keystone.tests import core as test
+
+from keystone import config
+from keystone import middleware
+from keystone.openstack.common import jsonutils
+
+
+CONF = config.CONF
+
+
+def make_request(**kwargs):
+ accept = kwargs.pop('accept', None)
+ method = kwargs.pop('method', 'GET')
+ body = kwargs.pop('body', None)
+ req = webob.Request.blank('/', **kwargs)
+ req.method = method
+ if body is not None:
+ req.body = body
+ if accept is not None:
+ req.accept = accept
+ return req
+
+
+def make_response(**kwargs):
+ body = kwargs.pop('body', None)
+ return webob.Response(body)
+
+
+class TokenAuthMiddlewareTest(test.TestCase):
+ def test_request(self):
+ req = make_request()
+ req.headers[middleware.AUTH_TOKEN_HEADER] = 'MAGIC'
+ middleware.TokenAuthMiddleware(None).process_request(req)
+ context = req.environ[middleware.CONTEXT_ENV]
+ self.assertEqual(context['token_id'], 'MAGIC')
+
+
+class AdminTokenAuthMiddlewareTest(test.TestCase):
+ def test_request_admin(self):
+ req = make_request()
+ req.headers[middleware.AUTH_TOKEN_HEADER] = CONF.admin_token
+ middleware.AdminTokenAuthMiddleware(None).process_request(req)
+ context = req.environ[middleware.CONTEXT_ENV]
+ self.assertTrue(context['is_admin'])
+
+ def test_request_non_admin(self):
+ req = make_request()
+ req.headers[middleware.AUTH_TOKEN_HEADER] = 'NOT-ADMIN'
+ middleware.AdminTokenAuthMiddleware(None).process_request(req)
+ context = req.environ[middleware.CONTEXT_ENV]
+ self.assertFalse(context['is_admin'])
+
+
+class PostParamsMiddlewareTest(test.TestCase):
+ def test_request_with_params(self):
+ req = make_request(body="arg1=one", method='POST')
+ middleware.PostParamsMiddleware(None).process_request(req)
+ params = req.environ[middleware.PARAMS_ENV]
+ self.assertEqual(params, {"arg1": "one"})
+
+
+class JsonBodyMiddlewareTest(test.TestCase):
+ def test_request_with_params(self):
+ req = make_request(body='{"arg1": "one", "arg2": ["a"]}',
+ content_type='application/json',
+ method='POST')
+ middleware.JsonBodyMiddleware(None).process_request(req)
+ params = req.environ[middleware.PARAMS_ENV]
+ self.assertEqual(params, {"arg1": "one", "arg2": ["a"]})
+
+ def test_malformed_json(self):
+ req = make_request(body='{"arg1": "on',
+ content_type='application/json',
+ method='POST')
+ resp = middleware.JsonBodyMiddleware(None).process_request(req)
+ self.assertEqual(resp.status_int, 400)
+
+ def test_no_content_type(self):
+ req = make_request(body='{"arg1": "one", "arg2": ["a"]}',
+ method='POST')
+ middleware.JsonBodyMiddleware(None).process_request(req)
+ params = req.environ[middleware.PARAMS_ENV]
+ self.assertEqual(params, {"arg1": "one", "arg2": ["a"]})
+
+ def test_unrecognized_content_type(self):
+ req = make_request(body='{"arg1": "one", "arg2": ["a"]}',
+ content_type='text/plain',
+ method='POST')
+ resp = middleware.JsonBodyMiddleware(None).process_request(req)
+ self.assertEqual(resp.status_int, 400)
+
+ def test_unrecognized_content_type_without_body(self):
+ req = make_request(content_type='text/plain',
+ method='GET')
+ middleware.JsonBodyMiddleware(None).process_request(req)
+ params = req.environ.get(middleware.PARAMS_ENV, {})
+ self.assertEqual(params, {})
+
+
+class XmlBodyMiddlewareTest(test.TestCase):
+ def test_client_wants_xml_back(self):
+ """Clients requesting XML should get what they ask for."""
+ body = '{"container": {"attribute": "value"}}'
+ req = make_request(body=body, method='POST', accept='application/xml')
+ middleware.XmlBodyMiddleware(None).process_request(req)
+ resp = make_response(body=body)
+ middleware.XmlBodyMiddleware(None).process_response(req, resp)
+ self.assertEqual(resp.content_type, 'application/xml')
+
+ def test_client_wants_json_back(self):
+ """Clients requesting JSON should definitely not get XML back."""
+ body = '{"container": {"attribute": "value"}}'
+ req = make_request(body=body, method='POST', accept='application/json')
+ middleware.XmlBodyMiddleware(None).process_request(req)
+ resp = make_response(body=body)
+ middleware.XmlBodyMiddleware(None).process_response(req, resp)
+ self.assertNotIn('application/xml', resp.content_type)
+
+ def test_client_fails_to_specify_accept(self):
+ """If client does not specify an Accept header, default to JSON."""
+ body = '{"container": {"attribute": "value"}}'
+ req = make_request(body=body, method='POST')
+ middleware.XmlBodyMiddleware(None).process_request(req)
+ resp = make_response(body=body)
+ middleware.XmlBodyMiddleware(None).process_response(req, resp)
+ self.assertNotIn('application/xml', resp.content_type)
+
+ def test_xml_replaced_by_json(self):
+ """XML requests should be replaced by JSON requests."""
+ req = make_request(
+ body='<container><element attribute="value" /></container>',
+ content_type='application/xml',
+ method='POST')
+ middleware.XmlBodyMiddleware(None).process_request(req)
+ self.assertTrue(req.content_type, 'application/json')
+ self.assertTrue(jsonutils.loads(req.body))
+
+ def test_json_unnaffected(self):
+ """JSON-only requests should be unaffected by the XML middleware."""
+ content_type = 'application/json'
+ body = '{"container": {"attribute": "value"}}'
+ req = make_request(body=body, content_type=content_type, method='POST')
+ middleware.XmlBodyMiddleware(None).process_request(req)
+ self.assertEqual(req.body, body)
+ self.assertEqual(req.content_type, content_type)
diff --git a/keystone/tests/test_no_admin_token_auth.py b/keystone/tests/test_no_admin_token_auth.py
new file mode 100644
index 00000000..3a7113d8
--- /dev/null
+++ b/keystone/tests/test_no_admin_token_auth.py
@@ -0,0 +1,47 @@
+
+import os
+import webtest
+
+from keystone.tests import core as test
+
+
+def _generate_paste_config():
+ # Generate a file, based on keystone-paste.ini, that doesn't include
+ # admin_token_auth in the pipeline
+
+ with open(test.etcdir('keystone-paste.ini'), 'r') as f:
+ contents = f.read()
+
+ new_contents = contents.replace(' admin_token_auth ', ' ')
+
+ with open(test.tmpdir('no_admin_token_auth-paste.ini'), 'w') as f:
+ f.write(new_contents)
+
+
+class TestNoAdminTokenAuth(test.TestCase):
+ def setUp(self):
+ super(TestNoAdminTokenAuth, self).setUp()
+ self.load_backends()
+
+ _generate_paste_config()
+
+ self.admin_app = webtest.TestApp(
+ self.loadapp(test.tmpdir('no_admin_token_auth'), name='admin'),
+ extra_environ=dict(REMOTE_ADDR='127.0.0.1'))
+
+ def tearDown(self):
+ self.admin_app = None
+ os.remove(test.tmpdir('no_admin_token_auth-paste.ini'))
+
+ def test_request_no_admin_token_auth(self):
+ # This test verifies that if the admin_token_auth middleware isn't
+ # in the paste pipeline that users can still make requests.
+
+ # Note(blk-u): Picked /v2.0/tenants because it's an operation that
+ # requires is_admin in the context, any operation that requires
+ # is_admin would work for this test.
+ REQ_PATH = '/v2.0/tenants'
+
+ # If the following does not raise, then the test is successful.
+ self.admin_app.get(REQ_PATH, headers={'X-Auth-Token': 'NotAdminToken'},
+ status=401)
diff --git a/keystone/tests/test_overrides.conf b/keystone/tests/test_overrides.conf
new file mode 100644
index 00000000..aac29f26
--- /dev/null
+++ b/keystone/tests/test_overrides.conf
@@ -0,0 +1,20 @@
+[DEFAULT]
+crypt_strength = 1000
+
+[identity]
+driver = keystone.identity.backends.kvs.Identity
+
+[catalog]
+driver = keystone.catalog.backends.templated.TemplatedCatalog
+template_file = default_catalog.templates
+
+[trust]
+driver = keystone.trust.backends.kvs.Trust
+
+[token]
+driver = keystone.token.backends.kvs.Token
+
+[signing]
+certfile = ../../examples/pki/certs/signing_cert.pem
+keyfile = ../../examples/pki/private/signing_key.pem
+ca_certs = ../../examples/pki/certs/cacert.pem
diff --git a/keystone/tests/test_pki_token_provider.conf b/keystone/tests/test_pki_token_provider.conf
new file mode 100644
index 00000000..255972c3
--- /dev/null
+++ b/keystone/tests/test_pki_token_provider.conf
@@ -0,0 +1,2 @@
+[token]
+provider = keystone.token.providers.pki.Provider
diff --git a/keystone/tests/test_policy.py b/keystone/tests/test_policy.py
new file mode 100644
index 00000000..bdf91c94
--- /dev/null
+++ b/keystone/tests/test_policy.py
@@ -0,0 +1,191 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2011 Piston Cloud Computing, Inc.
+# All Rights Reserved.
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import StringIO
+import tempfile
+import urllib2
+
+from keystone.tests import core as test
+
+from keystone import config
+from keystone import exception
+from keystone.openstack.common import policy as common_policy
+from keystone.policy.backends import rules
+
+
+CONF = config.CONF
+
+
+class PolicyFileTestCase(test.TestCase):
+ def setUp(self):
+ super(PolicyFileTestCase, self).setUp()
+ self.orig_policy_file = CONF.policy_file
+ rules.reset()
+ _unused, self.tmpfilename = tempfile.mkstemp()
+ self.opt(policy_file=self.tmpfilename)
+ self.target = {}
+
+ def tearDown(self):
+ super(PolicyFileTestCase, self).tearDown()
+ rules.reset()
+ self.opt(policy_file=self.orig_policy_file)
+
+ def test_modified_policy_reloads(self):
+ action = "example:test"
+ empty_credentials = {}
+ with open(self.tmpfilename, "w") as policyfile:
+ policyfile.write("""{"example:test": []}""")
+ rules.enforce(empty_credentials, action, self.target)
+ with open(self.tmpfilename, "w") as policyfile:
+ policyfile.write("""{"example:test": ["false:false"]}""")
+ # NOTE(vish): reset stored policy cache so we don't have to sleep(1)
+ rules._POLICY_CACHE = {}
+ self.assertRaises(exception.ForbiddenAction, rules.enforce,
+ empty_credentials, action, self.target)
+
+
+class PolicyTestCase(test.TestCase):
+ def setUp(self):
+ super(PolicyTestCase, self).setUp()
+ rules.reset()
+ # NOTE(vish): preload rules to circumvent reloading from file
+ rules.init()
+ self.rules = {
+ "true": [],
+ "example:allowed": [],
+ "example:denied": [["false:false"]],
+ "example:get_http": [["http:http://www.example.com"]],
+ "example:my_file": [["role:compute_admin"],
+ ["project_id:%(project_id)s"]],
+ "example:early_and_fail": [["false:false", "rule:true"]],
+ "example:early_or_success": [["rule:true"], ["false:false"]],
+ "example:lowercase_admin": [["role:admin"], ["role:sysadmin"]],
+ "example:uppercase_admin": [["role:ADMIN"], ["role:sysadmin"]],
+ }
+
+ # NOTE(vish): then overload underlying policy engine
+ self._set_rules()
+ self.credentials = {}
+ self.target = {}
+
+ def _set_rules(self):
+ these_rules = common_policy.Rules(
+ dict((k, common_policy.parse_rule(v))
+ for k, v in self.rules.items()))
+ common_policy.set_rules(these_rules)
+
+ def tearDown(self):
+ rules.reset()
+ super(PolicyTestCase, self).tearDown()
+
+ def test_enforce_nonexistent_action_throws(self):
+ action = "example:noexist"
+ self.assertRaises(exception.ForbiddenAction, rules.enforce,
+ self.credentials, action, self.target)
+
+ def test_enforce_bad_action_throws(self):
+ action = "example:denied"
+ self.assertRaises(exception.ForbiddenAction, rules.enforce,
+ self.credentials, action, self.target)
+
+ def test_enforce_good_action(self):
+ action = "example:allowed"
+ rules.enforce(self.credentials, action, self.target)
+
+ def test_enforce_http_true(self):
+
+ def fakeurlopen(url, post_data):
+ return StringIO.StringIO("True")
+
+ self.stubs.Set(urllib2, 'urlopen', fakeurlopen)
+ action = "example:get_http"
+ target = {}
+ result = rules.enforce(self.credentials, action, target)
+ self.assertTrue(result)
+
+ def test_enforce_http_false(self):
+
+ def fakeurlopen(url, post_data):
+ return StringIO.StringIO("False")
+ self.stubs.Set(urllib2, 'urlopen', fakeurlopen)
+ action = "example:get_http"
+ target = {}
+ self.assertRaises(exception.ForbiddenAction, rules.enforce,
+ self.credentials, action, target)
+
+ def test_templatized_enforcement(self):
+ target_mine = {'project_id': 'fake'}
+ target_not_mine = {'project_id': 'another'}
+ credentials = {'project_id': 'fake', 'roles': []}
+ action = "example:my_file"
+ rules.enforce(credentials, action, target_mine)
+ self.assertRaises(exception.ForbiddenAction, rules.enforce,
+ credentials, action, target_not_mine)
+
+ def test_early_AND_enforcement(self):
+ action = "example:early_and_fail"
+ self.assertRaises(exception.ForbiddenAction, rules.enforce,
+ self.credentials, action, self.target)
+
+ def test_early_OR_enforcement(self):
+ action = "example:early_or_success"
+ rules.enforce(self.credentials, action, self.target)
+
+ def test_ignore_case_role_check(self):
+ lowercase_action = "example:lowercase_admin"
+ uppercase_action = "example:uppercase_admin"
+ # NOTE(dprince) we mix case in the Admin role here to ensure
+ # case is ignored
+ admin_credentials = {'roles': ['AdMiN']}
+ rules.enforce(admin_credentials, lowercase_action, self.target)
+ rules.enforce(admin_credentials, uppercase_action, self.target)
+
+
+class DefaultPolicyTestCase(test.TestCase):
+ def setUp(self):
+ super(DefaultPolicyTestCase, self).setUp()
+ rules.reset()
+ rules.init()
+
+ self.rules = {
+ "default": [],
+ "example:exist": [["false:false"]]
+ }
+ self._set_rules('default')
+ self.credentials = {}
+
+ def _set_rules(self, default_rule):
+ these_rules = common_policy.Rules(
+ dict((k, common_policy.parse_rule(v))
+ for k, v in self.rules.items()), default_rule)
+ common_policy.set_rules(these_rules)
+
+ def tearDown(self):
+ super(DefaultPolicyTestCase, self).setUp()
+ rules.reset()
+
+ def test_policy_called(self):
+ self.assertRaises(exception.ForbiddenAction, rules.enforce,
+ self.credentials, "example:exist", {})
+
+ def test_not_found_policy_calls_default(self):
+ rules.enforce(self.credentials, "example:noexist", {})
+
+ def test_default_not_found(self):
+ self._set_rules("default_noexist")
+ self.assertRaises(exception.ForbiddenAction, rules.enforce,
+ self.credentials, "example:noexist", {})
diff --git a/keystone/tests/test_s3_token_middleware.py b/keystone/tests/test_s3_token_middleware.py
new file mode 100644
index 00000000..ec31f2ac
--- /dev/null
+++ b/keystone/tests/test_s3_token_middleware.py
@@ -0,0 +1,233 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import unittest2 as unittest
+import webob
+
+from keystone.middleware import s3_token
+from keystone.openstack.common import jsonutils
+
+
+class FakeHTTPResponse(object):
+ def __init__(self, status, body):
+ self.status = status
+ self.body = body
+ self.reason = ""
+
+ def read(self):
+ return self.body
+
+
+class FakeApp(object):
+ """This represents a WSGI app protected by the auth_token middleware."""
+ def __call__(self, env, start_response):
+ resp = webob.Response()
+ resp.environ = env
+ return resp(env, start_response)
+
+
+class FakeHTTPConnection(object):
+ def __init__(self, *args):
+ return
+
+ def getresponse(self):
+ return self.resp
+
+ def close(self):
+ pass
+
+ def request(self, method, path, **kwargs):
+ pass
+
+
+class S3TokenMiddlewareTestBase(unittest.TestCase):
+ def setUp(self):
+ super(S3TokenMiddlewareTestBase, self).setUp()
+
+ def start_fake_response(self, status, headers):
+ self.response_status = int(status.split(' ', 1)[0])
+ self.response_headers = dict(headers)
+
+
+def good_request(cls, method, path, **kwargs):
+ cls.status = 201
+ ret = {'access': {'token':
+ {'id': 'TOKEN_ID',
+ 'tenant': {'id': 'TENANT_ID'}}}}
+ body = jsonutils.dumps(ret)
+ cls.resp = FakeHTTPResponse(cls.status, body)
+
+
+class S3TokenMiddlewareTestGood(S3TokenMiddlewareTestBase):
+ def setup_middleware_fake(self):
+ self.middleware.http_client_class = FakeHTTPConnection
+ self.middleware.http_client_class.request = good_request
+
+ def setUp(self):
+ self.middleware = s3_token.S3Token(FakeApp(), {})
+ self.setup_middleware_fake()
+ super(S3TokenMiddlewareTestGood, self).setUp()
+
+ # Ignore the request and pass to the next middleware in the
+ # pipeline if no path has been specified.
+ def test_no_path_request(self):
+ req = webob.Request.blank('/')
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 200)
+
+ # Ignore the request and pass to the next middleware in the
+ # pipeline if no Authorization header has been specified
+ def test_without_authorization(self):
+ req = webob.Request.blank('/v1/AUTH_cfa/c/o')
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 200)
+
+ def test_without_auth_storage_token(self):
+ req = webob.Request.blank('/v1/AUTH_cfa/c/o')
+ req.headers['Authorization'] = 'badboy'
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 200)
+
+ def test_authorized(self):
+ req = webob.Request.blank('/v1/AUTH_cfa/c/o')
+ req.headers['Authorization'] = 'access:signature'
+ req.headers['X-Storage-Token'] = 'token'
+ req.get_response(self.middleware)
+ self.assertTrue(req.path.startswith('/v1/AUTH_TENANT_ID'))
+ self.assertEqual(req.headers['X-Auth-Token'], 'TOKEN_ID')
+
+ def test_authorized_http(self):
+ self.middleware = (
+ s3_token.filter_factory({'auth_protocol': 'http'})(FakeApp()))
+ self.setup_middleware_fake()
+ req = webob.Request.blank('/v1/AUTH_cfa/c/o')
+ req.headers['Authorization'] = 'access:signature'
+ req.headers['X-Storage-Token'] = 'token'
+ req.get_response(self.middleware)
+ self.assertTrue(req.path.startswith('/v1/AUTH_TENANT_ID'))
+ self.assertEqual(req.headers['X-Auth-Token'], 'TOKEN_ID')
+
+ def test_authorization_nova_toconnect(self):
+ req = webob.Request.blank('/v1/AUTH_swiftint/c/o')
+ req.headers['Authorization'] = 'access:FORCED_TENANT_ID:signature'
+ req.headers['X-Storage-Token'] = 'token'
+ req.get_response(self.middleware)
+ path = req.environ['PATH_INFO']
+ self.assertTrue(path.startswith('/v1/AUTH_FORCED_TENANT_ID'))
+
+
+class S3TokenMiddlewareTestBad(S3TokenMiddlewareTestBase):
+ def setUp(self):
+ self.middleware = s3_token.S3Token(FakeApp(), {})
+ self.middleware.http_client_class = FakeHTTPConnection
+ super(S3TokenMiddlewareTestBad, self).setUp()
+
+ def test_unauthorized_token(self):
+ def request(self, method, path, **kwargs):
+ ret = {"error":
+ {"message": "EC2 access key not found.",
+ "code": 401,
+ "title": "Unauthorized"}}
+ body = jsonutils.dumps(ret)
+ self.status = 403
+ self.resp = FakeHTTPResponse(self.status, body)
+
+ req = webob.Request.blank('/v1/AUTH_cfa/c/o')
+ req.headers['Authorization'] = 'access:signature'
+ req.headers['X-Storage-Token'] = 'token'
+ self.middleware.http_client_class.request = request
+ resp = req.get_response(self.middleware)
+ s3_denied_req = self.middleware.deny_request('AccessDenied')
+ self.assertEqual(resp.body, s3_denied_req.body)
+ self.assertEqual(resp.status_int, s3_denied_req.status_int)
+
+ def test_bogus_authorization(self):
+ req = webob.Request.blank('/v1/AUTH_cfa/c/o')
+ req.headers['Authorization'] = 'badboy'
+ req.headers['X-Storage-Token'] = 'token'
+ resp = req.get_response(self.middleware)
+ self.assertEqual(resp.status_int, 400)
+ s3_invalid_req = self.middleware.deny_request('InvalidURI')
+ self.assertEqual(resp.body, s3_invalid_req.body)
+ self.assertEqual(resp.status_int, s3_invalid_req.status_int)
+
+ def test_fail_to_connect_to_keystone(self):
+ def request(self, method, path, **kwargs):
+ raise s3_token.ServiceError
+ self.middleware.http_client_class.request = request
+
+ req = webob.Request.blank('/v1/AUTH_cfa/c/o')
+ req.headers['Authorization'] = 'access:signature'
+ req.headers['X-Storage-Token'] = 'token'
+ self.middleware.http_client_class.status = 503
+ resp = req.get_response(self.middleware)
+ s3_invalid_req = self.middleware.deny_request('InvalidURI')
+ self.assertEqual(resp.body, s3_invalid_req.body)
+ self.assertEqual(resp.status_int, s3_invalid_req.status_int)
+
+ def test_bad_reply(self):
+ def request(self, method, path, **kwargs):
+ body = "<badreply>"
+ self.status = 201
+ self.resp = FakeHTTPResponse(self.status, body)
+
+ req = webob.Request.blank('/v1/AUTH_cfa/c/o')
+ req.headers['Authorization'] = 'access:signature'
+ req.headers['X-Storage-Token'] = 'token'
+ self.middleware.http_client_class.request = request
+ resp = req.get_response(self.middleware)
+ s3_invalid_req = self.middleware.deny_request('InvalidURI')
+ self.assertEqual(resp.body, s3_invalid_req.body)
+ self.assertEqual(resp.status_int, s3_invalid_req.status_int)
+
+
+class S3TokenMiddlewareTestUtil(unittest.TestCase):
+ def test_split_path_failed(self):
+ self.assertRaises(ValueError, s3_token.split_path, '')
+ self.assertRaises(ValueError, s3_token.split_path, '/')
+ self.assertRaises(ValueError, s3_token.split_path, '//')
+ self.assertRaises(ValueError, s3_token.split_path, '//a')
+ self.assertRaises(ValueError, s3_token.split_path, '/a/c')
+ self.assertRaises(ValueError, s3_token.split_path, '//c')
+ self.assertRaises(ValueError, s3_token.split_path, '/a/c/')
+ self.assertRaises(ValueError, s3_token.split_path, '/a//')
+ self.assertRaises(ValueError, s3_token.split_path, '/a', 2)
+ self.assertRaises(ValueError, s3_token.split_path, '/a', 2, 3)
+ self.assertRaises(ValueError, s3_token.split_path, '/a', 2, 3, True)
+ self.assertRaises(ValueError, s3_token.split_path, '/a/c/o/r', 3, 3)
+ self.assertRaises(ValueError, s3_token.split_path, '/a', 5, 4)
+
+ def test_split_path_success(self):
+ self.assertEquals(s3_token.split_path('/a'), ['a'])
+ self.assertEquals(s3_token.split_path('/a/'), ['a'])
+ self.assertEquals(s3_token.split_path('/a/c', 2), ['a', 'c'])
+ self.assertEquals(s3_token.split_path('/a/c/o', 3), ['a', 'c', 'o'])
+ self.assertEquals(s3_token.split_path('/a/c/o/r', 3, 3, True),
+ ['a', 'c', 'o/r'])
+ self.assertEquals(s3_token.split_path('/a/c', 2, 3, True),
+ ['a', 'c', None])
+ self.assertEquals(s3_token.split_path('/a/c/', 2), ['a', 'c'])
+ self.assertEquals(s3_token.split_path('/a/c/', 2, 3), ['a', 'c', ''])
+
+ def test_split_path_invalid_path(self):
+ try:
+ s3_token.split_path('o\nn e', 2)
+ except ValueError, err:
+ self.assertEquals(str(err), 'Invalid path: o%0An%20e')
+ try:
+ s3_token.split_path('o\nn e', 2, 3, True)
+ except ValueError, err:
+ self.assertEquals(str(err), 'Invalid path: o%0An%20e')
diff --git a/keystone/tests/test_serializer.py b/keystone/tests/test_serializer.py
new file mode 100644
index 00000000..260a533c
--- /dev/null
+++ b/keystone/tests/test_serializer.py
@@ -0,0 +1,297 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+
+from keystone.common import serializer
+from keystone.tests import core as test
+
+
+class XmlSerializerTestCase(test.TestCase):
+ def assertSerializeDeserialize(self, d, xml, xmlns=None):
+ self.assertEqualXML(
+ serializer.to_xml(copy.deepcopy(d), xmlns),
+ xml)
+ self.assertEqual(serializer.from_xml(xml), d)
+
+ # operations should be invertible
+ self.assertEqual(
+ serializer.from_xml(serializer.to_xml(copy.deepcopy(d), xmlns)),
+ d)
+ self.assertEqualXML(
+ serializer.to_xml(serializer.from_xml(xml), xmlns),
+ xml)
+
+ def test_auth_request(self):
+ d = {
+ "auth": {
+ "passwordCredentials": {
+ "username": "test_user",
+ "password": "mypass"
+ },
+ "tenantName": "customer-x"
+ }
+ }
+
+ xml = """
+ <?xml version="1.0" encoding="UTF-8"?>
+ <auth xmlns="http://docs.openstack.org/identity/api/v2.0"
+ tenantName="customer-x">
+ <passwordCredentials
+ username="test_user"
+ password="mypass"/>
+ </auth>
+ """
+
+ self.assertSerializeDeserialize(d, xml)
+
+ def test_role_crud(self):
+ d = {
+ "role": {
+ "id": "123",
+ "name": "Guest",
+ "description": "Guest Access"
+ }
+ }
+
+ # TODO(dolph): examples show this description as an attribute?
+ xml = """
+ <?xml version="1.0" encoding="UTF-8"?>
+ <role xmlns="http://docs.openstack.org/identity/api/v2.0"
+ id="123"
+ name="Guest">
+ <description>Guest Access</description>
+ </role>
+ """
+
+ self.assertSerializeDeserialize(d, xml)
+
+ def test_service_crud(self):
+ xmlns = "http://docs.openstack.org/identity/api/ext/OS-KSADM/v1.0"
+
+ d = {
+ "OS-KSADM:service": {
+ "id": "123",
+ "name": "nova",
+ "type": "compute",
+ "description": "OpenStack Compute Service"
+ }
+ }
+
+ # TODO(dolph): examples show this description as an attribute?
+ xml = """
+ <?xml version="1.0" encoding="UTF-8"?>
+ <service
+ xmlns="%(xmlns)s"
+ type="compute"
+ id="123"
+ name="nova">
+ <description>OpenStack Compute Service</description>
+ </service>
+ """ % {'xmlns': xmlns}
+
+ self.assertSerializeDeserialize(d, xml, xmlns=xmlns)
+
+ def test_tenant_crud(self):
+ d = {
+ "tenant": {
+ "id": "1234",
+ "name": "ACME corp",
+ "description": "A description...",
+ "enabled": True
+ }
+ }
+
+ xml = """
+ <?xml version="1.0" encoding="UTF-8"?>
+ <tenant
+ xmlns="http://docs.openstack.org/identity/api/v2.0"
+ enabled="true"
+ id="1234"
+ name="ACME corp">
+ <description>A description...</description>
+ </tenant>
+ """
+
+ self.assertSerializeDeserialize(d, xml)
+
+ def test_tenant_crud_no_description(self):
+ d = {
+ "tenant": {
+ "id": "1234",
+ "name": "ACME corp",
+ "description": "",
+ "enabled": True
+ }
+ }
+
+ xml = """
+ <?xml version="1.0" encoding="UTF-8"?>
+ <tenant
+ xmlns="http://docs.openstack.org/identity/api/v2.0"
+ enabled="true"
+ id="1234"
+ name="ACME corp">
+ <description></description>
+ </tenant>
+ """
+
+ self.assertSerializeDeserialize(d, xml)
+
+ def test_policy_list(self):
+ d = {"policies": [{"id": "ab12cd"}]}
+
+ xml = """
+ <?xml version="1.0" encoding="UTF-8"?>
+ <policies xmlns="http://docs.openstack.org/identity/api/v2.0">
+ <policy id="ab12cd"/>
+ </policies>
+ """
+ self.assertEqualXML(serializer.to_xml(d), xml)
+
+ def test_values_list(self):
+ d = {
+ "objects": {
+ "values": [{
+ "attribute": "value1",
+ }, {
+ "attribute": "value2",
+ }]
+ }
+ }
+
+ xml = """
+ <?xml version="1.0" encoding="UTF-8"?>
+ <objects xmlns="http://docs.openstack.org/identity/api/v2.0">
+ <object attribute="value1"/>
+ <object attribute="value2"/>
+ </objects>
+ """
+
+ self.assertEqualXML(serializer.to_xml(d), xml)
+
+ def test_collection_list(self):
+ d = {
+ "links": {
+ "next": "http://localhost:5000/v3/objects?page=3",
+ "previous": None,
+ "self": "http://localhost:5000/v3/objects"
+ },
+ "objects": [{
+ "attribute": "value1",
+ "links": {
+ "self": "http://localhost:5000/v3/objects/abc123def",
+ "anotherobj": "http://localhost:5000/v3/anotherobjs/123"
+ }
+ }, {
+ "attribute": "value2",
+ "links": {
+ "self": "http://localhost:5000/v3/objects/abc456"
+ }
+ }]}
+ xml = """
+ <?xml version="1.0" encoding="UTF-8"?>
+ <objects xmlns="http://docs.openstack.org/identity/api/v2.0">
+ <object attribute="value1">
+ <links>
+ <link rel="self"
+ href="http://localhost:5000/v3/objects/abc123def"/>
+ <link rel="anotherobj"
+ href="http://localhost:5000/v3/anotherobjs/123"/>
+ </links>
+ </object>
+ <object attribute="value2">
+ <links>
+ <link rel="self"
+ href="http://localhost:5000/v3/objects/abc456"/>
+ </links>
+ </object>
+ <links>
+ <link rel="self"
+ href="http://localhost:5000/v3/objects"/>
+ <link rel="next"
+ href="http://localhost:5000/v3/objects?page=3"/>
+ </links>
+ </objects>
+ """
+ self.assertSerializeDeserialize(d, xml)
+
+ def test_collection_member(self):
+ d = {
+ "object": {
+ "attribute": "value",
+ "links": {
+ "self": "http://localhost:5000/v3/objects/abc123def",
+ "anotherobj": "http://localhost:5000/v3/anotherobjs/123"}}}
+
+ xml = """
+ <?xml version="1.0" encoding="UTF-8"?>
+ <object xmlns="http://docs.openstack.org/identity/api/v2.0"
+ attribute="value">
+ <links>
+ <link rel="self"
+ href="http://localhost:5000/v3/objects/abc123def"/>
+ <link rel="anotherobj"
+ href="http://localhost:5000/v3/anotherobjs/123"/>
+ </links>
+ </object>
+ """
+ self.assertSerializeDeserialize(d, xml)
+
+ def test_v2_links_special_case(self):
+ # There's special-case code (for backward compatibility) where if the
+ # data is the v2 version data, the link elements are also added to the
+ # main element.
+
+ d = {
+ "object": {
+ "id": "v2.0",
+ "status": "stable",
+ "updated": "2013-03-06T00:00:00Z",
+ "links": [{"href": "http://localhost:5000/v2.0/",
+ "rel": "self"},
+ {"href": "http://docs.openstack.org/api/openstack-"
+ "identity-service/2.0/content/",
+ "type": "text/html", "rel": "describedby"},
+ {"href": "http://docs.openstack.org/api/openstack-"
+ "identity-service/2.0/"
+ "identity-dev-guide-2.0.pdf",
+ "type": "application/pdf", "rel": "describedby"}]
+ }}
+
+ xml = """
+ <?xml version="1.0" encoding="UTF-8"?>
+ <object xmlns="http://docs.openstack.org/identity/api/v2.0"
+ id="v2.0" status="stable" updated="2013-03-06T00:00:00Z">
+ <links>
+ <link rel="self" href="http://localhost:5000/v2.0/"/>
+ <link rel="describedby"
+ href="http://docs.openstack.org/api/openstack-\
+identity-service/2.0/content/" type="text/html"/>
+ <link rel="describedby"
+ href="http://docs.openstack.org/api/openstack-\
+identity-service/2.0/identity-dev-guide-2.0.pdf" type="application/pdf"/>
+ </links>
+ <link rel="self" href="http://localhost:5000/v2.0/"/>
+ <link rel="describedby"
+ href="http://docs.openstack.org/api/openstack-\
+identity-service/2.0/content/" type="text/html"/>
+ <link rel="describedby"
+ href="http://docs.openstack.org/api/openstack-\
+identity-service/2.0/identity-dev-guide-2.0.pdf" type="application/pdf"/>
+ </object>
+ """
+ self.assertEqualXML(serializer.to_xml(d), xml)
diff --git a/keystone/tests/test_singular_plural.py b/keystone/tests/test_singular_plural.py
new file mode 100644
index 00000000..ea3ad27c
--- /dev/null
+++ b/keystone/tests/test_singular_plural.py
@@ -0,0 +1,52 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import ast
+
+from keystone.contrib.admin_crud import core as admin_crud_core
+from keystone.contrib.ec2 import core as ec2_core
+from keystone.contrib.s3 import core as s3_core
+from keystone.contrib.stats import core as stats_core
+from keystone.contrib.user_crud import core as user_crud_core
+from keystone.identity import core as identity_core
+from keystone import service
+
+
+class TestSingularPlural(object):
+ def test_keyword_arg_condition_or_methods(self):
+ """Raise if we see a keyword arg called 'condition' or 'methods'."""
+ modules = [admin_crud_core, ec2_core, s3_core, stats_core,
+ user_crud_core, identity_core, service]
+ for module in modules:
+ filename = module.__file__
+ if filename.endswith(".pyc"):
+ # In Python 2, the .py and .pyc files are in the same dir.
+ filename = filename[:-1]
+ with open(filename) as fil:
+ source = fil.read()
+ module = ast.parse(source, filename)
+ last_stmt_or_expr = None
+ for node in ast.walk(module):
+ if isinstance(node, ast.stmt) or isinstance(node, ast.expr):
+ # keyword nodes don't have line numbers, so we need to
+ # get that information from the parent stmt or expr.
+ last_stmt_or_expr = node
+ elif isinstance(node, ast.keyword):
+ for bad_word in ["condition", "methods"]:
+ if node.arg == bad_word:
+ raise AssertionError(
+ "Suspicious name '%s' at %s line %s" %
+ (bad_word, filename, last_stmt_or_expr.lineno))
diff --git a/keystone/tests/test_sizelimit.py b/keystone/tests/test_sizelimit.py
new file mode 100644
index 00000000..a37b0e31
--- /dev/null
+++ b/keystone/tests/test_sizelimit.py
@@ -0,0 +1,57 @@
+# Copyright (c) 2013 OpenStack, LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import webob
+
+from keystone.tests import core as test
+
+from keystone import config
+from keystone import exception
+from keystone import middleware
+
+CONF = config.CONF
+MAX_REQUEST_BODY_SIZE = CONF.max_request_body_size
+
+
+class TestRequestBodySizeLimiter(test.TestCase):
+
+ def setUp(self):
+ super(TestRequestBodySizeLimiter, self).setUp()
+
+ @webob.dec.wsgify()
+ def fake_app(req):
+ return webob.Response(req.body)
+
+ self.middleware = middleware.RequestBodySizeLimiter(fake_app)
+ self.request = webob.Request.blank('/', method='POST')
+
+ def test_content_length_acceptable(self):
+ self.request.headers['Content-Length'] = MAX_REQUEST_BODY_SIZE
+ self.request.body = "0" * MAX_REQUEST_BODY_SIZE
+ response = self.request.get_response(self.middleware)
+ self.assertEqual(response.status_int, 200)
+
+ def test_content_length_too_large(self):
+ self.request.headers['Content-Length'] = MAX_REQUEST_BODY_SIZE + 1
+ self.request.body = "0" * (MAX_REQUEST_BODY_SIZE + 1)
+ self.assertRaises(exception.RequestTooLarge,
+ self.request.get_response,
+ self.middleware)
+
+ def test_request_too_large_no_content_length(self):
+ self.request.body = "0" * (MAX_REQUEST_BODY_SIZE + 1)
+ self.request.headers['Content-Length'] = None
+ self.assertRaises(exception.RequestTooLarge,
+ self.request.get_response,
+ self.middleware)
diff --git a/keystone/tests/test_sql_core.py b/keystone/tests/test_sql_core.py
new file mode 100644
index 00000000..e3379152
--- /dev/null
+++ b/keystone/tests/test_sql_core.py
@@ -0,0 +1,182 @@
+# Copyright 2013 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+from keystone.common import sql
+from keystone.tests import core as test
+
+
+class CallbackMonitor:
+ def __init__(self, expect_called=True, raise_=False):
+ self.expect_called = expect_called
+ self.called = False
+ self._complete = False
+ self._raise = raise_
+
+ def call_this(self):
+ if self._complete:
+ return
+
+ if not self.expect_called:
+ raise Exception("Did not expect callback.")
+
+ if self.called:
+ raise Exception("Callback already called.")
+
+ self.called = True
+
+ if self._raise:
+ raise Exception("When called, raises.")
+
+ def check(self):
+ if self.expect_called:
+ if not self.called:
+ raise Exception("Expected function to be called.")
+ self._complete = True
+
+
+class TestGlobalEngine(test.TestCase):
+
+ def tearDown(self):
+ sql.set_global_engine(None)
+ super(TestGlobalEngine, self).tearDown()
+
+ def test_notify_on_set(self):
+ # If call sql.set_global_engine(), notify callbacks get called.
+
+ cb_mon = CallbackMonitor()
+
+ sql.register_global_engine_callback(cb_mon.call_this)
+ fake_engine = object()
+ sql.set_global_engine(fake_engine)
+
+ cb_mon.check()
+
+ def test_multi_notify(self):
+ # You can also set multiple notify callbacks and they each get called.
+
+ cb_mon1 = CallbackMonitor()
+ cb_mon2 = CallbackMonitor()
+
+ sql.register_global_engine_callback(cb_mon1.call_this)
+ sql.register_global_engine_callback(cb_mon2.call_this)
+
+ fake_engine = object()
+ sql.set_global_engine(fake_engine)
+
+ cb_mon1.check()
+ cb_mon2.check()
+
+ def test_notify_once(self):
+ # After a callback is called, it's not called again if set global
+ # engine again.
+
+ cb_mon = CallbackMonitor()
+
+ sql.register_global_engine_callback(cb_mon.call_this)
+ fake_engine = object()
+ sql.set_global_engine(fake_engine)
+
+ fake_engine = object()
+ # Note that cb_mon.call_this would raise if it's called again.
+ sql.set_global_engine(fake_engine)
+
+ cb_mon.check()
+
+ def test_set_same_engine(self):
+ # If you set the global engine to the same engine, callbacks don't get
+ # called.
+
+ fake_engine = object()
+
+ sql.set_global_engine(fake_engine)
+
+ cb_mon = CallbackMonitor(expect_called=False)
+ sql.register_global_engine_callback(cb_mon.call_this)
+
+ # Note that cb_mon.call_this would raise if it's called.
+ sql.set_global_engine(fake_engine)
+
+ cb_mon.check()
+
+ def test_notify_register_same(self):
+ # If you register the same callback twice, only gets called once.
+ cb_mon = CallbackMonitor()
+
+ sql.register_global_engine_callback(cb_mon.call_this)
+ sql.register_global_engine_callback(cb_mon.call_this)
+
+ fake_engine = object()
+ # Note that cb_mon.call_this would raise if it's called twice.
+ sql.set_global_engine(fake_engine)
+
+ cb_mon.check()
+
+ def test_callback_throws(self):
+ # If a callback function raises,
+ # a) the caller doesn't know about it,
+ # b) other callbacks are still called
+
+ cb_mon1 = CallbackMonitor(raise_=True)
+ cb_mon2 = CallbackMonitor()
+
+ sql.register_global_engine_callback(cb_mon1.call_this)
+ sql.register_global_engine_callback(cb_mon2.call_this)
+
+ fake_engine = object()
+ sql.set_global_engine(fake_engine)
+
+ cb_mon1.check()
+ cb_mon2.check()
+
+
+class TestBase(test.TestCase):
+
+ def tearDown(self):
+ sql.set_global_engine(None)
+ super(TestBase, self).tearDown()
+
+ def test_get_engine_global(self):
+ # If call get_engine() twice, get the same global engine.
+ base = sql.Base()
+ engine1 = base.get_engine()
+ self.assertIsNotNone(engine1)
+ engine2 = base.get_engine()
+ self.assertIs(engine1, engine2)
+
+ def test_get_engine_not_global(self):
+ # If call get_engine() twice, once with allow_global_engine=True
+ # and once with allow_global_engine=False, get different engines.
+ base = sql.Base()
+ engine1 = base.get_engine()
+ engine2 = base.get_engine(allow_global_engine=False)
+ self.assertIsNot(engine1, engine2)
+
+ def test_get_session(self):
+ # autocommit and expire_on_commit flags to get_session() are passed on
+ # to the session created.
+
+ base = sql.Base()
+ session = base.get_session(autocommit=False, expire_on_commit=True)
+
+ self.assertFalse(session.autocommit)
+ self.assertTrue(session.expire_on_commit)
+
+ def test_get_session_invalidated(self):
+ # If clear the global engine, a new engine is used for get_session().
+ base = sql.Base()
+ session1 = base.get_session()
+ sql.set_global_engine(None)
+ session2 = base.get_session()
+ self.assertIsNot(session1.bind, session2.bind)
diff --git a/keystone/tests/test_sql_migrate_extensions.py b/keystone/tests/test_sql_migrate_extensions.py
new file mode 100644
index 00000000..4a529559
--- /dev/null
+++ b/keystone/tests/test_sql_migrate_extensions.py
@@ -0,0 +1,47 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""
+To run these tests against a live database:
+1. Modify the file `tests/backend_sql.conf` to use the connection for your
+ live database
+2. Set up a blank, live database.
+3. run the tests using
+ ./run_tests.sh -N test_sql_upgrade
+ WARNING::
+ Your database will be wiped.
+ Do not do this against a Database with valuable data as
+ all data will be lost.
+"""
+
+from keystone.contrib import example
+
+import test_sql_upgrade
+
+
+class SqlUpgradeExampleExtension(test_sql_upgrade.SqlMigrateBase):
+ def repo_package(self):
+ return example
+
+ def test_upgrade(self):
+ self.assertTableDoesNotExist('example')
+ self.upgrade(1, repository=self.repo_path)
+ self.assertTableColumns('example', ['id', 'type', 'extra'])
+
+ def test_downgrade(self):
+ self.upgrade(1, repository=self.repo_path)
+ self.assertTableColumns('example', ['id', 'type', 'extra'])
+ self.downgrade(0, repository=self.repo_path)
+ self.assertTableDoesNotExist('example')
diff --git a/keystone/tests/test_sql_upgrade.py b/keystone/tests/test_sql_upgrade.py
new file mode 100644
index 00000000..e904d6a7
--- /dev/null
+++ b/keystone/tests/test_sql_upgrade.py
@@ -0,0 +1,1378 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""
+To run these tests against a live database:
+1. Modify the file `keystone/tests/backend_sql.conf` to use the connection for
+ your live database
+2. Set up a blank, live database.
+3. run the tests using
+ ./run_tests.sh -N test_sql_upgrade
+ WARNING::
+ Your database will be wiped.
+ Do not do this against a Database with valuable data as
+ all data will be lost.
+"""
+import copy
+import json
+import uuid
+
+from migrate.versioning import api as versioning_api
+import sqlalchemy
+
+from keystone.tests import core as test
+
+from keystone.common import sql
+from keystone.common.sql import migration
+from keystone import config
+
+import default_fixtures
+
+
+CONF = config.CONF
+DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
+
+
+class SqlMigrateBase(test.TestCase):
+ def initialize_sql(self):
+ self.metadata = sqlalchemy.MetaData()
+ self.metadata.bind = self.engine
+
+ _config_file_list = [test.etcdir('keystone.conf.sample'),
+ test.testsdir('test_overrides.conf'),
+ test.testsdir('backend_sql.conf')]
+
+ #override this to specify the complete list of configuration files
+ def config_files(self):
+ return self._config_file_list
+
+ def repo_package(self):
+ return None
+
+ def setUp(self):
+ super(SqlMigrateBase, self).setUp()
+
+ self.config(self.config_files())
+ self.base = sql.Base()
+
+ # create and share a single sqlalchemy engine for testing
+ self.engine = self.base.get_engine(allow_global_engine=False)
+ self.Session = self.base.get_sessionmaker(engine=self.engine,
+ autocommit=False)
+
+ self.initialize_sql()
+ self.repo_path = migration.find_migrate_repo(self.repo_package())
+ self.schema = versioning_api.ControlledSchema.create(
+ self.engine,
+ self.repo_path, 0)
+
+ # auto-detect the highest available schema version in the migrate_repo
+ self.max_version = self.schema.repository.version().version
+
+ def tearDown(self):
+ sqlalchemy.orm.session.Session.close_all()
+ table = sqlalchemy.Table("migrate_version", self.metadata,
+ autoload=True)
+ self.downgrade(0)
+ table.drop(self.engine, checkfirst=True)
+ super(SqlMigrateBase, self).tearDown()
+
+ def select_table(self, name):
+ table = sqlalchemy.Table(name,
+ self.metadata,
+ autoload=True)
+ s = sqlalchemy.select([table])
+ return s
+
+ def assertTableExists(self, table_name):
+ try:
+ self.select_table(table_name)
+ except sqlalchemy.exc.NoSuchTableError:
+ raise AssertionError('Table "%s" does not exist' % table_name)
+
+ def assertTableDoesNotExist(self, table_name):
+ """Asserts that a given table exists cannot be selected by name."""
+ # Switch to a different metadata otherwise you might still
+ # detect renamed or dropped tables
+ try:
+ temp_metadata = sqlalchemy.MetaData()
+ temp_metadata.bind = self.engine
+ sqlalchemy.Table(table_name, temp_metadata, autoload=True)
+ except sqlalchemy.exc.NoSuchTableError:
+ pass
+ else:
+ raise AssertionError('Table "%s" already exists' % table_name)
+
+ def upgrade(self, *args, **kwargs):
+ self._migrate(*args, **kwargs)
+
+ def downgrade(self, *args, **kwargs):
+ self._migrate(*args, downgrade=True, **kwargs)
+
+ def _migrate(self, version, repository=None, downgrade=False,
+ current_schema=None):
+ repository = repository or self.repo_path
+ err = ''
+ version = versioning_api._migrate_version(self.schema,
+ version,
+ not downgrade,
+ err)
+ if not current_schema:
+ current_schema = self.schema
+ changeset = current_schema.changeset(version)
+ for ver, change in changeset:
+ self.schema.runchange(ver, change, changeset.step)
+ self.assertEqual(self.schema.version, version)
+
+ def assertTableColumns(self, table_name, expected_cols):
+ """Asserts that the table contains the expected set of columns."""
+ self.initialize_sql()
+ table = self.select_table(table_name)
+ actual_cols = [col.name for col in table.columns]
+ self.assertEqual(expected_cols, actual_cols, '%s table' % table_name)
+
+
+class SqlUpgradeTests(SqlMigrateBase):
+
+ def test_blank_db_to_start(self):
+ self.assertTableDoesNotExist('user')
+
+ def test_start_version_0(self):
+ version = migration.db_version()
+ self.assertEqual(version, 0, "DB is at version 0")
+
+ def test_two_steps_forward_one_step_back(self):
+ """You should be able to cleanly undo and re-apply all upgrades.
+
+ Upgrades are run in the following order::
+
+ 0 -> 1 -> 0 -> 1 -> 2 -> 1 -> 2 -> 3 -> 2 -> 3 ...
+ ^---------^ ^---------^ ^---------^
+
+ """
+ for x in range(1, self.max_version + 1):
+ self.upgrade(x)
+ self.downgrade(x - 1)
+ self.upgrade(x)
+
+ def test_upgrade_add_initial_tables(self):
+ self.upgrade(1)
+ self.assertTableColumns("user", ["id", "name", "extra"])
+ self.assertTableColumns("tenant", ["id", "name", "extra"])
+ self.assertTableColumns("role", ["id", "name"])
+ self.assertTableColumns("user_tenant_membership",
+ ["user_id", "tenant_id"])
+ self.assertTableColumns("metadata", ["user_id", "tenant_id", "data"])
+ self.populate_user_table()
+
+ def test_upgrade_add_policy(self):
+ self.upgrade(5)
+ self.assertTableDoesNotExist('policy')
+
+ self.upgrade(6)
+ self.assertTableExists('policy')
+ self.assertTableColumns('policy', ['id', 'type', 'blob', 'extra'])
+
+ def test_upgrade_normalize_identity(self):
+ self.upgrade(8)
+ self.populate_user_table()
+ self.populate_tenant_table()
+ self.upgrade(10)
+ self.assertTableColumns("user",
+ ["id", "name", "extra",
+ "password", "enabled"])
+ self.assertTableColumns("tenant",
+ ["id", "name", "extra", "description",
+ "enabled"])
+ self.assertTableColumns("role", ["id", "name", "extra"])
+ self.assertTableColumns("user_tenant_membership",
+ ["user_id", "tenant_id"])
+ self.assertTableColumns("metadata", ["user_id", "tenant_id", "data"])
+ session = self.Session()
+ user_table = sqlalchemy.Table("user",
+ self.metadata,
+ autoload=True)
+ a_user = session.query(user_table).filter("id='foo'").one()
+ self.assertTrue(a_user.enabled)
+ a_user = session.query(user_table).filter("id='badguy'").one()
+ self.assertFalse(a_user.enabled)
+ tenant_table = sqlalchemy.Table("tenant",
+ self.metadata,
+ autoload=True)
+ a_tenant = session.query(tenant_table).filter("id='baz'").one()
+ self.assertEqual(a_tenant.description, 'description')
+ session.commit()
+ session.close()
+
+ def test_normalized_enabled_states(self):
+ self.upgrade(8)
+
+ users = {
+ 'bool_enabled_user': {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ 'extra': json.dumps({'enabled': True})},
+ 'bool_disabled_user': {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ 'extra': json.dumps({'enabled': False})},
+ 'str_enabled_user': {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ 'extra': json.dumps({'enabled': 'True'})},
+ 'str_disabled_user': {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ 'extra': json.dumps({'enabled': 'False'})},
+ 'int_enabled_user': {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ 'extra': json.dumps({'enabled': 1})},
+ 'int_disabled_user': {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ 'extra': json.dumps({'enabled': 0})},
+ 'null_enabled_user': {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ 'extra': json.dumps({'enabled': None})},
+ 'unset_enabled_user': {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ 'extra': json.dumps({})}}
+
+ session = self.Session()
+ for user in users.values():
+ self.insert_dict(session, 'user', user)
+ session.commit()
+
+ self.upgrade(10)
+
+ user_table = sqlalchemy.Table('user', self.metadata, autoload=True)
+ q = session.query(user_table, 'enabled')
+
+ user = q.filter_by(id=users['bool_enabled_user']['id']).one()
+ self.assertTrue(user.enabled)
+
+ user = q.filter_by(id=users['bool_disabled_user']['id']).one()
+ self.assertFalse(user.enabled)
+
+ user = q.filter_by(id=users['str_enabled_user']['id']).one()
+ self.assertTrue(user.enabled)
+
+ user = q.filter_by(id=users['str_disabled_user']['id']).one()
+ self.assertFalse(user.enabled)
+
+ user = q.filter_by(id=users['int_enabled_user']['id']).one()
+ self.assertTrue(user.enabled)
+
+ user = q.filter_by(id=users['int_disabled_user']['id']).one()
+ self.assertFalse(user.enabled)
+
+ user = q.filter_by(id=users['null_enabled_user']['id']).one()
+ self.assertTrue(user.enabled)
+
+ user = q.filter_by(id=users['unset_enabled_user']['id']).one()
+ self.assertTrue(user.enabled)
+
+ def test_downgrade_10_to_8(self):
+ self.upgrade(10)
+ self.populate_user_table(with_pass_enab=True)
+ self.populate_tenant_table(with_desc_enab=True)
+ self.downgrade(8)
+ self.assertTableColumns('user',
+ ['id', 'name', 'extra'])
+ self.assertTableColumns('tenant',
+ ['id', 'name', 'extra'])
+ session = self.Session()
+ user_table = sqlalchemy.Table("user",
+ self.metadata,
+ autoload=True)
+ a_user = session.query(user_table).filter("id='badguy'").one()
+ self.assertEqual(a_user.name, default_fixtures.USERS[2]['name'])
+ tenant_table = sqlalchemy.Table("tenant",
+ self.metadata,
+ autoload=True)
+ a_tenant = session.query(tenant_table).filter("id='baz'").one()
+ self.assertEqual(a_tenant.name, default_fixtures.TENANTS[1]['name'])
+ session.commit()
+ session.close()
+
+ def test_upgrade_endpoints(self):
+ self.upgrade(10)
+ service_extra = {
+ 'name': uuid.uuid4().hex,
+ }
+ service = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'extra': json.dumps(service_extra),
+ }
+ endpoint_extra = {
+ 'publicurl': uuid.uuid4().hex,
+ 'internalurl': uuid.uuid4().hex,
+ 'adminurl': uuid.uuid4().hex,
+ }
+ endpoint = {
+ 'id': uuid.uuid4().hex,
+ 'region': uuid.uuid4().hex,
+ 'service_id': service['id'],
+ 'extra': json.dumps(endpoint_extra),
+ }
+
+ session = self.Session()
+ self.insert_dict(session, 'service', service)
+ self.insert_dict(session, 'endpoint', endpoint)
+ session.commit()
+ session.close()
+
+ self.upgrade(13)
+ self.assertTableColumns(
+ 'service',
+ ['id', 'type', 'extra'])
+ self.assertTableColumns(
+ 'endpoint',
+ ['id', 'legacy_endpoint_id', 'interface', 'region', 'service_id',
+ 'url', 'extra'])
+
+ endpoint_table = sqlalchemy.Table(
+ 'endpoint', self.metadata, autoload=True)
+
+ session = self.Session()
+ self.assertEqual(session.query(endpoint_table).count(), 3)
+ for interface in ['public', 'internal', 'admin']:
+ q = session.query(endpoint_table)
+ q = q.filter_by(legacy_endpoint_id=endpoint['id'])
+ q = q.filter_by(interface=interface)
+ ref = q.one()
+ self.assertNotEqual(ref.id, endpoint['id'])
+ self.assertEqual(ref.legacy_endpoint_id, endpoint['id'])
+ self.assertEqual(ref.interface, interface)
+ self.assertEqual(ref.region, endpoint['region'])
+ self.assertEqual(ref.service_id, endpoint['service_id'])
+ self.assertEqual(ref.url, endpoint_extra['%surl' % interface])
+ self.assertEqual(ref.extra, '{}')
+ session.commit()
+ session.close()
+
+ def assertTenantTables(self):
+ self.assertTableExists('tenant')
+ self.assertTableExists('user_tenant_membership')
+ self.assertTableDoesNotExist('project')
+ self.assertTableDoesNotExist('user_project_membership')
+
+ def assertProjectTables(self):
+ self.assertTableExists('project')
+ self.assertTableExists('user_project_membership')
+ self.assertTableDoesNotExist('tenant')
+ self.assertTableDoesNotExist('user_tenant_membership')
+
+ def test_upgrade_tenant_to_project(self):
+ self.upgrade(14)
+ self.assertTenantTables()
+ self.upgrade(15)
+ self.assertProjectTables()
+
+ def test_downgrade_project_to_tenant(self):
+ # TODO(henry-nash): Debug why we need to re-load the tenant
+ # or user_tenant_membership ahead of upgrading to project
+ # in order for the assertProjectTables to work on sqlite
+ # (MySQL is fine without it)
+ self.upgrade(14)
+ self.assertTenantTables()
+ self.upgrade(15)
+ self.assertProjectTables()
+ self.downgrade(14)
+ self.assertTenantTables()
+
+ def test_upgrade_add_group_tables(self):
+ self.upgrade(13)
+ self.upgrade(14)
+ self.assertTableExists('group')
+ self.assertTableExists('group_project_metadata')
+ self.assertTableExists('group_domain_metadata')
+ self.assertTableExists('user_group_membership')
+
+ def test_upgrade_14_to_16(self):
+ self.upgrade(14)
+ self.populate_user_table(with_pass_enab=True)
+ self.populate_tenant_table(with_desc_enab=True)
+ self.upgrade(16)
+
+ self.assertTableColumns("user",
+ ["id", "name", "extra",
+ "password", "enabled", "domain_id"])
+ session = self.Session()
+ user_table = sqlalchemy.Table("user",
+ self.metadata,
+ autoload=True)
+ a_user = session.query(user_table).filter("id='foo'").one()
+ self.assertTrue(a_user.enabled)
+ self.assertEqual(a_user.domain_id, DEFAULT_DOMAIN_ID)
+ a_user = session.query(user_table).filter("id='badguy'").one()
+ self.assertEqual(a_user.name, default_fixtures.USERS[2]['name'])
+ self.assertEqual(a_user.domain_id, DEFAULT_DOMAIN_ID)
+ project_table = sqlalchemy.Table("project",
+ self.metadata,
+ autoload=True)
+ a_project = session.query(project_table).filter("id='baz'").one()
+ self.assertEqual(a_project.description,
+ default_fixtures.TENANTS[1]['description'])
+ self.assertEqual(a_project.domain_id, DEFAULT_DOMAIN_ID)
+
+ session.commit()
+ session.close()
+
+ self.check_uniqueness_constraints()
+
+ def test_downgrade_16_to_14(self):
+ self.upgrade(16)
+ self.populate_user_table(with_pass_enab_domain=True)
+ self.populate_tenant_table(with_desc_enab_domain=True)
+ self.downgrade(14)
+ self.assertTableColumns("user",
+ ["id", "name", "extra",
+ "password", "enabled"])
+ session = self.Session()
+ user_table = sqlalchemy.Table("user",
+ self.metadata,
+ autoload=True)
+ a_user = session.query(user_table).filter("id='foo'").one()
+ self.assertTrue(a_user.enabled)
+ a_user = session.query(user_table).filter("id='badguy'").one()
+ self.assertEqual(a_user.name, default_fixtures.USERS[2]['name'])
+ tenant_table = sqlalchemy.Table("tenant",
+ self.metadata,
+ autoload=True)
+ a_tenant = session.query(tenant_table).filter("id='baz'").one()
+ self.assertEqual(a_tenant.description,
+ default_fixtures.TENANTS[1]['description'])
+ session.commit()
+ session.close()
+
+ def test_downgrade_remove_group_tables(self):
+ self.upgrade(14)
+ self.downgrade(13)
+ self.assertTableDoesNotExist('group')
+ self.assertTableDoesNotExist('group_project_metadata')
+ self.assertTableDoesNotExist('group_domain_metadata')
+ self.assertTableDoesNotExist('user_group_membership')
+
+ def test_downgrade_endpoints(self):
+ self.upgrade(13)
+
+ service_extra = {
+ 'name': uuid.uuid4().hex,
+ }
+ service = {
+ 'id': uuid.uuid4().hex,
+ 'type': uuid.uuid4().hex,
+ 'extra': json.dumps(service_extra),
+ }
+
+ common_endpoint_attrs = {
+ 'legacy_endpoint_id': uuid.uuid4().hex,
+ 'region': uuid.uuid4().hex,
+ 'service_id': service['id'],
+ 'extra': json.dumps({}),
+ }
+ endpoints = {
+ 'public': {
+ 'id': uuid.uuid4().hex,
+ 'interface': 'public',
+ 'url': uuid.uuid4().hex,
+ },
+ 'internal': {
+ 'id': uuid.uuid4().hex,
+ 'interface': 'internal',
+ 'url': uuid.uuid4().hex,
+ },
+ 'admin': {
+ 'id': uuid.uuid4().hex,
+ 'interface': 'admin',
+ 'url': uuid.uuid4().hex,
+ },
+ }
+
+ session = self.Session()
+ self.insert_dict(session, 'service', service)
+ for endpoint in endpoints.values():
+ endpoint.update(common_endpoint_attrs)
+ self.insert_dict(session, 'endpoint', endpoint)
+ session.commit()
+ session.close()
+
+ self.downgrade(9)
+
+ self.assertTableColumns(
+ 'service',
+ ['id', 'type', 'extra'])
+ self.assertTableColumns(
+ 'endpoint',
+ ['id', 'region', 'service_id', 'extra'])
+
+ endpoint_table = sqlalchemy.Table(
+ 'endpoint', self.metadata, autoload=True)
+
+ session = self.Session()
+ self.assertEqual(session.query(endpoint_table).count(), 1)
+ q = session.query(endpoint_table)
+ q = q.filter_by(id=common_endpoint_attrs['legacy_endpoint_id'])
+ ref = q.one()
+ self.assertEqual(ref.id, common_endpoint_attrs['legacy_endpoint_id'])
+ self.assertEqual(ref.region, endpoint['region'])
+ self.assertEqual(ref.service_id, endpoint['service_id'])
+ extra = json.loads(ref.extra)
+ for interface in ['public', 'internal', 'admin']:
+ expected_url = endpoints[interface]['url']
+ self.assertEqual(extra['%surl' % interface], expected_url)
+ session.commit()
+ session.close()
+
+ def insert_dict(self, session, table_name, d):
+ """Naively inserts key-value pairs into a table, given a dictionary."""
+ this_table = sqlalchemy.Table(table_name, self.metadata, autoload=True)
+ insert = this_table.insert()
+ insert.execute(d)
+ session.commit()
+
+ def test_downgrade_to_0(self):
+ self.upgrade(self.max_version)
+
+ if self.engine.name == 'mysql':
+ self._mysql_check_all_tables_innodb()
+
+ self.downgrade(0)
+ for table_name in ["user", "token", "role", "user_tenant_membership",
+ "metadata"]:
+ self.assertTableDoesNotExist(table_name)
+
+ def test_upgrade_add_domain_tables(self):
+ self.upgrade(6)
+ self.assertTableDoesNotExist('credential')
+ self.assertTableDoesNotExist('domain')
+ self.assertTableDoesNotExist('user_domain_metadata')
+
+ self.upgrade(7)
+ self.assertTableExists('credential')
+ self.assertTableColumns('credential', ['id', 'user_id', 'project_id',
+ 'blob', 'type', 'extra'])
+ self.assertTableExists('domain')
+ self.assertTableColumns('domain', ['id', 'name', 'enabled', 'extra'])
+ self.assertTableExists('user_domain_metadata')
+ self.assertTableColumns('user_domain_metadata',
+ ['user_id', 'domain_id', 'data'])
+
+ def test_metadata_table_migration(self):
+ # Scaffolding
+ session = self.Session()
+
+ self.upgrade(16)
+ domain_table = sqlalchemy.Table('domain', self.metadata, autoload=True)
+ user_table = sqlalchemy.Table('user', self.metadata, autoload=True)
+ role_table = sqlalchemy.Table('role', self.metadata, autoload=True)
+ project_table = sqlalchemy.Table(
+ 'project', self.metadata, autoload=True)
+ metadata_table = sqlalchemy.Table(
+ 'metadata', self.metadata, autoload=True)
+
+ # Create a Domain
+ domain = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ session.execute(domain_table.insert().values(domain))
+
+ # Create a Project
+ project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id'],
+ 'extra': "{}"}
+ session.execute(project_table.insert().values(project))
+
+ # Create another Project
+ project2 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id'],
+ 'extra': "{}"}
+ session.execute(project_table.insert().values(project2))
+
+ # Create a User
+ user = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id'],
+ 'password': uuid.uuid4().hex,
+ 'enabled': True,
+ 'extra': json.dumps({})}
+ session.execute(user_table.insert().values(user))
+
+ # Create a Role
+ role = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ session.execute(role_table.insert().values(role))
+
+ # And another role
+ role2 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ session.execute(role_table.insert().values(role2))
+
+ # Grant Role to User
+ role_grant = {'user_id': user['id'],
+ 'tenant_id': project['id'],
+ 'data': json.dumps({"roles": [role['id']]})}
+ session.execute(metadata_table.insert().values(role_grant))
+
+ role_grant = {'user_id': user['id'],
+ 'tenant_id': project2['id'],
+ 'data': json.dumps({"roles": [role2['id']]})}
+ session.execute(metadata_table.insert().values(role_grant))
+
+ # Create another user to test the case where member_role_id is already
+ # assigned.
+ user2 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id'],
+ 'password': uuid.uuid4().hex,
+ 'enabled': True,
+ 'extra': json.dumps({})}
+ session.execute(user_table.insert().values(user2))
+
+ # Grant CONF.member_role_id to User2
+ role_grant = {'user_id': user2['id'],
+ 'tenant_id': project['id'],
+ 'data': json.dumps({"roles": [CONF.member_role_id]})}
+ session.execute(metadata_table.insert().values(role_grant))
+
+ session.commit()
+
+ self.upgrade(17)
+
+ user_project_metadata_table = sqlalchemy.Table(
+ 'user_project_metadata', self.metadata, autoload=True)
+
+ s = sqlalchemy.select([metadata_table.c.data]).where(
+ (metadata_table.c.user_id == user['id']) &
+ (metadata_table.c.tenant_id == project['id']))
+ r = session.execute(s)
+ test_project1 = json.loads(r.fetchone()['data'])
+ self.assertEqual(len(test_project1['roles']), 1)
+ self.assertIn(role['id'], test_project1['roles'])
+
+ # Test user in project2 has role2
+ s = sqlalchemy.select([metadata_table.c.data]).where(
+ (metadata_table.c.user_id == user['id']) &
+ (metadata_table.c.tenant_id == project2['id']))
+ r = session.execute(s)
+ test_project2 = json.loads(r.fetchone()['data'])
+ self.assertEqual(len(test_project2['roles']), 1)
+ self.assertIn(role2['id'], test_project2['roles'])
+
+ # Test for user in project has role in user_project_metadata
+ # Migration 17 does not properly migrate this data, so this should
+ # be None.
+ s = sqlalchemy.select([user_project_metadata_table.c.data]).where(
+ (user_project_metadata_table.c.user_id == user['id']) &
+ (user_project_metadata_table.c.project_id == project['id']))
+ r = session.execute(s)
+ self.assertIsNone(r.fetchone())
+
+ # Create a conflicting user-project in user_project_metadata with
+ # a different role
+ data = json.dumps({"roles": [role2['id']]})
+ role_grant = {'user_id': user['id'],
+ 'project_id': project['id'],
+ 'data': data}
+ cmd = user_project_metadata_table.insert().values(role_grant)
+ self.engine.execute(cmd)
+
+ # Create another conflicting user-project for User2
+ data = json.dumps({"roles": [role2['id']]})
+ role_grant = {'user_id': user2['id'],
+ 'project_id': project['id'],
+ 'data': data}
+ cmd = user_project_metadata_table.insert().values(role_grant)
+ self.engine.execute(cmd)
+ # End Scaffolding
+
+ session.commit()
+
+ # Migrate to 20
+ self.upgrade(20)
+
+ # The user-project pairs should have all roles from the previous
+ # metadata table in addition to any roles currently in
+ # user_project_metadata
+ s = sqlalchemy.select([user_project_metadata_table.c.data]).where(
+ (user_project_metadata_table.c.user_id == user['id']) &
+ (user_project_metadata_table.c.project_id == project['id']))
+ r = session.execute(s)
+ role_ids = json.loads(r.fetchone()['data'])['roles']
+ self.assertEqual(len(role_ids), 3)
+ self.assertIn(CONF.member_role_id, role_ids)
+ self.assertIn(role['id'], role_ids)
+ self.assertIn(role2['id'], role_ids)
+
+ # pairs that only existed in old metadata table should be in
+ # user_project_metadata
+ s = sqlalchemy.select([user_project_metadata_table.c.data]).where(
+ (user_project_metadata_table.c.user_id == user['id']) &
+ (user_project_metadata_table.c.project_id == project2['id']))
+ r = session.execute(s)
+ role_ids = json.loads(r.fetchone()['data'])['roles']
+ self.assertEqual(len(role_ids), 2)
+ self.assertIn(CONF.member_role_id, role_ids)
+ self.assertIn(role2['id'], role_ids)
+
+ self.assertTableDoesNotExist('metadata')
+
+ def test_upgrade_default_roles(self):
+ def count_member_roles():
+ session = self.Session()
+ query_string = ("select count(*) as c from role "
+ "where name='%s'" % config.CONF.member_role_name)
+ role_count = session.execute(query_string).fetchone()['c']
+ session.close()
+ return role_count
+
+ self.upgrade(16)
+ self.assertEquals(0, count_member_roles())
+ self.upgrade(17)
+ self.assertEquals(1, count_member_roles())
+ self.downgrade(16)
+ self.assertEquals(0, count_member_roles())
+
+ def check_uniqueness_constraints(self):
+ # Check uniqueness constraints for User & Project tables are
+ # correct following schema modification. The Group table's
+ # schema is never modified, so we don't bother to check that.
+ domain_table = sqlalchemy.Table('domain',
+ self.metadata,
+ autoload=True)
+ domain1 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ domain2 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ cmd = domain_table.insert().values(domain1)
+ self.engine.execute(cmd)
+ cmd = domain_table.insert().values(domain2)
+ self.engine.execute(cmd)
+
+ # First, the User table.
+ this_table = sqlalchemy.Table('user',
+ self.metadata,
+ autoload=True)
+ user = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'],
+ 'password': uuid.uuid4().hex,
+ 'enabled': True,
+ 'extra': json.dumps({})}
+ cmd = this_table.insert().values(user)
+ self.engine.execute(cmd)
+ # now insert a user with the same name into a different
+ # domain - which should work.
+ user['id'] = uuid.uuid4().hex
+ user['domain_id'] = domain2['id']
+ cmd = this_table.insert().values(user)
+ self.engine.execute(cmd)
+ # TODO(henry-nash): For now, as part of clean-up we delete one of these
+ # users. Although not part of this test, unless we do so the
+ # downgrade(16->15) that is part of teardown with fail due to having
+ # two uses with clashing name as we try to revert to a single global
+ # name space. This limitation is raised as Bug #1125046 and the delete
+ # could be removed depending on how that bug is resolved.
+ cmd = this_table.delete(id=user['id'])
+ self.engine.execute(cmd)
+
+ # Now, the Project table.
+ this_table = sqlalchemy.Table('project',
+ self.metadata,
+ autoload=True)
+ project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain1['id'],
+ 'description': uuid.uuid4().hex,
+ 'enabled': True,
+ 'extra': json.dumps({})}
+ cmd = this_table.insert().values(project)
+ self.engine.execute(cmd)
+ # now insert a project with the same name into a different
+ # domain - which should work.
+ project['id'] = uuid.uuid4().hex
+ project['domain_id'] = domain2['id']
+ cmd = this_table.insert().values(project)
+ self.engine.execute(cmd)
+ # TODO(henry-nash): For now, we delete one of the projects for the same
+ # reason as we delete one of the users (Bug #1125046). This delete
+ # could be removed depending on that bug resolution.
+ cmd = this_table.delete(id=project['id'])
+ self.engine.execute(cmd)
+
+ def test_upgrade_trusts(self):
+ self.assertEqual(self.schema.version, 0, "DB is at version 0")
+ self.upgrade(20)
+ self.assertTableColumns("token",
+ ["id", "expires", "extra", "valid"])
+ self.upgrade(21)
+ self.assertTableColumns("trust",
+ ["id", "trustor_user_id",
+ "trustee_user_id",
+ "project_id", "impersonation",
+ "deleted_at",
+ "expires_at", "extra"])
+ self.assertTableColumns("trust_role",
+ ["trust_id", "role_id"])
+ self.assertTableColumns("token",
+ ["id", "expires", "extra", "valid",
+ "trust_id", "user_id"])
+
+ def test_fixup_role(self):
+ session = self.Session()
+ self.assertEqual(self.schema.version, 0, "DB is at version 0")
+ self.upgrade(1)
+ self.insert_dict(session, "role", {"id": "test", "name": "test"})
+ self.upgrade(18)
+ self.insert_dict(session, "role", {"id": "test2",
+ "name": "test2",
+ "extra": None})
+ r = session.execute('select count(*) as c from role '
+ 'where extra is null')
+ self.assertEqual(r.fetchone()['c'], 2)
+ session.commit()
+ self.upgrade(19)
+ r = session.execute('select count(*) as c from role '
+ 'where extra is null')
+ self.assertEqual(r.fetchone()['c'], 0)
+
+ def test_legacy_endpoint_id(self):
+ session = self.Session()
+ self.upgrade(21)
+
+ service = {
+ 'id': uuid.uuid4().hex,
+ 'name': 'keystone',
+ 'type': 'identity'}
+ self.insert_dict(session, 'service', service)
+
+ legacy_endpoint_id = uuid.uuid4().hex
+ endpoint = {
+ 'id': uuid.uuid4().hex,
+ 'service_id': service['id'],
+ 'interface': uuid.uuid4().hex[:8],
+ 'url': uuid.uuid4().hex,
+ 'extra': json.dumps({
+ 'legacy_endpoint_id': legacy_endpoint_id})}
+ self.insert_dict(session, 'endpoint', endpoint)
+
+ session.commit()
+ self.upgrade(22)
+
+ endpoint_table = sqlalchemy.Table(
+ 'endpoint', self.metadata, autoload=True)
+
+ self.assertEqual(session.query(endpoint_table).count(), 1)
+ ref = session.query(endpoint_table).one()
+ self.assertEqual(ref.id, endpoint['id'], ref)
+ self.assertEqual(ref.service_id, endpoint['service_id'])
+ self.assertEqual(ref.interface, endpoint['interface'])
+ self.assertEqual(ref.url, endpoint['url'])
+ self.assertEqual(ref.legacy_endpoint_id, legacy_endpoint_id)
+ self.assertEqual(ref.extra, '{}')
+
+ def test_group_project_FK_fixup(self):
+ # To create test data we must start before we broke in the
+ # group_project_metadata table in 015.
+ self.upgrade(14)
+ session = self.Session()
+
+ domain_table = sqlalchemy.Table('domain', self.metadata, autoload=True)
+ group_table = sqlalchemy.Table('group', self.metadata, autoload=True)
+ tenant_table = sqlalchemy.Table('tenant', self.metadata, autoload=True)
+ role_table = sqlalchemy.Table('role', self.metadata, autoload=True)
+ group_project_metadata_table = sqlalchemy.Table(
+ 'group_project_metadata', self.metadata, autoload=True)
+
+ # Create a Domain
+ domain = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ session.execute(domain_table.insert().values(domain))
+
+ # Create two Tenants
+ tenant = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'extra': "{}"}
+ session.execute(tenant_table.insert().values(tenant))
+
+ tenant1 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'extra': "{}"}
+ session.execute(tenant_table.insert().values(tenant1))
+
+ # Create a Group
+ group = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id'],
+ 'extra': json.dumps({})}
+ session.execute(group_table.insert().values(group))
+
+ # Create roles
+ role_list = []
+ for _ in range(2):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ session.execute(role_table.insert().values(role))
+ role_list.append(role)
+
+ # Grant Role to User on Project
+ role_grant = {'group_id': group['id'],
+ 'project_id': tenant['id'],
+ 'data': json.dumps({'roles': [role_list[0]['id']]})}
+ session.execute(
+ group_project_metadata_table.insert().values(role_grant))
+
+ role_grant = {'group_id': group['id'],
+ 'project_id': tenant1['id'],
+ 'data': json.dumps({'roles': [role_list[1]['id']]})}
+ session.execute(
+ group_project_metadata_table.insert().values(role_grant))
+
+ session.commit()
+
+ # Now upgrade and fix up the FKs
+ self.upgrade(28)
+ self.assertTableExists('group_project_metadata')
+ self.assertTableExists('project')
+ self.assertTableDoesNotExist('tenant')
+
+ s = sqlalchemy.select([group_project_metadata_table.c.data]).where(
+ (group_project_metadata_table.c.group_id == group['id']) &
+ (group_project_metadata_table.c.project_id == tenant['id']))
+ r = session.execute(s)
+ data = json.loads(r.fetchone()['data'])
+ self.assertEqual(len(data['roles']), 1)
+ self.assertIn(role_list[0]['id'], data['roles'])
+
+ s = sqlalchemy.select([group_project_metadata_table.c.data]).where(
+ (group_project_metadata_table.c.group_id == group['id']) &
+ (group_project_metadata_table.c.project_id == tenant1['id']))
+ r = session.execute(s)
+ data = json.loads(r.fetchone()['data'])
+ self.assertEqual(len(data['roles']), 1)
+ self.assertIn(role_list[1]['id'], data['roles'])
+
+ self.downgrade(27)
+ self.assertTableExists('group_project_metadata')
+ self.assertTableExists('project')
+ self.assertTableDoesNotExist('tenant')
+
+ def test_assignment_metadata_migration(self):
+ self.upgrade(28)
+ # Scaffolding
+ session = self.Session()
+
+ domain_table = sqlalchemy.Table('domain', self.metadata, autoload=True)
+ user_table = sqlalchemy.Table('user', self.metadata, autoload=True)
+ group_table = sqlalchemy.Table('group', self.metadata, autoload=True)
+ role_table = sqlalchemy.Table('role', self.metadata, autoload=True)
+ project_table = sqlalchemy.Table(
+ 'project', self.metadata, autoload=True)
+ user_project_metadata_table = sqlalchemy.Table(
+ 'user_project_metadata', self.metadata, autoload=True)
+ user_domain_metadata_table = sqlalchemy.Table(
+ 'user_domain_metadata', self.metadata, autoload=True)
+ group_project_metadata_table = sqlalchemy.Table(
+ 'group_project_metadata', self.metadata, autoload=True)
+ group_domain_metadata_table = sqlalchemy.Table(
+ 'group_domain_metadata', self.metadata, autoload=True)
+
+ # Create a Domain
+ domain = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ session.execute(domain_table.insert().values(domain))
+
+ # Create anther Domain
+ domain2 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'enabled': True}
+ session.execute(domain_table.insert().values(domain2))
+
+ # Create a Project
+ project = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id'],
+ 'extra': "{}"}
+ session.execute(project_table.insert().values(project))
+
+ # Create another Project
+ project2 = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id'],
+ 'extra': "{}"}
+ session.execute(project_table.insert().values(project2))
+
+ # Create a User
+ user = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id'],
+ 'password': uuid.uuid4().hex,
+ 'enabled': True,
+ 'extra': json.dumps({})}
+ session.execute(user_table.insert().values(user))
+
+ # Create a Group
+ group = {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': domain['id'],
+ 'extra': json.dumps({})}
+ session.execute(group_table.insert().values(group))
+
+ # Create roles
+ role_list = []
+ for _ in range(7):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ session.execute(role_table.insert().values(role))
+ role_list.append(role)
+
+ # Grant Role to User on Project
+ role_grant = {'user_id': user['id'],
+ 'project_id': project['id'],
+ 'data': json.dumps({'roles': [role_list[0]['id']]})}
+ session.execute(
+ user_project_metadata_table.insert().values(role_grant))
+
+ role_grant = {'user_id': user['id'],
+ 'project_id': project2['id'],
+ 'data': json.dumps({'roles': [role_list[1]['id']]})}
+ session.execute(
+ user_project_metadata_table.insert().values(role_grant))
+
+ # Grant Role to Group on different Project
+ role_grant = {'group_id': group['id'],
+ 'project_id': project2['id'],
+ 'data': json.dumps({'roles': [role_list[2]['id']]})}
+ session.execute(
+ group_project_metadata_table.insert().values(role_grant))
+
+ # Grant Role to User on Domain
+ role_grant = {'user_id': user['id'],
+ 'domain_id': domain['id'],
+ 'data': json.dumps({'roles': [role_list[3]['id']]})}
+ session.execute(user_domain_metadata_table.insert().values(role_grant))
+
+ # Grant Role to Group on Domain
+ role_grant = {'group_id': group['id'],
+ 'domain_id': domain['id'],
+ 'data': json.dumps(
+ {'roles': [role_list[4]['id']],
+ 'other': 'somedata'})}
+ session.execute(
+ group_domain_metadata_table.insert().values(role_grant))
+
+ session.commit()
+
+ self.upgrade(29)
+ s = sqlalchemy.select([user_project_metadata_table.c.data]).where(
+ (user_project_metadata_table.c.user_id == user['id']) &
+ (user_project_metadata_table.c.project_id == project['id']))
+ r = session.execute(s)
+ data = json.loads(r.fetchone()['data'])
+ self.assertEqual(len(data['roles']), 1)
+ self.assertIn({'id': role_list[0]['id']}, data['roles'])
+
+ s = sqlalchemy.select([user_project_metadata_table.c.data]).where(
+ (user_project_metadata_table.c.user_id == user['id']) &
+ (user_project_metadata_table.c.project_id == project2['id']))
+ r = session.execute(s)
+ data = json.loads(r.fetchone()['data'])
+ self.assertEqual(len(data['roles']), 1)
+ self.assertIn({'id': role_list[1]['id']}, data['roles'])
+
+ s = sqlalchemy.select([group_project_metadata_table.c.data]).where(
+ (group_project_metadata_table.c.group_id == group['id']) &
+ (group_project_metadata_table.c.project_id == project2['id']))
+ r = session.execute(s)
+ data = json.loads(r.fetchone()['data'])
+ self.assertEqual(len(data['roles']), 1)
+ self.assertIn({'id': role_list[2]['id']}, data['roles'])
+
+ s = sqlalchemy.select([user_domain_metadata_table.c.data]).where(
+ (user_domain_metadata_table.c.user_id == user['id']) &
+ (user_domain_metadata_table.c.domain_id == domain['id']))
+ r = session.execute(s)
+ data = json.loads(r.fetchone()['data'])
+ self.assertEqual(len(data['roles']), 1)
+ self.assertIn({'id': role_list[3]['id']}, data['roles'])
+
+ s = sqlalchemy.select([group_domain_metadata_table.c.data]).where(
+ (group_domain_metadata_table.c.group_id == group['id']) &
+ (group_domain_metadata_table.c.domain_id == domain['id']))
+ r = session.execute(s)
+ data = json.loads(r.fetchone()['data'])
+ self.assertEqual(len(data['roles']), 1)
+ self.assertIn({'id': role_list[4]['id']}, data['roles'])
+ self.assertIn('other', data)
+
+ # Now add an entry that has one regular and one inherited role
+ role_grant = {'user_id': user['id'],
+ 'domain_id': domain2['id'],
+ 'data': json.dumps(
+ {'roles': [{'id': role_list[5]['id']},
+ {'id': role_list[6]['id'],
+ 'inherited_to': 'projects'}]})}
+ session.execute(user_domain_metadata_table.insert().values(role_grant))
+
+ session.commit()
+ self.downgrade(28)
+
+ s = sqlalchemy.select([user_project_metadata_table.c.data]).where(
+ (user_project_metadata_table.c.user_id == user['id']) &
+ (user_project_metadata_table.c.project_id == project['id']))
+ r = session.execute(s)
+ data = json.loads(r.fetchone()['data'])
+ self.assertEqual(len(data['roles']), 1)
+ self.assertIn(role_list[0]['id'], data['roles'])
+
+ s = sqlalchemy.select([user_project_metadata_table.c.data]).where(
+ (user_project_metadata_table.c.user_id == user['id']) &
+ (user_project_metadata_table.c.project_id == project2['id']))
+ r = session.execute(s)
+ data = json.loads(r.fetchone()['data'])
+ self.assertEqual(len(data['roles']), 1)
+ self.assertIn(role_list[1]['id'], data['roles'])
+
+ s = sqlalchemy.select([group_project_metadata_table.c.data]).where(
+ (group_project_metadata_table.c.group_id == group['id']) &
+ (group_project_metadata_table.c.project_id == project2['id']))
+ r = session.execute(s)
+ data = json.loads(r.fetchone()['data'])
+ self.assertEqual(len(data['roles']), 1)
+ self.assertIn(role_list[2]['id'], data['roles'])
+
+ s = sqlalchemy.select([user_domain_metadata_table.c.data]).where(
+ (user_domain_metadata_table.c.user_id == user['id']) &
+ (user_domain_metadata_table.c.domain_id == domain['id']))
+ r = session.execute(s)
+ data = json.loads(r.fetchone()['data'])
+ self.assertEqual(len(data['roles']), 1)
+ self.assertIn(role_list[3]['id'], data['roles'])
+
+ s = sqlalchemy.select([group_domain_metadata_table.c.data]).where(
+ (group_domain_metadata_table.c.group_id == group['id']) &
+ (group_domain_metadata_table.c.domain_id == domain['id']))
+ r = session.execute(s)
+ data = json.loads(r.fetchone()['data'])
+ self.assertEqual(len(data['roles']), 1)
+ self.assertIn(role_list[4]['id'], data['roles'])
+ self.assertIn('other', data)
+
+ # For user-domain2, where we had one regular and one inherited role,
+ # only the direct role should remain, the inherited role should
+ # have been deleted during the downgrade
+ s = sqlalchemy.select([user_domain_metadata_table.c.data]).where(
+ (user_domain_metadata_table.c.user_id == user['id']) &
+ (user_domain_metadata_table.c.domain_id == domain2['id']))
+ r = session.execute(s)
+ data = json.loads(r.fetchone()['data'])
+ self.assertEqual(len(data['roles']), 1)
+ self.assertIn(role_list[5]['id'], data['roles'])
+
+ def test_drop_credential_constraint(self):
+ ec2_credential = {
+ 'id': '100',
+ 'user_id': 'foo',
+ 'project_id': 'bar',
+ 'type': 'ec2',
+ 'blob': json.dumps({
+ "access": "12345",
+ "secret": "12345"
+ })
+ }
+ user = {
+ 'id': 'foo',
+ 'name': 'FOO',
+ 'password': 'foo2',
+ 'enabled': True,
+ 'email': 'foo@bar.com',
+ 'extra': json.dumps({'enabled': True})
+ }
+ tenant = {
+ 'id': 'bar',
+ 'name': 'BAR',
+ 'description': 'description',
+ 'enabled': True,
+ 'extra': json.dumps({'enabled': True})
+ }
+ session = self.Session()
+ self.upgrade(7)
+ self.insert_dict(session, 'user', user)
+ self.insert_dict(session, 'tenant', tenant)
+ self.insert_dict(session, 'credential', ec2_credential)
+ session.commit()
+ self.upgrade(30)
+ cred_table = sqlalchemy.Table('credential',
+ self.metadata,
+ autoload=True)
+ cred = session.query(cred_table).filter("id='100'").one()
+ self.assertEqual(cred.user_id,
+ ec2_credential['user_id'])
+
+ def test_drop_credential_indexes(self):
+ self.upgrade(31)
+ table = sqlalchemy.Table('credential', self.metadata, autoload=True)
+ self.assertEqual(len(table.indexes), 0)
+
+ def test_downgrade_30(self):
+ self.upgrade(31)
+ self.downgrade(30)
+ table = sqlalchemy.Table('credential', self.metadata, autoload=True)
+ index_data = [(idx.name, idx.columns.keys())
+ for idx in table.indexes]
+ if self.engine.name == 'mysql':
+ self.assertIn(('user_id', ['user_id']), index_data)
+ self.assertIn(('credential_project_id_fkey', ['project_id']),
+ index_data)
+ else:
+ self.assertEqual(len(index_data), 0)
+
+ def populate_user_table(self, with_pass_enab=False,
+ with_pass_enab_domain=False):
+ # Populate the appropriate fields in the user
+ # table, depending on the parameters:
+ #
+ # Default: id, name, extra
+ # pass_enab: Add password, enabled as well
+ # pass_enab_domain: Add password, enabled and domain as well
+ #
+ this_table = sqlalchemy.Table("user",
+ self.metadata,
+ autoload=True)
+ for user in default_fixtures.USERS:
+ extra = copy.deepcopy(user)
+ extra.pop('id')
+ extra.pop('name')
+
+ if with_pass_enab:
+ password = extra.pop('password', None)
+ enabled = extra.pop('enabled', True)
+ ins = this_table.insert().values(
+ {'id': user['id'],
+ 'name': user['name'],
+ 'password': password,
+ 'enabled': bool(enabled),
+ 'extra': json.dumps(extra)})
+ else:
+ if with_pass_enab_domain:
+ password = extra.pop('password', None)
+ enabled = extra.pop('enabled', True)
+ extra.pop('domain_id')
+ ins = this_table.insert().values(
+ {'id': user['id'],
+ 'name': user['name'],
+ 'domain_id': user['domain_id'],
+ 'password': password,
+ 'enabled': bool(enabled),
+ 'extra': json.dumps(extra)})
+ else:
+ ins = this_table.insert().values(
+ {'id': user['id'],
+ 'name': user['name'],
+ 'extra': json.dumps(extra)})
+ self.engine.execute(ins)
+
+ def populate_tenant_table(self, with_desc_enab=False,
+ with_desc_enab_domain=False):
+ # Populate the appropriate fields in the tenant or
+ # project table, depending on the parameters
+ #
+ # Default: id, name, extra
+ # desc_enab: Add description, enabled as well
+ # desc_enab_domain: Add description, enabled and domain as well,
+ # plus use project instead of tenant
+ #
+ if with_desc_enab_domain:
+ # By this time tenants are now projects
+ this_table = sqlalchemy.Table("project",
+ self.metadata,
+ autoload=True)
+ else:
+ this_table = sqlalchemy.Table("tenant",
+ self.metadata,
+ autoload=True)
+
+ for tenant in default_fixtures.TENANTS:
+ extra = copy.deepcopy(tenant)
+ extra.pop('id')
+ extra.pop('name')
+
+ if with_desc_enab:
+ desc = extra.pop('description', None)
+ enabled = extra.pop('enabled', True)
+ ins = this_table.insert().values(
+ {'id': tenant['id'],
+ 'name': tenant['name'],
+ 'description': desc,
+ 'enabled': bool(enabled),
+ 'extra': json.dumps(extra)})
+ else:
+ if with_desc_enab_domain:
+ desc = extra.pop('description', None)
+ enabled = extra.pop('enabled', True)
+ extra.pop('domain_id')
+ ins = this_table.insert().values(
+ {'id': tenant['id'],
+ 'name': tenant['name'],
+ 'domain_id': tenant['domain_id'],
+ 'description': desc,
+ 'enabled': bool(enabled),
+ 'extra': json.dumps(extra)})
+ else:
+ ins = this_table.insert().values(
+ {'id': tenant['id'],
+ 'name': tenant['name'],
+ 'extra': json.dumps(extra)})
+ self.engine.execute(ins)
+
+ def _mysql_check_all_tables_innodb(self):
+ database = self.engine.url.database
+
+ connection = self.engine.connect()
+ # sanity check
+ total = connection.execute("SELECT count(*) "
+ "from information_schema.TABLES "
+ "where TABLE_SCHEMA='%(database)s'" %
+ locals())
+ self.assertTrue(total.scalar() > 0, "No tables found. Wrong schema?")
+
+ noninnodb = connection.execute("SELECT table_name "
+ "from information_schema.TABLES "
+ "where TABLE_SCHEMA='%(database)s' "
+ "and ENGINE!='InnoDB' "
+ "and TABLE_NAME!='migrate_version'" %
+ locals())
+ names = [x[0] for x in noninnodb]
+ self.assertEqual(names, [],
+ "Non-InnoDB tables exist")
+
+ connection.close()
diff --git a/keystone/tests/test_ssl.py b/keystone/tests/test_ssl.py
new file mode 100644
index 00000000..cb6b5fdc
--- /dev/null
+++ b/keystone/tests/test_ssl.py
@@ -0,0 +1,154 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import ssl
+
+from keystone.tests import core as test
+
+from keystone.common import environment
+from keystone import config
+
+CONF = config.CONF
+
+CERTDIR = test.rootdir("examples/pki/certs")
+KEYDIR = test.rootdir("examples/pki/private")
+CERT = os.path.join(CERTDIR, 'ssl_cert.pem')
+KEY = os.path.join(KEYDIR, 'ssl_key.pem')
+CA = os.path.join(CERTDIR, 'cacert.pem')
+CLIENT = os.path.join(CERTDIR, 'middleware.pem')
+
+
+class SSLTestCase(test.TestCase):
+ def setUp(self):
+ super(SSLTestCase, self).setUp()
+ self.load_backends()
+
+ def test_1way_ssl_ok(self):
+ """Make sure both public and admin API work with 1-way SSL."""
+ self.public_server = self.serveapp('keystone', name='main',
+ cert=CERT, key=KEY, ca=CA)
+ self.admin_server = self.serveapp('keystone', name='admin',
+ cert=CERT, key=KEY, ca=CA)
+ # Verify Admin
+ conn = environment.httplib.HTTPSConnection('127.0.0.1',
+ CONF.admin_port)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(resp.status, 300)
+ # Verify Public
+ conn = environment.httplib.HTTPSConnection('127.0.0.1',
+ CONF.public_port)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(resp.status, 300)
+
+ def test_2way_ssl_ok(self):
+ """Make sure both public and admin API work with 2-way SSL.
+
+ Requires client certificate.
+ """
+ self.public_server = self.serveapp(
+ 'keystone', name='main', cert=CERT,
+ key=KEY, ca=CA, cert_required=True)
+ self.admin_server = self.serveapp(
+ 'keystone', name='admin', cert=CERT,
+ key=KEY, ca=CA, cert_required=True)
+ # Verify Admin
+ conn = environment.httplib.HTTPSConnection(
+ '127.0.0.1', CONF.admin_port, CLIENT, CLIENT)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(resp.status, 300)
+ # Verify Public
+ conn = environment.httplib.HTTPSConnection(
+ '127.0.0.1', CONF.public_port, CLIENT, CLIENT)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(resp.status, 300)
+
+ def test_1way_ssl_with_ipv6_ok(self):
+ """Make sure both public and admin API work with 1-way ipv6 & SSL."""
+ self.skip_if_no_ipv6()
+ self.public_server = self.serveapp('keystone', name='main',
+ cert=CERT, key=KEY, ca=CA,
+ host="::1", port=0)
+ self.admin_server = self.serveapp('keystone', name='admin',
+ cert=CERT, key=KEY, ca=CA,
+ host="::1", port=0)
+ # Verify Admin
+ conn = environment.httplib.HTTPSConnection('::1', CONF.admin_port)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(resp.status, 300)
+ # Verify Public
+ conn = environment.httplib.HTTPSConnection('::1', CONF.public_port)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(resp.status, 300)
+
+ def test_2way_ssl_with_ipv6_ok(self):
+ """Make sure both public and admin API work with 2-way ipv6 & SSL.
+
+ Requires client certificate.
+ """
+ self.skip_if_no_ipv6()
+ self.public_server = self.serveapp(
+ 'keystone', name='main', cert=CERT,
+ key=KEY, ca=CA, cert_required=True,
+ host="::1", port=0)
+ self.admin_server = self.serveapp(
+ 'keystone', name='admin', cert=CERT,
+ key=KEY, ca=CA, cert_required=True,
+ host="::1", port=0)
+ # Verify Admin
+ conn = environment.httplib.HTTPSConnection(
+ '::1', CONF.admin_port, CLIENT, CLIENT)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(resp.status, 300)
+ # Verify Public
+ conn = environment.httplib.HTTPSConnection(
+ '::1', CONF.public_port, CLIENT, CLIENT)
+ conn.request('GET', '/')
+ resp = conn.getresponse()
+ self.assertEqual(resp.status, 300)
+
+ def test_2way_ssl_fail(self):
+ """Expect to fail when client does not present proper certificate."""
+ self.public_server = self.serveapp(
+ 'keystone', name='main', cert=CERT,
+ key=KEY, ca=CA, cert_required=True)
+ self.admin_server = self.serveapp(
+ 'keystone', name='admin', cert=CERT,
+ key=KEY, ca=CA, cert_required=True)
+ # Verify Admin
+ conn = environment.httplib.HTTPSConnection('127.0.0.1',
+ CONF.admin_port)
+ try:
+ conn.request('GET', '/')
+ self.fail('Admin API shoulda failed with SSL handshake!')
+ except ssl.SSLError:
+ pass
+ # Verify Public
+ conn = environment.httplib.HTTPSConnection('127.0.0.1',
+ CONF.public_port)
+ try:
+ conn.request('GET', '/')
+ self.fail('Public API shoulda failed with SSL handshake!')
+ except ssl.SSLError:
+ pass
diff --git a/keystone/tests/test_token_bind.py b/keystone/tests/test_token_bind.py
new file mode 100644
index 00000000..ae398ea1
--- /dev/null
+++ b/keystone/tests/test_token_bind.py
@@ -0,0 +1,182 @@
+# Copyright 2013 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.common import wsgi
+from keystone import config
+from keystone import exception
+from keystone.tests import core as test
+
+CONF = config.CONF
+
+KERBEROS_BIND = 'USER@REALM'
+
+# the only thing the function checks for is the presence of bind
+TOKEN_BIND_KERB = {'bind': {'kerberos': KERBEROS_BIND}}
+TOKEN_BIND_UNKNOWN = {'bind': {'FOO': 'BAR'}}
+TOKEN_BIND_NONE = {}
+
+ANY = 'any'
+ALL_TOKENS = [TOKEN_BIND_KERB, TOKEN_BIND_UNKNOWN, TOKEN_BIND_NONE]
+
+
+class BindTest(test.TestCase):
+ """Test binding tokens to a Principal.
+
+ Even though everything in this file references kerberos the same concepts
+ will apply to all future binding mechanisms.
+ """
+
+ def assert_kerberos_bind(self, tokens, bind_level,
+ use_kerberos=True, success=True):
+ if not isinstance(tokens, dict):
+ for token in tokens:
+ self.assert_kerberos_bind(token, bind_level,
+ use_kerberos=use_kerberos,
+ success=success)
+ elif use_kerberos == ANY:
+ for val in (True, False):
+ self.assert_kerberos_bind(tokens, bind_level,
+ use_kerberos=val, success=success)
+ else:
+ context = {}
+ CONF.token.enforce_token_bind = bind_level
+
+ if use_kerberos:
+ context['REMOTE_USER'] = KERBEROS_BIND
+ context['AUTH_TYPE'] = 'Negotiate'
+
+ if not success:
+ self.assertRaises(exception.Unauthorized,
+ wsgi.validate_token_bind,
+ context, tokens)
+ else:
+ wsgi.validate_token_bind(context, tokens)
+
+ # DISABLED
+
+ def test_bind_disabled_with_kerb_user(self):
+ self.assert_kerberos_bind(ALL_TOKENS,
+ bind_level='disabled',
+ use_kerberos=ANY,
+ success=True)
+
+ # PERMISSIVE
+
+ def test_bind_permissive_with_kerb_user(self):
+ self.assert_kerberos_bind(TOKEN_BIND_KERB,
+ bind_level='permissive',
+ use_kerberos=True,
+ success=True)
+
+ def test_bind_permissive_with_regular_token(self):
+ self.assert_kerberos_bind(TOKEN_BIND_NONE,
+ bind_level='permissive',
+ use_kerberos=ANY,
+ success=True)
+
+ def test_bind_permissive_without_kerb_user(self):
+ self.assert_kerberos_bind(TOKEN_BIND_KERB,
+ bind_level='permissive',
+ use_kerberos=False,
+ success=False)
+
+ def test_bind_permissive_with_unknown_bind(self):
+ self.assert_kerberos_bind(TOKEN_BIND_UNKNOWN,
+ bind_level='permissive',
+ use_kerberos=ANY,
+ success=True)
+
+ # STRICT
+
+ def test_bind_strict_with_regular_token(self):
+ self.assert_kerberos_bind(TOKEN_BIND_NONE,
+ bind_level='strict',
+ use_kerberos=ANY,
+ success=True)
+
+ def test_bind_strict_with_kerb_user(self):
+ self.assert_kerberos_bind(TOKEN_BIND_KERB,
+ bind_level='strict',
+ use_kerberos=True,
+ success=True)
+
+ def test_bind_strict_without_kerb_user(self):
+ self.assert_kerberos_bind(TOKEN_BIND_KERB,
+ bind_level='strict',
+ use_kerberos=False,
+ success=False)
+
+ def test_bind_strict_with_unknown_bind(self):
+ self.assert_kerberos_bind(TOKEN_BIND_UNKNOWN,
+ bind_level='strict',
+ use_kerberos=ANY,
+ success=False)
+
+ # REQUIRED
+
+ def test_bind_required_with_regular_token(self):
+ self.assert_kerberos_bind(TOKEN_BIND_NONE,
+ bind_level='required',
+ use_kerberos=ANY,
+ success=False)
+
+ def test_bind_required_with_kerb_user(self):
+ self.assert_kerberos_bind(TOKEN_BIND_KERB,
+ bind_level='required',
+ use_kerberos=True,
+ success=True)
+
+ def test_bind_required_without_kerb_user(self):
+ self.assert_kerberos_bind(TOKEN_BIND_KERB,
+ bind_level='required',
+ use_kerberos=False,
+ success=False)
+
+ def test_bind_required_with_unknown_bind(self):
+ self.assert_kerberos_bind(TOKEN_BIND_UNKNOWN,
+ bind_level='required',
+ use_kerberos=ANY,
+ success=False)
+
+ # NAMED
+
+ def test_bind_named_with_regular_token(self):
+ self.assert_kerberos_bind(TOKEN_BIND_NONE,
+ bind_level='kerberos',
+ use_kerberos=ANY,
+ success=False)
+
+ def test_bind_named_with_kerb_user(self):
+ self.assert_kerberos_bind(TOKEN_BIND_KERB,
+ bind_level='kerberos',
+ use_kerberos=True,
+ success=True)
+
+ def test_bind_named_without_kerb_user(self):
+ self.assert_kerberos_bind(TOKEN_BIND_KERB,
+ bind_level='kerberos',
+ use_kerberos=False,
+ success=False)
+
+ def test_bind_named_with_unknown_bind(self):
+ self.assert_kerberos_bind(TOKEN_BIND_UNKNOWN,
+ bind_level='kerberos',
+ use_kerberos=ANY,
+ success=False)
+
+ def test_bind_named_with_unknown_scheme(self):
+ self.assert_kerberos_bind(ALL_TOKENS,
+ bind_level='unknown',
+ use_kerberos=ANY,
+ success=False)
diff --git a/keystone/tests/test_token_provider.py b/keystone/tests/test_token_provider.py
new file mode 100644
index 00000000..08fab35d
--- /dev/null
+++ b/keystone/tests/test_token_provider.py
@@ -0,0 +1,439 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2013 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from keystone import exception
+from keystone.tests import core as test
+from keystone import token
+
+
+SAMPLE_V2_TOKEN = {
+ "access": {
+ "trust": {
+ "id": "abc123",
+ "trustee_user_id": "123456"
+ },
+ "serviceCatalog": [
+ {
+ "endpoints": [
+ {
+ "adminURL": "http://localhost:8774/v1.1/01257",
+ "id": "51934fe63a5b4ac0a32664f64eb462c3",
+ "internalURL": "http://localhost:8774/v1.1/01257",
+ "publicURL": "http://localhost:8774/v1.1/01257",
+ "region": "RegionOne"
+ }
+ ],
+ "endpoints_links": [],
+ "name": "nova",
+ "type": "compute"
+ },
+ {
+ "endpoints": [
+ {
+ "adminURL": "http://localhost:9292",
+ "id": "aaa17a539e364297a7845d67c7c7cc4b",
+ "internalURL": "http://localhost:9292",
+ "publicURL": "http://localhost:9292",
+ "region": "RegionOne"
+ }
+ ],
+ "endpoints_links": [],
+ "name": "glance",
+ "type": "image"
+ },
+ {
+ "endpoints": [
+ {
+ "adminURL": "http://localhost:8776/v1/01257",
+ "id": "077d82df25304abeac2294004441db5a",
+ "internalURL": "http://localhost:8776/v1/01257",
+ "publicURL": "http://localhost:8776/v1/01257",
+ "region": "RegionOne"
+ }
+ ],
+ "endpoints_links": [],
+ "name": "volume",
+ "type": "volume"
+ },
+ {
+ "endpoints": [
+ {
+ "adminURL": "http://localhost:8773/services/Admin",
+ "id": "b06997fd08414903ad458836efaa9067",
+ "internalURL": "http://localhost:8773/services/Cloud",
+ "publicURL": "http://localhost:8773/services/Cloud",
+ "region": "RegionOne"
+ }
+ ],
+ "endpoints_links": [],
+ "name": "ec2",
+ "type": "ec2"
+ },
+ {
+ "endpoints": [
+ {
+ "adminURL": "http://localhost:8888/v1",
+ "id": "7bd0c643e05a4a2ab40902b2fa0dd4e6",
+ "internalURL": "http://localhost:8888/v1/AUTH_01257",
+ "publicURL": "http://localhost:8888/v1/AUTH_01257",
+ "region": "RegionOne"
+ }
+ ],
+ "endpoints_links": [],
+ "name": "swift",
+ "type": "object-store"
+ },
+ {
+ "endpoints": [
+ {
+ "adminURL": "http://localhost:35357/v2.0",
+ "id": "02850c5d1d094887bdc46e81e1e15dc7",
+ "internalURL": "http://localhost:5000/v2.0",
+ "publicURL": "http://localhost:5000/v2.0",
+ "region": "RegionOne"
+ }
+ ],
+ "endpoints_links": [],
+ "name": "keystone",
+ "type": "identity"
+ }
+ ],
+ "token": {
+ "expires": "2013-05-22T00:02:43.941430Z",
+ "id": "ce4fc2d36eea4cc9a36e666ac2f1029a",
+ "issued_at": "2013-05-21T00:02:43.941473Z",
+ "tenant": {
+ "enabled": True,
+ "id": "01257",
+ "name": "service"
+ }
+ },
+ "user": {
+ "id": "f19ddbe2c53c46f189fe66d0a7a9c9ce",
+ "name": "nova",
+ "roles": [
+ {
+ "name": "_member_"
+ },
+ {
+ "name": "admin"
+ }
+ ],
+ "roles_links": [],
+ "username": "nova"
+ }
+ }
+}
+
+SAMPLE_V3_TOKEN = {
+ "token": {
+ "catalog": [
+ {
+ "endpoints": [
+ {
+ "id": "02850c5d1d094887bdc46e81e1e15dc7",
+ "interface": "admin",
+ "region": "RegionOne",
+ "url": "http://localhost:35357/v2.0"
+ },
+ {
+ "id": "446e244b75034a9ab4b0811e82d0b7c8",
+ "interface": "internal",
+ "region": "RegionOne",
+ "url": "http://localhost:5000/v2.0"
+ },
+ {
+ "id": "47fa3d9f499240abb5dfcf2668f168cd",
+ "interface": "public",
+ "region": "RegionOne",
+ "url": "http://localhost:5000/v2.0"
+ }
+ ],
+ "id": "26d7541715a44a4d9adad96f9872b633",
+ "type": "identity",
+ },
+ {
+ "endpoints": [
+ {
+ "id": "aaa17a539e364297a7845d67c7c7cc4b",
+ "interface": "admin",
+ "region": "RegionOne",
+ "url": "http://localhost:9292"
+ },
+ {
+ "id": "4fa9620e42394cb1974736dce0856c71",
+ "interface": "internal",
+ "region": "RegionOne",
+ "url": "http://localhost:9292"
+ },
+ {
+ "id": "9673687f9bc441d88dec37942bfd603b",
+ "interface": "public",
+ "region": "RegionOne",
+ "url": "http://localhost:9292"
+ }
+ ],
+ "id": "d27a41843f4e4b0e8cf6dac4082deb0d",
+ "type": "image",
+ },
+ {
+ "endpoints": [
+ {
+ "id": "7bd0c643e05a4a2ab40902b2fa0dd4e6",
+ "interface": "admin",
+ "region": "RegionOne",
+ "url": "http://localhost:8888/v1"
+ },
+ {
+ "id": "43bef154594d4ccb8e49014d20624e1d",
+ "interface": "internal",
+ "region": "RegionOne",
+ "url": "http://localhost:8888/v1/AUTH_01257"
+ },
+ {
+ "id": "e63b5f5d7aa3493690189d0ff843b9b3",
+ "interface": "public",
+ "region": "RegionOne",
+ "url": "http://localhost:8888/v1/AUTH_01257"
+ }
+ ],
+ "id": "a669e152f1104810a4b6701aade721bb",
+ "type": "object-store",
+ },
+ {
+ "endpoints": [
+ {
+ "id": "51934fe63a5b4ac0a32664f64eb462c3",
+ "interface": "admin",
+ "region": "RegionOne",
+ "url": "http://localhost:8774/v1.1/01257"
+ },
+ {
+ "id": "869b535eea0d42e483ae9da0d868ebad",
+ "interface": "internal",
+ "region": "RegionOne",
+ "url": "http://localhost:8774/v1.1/01257"
+ },
+ {
+ "id": "93583824c18f4263a2245ca432b132a6",
+ "interface": "public",
+ "region": "RegionOne",
+ "url": "http://localhost:8774/v1.1/01257"
+ }
+ ],
+ "id": "7f32cc2af6c9476e82d75f80e8b3bbb8",
+ "type": "compute",
+ },
+ {
+ "endpoints": [
+ {
+ "id": "b06997fd08414903ad458836efaa9067",
+ "interface": "admin",
+ "region": "RegionOne",
+ "url": "http://localhost:8773/services/Admin"
+ },
+ {
+ "id": "411f7de7c9a8484c9b46c254fb2676e2",
+ "interface": "internal",
+ "region": "RegionOne",
+ "url": "http://localhost:8773/services/Cloud"
+ },
+ {
+ "id": "f21c93f3da014785854b4126d0109c49",
+ "interface": "public",
+ "region": "RegionOne",
+ "url": "http://localhost:8773/services/Cloud"
+ }
+ ],
+ "id": "b08c9c7d4ef543eba5eeb766f72e5aa1",
+ "type": "ec2",
+ },
+ {
+ "endpoints": [
+ {
+ "id": "077d82df25304abeac2294004441db5a",
+ "interface": "admin",
+ "region": "RegionOne",
+ "url": "http://localhost:8776/v1/01257"
+ },
+ {
+ "id": "875bf282362c40219665278b4fd11467",
+ "interface": "internal",
+ "region": "RegionOne",
+ "url": "http://localhost:8776/v1/01257"
+ },
+ {
+ "id": "cd229aa6df0640dc858a8026eb7e640c",
+ "interface": "public",
+ "region": "RegionOne",
+ "url": "http://localhost:8776/v1/01257"
+ }
+ ],
+ "id": "5db21b82617f4a95816064736a7bec22",
+ "type": "volume",
+ }
+ ],
+ "expires_at": "2013-05-22T00:02:43.941430Z",
+ "issued_at": "2013-05-21T00:02:43.941473Z",
+ "methods": [
+ "password"
+ ],
+ "project": {
+ "domain": {
+ "id": "default",
+ "name": "Default"
+ },
+ "id": "01257",
+ "name": "service"
+ },
+ "roles": [
+ {
+ "id": "9fe2ff9ee4384b1894a90878d3e92bab",
+ "name": "_member_"
+ },
+ {
+ "id": "53bff13443bd4450b97f978881d47b18",
+ "name": "admin"
+ }
+ ],
+ "user": {
+ "domain": {
+ "id": "default",
+ "name": "Default"
+ },
+ "id": "f19ddbe2c53c46f189fe66d0a7a9c9ce",
+ "name": "nova"
+ },
+ "OS-TRUST:trust": {
+ "id": "abc123",
+ "trustee_user_id": "123456",
+ "trustor_user_id": "333333",
+ "impersonation": False
+ }
+ }
+}
+
+
+class TestTokenProvider(test.TestCase):
+ def setUp(self):
+ super(TestTokenProvider, self).setUp()
+ self.load_backends()
+
+ def test_get_token_version(self):
+ self.assertEqual(
+ token.provider.V2,
+ self.token_provider_api.get_token_version(SAMPLE_V2_TOKEN))
+ self.assertEqual(
+ token.provider.V3,
+ self.token_provider_api.get_token_version(SAMPLE_V3_TOKEN))
+ self.assertRaises(token.provider.UnsupportedTokenVersionException,
+ self.token_provider_api.get_token_version,
+ 'bogus')
+
+ def test_issue_token(self):
+ self.assertRaises(token.provider.UnsupportedTokenVersionException,
+ self.token_provider_api.issue_token,
+ 'bogus_version')
+
+ def test_validate_token(self):
+ self.assertRaises(token.provider.UnsupportedTokenVersionException,
+ self.token_provider_api.validate_token,
+ uuid.uuid4().hex,
+ None,
+ 'bogus_version')
+
+ def test_token_format_provider_mismatch(self):
+ self.opt_in_group('signing', token_format='UUID')
+ self.opt_in_group('token',
+ provider=token.provider.PKI_PROVIDER)
+ try:
+ token.provider.Manager()
+ raise Exception(
+ 'expecting ValueError on token provider misconfiguration')
+ except exception.UnexpectedError:
+ pass
+
+ self.opt_in_group('signing', token_format='PKI')
+ self.opt_in_group('token',
+ provider=token.provider.UUID_PROVIDER)
+ try:
+ token.provider.Manager()
+ raise Exception(
+ 'expecting ValueError on token provider misconfiguration')
+ except exception.UnexpectedError:
+ pass
+
+ # should be OK as token_format and provider aligns
+ self.opt_in_group('signing', token_format='PKI')
+ self.opt_in_group('token',
+ provider=token.provider.PKI_PROVIDER)
+ token.provider.Manager()
+
+ self.opt_in_group('signing', token_format='UUID')
+ self.opt_in_group('token',
+ provider=token.provider.UUID_PROVIDER)
+ token.provider.Manager()
+
+ # custom provider should be OK too
+ self.opt_in_group('signing', token_format='CUSTOM')
+ self.opt_in_group('token',
+ provider=token.provider.PKI_PROVIDER)
+ token.provider.Manager()
+
+ def test_default_token_format(self):
+ self.assertEqual(token.provider.Manager.get_token_provider(),
+ token.provider.PKI_PROVIDER)
+
+ def test_uuid_token_format_and_no_provider(self):
+ self.opt_in_group('signing', token_format='UUID')
+ self.assertEqual(token.provider.Manager.get_token_provider(),
+ token.provider.UUID_PROVIDER)
+
+ def test_unsupported_token_format(self):
+ self.opt_in_group('signing', token_format='CUSTOM')
+ self.assertRaises(exception.UnexpectedError,
+ token.provider.Manager.get_token_provider)
+
+ def test_uuid_provider(self):
+ self.opt_in_group('token', provider=token.provider.UUID_PROVIDER)
+ self.assertEqual(token.provider.Manager.get_token_provider(),
+ token.provider.UUID_PROVIDER)
+
+ def test_provider_override_token_format(self):
+ self.opt_in_group('token',
+ provider='keystone.token.providers.pki.Test')
+ self.assertEqual(token.provider.Manager.get_token_provider(),
+ 'keystone.token.providers.pki.Test')
+
+ self.opt_in_group('signing', token_format='UUID')
+ self.opt_in_group('token',
+ provider=token.provider.UUID_PROVIDER)
+ self.assertEqual(token.provider.Manager.get_token_provider(),
+ token.provider.UUID_PROVIDER)
+
+ self.opt_in_group('signing', token_format='PKI')
+ self.opt_in_group('token',
+ provider=token.provider.PKI_PROVIDER)
+ self.assertEqual(token.provider.Manager.get_token_provider(),
+ token.provider.PKI_PROVIDER)
+
+ self.opt_in_group('signing', token_format='CUSTOM')
+ self.opt_in_group('token',
+ provider='my.package.MyProvider')
+ self.assertEqual(token.provider.Manager.get_token_provider(),
+ 'my.package.MyProvider')
diff --git a/keystone/tests/test_url_middleware.py b/keystone/tests/test_url_middleware.py
new file mode 100644
index 00000000..436eb8d4
--- /dev/null
+++ b/keystone/tests/test_url_middleware.py
@@ -0,0 +1,56 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import webob
+
+from keystone.tests import core as test
+
+from keystone import middleware
+
+
+class FakeApp(object):
+ """Fakes a WSGI app URL normalized."""
+ def __call__(self, env, start_response):
+ resp = webob.Response()
+ resp.body = 'SUCCESS'
+ return resp(env, start_response)
+
+
+class UrlMiddlewareTest(test.TestCase):
+ def setUp(self):
+ self.middleware = middleware.NormalizingFilter(FakeApp())
+ self.response_status = None
+ self.response_headers = None
+ super(UrlMiddlewareTest, self).setUp()
+
+ def start_fake_response(self, status, headers):
+ self.response_status = int(status.split(' ', 1)[0])
+ self.response_headers = dict(headers)
+
+ def test_trailing_slash_normalization(self):
+ """Tests /v2.0/tokens and /v2.0/tokens/ normalized URLs match."""
+ req1 = webob.Request.blank('/v2.0/tokens')
+ req2 = webob.Request.blank('/v2.0/tokens/')
+ self.middleware(req1.environ, self.start_fake_response)
+ self.middleware(req2.environ, self.start_fake_response)
+ self.assertEqual(req1.path_url, req2.path_url)
+ self.assertEqual(req1.path_url, 'http://localhost/v2.0/tokens')
+
+ def test_rewrite_empty_path(self):
+ """Tests empty path is rewritten to root."""
+ req = webob.Request.blank('')
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(req.path_url, 'http://localhost/')
diff --git a/keystone/tests/test_utils.py b/keystone/tests/test_utils.py
new file mode 100644
index 00000000..19535a7b
--- /dev/null
+++ b/keystone/tests/test_utils.py
@@ -0,0 +1,66 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# Copyright 2012 Justin Santa Barbara
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.tests import core as test
+
+from keystone.common import utils
+
+
+class UtilsTestCase(test.TestCase):
+ def test_hash(self):
+ password = 'right'
+ wrong = 'wrongwrong' # Two wrongs don't make a right
+ hashed = utils.hash_password(password)
+ self.assertTrue(utils.check_password(password, hashed))
+ self.assertFalse(utils.check_password(wrong, hashed))
+
+ def test_hash_long_password(self):
+ bigboy = '0' * 9999999
+ hashed = utils.hash_password(bigboy)
+ self.assertTrue(utils.check_password(bigboy, hashed))
+
+ def test_hash_edge_cases(self):
+ hashed = utils.hash_password('secret')
+ self.assertFalse(utils.check_password('', hashed))
+ self.assertFalse(utils.check_password(None, hashed))
+
+ def test_hash_unicode(self):
+ password = u'Comment \xe7a va'
+ wrong = 'Comment ?a va'
+ hashed = utils.hash_password(password)
+ self.assertTrue(utils.check_password(password, hashed))
+ self.assertFalse(utils.check_password(wrong, hashed))
+
+ def test_auth_str_equal(self):
+ self.assertTrue(utils.auth_str_equal('abc123', 'abc123'))
+ self.assertFalse(utils.auth_str_equal('a', 'aaaaa'))
+ self.assertFalse(utils.auth_str_equal('aaaaa', 'a'))
+ self.assertFalse(utils.auth_str_equal('ABC123', 'abc123'))
diff --git a/keystone/tests/test_uuid_token_provider.conf b/keystone/tests/test_uuid_token_provider.conf
new file mode 100644
index 00000000..d127ea3b
--- /dev/null
+++ b/keystone/tests/test_uuid_token_provider.conf
@@ -0,0 +1,2 @@
+[token]
+provider = keystone.token.providers.uuid.Provider
diff --git a/keystone/tests/test_v3.py b/keystone/tests/test_v3.py
new file mode 100644
index 00000000..7db14c84
--- /dev/null
+++ b/keystone/tests/test_v3.py
@@ -0,0 +1,971 @@
+import datetime
+import uuid
+
+from lxml import etree
+import webtest
+
+from keystone import auth
+from keystone.common import serializer
+from keystone import config
+from keystone.openstack.common import timeutils
+from keystone.policy.backends import rules
+from keystone.tests import core as test
+
+import test_content_types
+
+
+CONF = config.CONF
+DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
+
+TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
+
+
+class RestfulTestCase(test_content_types.RestfulTestCase):
+ _config_file_list = [test.etcdir('keystone.conf.sample'),
+ test.testsdir('test_overrides.conf'),
+ test.testsdir('backend_sql.conf'),
+ test.testsdir('backend_sql_disk.conf')]
+
+ #override this to sepcify the complete list of configuration files
+ def config_files(self):
+ return self._config_file_list
+
+ def setUp(self, load_sample_data=True):
+ """Setup for v3 Restful Test Cases.
+
+ If a child class wants to create their own sample data
+ and provide their own auth data to obtain tokens, then
+ load_sample_data should be set to false.
+
+ """
+ self.config(self.config_files())
+
+ test.setup_test_database()
+ self.load_backends()
+
+ self.public_app = webtest.TestApp(
+ self.loadapp('keystone', name='main'))
+ self.admin_app = webtest.TestApp(
+ self.loadapp('keystone', name='admin'))
+
+ if load_sample_data:
+ self.domain_id = uuid.uuid4().hex
+ self.domain = self.new_domain_ref()
+ self.domain['id'] = self.domain_id
+ self.identity_api.create_domain(self.domain_id, self.domain)
+
+ self.project_id = uuid.uuid4().hex
+ self.project = self.new_project_ref(
+ domain_id=self.domain_id)
+ self.project['id'] = self.project_id
+ self.identity_api.create_project(self.project_id, self.project)
+
+ self.user_id = uuid.uuid4().hex
+ self.user = self.new_user_ref(
+ domain_id=self.domain_id,
+ project_id=self.project_id)
+ self.user['id'] = self.user_id
+ self.identity_api.create_user(self.user_id, self.user)
+
+ self.default_domain_project_id = uuid.uuid4().hex
+ self.default_domain_project = self.new_project_ref(
+ domain_id=DEFAULT_DOMAIN_ID)
+ self.default_domain_project['id'] = self.default_domain_project_id
+ self.identity_api.create_project(self.default_domain_project_id,
+ self.default_domain_project)
+
+ self.default_domain_user_id = uuid.uuid4().hex
+ self.default_domain_user = self.new_user_ref(
+ domain_id=DEFAULT_DOMAIN_ID,
+ project_id=self.default_domain_project_id)
+ self.default_domain_user['id'] = self.default_domain_user_id
+ self.identity_api.create_user(self.default_domain_user_id,
+ self.default_domain_user)
+
+ # create & grant policy.json's default role for admin_required
+ self.role_id = uuid.uuid4().hex
+ self.role = self.new_role_ref()
+ self.role['id'] = self.role_id
+ self.role['name'] = 'admin'
+ self.identity_api.create_role(self.role_id, self.role)
+ self.identity_api.add_role_to_user_and_project(
+ self.user_id, self.project_id, self.role_id)
+ self.identity_api.add_role_to_user_and_project(
+ self.default_domain_user_id, self.default_domain_project_id,
+ self.role_id)
+ self.identity_api.add_role_to_user_and_project(
+ self.default_domain_user_id, self.project_id,
+ self.role_id)
+
+ self.public_server = self.serveapp('keystone', name='main')
+ self.admin_server = self.serveapp('keystone', name='admin')
+
+ def tearDown(self):
+ self.public_server.kill()
+ self.admin_server.kill()
+ self.public_server = None
+ self.admin_server = None
+ test.teardown_test_database()
+ # need to reset the plug-ins
+ auth.controllers.AUTH_METHODS = {}
+ #drop the policy rules
+ CONF.reset()
+ rules.reset()
+
+ def new_ref(self):
+ """Populates a ref with attributes common to all API entities."""
+ return {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'enabled': True}
+
+ def new_service_ref(self):
+ ref = self.new_ref()
+ ref['type'] = uuid.uuid4().hex
+ return ref
+
+ def new_endpoint_ref(self, service_id):
+ ref = self.new_ref()
+ ref['interface'] = uuid.uuid4().hex[:8]
+ ref['service_id'] = service_id
+ ref['url'] = uuid.uuid4().hex
+ ref['region'] = uuid.uuid4().hex
+ return ref
+
+ def new_domain_ref(self):
+ ref = self.new_ref()
+ return ref
+
+ def new_project_ref(self, domain_id):
+ ref = self.new_ref()
+ ref['domain_id'] = domain_id
+ return ref
+
+ def new_user_ref(self, domain_id, project_id=None):
+ ref = self.new_ref()
+ ref['domain_id'] = domain_id
+ ref['email'] = uuid.uuid4().hex
+ ref['password'] = uuid.uuid4().hex
+ if project_id:
+ ref['project_id'] = project_id
+ return ref
+
+ def new_group_ref(self, domain_id):
+ ref = self.new_ref()
+ ref['domain_id'] = domain_id
+ return ref
+
+ def new_credential_ref(self, user_id, project_id=None):
+ ref = self.new_ref()
+ ref['user_id'] = user_id
+ ref['blob'] = uuid.uuid4().hex
+ ref['type'] = uuid.uuid4().hex
+ if project_id:
+ ref['project_id'] = project_id
+ return ref
+
+ def new_role_ref(self):
+ ref = self.new_ref()
+ return ref
+
+ def new_policy_ref(self):
+ ref = self.new_ref()
+ ref['blob'] = uuid.uuid4().hex
+ ref['type'] = uuid.uuid4().hex
+ return ref
+
+ def new_trust_ref(self, trustor_user_id, trustee_user_id, project_id=None,
+ impersonation=None, expires=None, role_ids=None,
+ role_names=None):
+ ref = self.new_ref()
+
+ ref['trustor_user_id'] = trustor_user_id
+ ref['trustee_user_id'] = trustee_user_id
+ ref['impersonation'] = impersonation or False
+ ref['project_id'] = project_id
+
+ if isinstance(expires, basestring):
+ ref['expires_at'] = expires
+ elif isinstance(expires, dict):
+ ref['expires_at'] = timeutils.strtime(
+ timeutils.utcnow() + datetime.timedelta(**expires),
+ fmt=TIME_FORMAT)
+ elif expires is None:
+ pass
+ else:
+ raise NotImplementedError('Unexpected value for "expires"')
+
+ role_ids = role_ids or []
+ role_names = role_names or []
+ if role_ids or role_names:
+ ref['roles'] = []
+ for role_id in role_ids:
+ ref['roles'].append({'id': role_id})
+ for role_name in role_names:
+ ref['roles'].append({'name': role_name})
+
+ return ref
+
+ def admin_request(self, *args, **kwargs):
+ """Translates XML responses to dicts.
+
+ This implies that we only have to write assertions for JSON.
+
+ """
+ r = super(RestfulTestCase, self).admin_request(*args, **kwargs)
+ if r.headers.get('Content-Type') == 'application/xml':
+ r.result = serializer.from_xml(etree.tostring(r.result))
+ return r
+
+ def get_scoped_token(self):
+ """Convenience method so that we can test authenticated requests."""
+ r = self.admin_request(
+ method='POST',
+ path='/v3/auth/tokens',
+ body={
+ 'auth': {
+ 'identity': {
+ 'methods': ['password'],
+ 'password': {
+ 'user': {
+ 'name': self.user['name'],
+ 'password': self.user['password'],
+ 'domain': {
+ 'id': self.user['domain_id']
+ }
+ }
+ }
+ },
+ 'scope': {
+ 'project': {
+ 'id': self.project['id'],
+ }
+ }
+ }
+ })
+ return r.headers.get('X-Subject-Token')
+
+ def get_requested_token(self, auth):
+ """Request the specific token we want."""
+
+ r = self.admin_request(
+ method='POST',
+ path='/v3/auth/tokens',
+ body=auth)
+ return r.headers.get('X-Subject-Token')
+
+ def v3_request(self, path, **kwargs):
+ # Check if the caller has passed in auth details for
+ # use in requesting the token
+ auth = kwargs.pop('auth', None)
+ if auth:
+ token = self.get_requested_token(auth)
+ else:
+ token = kwargs.pop('token', None)
+ if not token:
+ token = self.get_scoped_token()
+ path = '/v3' + path
+
+ return self.admin_request(path=path, token=token, **kwargs)
+
+ def get(self, path, **kwargs):
+ r = self.v3_request(method='GET', path=path, **kwargs)
+ if 'expected_status' not in kwargs:
+ self.assertResponseStatus(r, 200)
+ return r
+
+ def head(self, path, **kwargs):
+ r = self.v3_request(method='HEAD', path=path, **kwargs)
+ if 'expected_status' not in kwargs:
+ self.assertResponseStatus(r, 204)
+ return r
+
+ def post(self, path, **kwargs):
+ r = self.v3_request(method='POST', path=path, **kwargs)
+ if 'expected_status' not in kwargs:
+ self.assertResponseStatus(r, 201)
+ return r
+
+ def put(self, path, **kwargs):
+ r = self.v3_request(method='PUT', path=path, **kwargs)
+ if 'expected_status' not in kwargs:
+ self.assertResponseStatus(r, 204)
+ return r
+
+ def patch(self, path, **kwargs):
+ r = self.v3_request(method='PATCH', path=path, **kwargs)
+ if 'expected_status' not in kwargs:
+ self.assertResponseStatus(r, 200)
+ return r
+
+ def delete(self, path, **kwargs):
+ r = self.v3_request(method='DELETE', path=path, **kwargs)
+ if 'expected_status' not in kwargs:
+ self.assertResponseStatus(r, 204)
+ return r
+
+ def assertValidErrorResponse(self, r):
+ if r.headers.get('Content-Type') == 'application/xml':
+ resp = serializer.from_xml(etree.tostring(r.result))
+ else:
+ resp = r.result
+ self.assertIsNotNone(resp.get('error'))
+ self.assertIsNotNone(resp['error'].get('code'))
+ self.assertIsNotNone(resp['error'].get('title'))
+ self.assertIsNotNone(resp['error'].get('message'))
+ self.assertEqual(int(resp['error']['code']), r.status_code)
+
+ def assertValidListLinks(self, links):
+ self.assertIsNotNone(links)
+ self.assertIsNotNone(links.get('self'))
+ self.assertIn(CONF.public_endpoint % CONF, links['self'])
+
+ self.assertIn('next', links)
+ if links['next'] is not None:
+ self.assertIn(
+ CONF.public_endpoint % CONF,
+ links['next'])
+
+ self.assertIn('previous', links)
+ if links['previous'] is not None:
+ self.assertIn(
+ CONF.public_endpoint % CONF,
+ links['previous'])
+
+ def assertValidListResponse(self, resp, key, entity_validator, ref=None,
+ expected_length=None, keys_to_check=None):
+ """Make assertions common to all API list responses.
+
+ If a reference is provided, it's ID will be searched for in the
+ response, and asserted to be equal.
+
+ """
+ entities = resp.result.get(key)
+ self.assertIsNotNone(entities)
+
+ if expected_length is not None:
+ self.assertEqual(len(entities), expected_length)
+ elif ref is not None:
+ # we're at least expecting the ref
+ self.assertNotEmpty(entities)
+
+ # collections should have relational links
+ self.assertValidListLinks(resp.result.get('links'))
+
+ for entity in entities:
+ self.assertIsNotNone(entity)
+ self.assertValidEntity(entity, keys_to_check=keys_to_check)
+ entity_validator(entity)
+ if ref:
+ entity = [x for x in entities if x['id'] == ref['id']][0]
+ self.assertValidEntity(entity, ref=ref,
+ keys_to_check=keys_to_check)
+ entity_validator(entity, ref)
+ return entities
+
+ def assertValidResponse(self, resp, key, entity_validator, *args,
+ **kwargs):
+ """Make assertions common to all API responses."""
+ entity = resp.result.get(key)
+ self.assertIsNotNone(entity)
+ keys = kwargs.pop('keys_to_check', None)
+ self.assertValidEntity(entity, keys_to_check=keys, *args, **kwargs)
+ entity_validator(entity, *args, **kwargs)
+ return entity
+
+ def assertValidEntity(self, entity, ref=None, keys_to_check=None):
+ """Make assertions common to all API entities.
+
+ If a reference is provided, the entity will also be compared against
+ the reference.
+ """
+ if keys_to_check:
+ keys = keys_to_check
+ else:
+ keys = ['name', 'description', 'enabled']
+
+ for k in ['id'] + keys:
+ msg = '%s unexpectedly None in %s' % (k, entity)
+ self.assertIsNotNone(entity.get(k), msg)
+
+ self.assertIsNotNone(entity.get('links'))
+ self.assertIsNotNone(entity['links'].get('self'))
+ self.assertIn(CONF.public_endpoint % CONF, entity['links']['self'])
+ self.assertIn(entity['id'], entity['links']['self'])
+
+ if ref:
+ for k in keys:
+ msg = '%s not equal: %s != %s' % (k, ref[k], entity[k])
+ self.assertEquals(ref[k], entity[k])
+
+ return entity
+
+ # auth validation
+
+ def assertValidISO8601ExtendedFormatDatetime(self, dt):
+ try:
+ return timeutils.parse_strtime(dt, fmt=TIME_FORMAT)
+ except Exception:
+ msg = '%s is not a valid ISO 8601 extended format date time.' % dt
+ raise AssertionError(msg)
+ self.assertTrue(isinstance(dt, datetime.datetime))
+
+ def assertValidTokenResponse(self, r, user=None):
+ self.assertTrue(r.headers.get('X-Subject-Token'))
+ token = r.result['token']
+
+ self.assertIsNotNone(token.get('expires_at'))
+ expires_at = self.assertValidISO8601ExtendedFormatDatetime(
+ token['expires_at'])
+ self.assertIsNotNone(token.get('issued_at'))
+ issued_at = self.assertValidISO8601ExtendedFormatDatetime(
+ token['issued_at'])
+ self.assertTrue(issued_at < expires_at)
+
+ self.assertIn('user', token)
+ self.assertIn('id', token['user'])
+ self.assertIn('name', token['user'])
+ self.assertIn('domain', token['user'])
+ self.assertIn('id', token['user']['domain'])
+
+ if user is not None:
+ self.assertEqual(user['id'], token['user']['id'])
+ self.assertEqual(user['name'], token['user']['name'])
+ self.assertEqual(user['domain_id'], token['user']['domain']['id'])
+
+ return token
+
+ def assertValidUnscopedTokenResponse(self, r, *args, **kwargs):
+ token = self.assertValidTokenResponse(r, *args, **kwargs)
+
+ self.assertNotIn('roles', token)
+ self.assertNotIn('catalog', token)
+ self.assertNotIn('project', token)
+ self.assertNotIn('domain', token)
+
+ return token
+
+ def assertValidScopedTokenResponse(self, r, *args, **kwargs):
+ require_catalog = kwargs.pop('require_catalog', True)
+ token = self.assertValidTokenResponse(r, *args, **kwargs)
+
+ if require_catalog:
+ self.assertIn('catalog', token)
+ else:
+ self.assertNotIn('catalog', token)
+
+ self.assertIn('roles', token)
+ self.assertTrue(token['roles'])
+ for role in token['roles']:
+ self.assertIn('id', role)
+ self.assertIn('name', role)
+
+ return token
+
+ def assertValidProjectScopedTokenResponse(self, r, *args, **kwargs):
+ token = self.assertValidScopedTokenResponse(r, *args, **kwargs)
+
+ self.assertIn('project', token)
+ self.assertIn('id', token['project'])
+ self.assertIn('name', token['project'])
+ self.assertIn('domain', token['project'])
+ self.assertIn('id', token['project']['domain'])
+ self.assertIn('name', token['project']['domain'])
+
+ self.assertEqual(self.role_id, token['roles'][0]['id'])
+
+ return token
+
+ def assertValidProjectTrustScopedTokenResponse(self, r, *args, **kwargs):
+ token = self.assertValidProjectScopedTokenResponse(r, *args, **kwargs)
+
+ trust = token.get('OS-TRUST:trust')
+ self.assertIsNotNone(trust)
+ self.assertIsNotNone(trust.get('id'))
+ self.assertTrue(isinstance(trust.get('impersonation'), bool))
+ self.assertIsNotNone(trust.get('trustor_user'))
+ self.assertIsNotNone(trust.get('trustee_user'))
+ self.assertIsNotNone(trust['trustor_user'].get('id'))
+ self.assertIsNotNone(trust['trustee_user'].get('id'))
+
+ def assertValidDomainScopedTokenResponse(self, r, *args, **kwargs):
+ token = self.assertValidScopedTokenResponse(r, *args, **kwargs)
+
+ self.assertIn('domain', token)
+ self.assertIn('id', token['domain'])
+ self.assertIn('name', token['domain'])
+
+ return token
+
+ def assertEqualTokens(self, a, b):
+ """Assert that two tokens are equal.
+
+ Compare two tokens except for their ids. This also truncates
+ the time in the comparison.
+ """
+ def normalize(token):
+ del token['token']['expires_at']
+ del token['token']['issued_at']
+ return token
+
+ a_expires_at = self.assertValidISO8601ExtendedFormatDatetime(
+ a['token']['expires_at'])
+ b_expires_at = self.assertValidISO8601ExtendedFormatDatetime(
+ b['token']['expires_at'])
+ self.assertCloseEnoughForGovernmentWork(a_expires_at, b_expires_at)
+
+ a_issued_at = self.assertValidISO8601ExtendedFormatDatetime(
+ a['token']['issued_at'])
+ b_issued_at = self.assertValidISO8601ExtendedFormatDatetime(
+ b['token']['issued_at'])
+ self.assertCloseEnoughForGovernmentWork(a_issued_at, b_issued_at)
+
+ return self.assertDictEqual(normalize(a), normalize(b))
+
+ # service validation
+
+ def assertValidServiceListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'services',
+ self.assertValidService,
+ *args,
+ **kwargs)
+
+ def assertValidServiceResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'service',
+ self.assertValidService,
+ *args,
+ **kwargs)
+
+ def assertValidService(self, entity, ref=None):
+ self.assertIsNotNone(entity.get('type'))
+ if ref:
+ self.assertEqual(ref['type'], entity['type'])
+ return entity
+
+ # endpoint validation
+
+ def assertValidEndpointListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'endpoints',
+ self.assertValidEndpoint,
+ *args,
+ **kwargs)
+
+ def assertValidEndpointResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'endpoint',
+ self.assertValidEndpoint,
+ *args,
+ **kwargs)
+
+ def assertValidEndpoint(self, entity, ref=None):
+ self.assertIsNotNone(entity.get('interface'))
+ self.assertIsNotNone(entity.get('service_id'))
+
+ # this is intended to be an unexposed implementation detail
+ self.assertNotIn('legacy_endpoint_id', entity)
+
+ if ref:
+ self.assertEqual(ref['interface'], entity['interface'])
+ self.assertEqual(ref['service_id'], entity['service_id'])
+ return entity
+
+ # domain validation
+
+ def assertValidDomainListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'domains',
+ self.assertValidDomain,
+ *args,
+ **kwargs)
+
+ def assertValidDomainResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'domain',
+ self.assertValidDomain,
+ *args,
+ **kwargs)
+
+ def assertValidDomain(self, entity, ref=None):
+ if ref:
+ pass
+ return entity
+
+ # project validation
+
+ def assertValidProjectListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'projects',
+ self.assertValidProject,
+ *args,
+ **kwargs)
+
+ def assertValidProjectResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'project',
+ self.assertValidProject,
+ *args,
+ **kwargs)
+
+ def assertValidProject(self, entity, ref=None):
+ self.assertIsNotNone(entity.get('domain_id'))
+ if ref:
+ self.assertEqual(ref['domain_id'], entity['domain_id'])
+ return entity
+
+ # user validation
+
+ def assertValidUserListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'users',
+ self.assertValidUser,
+ *args,
+ **kwargs)
+
+ def assertValidUserResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'user',
+ self.assertValidUser,
+ *args,
+ **kwargs)
+
+ def assertValidUser(self, entity, ref=None):
+ self.assertIsNotNone(entity.get('domain_id'))
+ self.assertIsNotNone(entity.get('email'))
+ self.assertIsNone(entity.get('password'))
+ if ref:
+ self.assertEqual(ref['domain_id'], entity['domain_id'])
+ self.assertEqual(ref['email'], entity['email'])
+ return entity
+
+ # group validation
+
+ def assertValidGroupListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'groups',
+ self.assertValidGroup,
+ *args,
+ **kwargs)
+
+ def assertValidGroupResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'group',
+ self.assertValidGroup,
+ *args,
+ **kwargs)
+
+ def assertValidGroup(self, entity, ref=None):
+ self.assertIsNotNone(entity.get('name'))
+ if ref:
+ self.assertEqual(ref['name'], entity['name'])
+ return entity
+
+ # credential validation
+
+ def assertValidCredentialListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'credentials',
+ self.assertValidCredential,
+ *args,
+ **kwargs)
+
+ def assertValidCredentialResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'credential',
+ self.assertValidCredential,
+ *args,
+ **kwargs)
+
+ def assertValidCredential(self, entity, ref=None):
+ self.assertIsNotNone(entity.get('user_id'))
+ self.assertIsNotNone(entity.get('blob'))
+ self.assertIsNotNone(entity.get('type'))
+ if ref:
+ self.assertEqual(ref['user_id'], entity['user_id'])
+ self.assertEqual(ref['blob'], entity['blob'])
+ self.assertEqual(ref['type'], entity['type'])
+ self.assertEqual(ref.get('project_id'), entity.get('project_id'))
+ return entity
+
+ # role validation
+
+ def assertValidRoleListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'roles',
+ self.assertValidRole,
+ keys_to_check=['name'],
+ *args,
+ **kwargs)
+
+ def assertValidRoleResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'role',
+ self.assertValidRole,
+ keys_to_check=['name'],
+ *args,
+ **kwargs)
+
+ def assertValidRole(self, entity, ref=None):
+ self.assertIsNotNone(entity.get('name'))
+ if ref:
+ self.assertEqual(ref['name'], entity['name'])
+ return entity
+
+ def assertValidRoleAssignmentListResponse(self, resp, ref=None,
+ expected_length=None):
+
+ entities = resp.result.get('role_assignments')
+
+ if expected_length is not None:
+ self.assertEqual(len(entities), expected_length)
+ elif ref is not None:
+ # we're at least expecting the ref
+ self.assertNotEmpty(entities)
+
+ # collections should have relational links
+ self.assertValidListLinks(resp.result.get('links'))
+
+ for entity in entities:
+ self.assertIsNotNone(entity)
+ self.assertValidRoleAssignment(entity)
+ if ref:
+ self.assertValidRoleAssignment(entity, ref)
+ return entities
+
+ def assertValidRoleAssignment(self, entity, ref=None, url=None):
+ self.assertIsNotNone(entity.get('role'))
+ self.assertIsNotNone(entity.get('scope'))
+
+ # Only one of user or group should be present
+ self.assertIsNotNone(entity.get('user') or
+ entity.get('group'))
+ self.assertIsNone(entity.get('user') and
+ entity.get('group'))
+
+ # Only one of domain or project should be present
+ self.assertIsNotNone(entity['scope'].get('project') or
+ entity['scope'].get('domain'))
+ self.assertIsNone(entity['scope'].get('project') and
+ entity['scope'].get('domain'))
+
+ if entity['scope'].get('project'):
+ self.assertIsNotNone(entity['scope']['project'].get('id'))
+ else:
+ self.assertIsNotNone(entity['scope']['domain'].get('id'))
+ self.assertIsNotNone(entity.get('links'))
+ self.assertIsNotNone(entity['links'].get('assignment'))
+
+ if ref:
+ if ref.get('user'):
+ self.assertEqual(ref['user']['id'], entity['user']['id'])
+ if ref.get('group'):
+ self.assertEqual(ref['group']['id'], entity['group']['id'])
+ if ref.get('role'):
+ self.assertEqual(ref['role']['id'], entity['role']['id'])
+ if ref['scope'].get('project'):
+ self.assertEqual(ref['scope']['project']['id'],
+ entity['scope']['project']['id'])
+ if ref['scope'].get('domain'):
+ self.assertEqual(ref['scope']['domain']['id'],
+ entity['scope']['domain']['id'])
+ if url:
+ self.assertIn(url, entity['links']['assignment'])
+
+ def assertRoleAssignmentInListResponse(
+ self, resp, ref, link_url=None, expected=1):
+
+ found_count = 0
+ for entity in resp.result.get('role_assignments'):
+ try:
+ self.assertValidRoleAssignment(
+ entity, ref=ref, url=link_url)
+ except Exception:
+ # It doesn't match, so let's go onto the next one
+ pass
+ else:
+ found_count += 1
+ self.assertEqual(found_count, expected)
+
+ def assertRoleAssignmentNotInListResponse(
+ self, resp, ref, link_url=None):
+
+ self.assertRoleAssignmentInListResponse(
+ resp, ref=ref, link_url=link_url, expected=0)
+
+ # policy validation
+
+ def assertValidPolicyListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'policies',
+ self.assertValidPolicy,
+ *args,
+ **kwargs)
+
+ def assertValidPolicyResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'policy',
+ self.assertValidPolicy,
+ *args,
+ **kwargs)
+
+ def assertValidPolicy(self, entity, ref=None):
+ self.assertIsNotNone(entity.get('blob'))
+ self.assertIsNotNone(entity.get('type'))
+ if ref:
+ self.assertEqual(ref['blob'], entity['blob'])
+ self.assertEqual(ref['type'], entity['type'])
+ return entity
+
+ # trust validation
+
+ def assertValidTrustListResponse(self, resp, *args, **kwargs):
+ return self.assertValidListResponse(
+ resp,
+ 'trusts',
+ self.assertValidTrust,
+ *args,
+ **kwargs)
+
+ def assertValidTrustResponse(self, resp, *args, **kwargs):
+ return self.assertValidResponse(
+ resp,
+ 'trust',
+ self.assertValidTrust,
+ *args,
+ **kwargs)
+
+ def assertValidTrust(self, entity, ref=None):
+ self.assertIsNotNone(entity.get('trustor_user_id'))
+ self.assertIsNotNone(entity.get('trustee_user_id'))
+
+ self.assertIn('expires_at', entity)
+ if entity['expires_at'] is not None:
+ self.assertValidISO8601ExtendedFormatDatetime(entity['expires_at'])
+
+ # always disallow project xor project_id (neither or both is allowed)
+ has_roles = bool(entity.get('roles'))
+ has_project = bool(entity.get('project_id'))
+ self.assertFalse(has_roles ^ has_project)
+
+ for role in entity['roles']:
+ self.assertIsNotNone(role)
+ self.assertValidEntity(role)
+ self.assertValidRole(role)
+
+ self.assertValidListLinks(entity.get('roles_links'))
+
+ # these were used during dev and shouldn't land in final impl
+ self.assertNotIn('role_ids', entity)
+ self.assertNotIn('role_names', entity)
+
+ if ref:
+ self.assertEqual(ref['trustor_user_id'], entity['trustor_user_id'])
+ self.assertEqual(ref['trustee_user_id'], entity['trustee_user_id'])
+ self.assertEqual(ref['project_id'], entity['project_id'])
+ if entity.get('expires_at') or ref.get('expires_at'):
+ entity_exp = self.assertValidISO8601ExtendedFormatDatetime(
+ entity['expires_at'])
+ ref_exp = self.assertValidISO8601ExtendedFormatDatetime(
+ ref['expires_at'])
+ self.assertCloseEnoughForGovernmentWork(entity_exp, ref_exp)
+ else:
+ self.assertEqual(ref.get('expires_at'),
+ entity.get('expires_at'))
+
+ return entity
+
+ def build_auth_scope(self, project_id=None, project_name=None,
+ project_domain_id=None, project_domain_name=None,
+ domain_id=None, domain_name=None, trust_id=None):
+ scope_data = {}
+ if project_id or project_name:
+ scope_data['project'] = {}
+ if project_id:
+ scope_data['project']['id'] = project_id
+ else:
+ scope_data['project']['name'] = project_name
+ if project_domain_id or project_domain_name:
+ project_domain_json = {}
+ if project_domain_id:
+ project_domain_json['id'] = project_domain_id
+ else:
+ project_domain_json['name'] = project_domain_name
+ scope_data['project']['domain'] = project_domain_json
+ if domain_id or domain_name:
+ scope_data['domain'] = {}
+ if domain_id:
+ scope_data['domain']['id'] = domain_id
+ else:
+ scope_data['domain']['name'] = domain_name
+ if trust_id:
+ scope_data['OS-TRUST:trust'] = {}
+ scope_data['OS-TRUST:trust']['id'] = trust_id
+ return scope_data
+
+ def build_password_auth(self, user_id=None, username=None,
+ user_domain_id=None, user_domain_name=None,
+ password=None):
+ password_data = {'user': {}}
+ if user_id:
+ password_data['user']['id'] = user_id
+ else:
+ password_data['user']['name'] = username
+ if user_domain_id or user_domain_name:
+ password_data['user']['domain'] = {}
+ if user_domain_id:
+ password_data['user']['domain']['id'] = user_domain_id
+ else:
+ password_data['user']['domain']['name'] = user_domain_name
+ password_data['user']['password'] = password
+ return password_data
+
+ def build_token_auth(self, token):
+ return {'id': token}
+
+ def build_authentication_request(self, token=None, user_id=None,
+ username=None, user_domain_id=None,
+ user_domain_name=None, password=None,
+ **kwargs):
+ """Build auth dictionary.
+
+ It will create an auth dictionary based on all the arguments
+ that it receives.
+ """
+ auth_data = {}
+ auth_data['identity'] = {'methods': []}
+ if token:
+ auth_data['identity']['methods'].append('token')
+ auth_data['identity']['token'] = self.build_token_auth(token)
+ if user_id or username:
+ auth_data['identity']['methods'].append('password')
+ auth_data['identity']['password'] = self.build_password_auth(
+ user_id, username, user_domain_id, user_domain_name, password)
+ if kwargs:
+ auth_data['scope'] = self.build_auth_scope(**kwargs)
+ return {'auth': auth_data}
+
+
+class VersionTestCase(RestfulTestCase):
+ def test_get_version(self):
+ pass
diff --git a/keystone/tests/test_v3_auth.py b/keystone/tests/test_v3_auth.py
new file mode 100644
index 00000000..43f87d98
--- /dev/null
+++ b/keystone/tests/test_v3_auth.py
@@ -0,0 +1,1860 @@
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import json
+import uuid
+
+from keystone import auth
+from keystone.common import cms
+from keystone import config
+from keystone import exception
+from keystone.tests import core as test
+
+import test_v3
+
+
+CONF = config.CONF
+
+
+class TestAuthInfo(test_v3.RestfulTestCase):
+ # TDOD(henry-nash) These tests are somewhat inefficient, since by
+ # using the test_v3.RestfulTestCase class to gain access to the auth
+ # building helper functions, they cause backend databases and fixtures
+ # to be loaded unnecessarily. Separating out the helper functions from
+ # this base class would improve efficiency (Bug #1134836)
+ def setUp(self, load_sample_data=False):
+ super(TestAuthInfo, self).setUp(load_sample_data=load_sample_data)
+
+ def test_missing_auth_methods(self):
+ auth_data = {'identity': {}}
+ auth_data['identity']['token'] = {'id': uuid.uuid4().hex}
+ self.assertRaises(exception.ValidationError,
+ auth.controllers.AuthInfo,
+ None,
+ auth_data)
+
+ def test_unsupported_auth_method(self):
+ auth_data = {'methods': ['abc']}
+ auth_data['abc'] = {'test': 'test'}
+ auth_data = {'identity': auth_data}
+ self.assertRaises(exception.AuthMethodNotSupported,
+ auth.controllers.AuthInfo,
+ None,
+ auth_data)
+
+ def test_missing_auth_method_data(self):
+ auth_data = {'methods': ['password']}
+ auth_data = {'identity': auth_data}
+ self.assertRaises(exception.ValidationError,
+ auth.controllers.AuthInfo,
+ None,
+ auth_data)
+
+ def test_project_name_no_domain(self):
+ auth_data = self.build_authentication_request(
+ username='test',
+ password='test',
+ project_name='abc')['auth']
+ self.assertRaises(exception.ValidationError,
+ auth.controllers.AuthInfo,
+ None,
+ auth_data)
+
+ def test_both_project_and_domain_in_scope(self):
+ auth_data = self.build_authentication_request(
+ user_id='test',
+ password='test',
+ project_name='test',
+ domain_name='test')['auth']
+ self.assertRaises(exception.ValidationError,
+ auth.controllers.AuthInfo,
+ None,
+ auth_data)
+
+ def test_get_method_data_invalid_method(self):
+ auth_data = self.build_authentication_request(
+ user_id='test',
+ password='test')['auth']
+ context = None
+ auth_info = auth.controllers.AuthInfo(context, auth_data)
+
+ method_name = uuid.uuid4().hex
+ self.assertRaises(exception.ValidationError,
+ auth_info.get_method_data,
+ method_name)
+
+
+class TestPKITokenAPIs(test_v3.RestfulTestCase):
+ def config_files(self):
+ conf_files = super(TestPKITokenAPIs, self).config_files()
+ conf_files.append(test.testsdir('test_pki_token_provider.conf'))
+ return conf_files
+
+ def setUp(self):
+ super(TestPKITokenAPIs, self).setUp()
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_id=self.domain_id,
+ password=self.user['password'])
+ resp = self.post('/auth/tokens', body=auth_data)
+ self.token_data = resp.result
+ self.token = resp.headers.get('X-Subject-Token')
+ self.headers = {'X-Subject-Token': resp.headers.get('X-Subject-Token')}
+
+ def test_default_fixture_scope_token(self):
+ self.assertIsNotNone(self.get_scoped_token())
+
+ def test_v3_token_id(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ resp = self.post('/auth/tokens', body=auth_data)
+ token_data = resp.result
+ token_id = resp.headers.get('X-Subject-Token')
+ self.assertIn('expires_at', token_data['token'])
+
+ expected_token_id = cms.cms_sign_token(json.dumps(token_data),
+ CONF.signing.certfile,
+ CONF.signing.keyfile)
+ self.assertEqual(expected_token_id, token_id)
+ # should be able to validate hash PKI token as well
+ hash_token_id = cms.cms_hash_token(token_id)
+ headers = {'X-Subject-Token': hash_token_id}
+ resp = self.get('/auth/tokens', headers=headers)
+ expected_token_data = resp.result
+ self.assertDictEqual(expected_token_data, token_data)
+
+ def test_v3_v2_intermix_non_default_domain_failed(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ resp = self.post('/auth/tokens', body=auth_data)
+ token = resp.headers.get('X-Subject-Token')
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ resp = self.admin_request(path=path,
+ token='ADMIN',
+ method='GET',
+ expected_status=401)
+
+ def test_v3_v2_intermix_domain_scoped_token_failed(self):
+ # grant the domain role to user
+ path = '/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id'])
+ self.put(path=path)
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ domain_id=self.domain['id'])
+ resp = self.post('/auth/tokens', body=auth_data)
+ token = resp.headers.get('X-Subject-Token')
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ resp = self.admin_request(path=path,
+ token='ADMIN',
+ method='GET',
+ expected_status=401)
+
+ def test_v3_v2_intermix_non_default_project_failed(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ project_id=self.project['id'])
+ resp = self.post('/auth/tokens', body=auth_data)
+ token = resp.headers.get('X-Subject-Token')
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ resp = self.admin_request(path=path,
+ token='ADMIN',
+ method='GET',
+ expected_status=401)
+
+ def test_v3_v2_unscoped_token_intermix(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'])
+ resp = self.post('/auth/tokens', body=auth_data)
+ token_data = resp.result
+ token = resp.headers.get('X-Subject-Token')
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ resp = self.admin_request(path=path,
+ token='ADMIN',
+ method='GET')
+ v2_token = resp.result
+ self.assertEqual(v2_token['access']['user']['id'],
+ token_data['token']['user']['id'])
+ # v2 token time has not fraction of second precision so
+ # just need to make sure the non fraction part agrees
+ self.assertIn(v2_token['access']['token']['expires'][:-1],
+ token_data['token']['expires_at'])
+
+ def test_v3_v2_token_intermix(self):
+ # FIXME(gyee): PKI tokens are not interchangeable because token
+ # data is baked into the token itself.
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ project_id=self.default_domain_project['id'])
+ resp = self.post('/auth/tokens', body=auth_data)
+ token_data = resp.result
+ token = resp.headers.get('X-Subject-Token')
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ resp = self.admin_request(path=path,
+ token='ADMIN',
+ method='GET')
+ v2_token = resp.result
+ self.assertEqual(v2_token['access']['user']['id'],
+ token_data['token']['user']['id'])
+ # v2 token time has not fraction of second precision so
+ # just need to make sure the non fraction part agrees
+ self.assertIn(v2_token['access']['token']['expires'][:-1],
+ token_data['token']['expires_at'])
+ self.assertEqual(v2_token['access']['user']['roles'][0]['id'],
+ token_data['token']['roles'][0]['id'])
+
+ def test_v3_v2_hashed_pki_token_intermix(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ project_id=self.default_domain_project['id'])
+ resp = self.post('/auth/tokens', body=auth_data)
+ token_data = resp.result
+ token = resp.headers.get('X-Subject-Token')
+
+ # should be able to validate a hash PKI token in v2 too
+ token = cms.cms_hash_token(token)
+ path = '/v2.0/tokens/%s' % (token)
+ resp = self.admin_request(path=path,
+ token='ADMIN',
+ method='GET')
+ v2_token = resp.result
+ self.assertEqual(v2_token['access']['user']['id'],
+ token_data['token']['user']['id'])
+ # v2 token time has not fraction of second precision so
+ # just need to make sure the non fraction part agrees
+ self.assertIn(v2_token['access']['token']['expires'][:-1],
+ token_data['token']['expires_at'])
+ self.assertEqual(v2_token['access']['user']['roles'][0]['id'],
+ token_data['token']['roles'][0]['id'])
+
+ def test_v2_v3_unscoped_token_intermix(self):
+ body = {
+ 'auth': {
+ 'passwordCredentials': {
+ 'userId': self.user['id'],
+ 'password': self.user['password']
+ }
+ }}
+ resp = self.admin_request(path='/v2.0/tokens',
+ method='POST',
+ body=body)
+ v2_token_data = resp.result
+ v2_token = v2_token_data['access']['token']['id']
+ headers = {'X-Subject-Token': v2_token}
+ resp = self.get('/auth/tokens', headers=headers)
+ token_data = resp.result
+ self.assertEqual(v2_token_data['access']['user']['id'],
+ token_data['token']['user']['id'])
+ # v2 token time has not fraction of second precision so
+ # just need to make sure the non fraction part agrees
+ self.assertIn(v2_token_data['access']['token']['expires'][-1],
+ token_data['token']['expires_at'])
+
+ def test_v2_v3_token_intermix(self):
+ body = {
+ 'auth': {
+ 'passwordCredentials': {
+ 'userId': self.user['id'],
+ 'password': self.user['password']
+ },
+ 'tenantId': self.project['id']
+ }}
+ resp = self.admin_request(path='/v2.0/tokens',
+ method='POST',
+ body=body)
+ v2_token_data = resp.result
+ v2_token = v2_token_data['access']['token']['id']
+ headers = {'X-Subject-Token': v2_token}
+ resp = self.get('/auth/tokens', headers=headers)
+ token_data = resp.result
+ self.assertEqual(v2_token_data['access']['user']['id'],
+ token_data['token']['user']['id'])
+ # v2 token time has not fraction of second precision so
+ # just need to make sure the non fraction part agrees
+ self.assertIn(v2_token_data['access']['token']['expires'][-1],
+ token_data['token']['expires_at'])
+ self.assertEqual(v2_token_data['access']['user']['roles'][0]['name'],
+ token_data['token']['roles'][0]['name'])
+
+ def test_rescoping_token(self):
+ expires = self.token_data['token']['expires_at']
+ auth_data = self.build_authentication_request(
+ token=self.token,
+ project_id=self.project_id)
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidProjectScopedTokenResponse(r)
+ # make sure expires stayed the same
+ self.assertEqual(expires, r.result['token']['expires_at'])
+
+ def test_check_token(self):
+ self.head('/auth/tokens', headers=self.headers, expected_status=204)
+
+ def test_validate_token(self):
+ r = self.get('/auth/tokens', headers=self.headers)
+ self.assertValidUnscopedTokenResponse(r)
+
+ def test_revoke_token(self):
+ headers = {'X-Subject-Token': self.get_scoped_token()}
+ self.delete('/auth/tokens', headers=headers, expected_status=204)
+ self.head('/auth/tokens', headers=headers, expected_status=401)
+
+ # make sure we have a CRL
+ r = self.get('/auth/tokens/OS-PKI/revoked')
+ self.assertIn('signed', r.result)
+
+
+class TestUUIDTokenAPIs(TestPKITokenAPIs):
+ def config_files(self):
+ conf_files = super(TestUUIDTokenAPIs, self).config_files()
+ conf_files.append(test.testsdir('test_uuid_token_provider.conf'))
+ return conf_files
+
+ def test_v3_token_id(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ resp = self.post('/auth/tokens', body=auth_data)
+ token_data = resp.result
+ token_id = resp.headers.get('X-Subject-Token')
+ self.assertIn('expires_at', token_data['token'])
+ self.assertFalse(cms.is_ans1_token(token_id))
+
+ def test_v3_v2_hashed_pki_token_intermix(self):
+ # this test is only applicable for PKI tokens
+ # skipping it for UUID tokens
+ pass
+
+
+class TestTokenRevoking(test_v3.RestfulTestCase):
+ """Test token revocation on the v3 Identity API."""
+
+ def setUp(self):
+ """Setup for Token Revoking Test Cases.
+
+ As well as the usual housekeeping, create a set of domains,
+ users, groups, roles and projects for the subsequent tests:
+
+ - Two domains: A & B
+ - DomainA has user1, domainB has user2 and user3
+ - DomainA has group1 and group2, domainB has group3
+ - User1 has a role on domainA
+ - Two projects: A & B, both in domainA
+ - All users have a role on projectA
+ - Two groups: 1 & 2
+ - User1 and user2 are members of group1
+ - User3 is a member of group2
+
+ """
+ super(TestTokenRevoking, self).setUp()
+
+ # Start by creating a couple of domains and projects
+ self.domainA = self.new_domain_ref()
+ self.identity_api.create_domain(self.domainA['id'], self.domainA)
+ self.domainB = self.new_domain_ref()
+ self.identity_api.create_domain(self.domainB['id'], self.domainB)
+ self.projectA = self.new_project_ref(domain_id=self.domainA['id'])
+ self.identity_api.create_project(self.projectA['id'], self.projectA)
+ self.projectB = self.new_project_ref(domain_id=self.domainA['id'])
+ self.identity_api.create_project(self.projectB['id'], self.projectB)
+
+ # Now create some users, one in domainA and two of them in domainB
+ self.user1 = self.new_user_ref(
+ domain_id=self.domainA['id'])
+ self.user1['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(self.user1['id'], self.user1)
+
+ self.user2 = self.new_user_ref(
+ domain_id=self.domainB['id'])
+ self.user2['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(self.user2['id'], self.user2)
+
+ self.user3 = self.new_user_ref(
+ domain_id=self.domainB['id'])
+ self.user3['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(self.user3['id'], self.user3)
+
+ self.group1 = self.new_group_ref(
+ domain_id=self.domainA['id'])
+ self.identity_api.create_group(self.group1['id'], self.group1)
+
+ self.group2 = self.new_group_ref(
+ domain_id=self.domainA['id'])
+ self.identity_api.create_group(self.group2['id'], self.group2)
+
+ self.group3 = self.new_group_ref(
+ domain_id=self.domainB['id'])
+ self.identity_api.create_group(self.group3['id'], self.group3)
+
+ self.identity_api.add_user_to_group(self.user1['id'],
+ self.group1['id'])
+ self.identity_api.add_user_to_group(self.user2['id'],
+ self.group1['id'])
+ self.identity_api.add_user_to_group(self.user3['id'],
+ self.group2['id'])
+
+ self.role1 = self.new_role_ref()
+ self.identity_api.create_role(self.role1['id'], self.role1)
+ self.role2 = self.new_role_ref()
+ self.identity_api.create_role(self.role2['id'], self.role2)
+
+ self.identity_api.create_grant(self.role1['id'],
+ user_id=self.user1['id'],
+ domain_id=self.domainA['id'])
+ self.identity_api.create_grant(self.role1['id'],
+ user_id=self.user1['id'],
+ project_id=self.projectA['id'])
+ self.identity_api.create_grant(self.role1['id'],
+ user_id=self.user2['id'],
+ project_id=self.projectA['id'])
+ self.identity_api.create_grant(self.role1['id'],
+ user_id=self.user3['id'],
+ project_id=self.projectA['id'])
+ self.identity_api.create_grant(self.role1['id'],
+ group_id=self.group1['id'],
+ project_id=self.projectA['id'])
+
+ def test_unscoped_token_remains_valid_after_role_assignment(self):
+ r = self.post(
+ '/auth/tokens',
+ body=self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password']))
+ unscoped_token = r.headers.get('X-Subject-Token')
+
+ r = self.post(
+ '/auth/tokens',
+ body=self.build_authentication_request(
+ token=unscoped_token,
+ project_id=self.projectA['id']))
+ scoped_token = r.headers.get('X-Subject-Token')
+
+ # confirm both tokens are valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': unscoped_token},
+ expected_status=204)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': scoped_token},
+ expected_status=204)
+
+ # create a new role
+ role = self.new_role_ref()
+ self.identity_api.create_role(role['id'], role)
+
+ # assign a new role
+ self.put(
+ '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
+ 'project_id': self.projectA['id'],
+ 'user_id': self.user1['id'],
+ 'role_id': role['id']})
+
+ # both tokens should remain valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': unscoped_token},
+ expected_status=204)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': scoped_token},
+ expected_status=204)
+
+ def test_deleting_user_grant_revokes_token(self):
+ """Test deleting a user grant revokes token.
+
+ Test Plan:
+ - Get a token for user1, scoped to ProjectA
+ - Delete the grant user1 has on ProjectA
+ - Check token is no longer valid
+
+ """
+ auth_data = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ project_id=self.projectA['id'])
+ resp = self.post('/auth/tokens', body=auth_data)
+ token = resp.headers.get('X-Subject-Token')
+ # Confirm token is valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=204)
+ # Delete the grant, which should invalidate the token
+ grant_url = (
+ '/projects/%(project_id)s/users/%(user_id)s/'
+ 'roles/%(role_id)s' % {
+ 'project_id': self.projectA['id'],
+ 'user_id': self.user1['id'],
+ 'role_id': self.role1['id']})
+ self.delete(grant_url)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=401)
+
+ def test_domain_user_role_assignment_maintains_token(self):
+ """Test user-domain role assignment maintains existing token.
+
+ Test Plan:
+ - Get a token for user1, scoped to ProjectA
+ - Create a grant for user1 on DomainB
+ - Check token is still valid
+
+ """
+ auth_data = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ project_id=self.projectA['id'])
+ resp = self.post('/auth/tokens', body=auth_data)
+ token = resp.headers.get('X-Subject-Token')
+ # Confirm token is valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=204)
+ # Assign a role, which should not affect the token
+ grant_url = (
+ '/domains/%(domain_id)s/users/%(user_id)s/'
+ 'roles/%(role_id)s' % {
+ 'domain_id': self.domainB['id'],
+ 'user_id': self.user1['id'],
+ 'role_id': self.role1['id']})
+ self.put(grant_url)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=204)
+
+ def test_deleting_group_grant_revokes_tokens(self):
+ """Test deleting a group grant revokes tokens.
+
+ Test Plan:
+ - Get a token for user1, scoped to ProjectA
+ - Get a token for user2, scoped to ProjectA
+ - Get a token for user3, scoped to ProjectA
+ - Delete the grant group1 has on ProjectA
+ - Check tokens for user1 & user2 are no longer valid,
+ since user1 and user2 are members of group1
+ - Check token for user3 is still valid
+
+ """
+ auth_data = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ project_id=self.projectA['id'])
+ resp = self.post('/auth/tokens', body=auth_data)
+ token1 = resp.headers.get('X-Subject-Token')
+ auth_data = self.build_authentication_request(
+ user_id=self.user2['id'],
+ password=self.user2['password'],
+ project_id=self.projectA['id'])
+ resp = self.post('/auth/tokens', body=auth_data)
+ token2 = resp.headers.get('X-Subject-Token')
+ auth_data = self.build_authentication_request(
+ user_id=self.user3['id'],
+ password=self.user3['password'],
+ project_id=self.projectA['id'])
+ resp = self.post('/auth/tokens', body=auth_data)
+ token3 = resp.headers.get('X-Subject-Token')
+ # Confirm tokens are valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token1},
+ expected_status=204)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token2},
+ expected_status=204)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token3},
+ expected_status=204)
+ # Delete the group grant, which should invalidate the
+ # tokens for user1 and user2
+ grant_url = (
+ '/projects/%(project_id)s/groups/%(group_id)s/'
+ 'roles/%(role_id)s' % {
+ 'project_id': self.projectA['id'],
+ 'group_id': self.group1['id'],
+ 'role_id': self.role1['id']})
+ self.delete(grant_url)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token1},
+ expected_status=401)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token2},
+ expected_status=401)
+ # But user3's token should still be valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token3},
+ expected_status=204)
+
+ def test_domain_group_role_assignment_maintains_token(self):
+ """Test domain-group role assignment maintains existing token.
+
+ Test Plan:
+ - Get a token for user1, scoped to ProjectA
+ - Create a grant for group1 on DomainB
+ - Check token is still longer valid
+
+ """
+ auth_data = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ project_id=self.projectA['id'])
+ resp = self.post('/auth/tokens', body=auth_data)
+ token = resp.headers.get('X-Subject-Token')
+ # Confirm token is valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=204)
+ # Delete the grant, which should invalidate the token
+ grant_url = (
+ '/domains/%(domain_id)s/groups/%(group_id)s/'
+ 'roles/%(role_id)s' % {
+ 'domain_id': self.domainB['id'],
+ 'group_id': self.group1['id'],
+ 'role_id': self.role1['id']})
+ self.put(grant_url)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=204)
+
+ def test_group_membership_changes_revokes_token(self):
+ """Test add/removal to/from group revokes token.
+
+ Test Plan:
+ - Get a token for user1, scoped to ProjectA
+ - Get a token for user2, scoped to ProjectA
+ - Remove user1 from group1
+ - Check token for user1 is no longer valid
+ - Check token for user2 is still valid, even though
+ user2 is also part of group1
+ - Add user2 to group2
+ - Check token for user2 is now no longer valid
+
+ """
+ auth_data = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ project_id=self.projectA['id'])
+ resp = self.post('/auth/tokens', body=auth_data)
+ token1 = resp.headers.get('X-Subject-Token')
+ auth_data = self.build_authentication_request(
+ user_id=self.user2['id'],
+ password=self.user2['password'],
+ project_id=self.projectA['id'])
+ resp = self.post('/auth/tokens', body=auth_data)
+ token2 = resp.headers.get('X-Subject-Token')
+ # Confirm tokens are valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token1},
+ expected_status=204)
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token2},
+ expected_status=204)
+ # Remove user1 from group1, which should invalidate
+ # the token
+ self.delete('/groups/%(group_id)s/users/%(user_id)s' % {
+ 'group_id': self.group1['id'],
+ 'user_id': self.user1['id']})
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token1},
+ expected_status=401)
+ # But user2's token should still be valid
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token2},
+ expected_status=204)
+ # Adding user2 to a group should invalidate token
+ self.put('/groups/%(group_id)s/users/%(user_id)s' % {
+ 'group_id': self.group2['id'],
+ 'user_id': self.user2['id']})
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token2},
+ expected_status=401)
+
+ def test_removing_role_assignment_does_not_affect_other_users(self):
+ """Revoking a role from one user should not affect other users."""
+ r = self.post(
+ '/auth/tokens',
+ body=self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ project_id=self.projectA['id']))
+ user1_token = r.headers.get('X-Subject-Token')
+
+ r = self.post(
+ '/auth/tokens',
+ body=self.build_authentication_request(
+ user_id=self.user3['id'],
+ password=self.user3['password'],
+ project_id=self.projectA['id']))
+ user3_token = r.headers.get('X-Subject-Token')
+
+ # delete relationships between user1 and projectA from setUp
+ self.delete(
+ '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
+ 'project_id': self.projectA['id'],
+ 'user_id': self.user1['id'],
+ 'role_id': self.role1['id']})
+ self.delete(
+ '/projects/%(project_id)s/groups/%(group_id)s/roles/%(role_id)s' %
+ {'project_id': self.projectA['id'],
+ 'group_id': self.group1['id'],
+ 'role_id': self.role1['id']})
+
+ # authorization for the first user should now fail
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': user1_token},
+ expected_status=401)
+ self.post(
+ '/auth/tokens',
+ body=self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ project_id=self.projectA['id']),
+ expected_status=401)
+
+ # authorization for the second user should still succeed
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': user3_token},
+ expected_status=204)
+ self.post(
+ '/auth/tokens',
+ body=self.build_authentication_request(
+ user_id=self.user3['id'],
+ password=self.user3['password'],
+ project_id=self.projectA['id']))
+
+
+class TestAuthExternalDisabled(test_v3.RestfulTestCase):
+ def config_files(self):
+ list = self._config_file_list[:]
+ list.append('auth_plugin_external_disabled.conf')
+ return list
+
+ def test_remote_user_disabled(self):
+ auth_data = self.build_authentication_request()['auth']
+ api = auth.controllers.Auth()
+ context = {'REMOTE_USER': '%s@%s' % (self.user['name'],
+ self.domain['id'])}
+ auth_info = auth.controllers.AuthInfo(None, auth_data)
+ auth_context = {'extras': {}, 'method_names': []}
+ self.assertRaises(exception.Unauthorized,
+ api.authenticate,
+ context,
+ auth_info,
+ auth_context)
+
+
+class TestAuthExternalDomain(test_v3.RestfulTestCase):
+ content_type = 'json'
+
+ def config_files(self):
+ list = self._config_file_list[:]
+ list.append('auth_plugin_external_domain.conf')
+ return list
+
+ def test_remote_user_with_realm(self):
+ auth_data = self.build_authentication_request()['auth']
+ api = auth.controllers.Auth()
+ context = {'REMOTE_USER': '%s@%s' %
+ (self.user['name'], self.domain['name'])}
+ auth_info = auth.controllers.AuthInfo(None, auth_data)
+ auth_context = {'extras': {}, 'method_names': []}
+ api.authenticate(context, auth_info, auth_context)
+ self.assertEqual(auth_context['user_id'], self.user['id'])
+
+ def test_project_id_scoped_with_remote_user(self):
+ CONF.token.bind = ['kerberos']
+ auth_data = self.build_authentication_request(
+ project_id=self.project['id'])
+ remote_user = '%s@%s' % (self.user['name'], self.domain['name'])
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'AUTH_TYPE': 'Negotiate'})
+ r = self.post('/auth/tokens', body=auth_data)
+ token = self.assertValidProjectScopedTokenResponse(r)
+ self.assertEquals(token['bind']['kerberos'], self.user['name'])
+
+ def test_unscoped_bind_with_remote_user(self):
+ CONF.token.bind = ['kerberos']
+ auth_data = self.build_authentication_request()
+ remote_user = '%s@%s' % (self.user['name'], self.domain['name'])
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'AUTH_TYPE': 'Negotiate'})
+ r = self.post('/auth/tokens', body=auth_data)
+ token = self.assertValidUnscopedTokenResponse(r)
+ self.assertEquals(token['bind']['kerberos'], self.user['name'])
+
+
+class TestAuthJSON(test_v3.RestfulTestCase):
+ content_type = 'json'
+
+ def test_unscoped_token_with_user_id(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidUnscopedTokenResponse(r)
+
+ def test_unscoped_token_with_user_domain_id(self):
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_id=self.domain['id'],
+ password=self.user['password'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidUnscopedTokenResponse(r)
+
+ def test_unscoped_token_with_user_domain_name(self):
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_name=self.domain['name'],
+ password=self.user['password'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidUnscopedTokenResponse(r)
+
+ def test_project_id_scoped_token_with_user_id(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidProjectScopedTokenResponse(r)
+
+ def test_default_project_id_scoped_token_with_user_id(self):
+ # create a second project to work with
+ ref = self.new_project_ref(domain_id=self.domain_id)
+ r = self.post('/projects', body={'project': ref})
+ project = self.assertValidProjectResponse(r, ref)
+
+ # grant the user a role on the project
+ self.put(
+ '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
+ 'user_id': self.user['id'],
+ 'project_id': project['id'],
+ 'role_id': self.role['id']})
+
+ # set the user's preferred project
+ body = {'user': {'default_project_id': project['id']}}
+ r = self.patch('/users/%(user_id)s' % {
+ 'user_id': self.user['id']},
+ body=body)
+ self.assertValidUserResponse(r)
+
+ # attempt to authenticate without requesting a project
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidProjectScopedTokenResponse(r)
+ self.assertEqual(r.result['token']['project']['id'], project['id'])
+
+ def test_default_project_id_scoped_token_with_user_id_no_catalog(self):
+ # create a second project to work with
+ ref = self.new_project_ref(domain_id=self.domain_id)
+ r = self.post('/projects', body={'project': ref})
+ project = self.assertValidProjectResponse(r, ref)
+
+ # grant the user a role on the project
+ self.put(
+ '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
+ 'user_id': self.user['id'],
+ 'project_id': project['id'],
+ 'role_id': self.role['id']})
+
+ # set the user's preferred project
+ body = {'user': {'default_project_id': project['id']}}
+ r = self.patch('/users/%(user_id)s' % {
+ 'user_id': self.user['id']},
+ body=body)
+ self.assertValidUserResponse(r)
+
+ # attempt to authenticate without requesting a project
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ r = self.post('/auth/tokens?nocatalog', body=auth_data)
+ self.assertValidProjectScopedTokenResponse(r, require_catalog=False)
+ self.assertEqual(r.result['token']['project']['id'], project['id'])
+
+ def test_implicit_project_id_scoped_token_with_user_id_no_catalog(self):
+ # attempt to authenticate without requesting a project
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ r = self.post('/auth/tokens?nocatalog', body=auth_data)
+ self.assertValidProjectScopedTokenResponse(r, require_catalog=False)
+ self.assertEqual(r.result['token']['project']['id'],
+ self.project['id'])
+
+ def test_default_project_id_scoped_token_with_user_id_401(self):
+ # create a second project to work with
+ ref = self.new_project_ref(domain_id=self.domain['id'])
+ del ref['id']
+ r = self.post('/projects', body={'project': ref})
+ project = self.assertValidProjectResponse(r, ref)
+
+ # set the user's preferred project without having authz on that project
+ body = {'user': {'default_project_id': project['id']}}
+ r = self.patch('/users/%(user_id)s' % {
+ 'user_id': self.user['id']},
+ body=body)
+ self.assertValidUserResponse(r)
+
+ # attempt to authenticate without requesting a project
+ # the default_project_id should be the assumed scope of the request,
+ # and fail because the user doesn't have explicit authz on that scope
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ self.post('/auth/tokens', body=auth_data, expected_status=401)
+
+ def test_project_id_scoped_token_with_user_id_401(self):
+ project_id = uuid.uuid4().hex
+ project = self.new_project_ref(domain_id=self.domain_id)
+ self.identity_api.create_project(project_id, project)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=project['id'])
+ self.post('/auth/tokens', body=auth_data, expected_status=401)
+
+ def test_user_and_group_roles_scoped_token(self):
+ """Test correct roles are returned in scoped token.
+
+ Test Plan:
+ - Create a domain, with 1 project, 2 users (user1 and user2)
+ and 2 groups (group1 and group2)
+ - Make user1 a member of group1, user2 a member of group2
+ - Create 8 roles, assigning them to each of the 8 combinations
+ of users/groups on domain/project
+ - Get a project scoped token for user1, checking that the right
+ two roles are returned (one directly assigned, one by virtue
+ of group membership)
+ - Repeat this for a domain scoped token
+ - Make user1 also a member of group2
+ - Get another scoped token making sure the additional role
+ shows up
+ - User2 is just here as a spoiler, to make sure we don't get
+ any roles uniquely assigned to it returned in any of our
+ tokens
+
+ """
+
+ domainA = self.new_domain_ref()
+ self.identity_api.create_domain(domainA['id'], domainA)
+ projectA = self.new_project_ref(domain_id=domainA['id'])
+ self.identity_api.create_project(projectA['id'], projectA)
+
+ user1 = self.new_user_ref(
+ domain_id=domainA['id'])
+ user1['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(user1['id'], user1)
+
+ user2 = self.new_user_ref(
+ domain_id=domainA['id'])
+ user2['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(user2['id'], user2)
+
+ group1 = self.new_group_ref(
+ domain_id=domainA['id'])
+ self.identity_api.create_group(group1['id'], group1)
+
+ group2 = self.new_group_ref(
+ domain_id=domainA['id'])
+ self.identity_api.create_group(group2['id'], group2)
+
+ self.identity_api.add_user_to_group(user1['id'],
+ group1['id'])
+ self.identity_api.add_user_to_group(user2['id'],
+ group2['id'])
+
+ # Now create all the roles and assign them
+ role_list = []
+ for _ in range(8):
+ role = self.new_role_ref()
+ self.identity_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ self.identity_api.create_grant(role_list[0]['id'],
+ user_id=user1['id'],
+ domain_id=domainA['id'])
+ self.identity_api.create_grant(role_list[1]['id'],
+ user_id=user1['id'],
+ project_id=projectA['id'])
+ self.identity_api.create_grant(role_list[2]['id'],
+ user_id=user2['id'],
+ domain_id=domainA['id'])
+ self.identity_api.create_grant(role_list[3]['id'],
+ user_id=user2['id'],
+ project_id=projectA['id'])
+ self.identity_api.create_grant(role_list[4]['id'],
+ group_id=group1['id'],
+ domain_id=domainA['id'])
+ self.identity_api.create_grant(role_list[5]['id'],
+ group_id=group1['id'],
+ project_id=projectA['id'])
+ self.identity_api.create_grant(role_list[6]['id'],
+ group_id=group2['id'],
+ domain_id=domainA['id'])
+ self.identity_api.create_grant(role_list[7]['id'],
+ group_id=group2['id'],
+ project_id=projectA['id'])
+
+ # First, get a project scoped token - which should
+ # contain the direct user role and the one by virtue
+ # of group membership
+ auth_data = self.build_authentication_request(
+ user_id=user1['id'],
+ password=user1['password'],
+ project_id=projectA['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+ token = self.assertValidScopedTokenResponse(r)
+ roles_ids = []
+ for i, ref in enumerate(token['roles']):
+ roles_ids.append(ref['id'])
+ self.assertEqual(len(token['roles']), 2)
+ self.assertIn(role_list[1]['id'], roles_ids)
+ self.assertIn(role_list[5]['id'], roles_ids)
+
+ # Now the same thing for a domain scoped token
+ auth_data = self.build_authentication_request(
+ user_id=user1['id'],
+ password=user1['password'],
+ domain_id=domainA['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+ token = self.assertValidScopedTokenResponse(r)
+ roles_ids = []
+ for i, ref in enumerate(token['roles']):
+ roles_ids.append(ref['id'])
+ self.assertEqual(len(token['roles']), 2)
+ self.assertIn(role_list[0]['id'], roles_ids)
+ self.assertIn(role_list[4]['id'], roles_ids)
+
+ # Finally, add user1 to the 2nd group, and get a new
+ # scoped token - the extra role should now be included
+ # by virtue of the 2nd group
+ self.identity_api.add_user_to_group(user1['id'],
+ group2['id'])
+ auth_data = self.build_authentication_request(
+ user_id=user1['id'],
+ password=user1['password'],
+ project_id=projectA['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+ token = self.assertValidScopedTokenResponse(r)
+ roles_ids = []
+ for i, ref in enumerate(token['roles']):
+ roles_ids.append(ref['id'])
+ self.assertEqual(len(token['roles']), 3)
+ self.assertIn(role_list[1]['id'], roles_ids)
+ self.assertIn(role_list[5]['id'], roles_ids)
+ self.assertIn(role_list[7]['id'], roles_ids)
+
+ def test_project_id_scoped_token_with_user_domain_id(self):
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_id=self.domain['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidProjectScopedTokenResponse(r)
+
+ def test_project_id_scoped_token_with_user_domain_name(self):
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_name=self.domain['name'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidProjectScopedTokenResponse(r)
+
+ def test_domain_id_scoped_token_with_user_id(self):
+ path = '/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id'])
+ self.put(path=path)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ domain_id=self.domain['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidDomainScopedTokenResponse(r)
+
+ def test_domain_id_scoped_token_with_user_domain_id(self):
+ path = '/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id'])
+ self.put(path=path)
+
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_id=self.domain['id'],
+ password=self.user['password'],
+ domain_id=self.domain['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidDomainScopedTokenResponse(r)
+
+ def test_domain_id_scoped_token_with_user_domain_name(self):
+ path = '/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id'])
+ self.put(path=path)
+
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_name=self.domain['name'],
+ password=self.user['password'],
+ domain_id=self.domain['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidDomainScopedTokenResponse(r)
+
+ def test_domain_name_scoped_token_with_user_id(self):
+ path = '/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id'])
+ self.put(path=path)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ domain_name=self.domain['name'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidDomainScopedTokenResponse(r)
+
+ def test_domain_name_scoped_token_with_user_domain_id(self):
+ path = '/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id'])
+ self.put(path=path)
+
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_id=self.domain['id'],
+ password=self.user['password'],
+ domain_name=self.domain['name'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidDomainScopedTokenResponse(r)
+
+ def test_domain_name_scoped_token_with_user_domain_name(self):
+ path = '/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id'])
+ self.put(path=path)
+
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_name=self.domain['name'],
+ password=self.user['password'],
+ domain_name=self.domain['name'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidDomainScopedTokenResponse(r)
+
+ def test_domain_scope_token_with_group_role(self):
+ group_id = uuid.uuid4().hex
+ group = self.new_group_ref(
+ domain_id=self.domain_id)
+ group['id'] = group_id
+ self.identity_api.create_group(group_id, group)
+
+ # add user to group
+ self.identity_api.add_user_to_group(self.user['id'], group['id'])
+
+ # grant the domain role to group
+ path = '/domains/%s/groups/%s/roles/%s' % (
+ self.domain['id'], group['id'], self.role['id'])
+ self.put(path=path)
+
+ # now get a domain-scoped token
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ domain_id=self.domain['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidDomainScopedTokenResponse(r)
+
+ def test_domain_scope_token_with_name(self):
+ # grant the domain role to user
+ path = '/domains/%s/users/%s/roles/%s' % (
+ self.domain['id'], self.user['id'], self.role['id'])
+ self.put(path=path)
+ # now get a domain-scoped token
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ domain_name=self.domain['name'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidDomainScopedTokenResponse(r)
+
+ def test_domain_scope_failed(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ domain_id=self.domain['id'])
+ self.post('/auth/tokens', body=auth_data, expected_status=401)
+
+ def test_auth_with_id(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidUnscopedTokenResponse(r)
+
+ token = r.headers.get('X-Subject-Token')
+
+ # test token auth
+ auth_data = self.build_authentication_request(token=token)
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidUnscopedTokenResponse(r)
+
+ def test_invalid_user_id(self):
+ auth_data = self.build_authentication_request(
+ user_id=uuid.uuid4().hex,
+ password=self.user['password'])
+ self.post('/auth/tokens', body=auth_data, expected_status=401)
+
+ def test_invalid_user_name(self):
+ auth_data = self.build_authentication_request(
+ username=uuid.uuid4().hex,
+ user_domain_id=self.domain['id'],
+ password=self.user['password'])
+ self.post('/auth/tokens', body=auth_data, expected_status=401)
+
+ def test_invalid_domain_id(self):
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_id=uuid.uuid4().hex,
+ password=self.user['password'])
+ self.post('/auth/tokens', body=auth_data, expected_status=401)
+
+ def test_invalid_domain_name(self):
+ auth_data = self.build_authentication_request(
+ username=self.user['name'],
+ user_domain_name=uuid.uuid4().hex,
+ password=self.user['password'])
+ self.post('/auth/tokens', body=auth_data, expected_status=401)
+
+ def test_invalid_password(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=uuid.uuid4().hex)
+ self.post('/auth/tokens', body=auth_data, expected_status=401)
+
+ def test_remote_user_no_realm(self):
+ CONF.auth.methods = 'external'
+ api = auth.controllers.Auth()
+ auth_data = self.build_authentication_request()['auth']
+ context = {'REMOTE_USER': self.default_domain_user['name']}
+ auth_info = auth.controllers.AuthInfo(None, auth_data)
+ auth_context = {'extras': {}, 'method_names': []}
+ api.authenticate(context, auth_info, auth_context)
+ self.assertEqual(auth_context['user_id'],
+ self.default_domain_user['id'])
+
+ def test_remote_user_no_domain(self):
+ auth_data = self.build_authentication_request()['auth']
+ api = auth.controllers.Auth()
+ context = {'REMOTE_USER': self.user['name']}
+ auth_info = auth.controllers.AuthInfo(None, auth_data)
+ auth_context = {'extras': {}, 'method_names': []}
+ self.assertRaises(exception.Unauthorized,
+ api.authenticate,
+ context,
+ auth_info,
+ auth_context)
+
+ def test_remote_user_and_password(self):
+ #both REMOTE_USER and password methods must pass.
+ #note that they do not have to match
+ auth_data = self.build_authentication_request(
+ user_domain_id=self.domain['id'],
+ username=self.user['name'],
+ password=self.user['password'])['auth']
+ api = auth.controllers.Auth()
+ context = {'REMOTE_USER': self.default_domain_user['name']}
+ auth_info = auth.controllers.AuthInfo(None, auth_data)
+ auth_context = {'extras': {}, 'method_names': []}
+ api.authenticate(context, auth_info, auth_context)
+
+ def test_remote_user_and_explicit_external(self):
+ #both REMOTE_USER and password methods must pass.
+ #note that they do not have to match
+ auth_data = self.build_authentication_request(
+ user_domain_id=self.domain['id'],
+ username=self.user['name'],
+ password=self.user['password'])['auth']
+ auth_data['identity']['methods'] = ["password", "external"]
+ auth_data['identity']['external'] = {}
+ api = auth.controllers.Auth()
+ context = {}
+ auth_info = auth.controllers.AuthInfo(None, auth_data)
+ auth_context = {'extras': {}, 'method_names': []}
+ self.assertRaises(exception.Unauthorized,
+ api.authenticate,
+ context,
+ auth_info,
+ auth_context)
+
+ def test_remote_user_bad_password(self):
+ #both REMOTE_USER and password methods must pass.
+ auth_data = self.build_authentication_request(
+ user_domain_id=self.domain['id'],
+ username=self.user['name'],
+ password='badpassword')['auth']
+ api = auth.controllers.Auth()
+ context = {'REMOTE_USER': self.default_domain_user['name']}
+ auth_info = auth.controllers.AuthInfo(None, auth_data)
+ auth_context = {'extras': {}, 'method_names': []}
+ self.assertRaises(exception.Unauthorized,
+ api.authenticate,
+ context,
+ auth_info,
+ auth_context)
+
+ def test_bind_not_set_with_remote_user(self):
+ CONF.token.bind = []
+ auth_data = self.build_authentication_request()
+ remote_user = self.default_domain_user['name']
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'AUTH_TYPE': 'Negotiate'})
+ r = self.post('/auth/tokens', body=auth_data)
+ token = self.assertValidUnscopedTokenResponse(r)
+ self.assertNotIn('bind', token)
+
+ #TODO(ayoung): move to TestPKITokenAPIs; it will be run for both formats
+ def test_verify_with_bound_token(self):
+ self.opt_in_group('token', bind='kerberos')
+ auth_data = self.build_authentication_request(
+ project_id=self.project['id'])
+ remote_user = self.default_domain_user['name']
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'AUTH_TYPE': 'Negotiate'})
+
+ resp = self.post('/auth/tokens', body=auth_data)
+
+ token = resp.headers.get('X-Subject-Token')
+ headers = {'X-Subject-Token': token}
+ r = self.get('/auth/tokens', headers=headers, token=token)
+ token = self.assertValidProjectScopedTokenResponse(r)
+ self.assertEqual(token['bind']['kerberos'],
+ self.default_domain_user['name'])
+
+ def test_auth_with_bind_token(self):
+ CONF.token.bind = ['kerberos']
+
+ auth_data = self.build_authentication_request()
+ remote_user = self.default_domain_user['name']
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'AUTH_TYPE': 'Negotiate'})
+ r = self.post('/auth/tokens', body=auth_data)
+
+ # the unscoped token should have bind information in it
+ token = self.assertValidUnscopedTokenResponse(r)
+ self.assertEqual(token['bind']['kerberos'], remote_user)
+
+ token = r.headers.get('X-Subject-Token')
+
+ # using unscoped token with remote user succeeds
+ auth_params = {'token': token, 'project_id': self.project_id}
+ auth_data = self.build_authentication_request(**auth_params)
+ r = self.post('/auth/tokens', body=auth_data)
+ token = self.assertValidProjectScopedTokenResponse(r)
+
+ # the bind information should be carried over from the original token
+ self.assertEqual(token['bind']['kerberos'], remote_user)
+
+ def test_v2_v3_bind_token_intermix(self):
+ self.opt_in_group('token', bind='kerberos')
+
+ # we need our own user registered to the default domain because of
+ # the way external auth works.
+ remote_user = self.default_domain_user['name']
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'AUTH_TYPE': 'Negotiate'})
+ body = {'auth': {}}
+ resp = self.admin_request(path='/v2.0/tokens',
+ method='POST',
+ body=body)
+
+ v2_token_data = resp.result
+
+ bind = v2_token_data['access']['token']['bind']
+ self.assertEqual(bind['kerberos'], self.default_domain_user['name'])
+
+ v2_token_id = v2_token_data['access']['token']['id']
+ headers = {'X-Subject-Token': v2_token_id}
+ resp = self.get('/auth/tokens', headers=headers)
+ token_data = resp.result
+
+ self.assertDictEqual(v2_token_data['access']['token']['bind'],
+ token_data['token']['bind'])
+
+
+class TestAuthXML(TestAuthJSON):
+ content_type = 'xml'
+
+
+class TestTrustOptional(test_v3.RestfulTestCase):
+ def setUp(self, *args, **kwargs):
+ self.opt_in_group('trust', enabled=False)
+ super(TestTrustOptional, self).setUp(*args, **kwargs)
+
+ def test_trusts_404(self):
+ self.get('/OS-TRUST/trusts', body={'trust': {}}, expected_status=404)
+ self.post('/OS-TRUST/trusts', body={'trust': {}}, expected_status=404)
+
+ def test_auth_with_scope_in_trust_403(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ trust_id=uuid.uuid4().hex)
+ self.post('/auth/tokens', body=auth_data, expected_status=403)
+
+
+class TestTrustAuth(TestAuthInfo):
+ def setUp(self):
+ self.opt_in_group('trust', enabled=True)
+ super(TestTrustAuth, self).setUp(load_sample_data=True)
+
+ # create a trustee to delegate stuff to
+ self.trustee_user_id = uuid.uuid4().hex
+ self.trustee_user = self.new_user_ref(domain_id=self.domain_id)
+ self.trustee_user['id'] = self.trustee_user_id
+ self.identity_api.create_user(self.trustee_user_id, self.trustee_user)
+
+ def test_create_trust_400(self):
+ self.skipTest('Blocked by bug 1133435')
+ self.post('/OS-TRUST/trusts', body={'trust': {}}, expected_status=400)
+
+ def test_create_unscoped_trust(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id)
+ del ref['id']
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ self.assertValidTrustResponse(r, ref)
+
+ def test_trust_crud(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ role_ids=[self.role_id])
+ del ref['id']
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r, ref)
+
+ r = self.get(
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
+ expected_status=200)
+ self.assertValidTrustResponse(r, ref)
+
+ # validate roles on the trust
+ r = self.get(
+ '/OS-TRUST/trusts/%(trust_id)s/roles' % {
+ 'trust_id': trust['id']},
+ expected_status=200)
+ roles = self.assertValidRoleListResponse(r, self.role)
+ self.assertIn(self.role['id'], [x['id'] for x in roles])
+ self.head(
+ '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % {
+ 'trust_id': trust['id'],
+ 'role_id': self.role['id']},
+ expected_status=204)
+ r = self.get(
+ '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % {
+ 'trust_id': trust['id'],
+ 'role_id': self.role['id']},
+ expected_status=200)
+ self.assertValidRoleResponse(r, self.role)
+
+ r = self.get('/OS-TRUST/trusts', expected_status=200)
+ self.assertValidTrustListResponse(r, trust)
+
+ # trusts are immutable
+ self.patch(
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
+ body={'trust': ref},
+ expected_status=404)
+
+ self.delete(
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
+ expected_status=204)
+
+ self.get(
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
+ expected_status=404)
+
+ def test_create_trust_trustee_404(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=uuid.uuid4().hex)
+ del ref['id']
+ self.post('/OS-TRUST/trusts', body={'trust': ref}, expected_status=404)
+
+ def test_create_trust_trustor_trustee_backwards(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.trustee_user_id,
+ trustee_user_id=self.user_id)
+ del ref['id']
+ self.post('/OS-TRUST/trusts', body={'trust': ref}, expected_status=403)
+
+ def test_create_trust_project_404(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=uuid.uuid4().hex,
+ role_ids=[self.role_id])
+ del ref['id']
+ self.post('/OS-TRUST/trusts', body={'trust': ref}, expected_status=404)
+
+ def test_create_trust_role_id_404(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ role_ids=[uuid.uuid4().hex])
+ del ref['id']
+ self.post('/OS-TRUST/trusts', body={'trust': ref}, expected_status=404)
+
+ def test_create_trust_role_name_404(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ role_names=[uuid.uuid4().hex])
+ del ref['id']
+ self.post('/OS-TRUST/trusts', body={'trust': ref}, expected_status=404)
+
+ def test_create_expired_trust(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ expires=dict(seconds=-1),
+ role_ids=[self.role_id])
+ del ref['id']
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r, ref)
+
+ self.get('/OS-TRUST/trusts/%(trust_id)s' % {
+ 'trust_id': trust['id']},
+ expected_status=404)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ self.post('/auth/tokens', body=auth_data, expected_status=401)
+
+ def test_v3_v2_intermix_trustor_not_in_default_domain_failed(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.default_domain_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ del ref['id']
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ trust_id=trust['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidProjectTrustScopedTokenResponse(
+ r, self.default_domain_user)
+
+ token = r.headers.get('X-Subject-Token')
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ self.admin_request(
+ path=path, token='ADMIN', method='GET', expected_status=401)
+
+ def test_v3_v2_intermix_trustor_not_in_default_domaini_failed(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.default_domain_user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.default_domain_project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ del ref['id']
+
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ project_id=self.default_domain_project_id)
+ r = self.post('/auth/tokens', body=auth_data)
+ token = r.headers.get('X-Subject-Token')
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref}, token=token)
+ trust = self.assertValidTrustResponse(r)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidProjectTrustScopedTokenResponse(
+ r, self.trustee_user)
+ token = r.headers.get('X-Subject-Token')
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ self.admin_request(
+ path=path, token='ADMIN', method='GET', expected_status=401)
+
+ def test_v3_v2_intermix_project_not_in_default_domaini_failed(self):
+ # create a trustee in default domain to delegate stuff to
+ trustee_user_id = uuid.uuid4().hex
+ trustee_user = self.new_user_ref(domain_id=test_v3.DEFAULT_DOMAIN_ID)
+ trustee_user['id'] = trustee_user_id
+ self.identity_api.create_user(trustee_user_id, trustee_user)
+
+ ref = self.new_trust_ref(
+ trustor_user_id=self.default_domain_user_id,
+ trustee_user_id=trustee_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ del ref['id']
+
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ project_id=self.default_domain_project_id)
+ r = self.post('/auth/tokens', body=auth_data)
+ token = r.headers.get('X-Subject-Token')
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref}, token=token)
+ trust = self.assertValidTrustResponse(r)
+
+ auth_data = self.build_authentication_request(
+ user_id=trustee_user['id'],
+ password=trustee_user['password'],
+ trust_id=trust['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidProjectTrustScopedTokenResponse(
+ r, trustee_user)
+ token = r.headers.get('X-Subject-Token')
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ self.admin_request(
+ path=path, token='ADMIN', method='GET', expected_status=401)
+
+ def test_v3_v2_intermix(self):
+ # create a trustee in default domain to delegate stuff to
+ trustee_user_id = uuid.uuid4().hex
+ trustee_user = self.new_user_ref(domain_id=test_v3.DEFAULT_DOMAIN_ID)
+ trustee_user['id'] = trustee_user_id
+ self.identity_api.create_user(trustee_user_id, trustee_user)
+
+ ref = self.new_trust_ref(
+ trustor_user_id=self.default_domain_user_id,
+ trustee_user_id=trustee_user_id,
+ project_id=self.default_domain_project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ del ref['id']
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ project_id=self.default_domain_project_id)
+ r = self.post('/auth/tokens', body=auth_data)
+ token = r.headers.get('X-Subject-Token')
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref}, token=token)
+ trust = self.assertValidTrustResponse(r)
+
+ auth_data = self.build_authentication_request(
+ user_id=trustee_user['id'],
+ password=trustee_user['password'],
+ trust_id=trust['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidProjectTrustScopedTokenResponse(
+ r, trustee_user)
+ token = r.headers.get('X-Subject-Token')
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ self.admin_request(
+ path=path, token='ADMIN', method='GET', expected_status=200)
+
+ def test_exercise_trust_scoped_token_without_impersonation(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ del ref['id']
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidProjectTrustScopedTokenResponse(r, self.trustee_user)
+ self.assertEqual(r.result['token']['user']['id'],
+ self.trustee_user['id'])
+ self.assertEqual(r.result['token']['user']['name'],
+ self.trustee_user['name'])
+ self.assertEqual(r.result['token']['user']['domain']['id'],
+ self.domain['id'])
+ self.assertEqual(r.result['token']['user']['domain']['name'],
+ self.domain['name'])
+ self.assertEqual(r.result['token']['project']['id'],
+ self.project['id'])
+ self.assertEqual(r.result['token']['project']['name'],
+ self.project['name'])
+
+ def test_exercise_trust_scoped_token_with_impersonation(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=True,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ del ref['id']
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+ self.assertValidProjectTrustScopedTokenResponse(r, self.user)
+ self.assertEqual(r.result['token']['user']['id'], self.user['id'])
+ self.assertEqual(r.result['token']['user']['name'], self.user['name'])
+ self.assertEqual(r.result['token']['user']['domain']['id'],
+ self.domain['id'])
+ self.assertEqual(r.result['token']['user']['domain']['name'],
+ self.domain['name'])
+ self.assertEqual(r.result['token']['project']['id'],
+ self.project['id'])
+ self.assertEqual(r.result['token']['project']['name'],
+ self.project['name'])
+
+ def test_delete_trust(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ del ref['id']
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+
+ trust = self.assertValidTrustResponse(r, ref)
+
+ self.delete('/OS-TRUST/trusts/%(trust_id)s' % {
+ 'trust_id': trust['id']},
+ expected_status=204)
+
+ self.get('/OS-TRUST/trusts/%(trust_id)s' % {
+ 'trust_id': trust['id']},
+ expected_status=404)
+
+ self.get('/OS-TRUST/trusts/%(trust_id)s' % {
+ 'trust_id': trust['id']},
+ expected_status=404)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ self.post('/auth/tokens', body=auth_data, expected_status=401)
+
+ def test_list_trusts(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ del ref['id']
+
+ for i in range(0, 3):
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ self.assertValidTrustResponse(r, ref)
+
+ r = self.get('/OS-TRUST/trusts?trustor_user_id=%s' %
+ self.user_id, expected_status=200)
+ trusts = r.result['trusts']
+ self.assertEqual(len(trusts), 3)
+
+ r = self.get('/OS-TRUST/trusts?trustee_user_id=%s' %
+ self.user_id, expected_status=200)
+ trusts = r.result['trusts']
+ self.assertEqual(len(trusts), 0)
+
+ def test_change_password_invalidates_trust_tokens(self):
+ ref = self.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=True,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ del ref['id']
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ r = self.post('/auth/tokens', body=auth_data)
+
+ self.assertValidProjectTrustScopedTokenResponse(r, self.user)
+ trust_token = r.headers.get('X-Subject-Token')
+
+ self.get('/OS-TRUST/trusts?trustor_user_id=%s' %
+ self.user_id, expected_status=200,
+ token=trust_token)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'])
+
+ self.assertValidUserResponse(
+ self.patch('/users/%s' % self.trustee_user['id'],
+ body={'user': {'password': uuid.uuid4().hex}},
+ auth=auth_data,
+ expected_status=200))
+
+ self.get('/OS-TRUST/trusts?trustor_user_id=%s' %
+ self.user_id, expected_status=401,
+ token=trust_token)
diff --git a/keystone/tests/test_v3_catalog.py b/keystone/tests/test_v3_catalog.py
new file mode 100644
index 00000000..408670ec
--- /dev/null
+++ b/keystone/tests/test_v3_catalog.py
@@ -0,0 +1,165 @@
+import uuid
+
+import test_v3
+
+
+class CatalogTestCase(test_v3.RestfulTestCase):
+ """Test service & endpoint CRUD."""
+
+ def setUp(self):
+ super(CatalogTestCase, self).setUp()
+
+ self.service_id = uuid.uuid4().hex
+ self.service = self.new_service_ref()
+ self.service['id'] = self.service_id
+ self.catalog_api.create_service(
+ self.service_id,
+ self.service.copy())
+
+ self.endpoint_id = uuid.uuid4().hex
+ self.endpoint = self.new_endpoint_ref(service_id=self.service_id)
+ self.endpoint['id'] = self.endpoint_id
+ self.catalog_api.create_endpoint(
+ self.endpoint_id,
+ self.endpoint.copy())
+
+ # service crud tests
+
+ def test_create_service(self):
+ """Call ``POST /services``."""
+ ref = self.new_service_ref()
+ r = self.post(
+ '/services',
+ body={'service': ref})
+ return self.assertValidServiceResponse(r, ref)
+
+ def test_list_services(self):
+ """Call ``GET /services``."""
+ r = self.get('/services')
+ self.assertValidServiceListResponse(r, ref=self.service)
+
+ def test_list_services_xml(self):
+ """Call ``GET /services (xml data)``."""
+ r = self.get('/services', content_type='xml')
+ self.assertValidServiceListResponse(r, ref=self.service)
+
+ def test_get_service(self):
+ """Call ``GET /services/{service_id}``."""
+ r = self.get('/services/%(service_id)s' % {
+ 'service_id': self.service_id})
+ self.assertValidServiceResponse(r, self.service)
+
+ def test_update_service(self):
+ """Call ``PATCH /services/{service_id}``."""
+ service = self.new_service_ref()
+ del service['id']
+ r = self.patch('/services/%(service_id)s' % {
+ 'service_id': self.service_id},
+ body={'service': service})
+ self.assertValidServiceResponse(r, service)
+
+ def test_delete_service(self):
+ """Call ``DELETE /services/{service_id}``."""
+ self.delete('/services/%(service_id)s' % {
+ 'service_id': self.service_id})
+
+ # endpoint crud tests
+
+ def test_list_endpoints(self):
+ """Call ``GET /endpoints``."""
+ r = self.get('/endpoints')
+ self.assertValidEndpointListResponse(r, ref=self.endpoint)
+
+ def test_list_endpoints_xml(self):
+ """Call ``GET /endpoints`` (xml data)."""
+ r = self.get('/endpoints', content_type='xml')
+ self.assertValidEndpointListResponse(r, ref=self.endpoint)
+
+ def test_create_endpoint(self):
+ """Call ``POST /endpoints``."""
+ ref = self.new_endpoint_ref(service_id=self.service_id)
+ r = self.post(
+ '/endpoints',
+ body={'endpoint': ref})
+ self.assertValidEndpointResponse(r, ref)
+
+ def assertValidErrorResponse(self, response):
+ self.assertTrue(response.status_code in [400])
+
+ def test_create_endpoint_400(self):
+ """Call ``POST /endpoints``."""
+ ref = self.new_endpoint_ref(service_id=self.service_id)
+ ref["region"] = "0" * 256
+ self.post('/endpoints', body={'endpoint': ref}, expected_status=400)
+
+ def test_get_endpoint(self):
+ """Call ``GET /endpoints/{endpoint_id}``."""
+ r = self.get(
+ '/endpoints/%(endpoint_id)s' % {
+ 'endpoint_id': self.endpoint_id})
+ self.assertValidEndpointResponse(r, self.endpoint)
+
+ def test_update_endpoint(self):
+ """Call ``PATCH /endpoints/{endpoint_id}``."""
+ ref = self.new_endpoint_ref(service_id=self.service_id)
+ del ref['id']
+ r = self.patch(
+ '/endpoints/%(endpoint_id)s' % {
+ 'endpoint_id': self.endpoint_id},
+ body={'endpoint': ref})
+ self.assertValidEndpointResponse(r, ref)
+
+ def test_delete_endpoint(self):
+ """Call ``DELETE /endpoints/{endpoint_id}``."""
+ self.delete(
+ '/endpoints/%(endpoint_id)s' % {
+ 'endpoint_id': self.endpoint_id})
+
+ def test_create_endpoint_on_v2(self):
+ # clear the v3 endpoint so we only have endpoints created on v2
+ self.delete(
+ '/endpoints/%(endpoint_id)s' % {
+ 'endpoint_id': self.endpoint_id})
+
+ # create a v3 endpoint ref, and then tweak it back to a v2-style ref
+ ref = self.new_endpoint_ref(service_id=self.service['id'])
+ del ref['id']
+ del ref['interface']
+ ref['publicurl'] = ref.pop('url')
+ ref['internalurl'] = None
+ # don't set adminurl to ensure it's absence is handled like internalurl
+
+ # create the endpoint on v2 (using a v3 token)
+ r = self.admin_request(
+ method='POST',
+ path='/v2.0/endpoints',
+ token=self.get_scoped_token(),
+ body={'endpoint': ref})
+ endpoint_v2 = r.result['endpoint']
+
+ # test the endpoint on v3
+ r = self.get('/endpoints')
+ endpoints = self.assertValidEndpointListResponse(r)
+ self.assertEqual(len(endpoints), 1)
+ endpoint_v3 = endpoints.pop()
+
+ # these attributes are identical between both API's
+ self.assertEqual(endpoint_v3['region'], ref['region'])
+ self.assertEqual(endpoint_v3['service_id'], ref['service_id'])
+ self.assertEqual(endpoint_v3['description'], ref['description'])
+
+ # a v2 endpoint is not quite the same concept as a v3 endpoint, so they
+ # receive different identifiers
+ self.assertNotEqual(endpoint_v2['id'], endpoint_v3['id'])
+
+ # v2 has a publicurl; v3 has a url + interface type
+ self.assertEqual(endpoint_v3['url'], ref['publicurl'])
+ self.assertEqual(endpoint_v3['interface'], 'public')
+
+ # tests for bug 1152632 -- these attributes were being returned by v3
+ self.assertNotIn('publicurl', endpoint_v3)
+ self.assertNotIn('adminurl', endpoint_v3)
+ self.assertNotIn('internalurl', endpoint_v3)
+
+ # test for bug 1152635 -- this attribute was being returned by v3
+ self.assertNotIn('legacy_endpoint_id', endpoint_v3)
diff --git a/keystone/tests/test_v3_credential.py b/keystone/tests/test_v3_credential.py
new file mode 100644
index 00000000..6040cca3
--- /dev/null
+++ b/keystone/tests/test_v3_credential.py
@@ -0,0 +1,78 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2013 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+import test_v3
+
+
+class CredentialTestCase(test_v3.RestfulTestCase):
+ """Test credential CRUD."""
+ def setUp(self):
+
+ super(CredentialTestCase, self).setUp()
+
+ self.credential_id = uuid.uuid4().hex
+ self.credential = self.new_credential_ref(
+ user_id=self.user['id'],
+ project_id=self.project_id)
+ self.credential['id'] = self.credential_id
+ self.credential_api.create_credential(
+ self.credential_id,
+ self.credential)
+
+ def test_list_credentials(self):
+ """Call ``GET /credentials``."""
+ r = self.get('/credentials')
+ self.assertValidCredentialListResponse(r, ref=self.credential)
+
+ def test_list_credentials_xml(self):
+ """Call ``GET /credentials`` (xml data)."""
+ r = self.get('/credentials', content_type='xml')
+ self.assertValidCredentialListResponse(r, ref=self.credential)
+
+ def test_create_credential(self):
+ """Call ``POST /credentials``."""
+ ref = self.new_credential_ref(user_id=self.user['id'])
+ r = self.post(
+ '/credentials',
+ body={'credential': ref})
+ self.assertValidCredentialResponse(r, ref)
+
+ def test_get_credential(self):
+ """Call ``GET /credentials/{credential_id}``."""
+ r = self.get(
+ '/credentials/%(credential_id)s' % {
+ 'credential_id': self.credential_id})
+ self.assertValidCredentialResponse(r, self.credential)
+
+ def test_update_credential(self):
+ """Call ``PATCH /credentials/{credential_id}``."""
+ ref = self.new_credential_ref(
+ user_id=self.user['id'],
+ project_id=self.project_id)
+ del ref['id']
+ r = self.patch(
+ '/credentials/%(credential_id)s' % {
+ 'credential_id': self.credential_id},
+ body={'credential': ref})
+ self.assertValidCredentialResponse(r, ref)
+
+ def test_delete_credential(self):
+ """Call ``DELETE /credentials/{credential_id}``."""
+ self.delete(
+ '/credentials/%(credential_id)s' % {
+ 'credential_id': self.credential_id})
diff --git a/keystone/tests/test_v3_identity.py b/keystone/tests/test_v3_identity.py
new file mode 100644
index 00000000..f1e19c42
--- /dev/null
+++ b/keystone/tests/test_v3_identity.py
@@ -0,0 +1,1557 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from keystone import config
+from keystone import exception
+
+import test_v3
+
+
+def _build_role_assignment_url_and_entity(
+ role_id, user_id=None, group_id=None, domain_id=None,
+ project_id=None, inherited_to_projects=False,
+ effective=False):
+
+ if user_id and domain_id:
+ url = ('/domains/%(domain_id)s/users/%(user_id)s'
+ '/roles/%(role_id)s' % {
+ 'domain_id': domain_id,
+ 'user_id': user_id,
+ 'role_id': role_id})
+ entity = {'role': {'id': role_id},
+ 'user': {'id': user_id},
+ 'scope': {'domain': {'id': domain_id}}}
+ if inherited_to_projects:
+ url = '/OS-INHERIT%s/inherited_to_projects' % url
+ if not effective:
+ entity['OS-INHERIT:inherited_to'] = 'projects'
+ elif user_id and project_id:
+ url = ('/projects/%(project_id)s/users/%(user_id)s'
+ '/roles/%(role_id)s' % {
+ 'project_id': project_id,
+ 'user_id': user_id,
+ 'role_id': role_id})
+ entity = {'role': {'id': role_id},
+ 'user': {'id': user_id},
+ 'scope': {'project': {'id': project_id}}}
+ if group_id and domain_id:
+ url = ('/domains/%(domain_id)s/groups/%(group_id)s'
+ '/roles/%(role_id)s' % {
+ 'domain_id': domain_id,
+ 'group_id': group_id,
+ 'role_id': role_id})
+ entity = {'role': {'id': role_id},
+ 'group': {'id': group_id},
+ 'scope': {'domain': {'id': domain_id}}}
+ if inherited_to_projects:
+ url = '/OS-INHERIT%s/inherited_to_projects' % url
+ if not effective:
+ entity['OS-INHERIT:inherited_to'] = 'projects'
+ elif group_id and project_id:
+ url = ('/projects/%(project_id)s/groups/%(group_id)s'
+ '/roles/%(role_id)s' % {
+ 'project_id': project_id,
+ 'group_id': group_id,
+ 'role_id': role_id})
+ entity = {'role': {'id': role_id},
+ 'group': {'id': group_id},
+ 'scope': {'project': {'id': project_id}}}
+ return (url, entity)
+
+
+class IdentityTestCase(test_v3.RestfulTestCase):
+ """Test domains, projects, users, groups, & role CRUD."""
+
+ def setUp(self):
+ super(IdentityTestCase, self).setUp()
+
+ self.group_id = uuid.uuid4().hex
+ self.group = self.new_group_ref(
+ domain_id=self.domain_id)
+ self.group['id'] = self.group_id
+ self.identity_api.create_group(self.group_id, self.group)
+
+ self.credential_id = uuid.uuid4().hex
+ self.credential = self.new_credential_ref(
+ user_id=self.user['id'],
+ project_id=self.project_id)
+ self.credential['id'] = self.credential_id
+ self.credential_api.create_credential(
+ self.credential_id,
+ self.credential)
+
+ # domain crud tests
+
+ def test_create_domain(self):
+ """Call ``POST /domains``."""
+ ref = self.new_domain_ref()
+ r = self.post(
+ '/domains',
+ body={'domain': ref})
+ return self.assertValidDomainResponse(r, ref)
+
+ def test_create_domain_400(self):
+ """Call ``POST /domains``."""
+ self.post('/domains', body={'domain': {}}, expected_status=400)
+
+ def test_list_domains(self):
+ """Call ``GET /domains``."""
+ r = self.get('/domains')
+ self.assertValidDomainListResponse(r, ref=self.domain)
+
+ def test_list_domains_xml(self):
+ """Call ``GET /domains (xml data)``."""
+ r = self.get('/domains', content_type='xml')
+ self.assertValidDomainListResponse(r, ref=self.domain)
+
+ def test_get_domain(self):
+ """Call ``GET /domains/{domain_id}``."""
+ r = self.get('/domains/%(domain_id)s' % {
+ 'domain_id': self.domain_id})
+ self.assertValidDomainResponse(r, self.domain)
+
+ def test_update_domain(self):
+ """Call ``PATCH /domains/{domain_id}``."""
+ ref = self.new_domain_ref()
+ del ref['id']
+ r = self.patch('/domains/%(domain_id)s' % {
+ 'domain_id': self.domain_id},
+ body={'domain': ref})
+ self.assertValidDomainResponse(r, ref)
+
+ def test_disable_domain(self):
+ """Call ``PATCH /domains/{domain_id}`` (set enabled=False)."""
+ # Create a 2nd set of entities in a 2nd domain
+ self.domain2 = self.new_domain_ref()
+ self.identity_api.create_domain(self.domain2['id'], self.domain2)
+
+ self.project2 = self.new_project_ref(
+ domain_id=self.domain2['id'])
+ self.identity_api.create_project(self.project2['id'], self.project2)
+
+ self.user2 = self.new_user_ref(
+ domain_id=self.domain2['id'],
+ project_id=self.project2['id'])
+ self.identity_api.create_user(self.user2['id'], self.user2)
+
+ self.identity_api.add_user_to_project(self.project2['id'],
+ self.user2['id'])
+
+ # First check a user in that domain can authenticate, via
+ # Both v2 and v3
+ body = {
+ 'auth': {
+ 'passwordCredentials': {
+ 'userId': self.user2['id'],
+ 'password': self.user2['password']
+ },
+ 'tenantId': self.project2['id']
+ }
+ }
+ self.admin_request(path='/v2.0/tokens', method='POST', body=body)
+
+ auth_data = self.build_authentication_request(
+ user_id=self.user2['id'],
+ password=self.user2['password'],
+ project_id=self.project2['id'])
+ self.post('/auth/tokens', body=auth_data)
+
+ # Now disable the domain
+ self.domain2['enabled'] = False
+ r = self.patch('/domains/%(domain_id)s' % {
+ 'domain_id': self.domain2['id']},
+ body={'domain': {'enabled': False}})
+ self.assertValidDomainResponse(r, self.domain2)
+
+ # Make sure the user can no longer authenticate, via
+ # either API
+ body = {
+ 'auth': {
+ 'passwordCredentials': {
+ 'userId': self.user2['id'],
+ 'password': self.user2['password']
+ },
+ 'tenantId': self.project2['id']
+ }
+ }
+ self.admin_request(
+ path='/v2.0/tokens', method='POST', body=body, expected_status=401)
+
+ # Try looking up in v3 by name and id
+ auth_data = self.build_authentication_request(
+ user_id=self.user2['id'],
+ password=self.user2['password'],
+ project_id=self.project2['id'])
+ self.post('/auth/tokens', body=auth_data, expected_status=401)
+
+ auth_data = self.build_authentication_request(
+ username=self.user2['name'],
+ user_domain_id=self.domain2['id'],
+ password=self.user2['password'],
+ project_id=self.project2['id'])
+ self.post('/auth/tokens', body=auth_data, expected_status=401)
+
+ def test_delete_enabled_domain_fails(self):
+ """Call ``DELETE /domains/{domain_id}`` (when domain enabled)."""
+
+ # Try deleting an enabled domain, which should fail
+ self.delete('/domains/%(domain_id)s' % {
+ 'domain_id': self.domain['id']},
+ expected_status=exception.ForbiddenAction.code)
+
+ def test_delete_domain(self):
+ """Call ``DELETE /domains/{domain_id}``.
+
+ The sample data set up already has a user, group, project
+ and credential that is part of self.domain. Since the user
+ we will authenticate with is in this domain, we create a
+ another set of entities in a second domain. Deleting this
+ second domain should delete all these new entities. In addition,
+ all the entities in the regular self.domain should be unaffected
+ by the delete.
+
+ Test Plan:
+ - Create domain2 and a 2nd set of entities
+ - Disable domain2
+ - Delete domain2
+ - Check entities in domain2 have been deleted
+ - Check entities in self.domain are unaffected
+
+ """
+
+ # Create a 2nd set of entities in a 2nd domain
+ self.domain2 = self.new_domain_ref()
+ self.identity_api.create_domain(self.domain2['id'], self.domain2)
+
+ self.project2 = self.new_project_ref(
+ domain_id=self.domain2['id'])
+ self.identity_api.create_project(self.project2['id'], self.project2)
+
+ self.user2 = self.new_user_ref(
+ domain_id=self.domain2['id'],
+ project_id=self.project2['id'])
+ self.identity_api.create_user(self.user2['id'], self.user2)
+
+ self.group2 = self.new_group_ref(
+ domain_id=self.domain2['id'])
+ self.identity_api.create_group(self.group2['id'], self.group2)
+
+ self.credential2 = self.new_credential_ref(
+ user_id=self.user2['id'],
+ project_id=self.project2['id'])
+ self.credential_api.create_credential(
+ self.credential2['id'],
+ self.credential2)
+
+ # Now disable the new domain and delete it
+ self.domain2['enabled'] = False
+ r = self.patch('/domains/%(domain_id)s' % {
+ 'domain_id': self.domain2['id']},
+ body={'domain': {'enabled': False}})
+ self.assertValidDomainResponse(r, self.domain2)
+ self.delete('/domains/%(domain_id)s' % {
+ 'domain_id': self.domain2['id']})
+
+ # Check all the domain2 relevant entities are gone
+ self.assertRaises(exception.DomainNotFound,
+ self.identity_api.get_domain,
+ self.domain2['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.identity_api.get_project,
+ self.project2['id'])
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.get_group,
+ self.group2['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ self.user2['id'])
+ self.assertRaises(exception.CredentialNotFound,
+ self.credential_api.get_credential,
+ self.credential2['id'])
+
+ # ...and that all self.domain entities are still here
+ r = self.identity_api.get_domain(self.domain['id'])
+ self.assertDictEqual(r, self.domain)
+ r = self.identity_api.get_project(self.project['id'])
+ self.assertDictEqual(r, self.project)
+ r = self.identity_api.get_group(self.group['id'])
+ self.assertDictEqual(r, self.group)
+ r = self.identity_api.get_user(self.user['id'])
+ self.user.pop('password')
+ self.assertDictEqual(r, self.user)
+ r = self.credential_api.get_credential(self.credential['id'])
+ self.assertDictEqual(r, self.credential)
+
+ # project crud tests
+
+ def test_list_projects(self):
+ """Call ``GET /projects``."""
+ r = self.get('/projects')
+ self.assertValidProjectListResponse(r, ref=self.project)
+
+ def test_list_projects_xml(self):
+ """Call ``GET /projects`` (xml data)."""
+ r = self.get('/projects', content_type='xml')
+ self.assertValidProjectListResponse(r, ref=self.project)
+
+ def test_create_project(self):
+ """Call ``POST /projects``."""
+ ref = self.new_project_ref(domain_id=self.domain_id)
+ r = self.post(
+ '/projects',
+ body={'project': ref})
+ self.assertValidProjectResponse(r, ref)
+
+ def test_create_project_400(self):
+ """Call ``POST /projects``."""
+ self.post('/projects', body={'project': {}}, expected_status=400)
+
+ def test_get_project(self):
+ """Call ``GET /projects/{project_id}``."""
+ r = self.get(
+ '/projects/%(project_id)s' % {
+ 'project_id': self.project_id})
+ self.assertValidProjectResponse(r, self.project)
+
+ def test_update_project(self):
+ """Call ``PATCH /projects/{project_id}``."""
+ ref = self.new_project_ref(domain_id=self.domain_id)
+ del ref['id']
+ r = self.patch(
+ '/projects/%(project_id)s' % {
+ 'project_id': self.project_id},
+ body={'project': ref})
+ self.assertValidProjectResponse(r, ref)
+
+ def test_delete_project(self):
+ """Call ``DELETE /projects/{project_id}
+
+ As well as making sure the delete succeeds, we ensure
+ that any credentials that reference this projects are
+ also deleted, while other credentials are unaffected.
+
+ """
+ # First check the credential for this project is present
+ r = self.credential_api.get_credential(self.credential['id'])
+ self.assertDictEqual(r, self.credential)
+ # Create a second credential with a different project
+ self.project2 = self.new_project_ref(
+ domain_id=self.domain['id'])
+ self.identity_api.create_project(self.project2['id'], self.project2)
+ self.credential2 = self.new_credential_ref(
+ user_id=self.user['id'],
+ project_id=self.project2['id'])
+ self.credential_api.create_credential(
+ self.credential2['id'],
+ self.credential2)
+
+ # Now delete the project
+ self.delete(
+ '/projects/%(project_id)s' % {
+ 'project_id': self.project_id})
+
+ # Deleting the project should have deleted any credentials
+ # that reference this project
+ self.assertRaises(exception.CredentialNotFound,
+ self.credential_api.get_credential,
+ credential_id=self.credential['id'])
+ # But the credential for project2 is unaffected
+ r = self.credential_api.get_credential(self.credential2['id'])
+ self.assertDictEqual(r, self.credential2)
+
+ # user crud tests
+
+ def test_create_user(self):
+ """Call ``POST /users``."""
+ ref = self.new_user_ref(domain_id=self.domain_id)
+ r = self.post(
+ '/users',
+ body={'user': ref})
+ return self.assertValidUserResponse(r, ref)
+
+ def test_create_user_400(self):
+ """Call ``POST /users``."""
+ self.post('/users', body={'user': {}}, expected_status=400)
+
+ def test_list_users(self):
+ """Call ``GET /users``."""
+ r = self.get('/users')
+ self.assertValidUserListResponse(r, ref=self.user)
+
+ def test_list_users_xml(self):
+ """Call ``GET /users`` (xml data)."""
+ r = self.get('/users', content_type='xml')
+ self.assertValidUserListResponse(r, ref=self.user)
+
+ def test_get_user(self):
+ """Call ``GET /users/{user_id}``."""
+ r = self.get('/users/%(user_id)s' % {
+ 'user_id': self.user['id']})
+ self.assertValidUserResponse(r, self.user)
+
+ def test_add_user_to_group(self):
+ """Call ``PUT /groups/{group_id}/users/{user_id}``."""
+ self.put('/groups/%(group_id)s/users/%(user_id)s' % {
+ 'group_id': self.group_id, 'user_id': self.user['id']})
+
+ def test_list_groups_for_user(self):
+ """Call ``GET /users/{user_id}/groups``."""
+
+ self.user1 = self.new_user_ref(
+ domain_id=self.domain['id'])
+ self.user1['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(self.user1['id'], self.user1)
+ self.user2 = self.new_user_ref(
+ domain_id=self.domain['id'])
+ self.user2['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(self.user1['id'], self.user2)
+ self.put('/groups/%(group_id)s/users/%(user_id)s' % {
+ 'group_id': self.group_id, 'user_id': self.user1['id']})
+
+ #Scenarios below are written to test the default policy configuration
+
+ #One should be allowed to list one's own groups
+ auth = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'])
+ r = self.get('/users/%(user_id)s/groups' % {
+ 'user_id': self.user1['id']}, auth=auth)
+ self.assertValidGroupListResponse(r, ref=self.group)
+
+ #Administrator is allowed to list others' groups
+ r = self.get('/users/%(user_id)s/groups' % {
+ 'user_id': self.user1['id']})
+ self.assertValidGroupListResponse(r, ref=self.group)
+
+ #Ordinary users should not be allowed to list other's groups
+ auth = self.build_authentication_request(
+ user_id=self.user2['id'],
+ password=self.user2['password'])
+ r = self.get('/users/%(user_id)s/groups' % {
+ 'user_id': self.user1['id']}, auth=auth,
+ expected_status=exception.ForbiddenAction.code)
+
+ def test_check_user_in_group(self):
+ """Call ``HEAD /groups/{group_id}/users/{user_id}``."""
+ self.put('/groups/%(group_id)s/users/%(user_id)s' % {
+ 'group_id': self.group_id, 'user_id': self.user['id']})
+ self.head('/groups/%(group_id)s/users/%(user_id)s' % {
+ 'group_id': self.group_id, 'user_id': self.user['id']})
+
+ def test_list_users_in_group(self):
+ """Call ``GET /groups/{group_id}/users``."""
+ r = self.put('/groups/%(group_id)s/users/%(user_id)s' % {
+ 'group_id': self.group_id, 'user_id': self.user['id']})
+ r = self.get('/groups/%(group_id)s/users' % {
+ 'group_id': self.group_id})
+ self.assertValidUserListResponse(r, ref=self.user)
+ self.assertIn('/groups/%(group_id)s/users' % {
+ 'group_id': self.group_id}, r.result['links']['self'])
+
+ def test_remove_user_from_group(self):
+ """Call ``DELETE /groups/{group_id}/users/{user_id}``."""
+ self.put('/groups/%(group_id)s/users/%(user_id)s' % {
+ 'group_id': self.group_id, 'user_id': self.user['id']})
+ self.delete('/groups/%(group_id)s/users/%(user_id)s' % {
+ 'group_id': self.group_id, 'user_id': self.user['id']})
+
+ def test_update_user(self):
+ """Call ``PATCH /users/{user_id}``."""
+ user = self.new_user_ref(domain_id=self.domain_id)
+ del user['id']
+ r = self.patch('/users/%(user_id)s' % {
+ 'user_id': self.user['id']},
+ body={'user': user})
+ self.assertValidUserResponse(r, user)
+
+ def test_delete_user(self):
+ """Call ``DELETE /users/{user_id}``.
+
+ As well as making sure the delete succeeds, we ensure
+ that any credentials that reference this user are
+ also deleted, while other credentials are unaffected.
+ In addition, no tokens should remain valid for this user.
+
+ """
+ # First check the credential for this user is present
+ r = self.credential_api.get_credential(self.credential['id'])
+ self.assertDictEqual(r, self.credential)
+ # Create a second credential with a different user
+ self.user2 = self.new_user_ref(
+ domain_id=self.domain['id'],
+ project_id=self.project['id'])
+ self.identity_api.create_user(self.user2['id'], self.user2)
+ self.credential2 = self.new_credential_ref(
+ user_id=self.user2['id'],
+ project_id=self.project['id'])
+ self.credential_api.create_credential(
+ self.credential2['id'],
+ self.credential2)
+ # Create a token for this user which we can check later
+ # gets deleted
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id'])
+ resp = self.post('/auth/tokens', body=auth_data)
+ token = resp.headers.get('X-Subject-Token')
+ # Confirm token is valid for now
+ self.head('/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=204)
+
+ # Now delete the user
+ self.delete('/users/%(user_id)s' % {
+ 'user_id': self.user['id']})
+
+ # Deleting the user should have deleted any credentials
+ # that reference this project
+ self.assertRaises(exception.CredentialNotFound,
+ self.credential_api.get_credential,
+ self.credential['id'])
+ # And the no tokens we remain valid
+ tokens = self.token_api.list_tokens(self.user['id'])
+ self.assertEquals(len(tokens), 0)
+ # But the credential for user2 is unaffected
+ r = self.credential_api.get_credential(self.credential2['id'])
+ self.assertDictEqual(r, self.credential2)
+
+ # group crud tests
+
+ def test_create_group(self):
+ """Call ``POST /groups``."""
+ ref = self.new_group_ref(domain_id=self.domain_id)
+ r = self.post(
+ '/groups',
+ body={'group': ref})
+ return self.assertValidGroupResponse(r, ref)
+
+ def test_create_group_400(self):
+ """Call ``POST /groups``."""
+ self.post('/groups', body={'group': {}}, expected_status=400)
+
+ def test_list_groups(self):
+ """Call ``GET /groups``."""
+ r = self.get('/groups')
+ self.assertValidGroupListResponse(r, ref=self.group)
+
+ def test_list_groups_xml(self):
+ """Call ``GET /groups`` (xml data)."""
+ r = self.get('/groups', content_type='xml')
+ self.assertValidGroupListResponse(r, ref=self.group)
+
+ def test_get_group(self):
+ """Call ``GET /groups/{group_id}``."""
+ r = self.get('/groups/%(group_id)s' % {
+ 'group_id': self.group_id})
+ self.assertValidGroupResponse(r, self.group)
+
+ def test_update_group(self):
+ """Call ``PATCH /groups/{group_id}``."""
+ group = self.new_group_ref(domain_id=self.domain_id)
+ del group['id']
+ r = self.patch('/groups/%(group_id)s' % {
+ 'group_id': self.group_id},
+ body={'group': group})
+ self.assertValidGroupResponse(r, group)
+
+ def test_delete_group(self):
+ """Call ``DELETE /groups/{group_id}``."""
+ self.delete('/groups/%(group_id)s' % {
+ 'group_id': self.group_id})
+
+ # role crud tests
+
+ def test_create_role(self):
+ """Call ``POST /roles``."""
+ ref = self.new_role_ref()
+ r = self.post(
+ '/roles',
+ body={'role': ref})
+ return self.assertValidRoleResponse(r, ref)
+
+ def test_create_role_400(self):
+ """Call ``POST /roles``."""
+ self.post('/roles', body={'role': {}}, expected_status=400)
+
+ def test_list_roles(self):
+ """Call ``GET /roles``."""
+ r = self.get('/roles')
+ self.assertValidRoleListResponse(r, ref=self.role)
+
+ def test_list_roles_xml(self):
+ """Call ``GET /roles`` (xml data)."""
+ r = self.get('/roles', content_type='xml')
+ self.assertValidRoleListResponse(r, ref=self.role)
+
+ def test_get_role(self):
+ """Call ``GET /roles/{role_id}``."""
+ r = self.get('/roles/%(role_id)s' % {
+ 'role_id': self.role_id})
+ self.assertValidRoleResponse(r, self.role)
+
+ def test_update_role(self):
+ """Call ``PATCH /roles/{role_id}``."""
+ ref = self.new_role_ref()
+ del ref['id']
+ r = self.patch('/roles/%(role_id)s' % {
+ 'role_id': self.role_id},
+ body={'role': ref})
+ self.assertValidRoleResponse(r, ref)
+
+ def test_delete_role(self):
+ """Call ``DELETE /roles/{role_id}``."""
+ self.delete('/roles/%(role_id)s' % {
+ 'role_id': self.role_id})
+
+ def test_crud_user_project_role_grants(self):
+ collection_url = (
+ '/projects/%(project_id)s/users/%(user_id)s/roles' % {
+ 'project_id': self.project['id'],
+ 'user_id': self.user['id']})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id}
+
+ self.put(member_url)
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=self.role)
+ self.assertIn(collection_url, r.result['links']['self'])
+
+ # FIXME(gyee): this test is no longer valid as user
+ # have no role in the project. Can't get a scoped token
+ #self.delete(member_url)
+ #r = self.get(collection_url)
+ #self.assertValidRoleListResponse(r, expected_length=0)
+ #self.assertIn(collection_url, r.result['links']['self'])
+
+ def test_crud_user_domain_role_grants(self):
+ collection_url = (
+ '/domains/%(domain_id)s/users/%(user_id)s/roles' % {
+ 'domain_id': self.domain_id,
+ 'user_id': self.user['id']})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id}
+
+ self.put(member_url)
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=self.role)
+ self.assertIn(collection_url, r.result['links']['self'])
+
+ self.delete(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, expected_length=0)
+ self.assertIn(collection_url, r.result['links']['self'])
+
+ def test_crud_group_project_role_grants(self):
+ collection_url = (
+ '/projects/%(project_id)s/groups/%(group_id)s/roles' % {
+ 'project_id': self.project_id,
+ 'group_id': self.group_id})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id}
+
+ self.put(member_url)
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=self.role)
+ self.assertIn(collection_url, r.result['links']['self'])
+
+ self.delete(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, expected_length=0)
+ self.assertIn(collection_url, r.result['links']['self'])
+
+ def test_crud_group_domain_role_grants(self):
+ collection_url = (
+ '/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
+ 'domain_id': self.domain_id,
+ 'group_id': self.group_id})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id}
+
+ self.put(member_url)
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=self.role)
+ self.assertIn(collection_url, r.result['links']['self'])
+
+ self.delete(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, expected_length=0)
+ self.assertIn(collection_url, r.result['links']['self'])
+
+ def test_get_role_assignments(self):
+ """Call ``GET /role_assignments``.
+
+ The sample data set up already has a user, group and project
+ that is part of self.domain. We use these plus a new user
+ we create as our data set, making sure we ignore any
+ role assignments that are already in existence.
+
+ Since we don't yet support a first class entity for role
+ assignments, we are only testing the LIST API. To create
+ and delete the role assignments we use the old grant APIs.
+
+ Test Plan:
+ - Create extra user for tests
+ - Get a list of all existing role assignments
+ - Add a new assignment for each of the four combinations, i.e.
+ group+domain, user+domain, group+project, user+project, using
+ the same role each time
+ - Get a new list of all role assignments, checking these four new
+ ones have been added
+ - Then delete the four we added
+ - Get a new list of all role assignments, checking the four have
+ been removed
+
+ """
+
+ # Since the default fixtures already assign some roles to the
+ # user it creates, we also need a new user that will not have any
+ # existing assignments
+ self.user1 = self.new_user_ref(
+ domain_id=self.domain['id'])
+ self.user1['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(self.user1['id'], self.user1)
+
+ collection_url = '/role_assignments'
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertIn(collection_url, r.result['links']['self'])
+ existing_assignments = len(r.result.get('role_assignments'))
+
+ # Now add one of each of the four types of assignment, making sure
+ # that we get them all back.
+ gd_url, gd_entity = _build_role_assignment_url_and_entity(
+ domain_id=self.domain_id, group_id=self.group_id,
+ role_id=self.role_id)
+ self.put(gd_url)
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')),
+ existing_assignments + 1)
+ self.assertRoleAssignmentInListResponse(r, gd_entity, link_url=gd_url)
+
+ ud_url, ud_entity = _build_role_assignment_url_and_entity(
+ domain_id=self.domain_id, user_id=self.user1['id'],
+ role_id=self.role_id)
+ self.put(ud_url)
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')),
+ existing_assignments + 2)
+ self.assertRoleAssignmentInListResponse(r, ud_entity, link_url=ud_url)
+
+ gp_url, gp_entity = _build_role_assignment_url_and_entity(
+ project_id=self.project_id, group_id=self.group_id,
+ role_id=self.role_id)
+ self.put(gp_url)
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')),
+ existing_assignments + 3)
+ self.assertRoleAssignmentInListResponse(r, gp_entity, link_url=gp_url)
+
+ up_url, up_entity = _build_role_assignment_url_and_entity(
+ project_id=self.project_id, user_id=self.user1['id'],
+ role_id=self.role_id)
+ self.put(up_url)
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')),
+ existing_assignments + 4)
+ self.assertRoleAssignmentInListResponse(r, up_entity, link_url=up_url)
+
+ # Now delete the four we added and make sure they are removed
+ # from the collection.
+
+ self.delete(gd_url)
+ self.delete(ud_url)
+ self.delete(gp_url)
+ self.delete(up_url)
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')),
+ existing_assignments)
+ self.assertRoleAssignmentNotInListResponse(r, gd_entity)
+ self.assertRoleAssignmentNotInListResponse(r, ud_entity)
+ self.assertRoleAssignmentNotInListResponse(r, gp_entity)
+ self.assertRoleAssignmentNotInListResponse(r, up_entity)
+
+ def test_get_effective_role_assignments(self):
+ """Call ``GET /role_assignments?effective``.
+
+ Test Plan:
+ - Create two extra user for tests
+ - Add these users to a group
+ - Add a role assignment for the group on a domain
+ - Get a list of all role assignments, checking one has been added
+ - Then get a list of all effective role assignments - the group
+ assignment should have turned into assignments on the domain
+ for each of the group members.
+
+ """
+ self.user1 = self.new_user_ref(
+ domain_id=self.domain['id'])
+ self.user1['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(self.user1['id'], self.user1)
+ self.user2 = self.new_user_ref(
+ domain_id=self.domain['id'])
+ self.user2['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(self.user2['id'], self.user2)
+ self.identity_api.add_user_to_group(self.user1['id'], self.group['id'])
+ self.identity_api.add_user_to_group(self.user2['id'], self.group['id'])
+
+ collection_url = '/role_assignments'
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertIn(collection_url, r.result['links']['self'])
+ existing_assignments = len(r.result.get('role_assignments'))
+
+ gd_url, gd_entity = _build_role_assignment_url_and_entity(
+ domain_id=self.domain_id, group_id=self.group_id,
+ role_id=self.role_id)
+ self.put(gd_url)
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')),
+ existing_assignments + 1)
+ self.assertRoleAssignmentInListResponse(r, gd_entity, link_url=gd_url)
+
+ # Now re-read the collection asking for effective roles - this
+ # should mean the group assignment is translated into the two
+ # member user assignments
+ collection_url = '/role_assignments?effective'
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')),
+ existing_assignments + 2)
+ unused, ud_entity = _build_role_assignment_url_and_entity(
+ domain_id=self.domain_id, user_id=self.user1['id'],
+ role_id=self.role_id)
+ gd_url, unused = _build_role_assignment_url_and_entity(
+ domain_id=self.domain_id, group_id=self.group['id'],
+ role_id=self.role_id)
+ self.assertRoleAssignmentInListResponse(r, ud_entity, link_url=gd_url)
+ ud_url, ud_entity = _build_role_assignment_url_and_entity(
+ domain_id=self.domain_id, user_id=self.user2['id'],
+ role_id=self.role_id)
+ self.assertRoleAssignmentInListResponse(r, ud_entity, link_url=gd_url)
+
+ def test_check_effective_values_for_role_assignments(self):
+ """Call ``GET /role_assignments?effective=value``.
+
+ Check the various ways of specifying the 'effective'
+ query parameter. If the 'effective' query parameter
+ is included then this should always be treated as
+ as meaning 'True' unless it is specified as:
+
+ {url}?effective=0
+
+ This is by design to match the agreed way of handling
+ policy checking on query/filter parameters.
+
+ Test Plan:
+ - Create two extra user for tests
+ - Add these users to a group
+ - Add a role assignment for the group on a domain
+ - Get a list of all role assignments, checking one has been added
+ - Then issue various request with different ways of defining
+ the 'effective' query parameter. As we have tested the
+ correctness of the data coming back when we get effective roles
+ in other tests, here we just use the count of entities to
+ know if we are getting effective roles or not
+
+ """
+ self.user1 = self.new_user_ref(
+ domain_id=self.domain['id'])
+ self.user1['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(self.user1['id'], self.user1)
+ self.user2 = self.new_user_ref(
+ domain_id=self.domain['id'])
+ self.user2['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(self.user2['id'], self.user2)
+ self.identity_api.add_user_to_group(self.user1['id'], self.group['id'])
+ self.identity_api.add_user_to_group(self.user2['id'], self.group['id'])
+
+ collection_url = '/role_assignments'
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ existing_assignments = len(r.result.get('role_assignments'))
+
+ gd_url, gd_entity = _build_role_assignment_url_and_entity(
+ domain_id=self.domain_id, group_id=self.group_id,
+ role_id=self.role_id)
+ self.put(gd_url)
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')),
+ existing_assignments + 1)
+ self.assertRoleAssignmentInListResponse(r, gd_entity, link_url=gd_url)
+
+ # Now re-read the collection asking for effective roles,
+ # using the most common way of defining "effective'. This
+ # should mean the group assignment is translated into the two
+ # member user assignments
+ collection_url = '/role_assignments?effective'
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')),
+ existing_assignments + 2)
+ # Now set 'effective' to false explicitly - should get
+ # back the regular roles
+ collection_url = '/role_assignments?effective=0'
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')),
+ existing_assignments + 1)
+ # Now try setting 'effective' to 'False' explicitly- this is
+ # NOT supported as a way of setting a query or filter
+ # parameter to false by design. Hence we should get back
+ # effective roles.
+ collection_url = '/role_assignments?effective=False'
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')),
+ existing_assignments + 2)
+ # Now set 'effective' to True explicitly
+ collection_url = '/role_assignments?effective=True'
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')),
+ existing_assignments + 2)
+
+ def test_filtered_role_assignments(self):
+ """Call ``GET /role_assignments?filters``.
+
+ Test Plan:
+ - Create extra users, group, role and project for tests
+ - Make the following assignments:
+ Give group1, role1 on project1 and domain
+ Give user1, role2 on project1 and domain
+ Make User1 a member of Group1
+ - Test a series of single filter list calls, checking that
+ the correct results are obtained
+ - Test a multi-filtered list call
+ - Test listing all effective roles for a given user
+ - Test the equivalent of the list of roles in a project scoped
+ token (all effective roles for a user on a project)
+
+ """
+
+ # Since the default fixtures already assign some roles to the
+ # user it creates, we also need a new user that will not have any
+ # existing assignments
+ self.user1 = self.new_user_ref(
+ domain_id=self.domain['id'])
+ self.user1['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(self.user1['id'], self.user1)
+ self.user2 = self.new_user_ref(
+ domain_id=self.domain['id'])
+ self.user2['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(self.user2['id'], self.user2)
+ self.group1 = self.new_group_ref(
+ domain_id=self.domain['id'])
+ self.identity_api.create_group(self.group1['id'], self.group1)
+ self.identity_api.add_user_to_group(self.user1['id'],
+ self.group1['id'])
+ self.identity_api.add_user_to_group(self.user2['id'],
+ self.group1['id'])
+ self.project1 = self.new_project_ref(
+ domain_id=self.domain['id'])
+ self.identity_api.create_project(self.project1['id'], self.project1)
+ self.role1 = self.new_role_ref()
+ self.identity_api.create_role(self.role1['id'], self.role1)
+ self.role2 = self.new_role_ref()
+ self.identity_api.create_role(self.role2['id'], self.role2)
+
+ # Now add one of each of the four types of assignment
+
+ gd_url, gd_entity = _build_role_assignment_url_and_entity(
+ domain_id=self.domain_id, group_id=self.group1['id'],
+ role_id=self.role1['id'])
+ self.put(gd_url)
+
+ ud_url, ud_entity = _build_role_assignment_url_and_entity(
+ domain_id=self.domain_id, user_id=self.user1['id'],
+ role_id=self.role2['id'])
+ self.put(ud_url)
+
+ gp_url, gp_entity = _build_role_assignment_url_and_entity(
+ project_id=self.project1['id'], group_id=self.group1['id'],
+ role_id=self.role1['id'])
+ self.put(gp_url)
+
+ up_url, up_entity = _build_role_assignment_url_and_entity(
+ project_id=self.project1['id'], user_id=self.user1['id'],
+ role_id=self.role2['id'])
+ self.put(up_url)
+
+ # Now list by various filters to make sure we get back the right ones
+
+ collection_url = ('/role_assignments?scope.project.id=%s' %
+ self.project1['id'])
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')), 2)
+ self.assertRoleAssignmentInListResponse(r, up_entity, link_url=up_url)
+ self.assertRoleAssignmentInListResponse(r, gp_entity, link_url=gp_url)
+
+ collection_url = ('/role_assignments?scope.domain.id=%s' %
+ self.domain['id'])
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')), 2)
+ self.assertRoleAssignmentInListResponse(r, ud_entity, link_url=ud_url)
+ self.assertRoleAssignmentInListResponse(r, gd_entity, link_url=gd_url)
+
+ collection_url = '/role_assignments?user.id=%s' % self.user1['id']
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')), 2)
+ self.assertRoleAssignmentInListResponse(r, up_entity, link_url=up_url)
+ self.assertRoleAssignmentInListResponse(r, ud_entity, link_url=ud_url)
+
+ collection_url = '/role_assignments?group.id=%s' % self.group1['id']
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')), 2)
+ self.assertRoleAssignmentInListResponse(r, gd_entity, link_url=gd_url)
+ self.assertRoleAssignmentInListResponse(r, gp_entity, link_url=gp_url)
+
+ collection_url = '/role_assignments?role.id=%s' % self.role1['id']
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')), 2)
+ self.assertRoleAssignmentInListResponse(r, gd_entity, link_url=gd_url)
+ self.assertRoleAssignmentInListResponse(r, gp_entity, link_url=gp_url)
+
+ # Let's try combining two filers together....
+
+ collection_url = (
+ '/role_assignments?user.id=%(user_id)s'
+ '&scope.project.id=%(project_id)s' % {
+ 'user_id': self.user1['id'],
+ 'project_id': self.project1['id']})
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')), 1)
+ self.assertRoleAssignmentInListResponse(r, up_entity, link_url=up_url)
+
+ # Now for a harder one - filter for user with effective
+ # roles - this should return role assignment that were directly
+ # assigned as well as by virtue of group membership
+
+ collection_url = ('/role_assignments?effective&user.id=%s' %
+ self.user1['id'])
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')), 4)
+ # Should have the two direct roles...
+ self.assertRoleAssignmentInListResponse(r, up_entity, link_url=up_url)
+ self.assertRoleAssignmentInListResponse(r, ud_entity, link_url=ud_url)
+ # ...and the two via group membership...
+ unused, up1_entity = _build_role_assignment_url_and_entity(
+ project_id=self.project1['id'], user_id=self.user1['id'],
+ role_id=self.role1['id'])
+ unused, ud1_entity = _build_role_assignment_url_and_entity(
+ domain_id=self.domain_id, user_id=self.user1['id'],
+ role_id=self.role1['id'])
+ gp1_url, unused = _build_role_assignment_url_and_entity(
+ project_id=self.project1['id'], group_id=self.group1['id'],
+ role_id=self.role1['id'])
+ gd1_url, unused = _build_role_assignment_url_and_entity(
+ domain_id=self.domain_id, group_id=self.group1['id'],
+ role_id=self.role1['id'])
+ self.assertRoleAssignmentInListResponse(r, up1_entity,
+ link_url=gp1_url)
+ self.assertRoleAssignmentInListResponse(r, ud1_entity,
+ link_url=gd1_url)
+
+ # ...and for the grand-daddy of them all, simulate the request
+ # that would generate the list of effective roles in a project
+ # scoped token.
+
+ collection_url = (
+ '/role_assignments?effective&user.id=%(user_id)s'
+ '&scope.project.id=%(project_id)s' % {
+ 'user_id': self.user1['id'],
+ 'project_id': self.project1['id']})
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')), 2)
+ # Should have one direct role and one from group membership...
+ self.assertRoleAssignmentInListResponse(r, up_entity, link_url=up_url)
+ self.assertRoleAssignmentInListResponse(r, up1_entity,
+ link_url=gp1_url)
+
+
+class IdentityIneritanceTestCase(test_v3.RestfulTestCase):
+ """Test inheritance crud and its effects."""
+
+ def setUp(self):
+ self.orig_extension_enablement = config.CONF.os_inherit.enabled
+ self.opt_in_group('os_inherit', enabled=True)
+ super(IdentityIneritanceTestCase, self).setUp()
+
+ def tearDown(self):
+ super(IdentityIneritanceTestCase, self).tearDown()
+ self.opt_in_group('os_inherit', enabled=self.orig_extension_enablement)
+
+ def test_crud_user_inherited_domain_role_grants(self):
+ role_list = []
+ for _ in range(2):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.assignment_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ # Create a non-inherited role as a spoiler
+ self.assignment_api.create_grant(
+ role_list[1]['id'], user_id=self.user['id'],
+ domain_id=self.domain_id)
+
+ base_collection_url = (
+ '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
+ 'domain_id': self.domain_id,
+ 'user_id': self.user['id']})
+ member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
+ 'collection_url': base_collection_url,
+ 'role_id': role_list[0]['id']}
+ collection_url = base_collection_url + '/inherited_to_projects'
+
+ self.put(member_url)
+
+ # Check we can read it back
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=role_list[0])
+ self.assertIn(collection_url, r.result['links']['self'])
+
+ # Now delete and check its gone
+ self.delete(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, expected_length=0)
+ self.assertIn(collection_url, r.result['links']['self'])
+
+ def test_crud_inherited_role_grants_failed_if_disabled(self):
+ # Disable the extension and check no API calls can be issued
+ self.opt_in_group('os_inherit', enabled=False)
+ super(IdentityIneritanceTestCase, self).setUp()
+
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.assignment_api.create_role(role['id'], role)
+
+ base_collection_url = (
+ '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
+ 'domain_id': self.domain_id,
+ 'user_id': self.user['id']})
+ member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
+ 'collection_url': base_collection_url,
+ 'role_id': role['id']}
+ collection_url = base_collection_url + '/inherited_to_projects'
+
+ self.put(member_url, expected_status=404)
+ self.head(member_url, expected_status=404)
+ self.get(collection_url, expected_status=404)
+ self.delete(member_url, expected_status=404)
+
+ def test_list_role_assignments_for_inherited_domain_grants(self):
+ """Call ``GET /role_assignments with inherited domain grants``.
+
+ Test Plan:
+ - Create 4 roles
+ - Create a domain with a user and two projects
+ - Assign two direct roles to project1
+ - Assign a spoiler role to project2
+ - Issue the URL to add inherited role to the domain
+ - Issue the URL to check it is indeed on the domain
+ - Issue the URL to check effective roles on project1 - this
+ should return 3 roles.
+
+ """
+ role_list = []
+ for _ in range(4):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.assignment_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ domain = self.new_domain_ref()
+ self.identity_api.create_domain(domain['id'], domain)
+ user1 = self.new_user_ref(
+ domain_id=domain['id'])
+ user1['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(user1['id'], user1)
+ project1 = self.new_project_ref(
+ domain_id=domain['id'])
+ self.assignment_api.create_project(project1['id'], project1)
+ project2 = self.new_project_ref(
+ domain_id=domain['id'])
+ self.assignment_api.create_project(project2['id'], project2)
+ # Add some roles to the project
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project1['id'], role_list[0]['id'])
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project1['id'], role_list[1]['id'])
+ # ..and one on a different project as a spoiler
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project2['id'], role_list[2]['id'])
+
+ # Now create our inherited role on the domain
+ base_collection_url = (
+ '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
+ 'domain_id': domain['id'],
+ 'user_id': user1['id']})
+ member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
+ 'collection_url': base_collection_url,
+ 'role_id': role_list[3]['id']}
+ collection_url = base_collection_url + '/inherited_to_projects'
+
+ self.put(member_url)
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=role_list[3])
+ self.assertIn(collection_url, r.result['links']['self'])
+
+ # Now use the list domain role assignments api to check if this
+ # is included
+ collection_url = (
+ '/role_assignments?user.id=%(user_id)s'
+ '&scope.domain.id=%(domain_id)s' % {
+ 'user_id': user1['id'],
+ 'domain_id': domain['id']})
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')), 1)
+ ud_url, ud_entity = _build_role_assignment_url_and_entity(
+ domain_id=domain['id'], user_id=user1['id'],
+ role_id=role_list[3]['id'], inherited_to_projects=True)
+ self.assertRoleAssignmentInListResponse(r, ud_entity, link_url=ud_url)
+
+ # Now ask for effective list role assignments - the role should
+ # turn into a project role, along with the two direct roles that are
+ # on the project
+ collection_url = (
+ '/role_assignments?effective&user.id=%(user_id)s'
+ '&scope.project.id=%(project_id)s' % {
+ 'user_id': user1['id'],
+ 'project_id': project1['id']})
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')), 3)
+ # An effective role for an inherited role will be a project
+ # entity, with a domain link to the inherited assignment
+ unused, up_entity = _build_role_assignment_url_and_entity(
+ project_id=project1['id'], user_id=user1['id'],
+ role_id=role_list[3]['id'])
+ ud_url, unused = _build_role_assignment_url_and_entity(
+ domain_id=domain['id'], user_id=user1['id'],
+ role_id=role_list[3]['id'], inherited_to_projects=True)
+ self.assertRoleAssignmentInListResponse(r, up_entity, link_url=ud_url)
+
+ def test_list_role_assignments_for_disabled_inheritance_extension(self):
+ """Call ``GET /role_assignments with inherited domain grants``.
+
+ Test Plan:
+ - Issue the URL to add inherited role to the domain
+ - Issue the URL to check effective roles on project include the
+ inherited role
+ - Disable the extension
+ - Re-check the effective roles, proving the inherited role no longer
+ shows up.
+
+ """
+
+ role_list = []
+ for _ in range(4):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.assignment_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ domain = self.new_domain_ref()
+ self.identity_api.create_domain(domain['id'], domain)
+ user1 = self.new_user_ref(
+ domain_id=domain['id'])
+ user1['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(user1['id'], user1)
+ project1 = self.new_project_ref(
+ domain_id=domain['id'])
+ self.assignment_api.create_project(project1['id'], project1)
+ project2 = self.new_project_ref(
+ domain_id=domain['id'])
+ self.assignment_api.create_project(project2['id'], project2)
+ # Add some roles to the project
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project1['id'], role_list[0]['id'])
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project1['id'], role_list[1]['id'])
+ # ..and one on a different project as a spoiler
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project2['id'], role_list[2]['id'])
+
+ # Now create our inherited role on the domain
+ base_collection_url = (
+ '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
+ 'domain_id': domain['id'],
+ 'user_id': user1['id']})
+ member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
+ 'collection_url': base_collection_url,
+ 'role_id': role_list[3]['id']}
+ collection_url = base_collection_url + '/inherited_to_projects'
+
+ self.put(member_url)
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=role_list[3])
+ self.assertIn(collection_url, r.result['links']['self'])
+
+ # Get effective list role assignments - the role should
+ # turn into a project role, along with the two direct roles that are
+ # on the project
+ collection_url = (
+ '/role_assignments?effective&user.id=%(user_id)s'
+ '&scope.project.id=%(project_id)s' % {
+ 'user_id': user1['id'],
+ 'project_id': project1['id']})
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')), 3)
+
+ unused, up_entity = _build_role_assignment_url_and_entity(
+ project_id=project1['id'], user_id=user1['id'],
+ role_id=role_list[3]['id'])
+ ud_url, unused = _build_role_assignment_url_and_entity(
+ domain_id=domain['id'], user_id=user1['id'],
+ role_id=role_list[3]['id'], inherited_to_projects=True)
+ self.assertRoleAssignmentInListResponse(r, up_entity, link_url=ud_url)
+
+ # Disable the extension and re-check the list, the role inherited
+ # from the project should no longer show up
+ self.opt_in_group('os_inherit', enabled=False)
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')), 2)
+
+ unused, up_entity = _build_role_assignment_url_and_entity(
+ project_id=project1['id'], user_id=user1['id'],
+ role_id=role_list[3]['id'])
+ ud_url, unused = _build_role_assignment_url_and_entity(
+ domain_id=domain['id'], user_id=user1['id'],
+ role_id=role_list[3]['id'], inherited_to_projects=True)
+ self.assertRoleAssignmentNotInListResponse(r, up_entity,
+ link_url=ud_url)
+
+ def test_list_role_assignments_for_inherited_group_domain_grants(self):
+ """Call ``GET /role_assignments with inherited group domain grants``.
+
+ Test Plan:
+ - Create 4 roles
+ - Create a domain with a user and two projects
+ - Assign two direct roles to project1
+ - Assign a spoiler role to project2
+ - Issue the URL to add inherited role to the domain
+ - Issue the URL to check it is indeed on the domain
+ - Issue the URL to check effective roles on project1 - this
+ should return 3 roles.
+
+ """
+ role_list = []
+ for _ in range(4):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.assignment_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ domain = self.new_domain_ref()
+ self.identity_api.create_domain(domain['id'], domain)
+ user1 = self.new_user_ref(
+ domain_id=domain['id'])
+ user1['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(user1['id'], user1)
+ user2 = self.new_user_ref(
+ domain_id=domain['id'])
+ user2['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(user2['id'], user2)
+ group1 = self.new_group_ref(
+ domain_id=domain['id'])
+ self.identity_api.create_group(group1['id'], group1)
+ self.identity_api.add_user_to_group(user1['id'],
+ group1['id'])
+ self.identity_api.add_user_to_group(user2['id'],
+ group1['id'])
+ project1 = self.new_project_ref(
+ domain_id=domain['id'])
+ self.assignment_api.create_project(project1['id'], project1)
+ project2 = self.new_project_ref(
+ domain_id=domain['id'])
+ self.assignment_api.create_project(project2['id'], project2)
+ # Add some roles to the project
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project1['id'], role_list[0]['id'])
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project1['id'], role_list[1]['id'])
+ # ..and one on a different project as a spoiler
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project2['id'], role_list[2]['id'])
+
+ # Now create our inherited role on the domain
+ base_collection_url = (
+ '/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
+ 'domain_id': domain['id'],
+ 'group_id': group1['id']})
+ member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
+ 'collection_url': base_collection_url,
+ 'role_id': role_list[3]['id']}
+ collection_url = base_collection_url + '/inherited_to_projects'
+
+ self.put(member_url)
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=role_list[3])
+ self.assertIn(collection_url, r.result['links']['self'])
+
+ # Now use the list domain role assignments api to check if this
+ # is included
+ collection_url = (
+ '/role_assignments?group.id=%(group_id)s'
+ '&scope.domain.id=%(domain_id)s' % {
+ 'group_id': group1['id'],
+ 'domain_id': domain['id']})
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')), 1)
+ gd_url, gd_entity = _build_role_assignment_url_and_entity(
+ domain_id=domain['id'], group_id=group1['id'],
+ role_id=role_list[3]['id'], inherited_to_projects=True)
+ self.assertRoleAssignmentInListResponse(r, gd_entity, link_url=gd_url)
+
+ # Now ask for effective list role assignments - the role should
+ # turn into a user project role, along with the two direct roles
+ # that are on the project
+ collection_url = (
+ '/role_assignments?effective&user.id=%(user_id)s'
+ '&scope.project.id=%(project_id)s' % {
+ 'user_id': user1['id'],
+ 'project_id': project1['id']})
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')), 3)
+ # An effective role for an inherited role will be a project
+ # entity, with a domain link to the inherited assignment
+ unused, up_entity = _build_role_assignment_url_and_entity(
+ project_id=project1['id'], user_id=user1['id'],
+ role_id=role_list[3]['id'])
+ gd_url, unused = _build_role_assignment_url_and_entity(
+ domain_id=domain['id'], group_id=group1['id'],
+ role_id=role_list[3]['id'], inherited_to_projects=True)
+ self.assertRoleAssignmentInListResponse(r, up_entity, link_url=gd_url)
+
+ def test_filtered_role_assignments_for_inherited_grants(self):
+ """Call ``GET /role_assignments?scope.OS-INHERIT:inherited_to``.
+
+ Test Plan:
+ - Create 5 roles
+ - Create a domain with a user, group and two projects
+ - Assign three direct spoiler roles to projects
+ - Issue the URL to add an inherited user role to the domain
+ - Issue the URL to add an inherited group role to the domain
+ - Issue the URL to filter by inherited roles - this should
+ return just the 2 inherited roles.
+
+ """
+ role_list = []
+ for _ in range(5):
+ role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.assignment_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ domain = self.new_domain_ref()
+ self.identity_api.create_domain(domain['id'], domain)
+ user1 = self.new_user_ref(
+ domain_id=domain['id'])
+ user1['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(user1['id'], user1)
+ group1 = self.new_group_ref(
+ domain_id=domain['id'])
+ self.identity_api.create_group(group1['id'], group1)
+ project1 = self.new_project_ref(
+ domain_id=domain['id'])
+ self.assignment_api.create_project(project1['id'], project1)
+ project2 = self.new_project_ref(
+ domain_id=domain['id'])
+ self.assignment_api.create_project(project2['id'], project2)
+ # Add some spoiler roles to the projects
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project1['id'], role_list[0]['id'])
+ self.assignment_api.add_role_to_user_and_project(
+ user1['id'], project2['id'], role_list[1]['id'])
+ # Create a non-inherited role as a spoiler
+ self.assignment_api.create_grant(
+ role_list[2]['id'], user_id=user1['id'], domain_id=domain['id'])
+
+ # Now create two inherited roles on the domain, one for a user
+ # and one for a domain
+ base_collection_url = (
+ '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
+ 'domain_id': domain['id'],
+ 'user_id': user1['id']})
+ member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
+ 'collection_url': base_collection_url,
+ 'role_id': role_list[3]['id']}
+ collection_url = base_collection_url + '/inherited_to_projects'
+
+ self.put(member_url)
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=role_list[3])
+ self.assertIn(collection_url, r.result['links']['self'])
+
+ base_collection_url = (
+ '/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
+ 'domain_id': domain['id'],
+ 'group_id': group1['id']})
+ member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
+ 'collection_url': base_collection_url,
+ 'role_id': role_list[4]['id']}
+ collection_url = base_collection_url + '/inherited_to_projects'
+
+ self.put(member_url)
+ self.head(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=role_list[4])
+ self.assertIn(collection_url, r.result['links']['self'])
+
+ # Now use the list role assignments api to get a list of inherited
+ # roles on the domain - should get back the two roles
+ collection_url = (
+ '/role_assignments?scope.OS-INHERIT:inherited_to=projects')
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(r)
+ self.assertEqual(len(r.result.get('role_assignments')), 2)
+ ud_url, ud_entity = _build_role_assignment_url_and_entity(
+ domain_id=domain['id'], user_id=user1['id'],
+ role_id=role_list[3]['id'], inherited_to_projects=True)
+ gd_url, gd_entity = _build_role_assignment_url_and_entity(
+ domain_id=domain['id'], group_id=group1['id'],
+ role_id=role_list[4]['id'], inherited_to_projects=True)
+ self.assertRoleAssignmentInListResponse(r, ud_entity, link_url=ud_url)
+ self.assertRoleAssignmentInListResponse(r, gd_entity, link_url=gd_url)
diff --git a/keystone/tests/test_v3_policy.py b/keystone/tests/test_v3_policy.py
new file mode 100644
index 00000000..d988efd2
--- /dev/null
+++ b/keystone/tests/test_v3_policy.py
@@ -0,0 +1,59 @@
+import uuid
+
+import test_v3
+
+
+class PolicyTestCase(test_v3.RestfulTestCase):
+ """Test policy CRUD."""
+
+ def setUp(self):
+ super(PolicyTestCase, self).setUp()
+ self.policy_id = uuid.uuid4().hex
+ self.policy = self.new_policy_ref()
+ self.policy['id'] = self.policy_id
+ self.policy_api.create_policy(
+ self.policy_id,
+ self.policy.copy())
+
+ # policy crud tests
+
+ def test_create_policy(self):
+ """Call ``POST /policies``."""
+ ref = self.new_policy_ref()
+ r = self.post(
+ '/policies',
+ body={'policy': ref})
+ return self.assertValidPolicyResponse(r, ref)
+
+ def test_list_policies(self):
+ """Call ``GET /policies``."""
+ r = self.get('/policies')
+ self.assertValidPolicyListResponse(r, ref=self.policy)
+
+ def test_list_policies_xml(self):
+ """Call ``GET /policies (xml data)``."""
+ r = self.get('/policies', content_type='xml')
+ self.assertValidPolicyListResponse(r, ref=self.policy)
+
+ def test_get_policy(self):
+ """Call ``GET /policies/{policy_id}``."""
+ r = self.get(
+ '/policies/%(policy_id)s' % {
+ 'policy_id': self.policy_id})
+ self.assertValidPolicyResponse(r, self.policy)
+
+ def test_update_policy(self):
+ """Call ``PATCH /policies/{policy_id}``."""
+ policy = self.new_policy_ref()
+ policy['id'] = self.policy_id
+ r = self.patch(
+ '/policies/%(policy_id)s' % {
+ 'policy_id': self.policy_id},
+ body={'policy': policy})
+ self.assertValidPolicyResponse(r, policy)
+
+ def test_delete_policy(self):
+ """Call ``DELETE /policies/{policy_id}``."""
+ self.delete(
+ '/policies/%(policy_id)s' % {
+ 'policy_id': self.policy_id})
diff --git a/keystone/tests/test_v3_protection.py b/keystone/tests/test_v3_protection.py
new file mode 100644
index 00000000..38e32813
--- /dev/null
+++ b/keystone/tests/test_v3_protection.py
@@ -0,0 +1,308 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+# Copyright 2013 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import tempfile
+import uuid
+
+from keystone import config
+from keystone import exception
+from keystone.openstack.common import jsonutils
+from keystone.policy.backends import rules
+
+import test_v3
+
+
+CONF = config.CONF
+DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
+
+
+class IdentityTestProtectedCase(test_v3.RestfulTestCase):
+ """Test policy enforcement on the v3 Identity API."""
+
+ def setUp(self):
+ """Setup for Identity Protection Test Cases.
+
+ As well as the usual housekeeping, create a set of domains,
+ users, roles and projects for the subsequent tests:
+
+ - Three domains: A,B & C. C is disabled.
+ - DomainA has user1, DomainB has user2 and user3
+ - DomainA has group1 and group2, DomainB has group3
+ - User1 has a role on DomainA
+
+ Remember that there will also be a fourth domain in existence,
+ the default domain.
+
+ """
+ # Ensure that test_v3.RestfulTestCase doesn't load its own
+ # sample data, which would make checking the results of our
+ # tests harder
+ super(IdentityTestProtectedCase, self).setUp(load_sample_data=False)
+ # Start by creating a couple of domains
+ self.domainA = self.new_domain_ref()
+ self.identity_api.create_domain(self.domainA['id'], self.domainA)
+ self.domainB = self.new_domain_ref()
+ self.identity_api.create_domain(self.domainB['id'], self.domainB)
+ self.domainC = self.new_domain_ref()
+ self.domainC['enabled'] = False
+ self.identity_api.create_domain(self.domainC['id'], self.domainC)
+
+ # Now create some users, one in domainA and two of them in domainB
+ self.user1 = self.new_user_ref(domain_id=self.domainA['id'])
+ self.user1['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(self.user1['id'], self.user1)
+
+ self.user2 = self.new_user_ref(domain_id=self.domainB['id'])
+ self.user2['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(self.user2['id'], self.user2)
+
+ self.user3 = self.new_user_ref(domain_id=self.domainB['id'])
+ self.user3['password'] = uuid.uuid4().hex
+ self.identity_api.create_user(self.user3['id'], self.user3)
+
+ self.group1 = self.new_group_ref(domain_id=self.domainA['id'])
+ self.identity_api.create_group(self.group1['id'], self.group1)
+
+ self.group2 = self.new_group_ref(domain_id=self.domainA['id'])
+ self.identity_api.create_group(self.group2['id'], self.group2)
+
+ self.group3 = self.new_group_ref(domain_id=self.domainB['id'])
+ self.identity_api.create_group(self.group3['id'], self.group3)
+
+ self.role = self.new_role_ref()
+ self.identity_api.create_role(self.role['id'], self.role)
+ self.identity_api.create_grant(self.role['id'],
+ user_id=self.user1['id'],
+ domain_id=self.domainA['id'])
+
+ # Initialize the policy engine and allow us to write to a temp
+ # file in each test to create the policies
+ self.orig_policy_file = CONF.policy_file
+ rules.reset()
+ _unused, self.tmpfilename = tempfile.mkstemp()
+ self.opt(policy_file=self.tmpfilename)
+
+ # A default auth request we can use - un-scoped user token
+ self.auth = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'])
+
+ def tearDown(self):
+ super(IdentityTestProtectedCase, self).tearDown()
+ rules.reset()
+ self.opt(policy_file=self.orig_policy_file)
+
+ def _get_id_list_from_ref_list(self, ref_list):
+ result_list = []
+ for x in ref_list:
+ result_list.append(x['id'])
+ return result_list
+
+ def _set_policy(self, new_policy):
+ with open(self.tmpfilename, "w") as policyfile:
+ policyfile.write(jsonutils.dumps(new_policy))
+
+ def test_list_users_unprotected(self):
+ """GET /users (unprotected)
+
+ Test Plan:
+ - Update policy so api is unprotected
+ - Use an un-scoped token to make sure we can get back all
+ the users independent of domain
+
+ """
+ self._set_policy({"identity:list_users": []})
+ r = self.get('/users', auth=self.auth)
+ id_list = self._get_id_list_from_ref_list(r.result.get('users'))
+ self.assertIn(self.user1['id'], id_list)
+ self.assertIn(self.user2['id'], id_list)
+ self.assertIn(self.user3['id'], id_list)
+
+ def test_list_users_filtered_by_domain(self):
+ """GET /users?domain_id=mydomain (filtered)
+
+ Test Plan:
+ - Update policy so api is unprotected
+ - Use an un-scoped token to make sure we can filter the
+ users by domainB, getting back the 2 users in that domain
+
+ """
+ self._set_policy({"identity:list_users": []})
+ url_by_name = '/users?domain_id=%s' % self.domainB['id']
+ r = self.get(url_by_name, auth=self.auth)
+ # We should get back two users, those in DomainB
+ id_list = self._get_id_list_from_ref_list(r.result.get('users'))
+ self.assertIn(self.user2['id'], id_list)
+ self.assertIn(self.user3['id'], id_list)
+
+ def test_get_user_protected_match_id(self):
+ """GET /users/{id} (match payload)
+
+ Test Plan:
+ - Update policy to protect api by user_id
+ - List users with user_id of user1 as filter, to check that
+ this will correctly match user_id in the flattened
+ payload
+
+ """
+ # TODO(henry-nash, ayoung): It would be good to expand this
+ # test for further test flattening, e.g. protect on, say, an
+ # attribute of an object being created
+ new_policy = {"identity:get_user": [["user_id:%(user_id)s"]]}
+ self._set_policy(new_policy)
+ url_by_name = '/users/%s' % self.user1['id']
+ r = self.get(url_by_name, auth=self.auth)
+ self.assertEquals(self.user1['id'], r.result['user']['id'])
+
+ def test_list_users_protected_by_domain(self):
+ """GET /users?domain_id=mydomain (protected)
+
+ Test Plan:
+ - Update policy to protect api by domain_id
+ - List groups using a token scoped to domainA with a filter
+ specifying domainA - we should only get back the one user
+ that is in domainA.
+ - Try and read the users from domainB - this should fail since
+ we don't have a token scoped for domainB
+
+ """
+ new_policy = {"identity:list_users": ["domain_id:%(domain_id)s"]}
+ self._set_policy(new_policy)
+ self.auth = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ domain_id=self.domainA['id'])
+ url_by_name = '/users?domain_id=%s' % self.domainA['id']
+ r = self.get(url_by_name, auth=self.auth)
+ # We should only get back one user, the one in DomainA
+ id_list = self._get_id_list_from_ref_list(r.result.get('users'))
+ self.assertEqual(len(id_list), 1)
+ self.assertIn(self.user1['id'], id_list)
+
+ # Now try for domainB, which should fail
+ url_by_name = '/users?domain_id=%s' % self.domainB['id']
+ r = self.get(url_by_name, auth=self.auth,
+ expected_status=exception.ForbiddenAction.code)
+
+ def test_list_groups_protected_by_domain(self):
+ """GET /groups?domain_id=mydomain (protected)
+
+ Test Plan:
+ - Update policy to protect api by domain_id
+ - List groups using a token scoped to domainA and make sure
+ we only get back the two groups that are in domainA
+ - Try and read the groups from domainB - this should fail since
+ we don't have a token scoped for domainB
+
+ """
+ new_policy = {"identity:list_groups": ["domain_id:%(domain_id)s"]}
+ self._set_policy(new_policy)
+ self.auth = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ domain_id=self.domainA['id'])
+ url_by_name = '/groups?domain_id=%s' % self.domainA['id']
+ r = self.get(url_by_name, auth=self.auth)
+ # We should only get back two groups, the ones in DomainA
+ id_list = self._get_id_list_from_ref_list(r.result.get('groups'))
+ self.assertEqual(len(id_list), 2)
+ self.assertIn(self.group1['id'], id_list)
+ self.assertIn(self.group2['id'], id_list)
+
+ # Now try for domainB, which should fail
+ url_by_name = '/groups?domain_id=%s' % self.domainB['id']
+ r = self.get(url_by_name, auth=self.auth,
+ expected_status=exception.ForbiddenAction.code)
+
+ def test_list_groups_protected_by_domain_and_filtered(self):
+ """GET /groups?domain_id=mydomain&name=myname (protected)
+
+ Test Plan:
+ - Update policy to protect api by domain_id
+ - List groups using a token scoped to domainA with a filter
+ specifying both domainA and the name of group.
+ - We should only get back the group in domainA that matches
+ the name
+
+ """
+ new_policy = {"identity:list_groups": ["domain_id:%(domain_id)s"]}
+ self._set_policy(new_policy)
+ self.auth = self.build_authentication_request(
+ user_id=self.user1['id'],
+ password=self.user1['password'],
+ domain_id=self.domainA['id'])
+ url_by_name = '/groups?domain_id=%s&name=%s' % (
+ self.domainA['id'], self.group2['name'])
+ r = self.get(url_by_name, auth=self.auth)
+ # We should only get back one user, the one in DomainA that matches
+ # the name supplied
+ id_list = self._get_id_list_from_ref_list(r.result.get('groups'))
+ self.assertEqual(len(id_list), 1)
+ self.assertIn(self.group2['id'], id_list)
+
+ def test_list_filtered_domains(self):
+ """GET /domains?enabled=0
+
+ Test Plan:
+ - Update policy for no protection on api
+ - Filter by the 'enabled' boolean to get disabled domains, which
+ should return just domainC
+ - Try the filter using different ways of specifying 'true'
+ to test that our handling of booleans in filter matching is
+ correct
+
+ """
+ new_policy = {"identity:list_domains": []}
+ self._set_policy(new_policy)
+ r = self.get('/domains?enabled=0', auth=self.auth)
+ id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
+ self.assertEqual(len(id_list), 1)
+ self.assertIn(self.domainC['id'], id_list)
+
+ # Now try a few ways of specifying 'true' when we should get back
+ # the other two domains, plus the default domain
+ r = self.get('/domains?enabled=1', auth=self.auth)
+ id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
+ self.assertEqual(len(id_list), 3)
+ self.assertIn(self.domainA['id'], id_list)
+ self.assertIn(self.domainB['id'], id_list)
+ self.assertIn(DEFAULT_DOMAIN_ID, id_list)
+
+ r = self.get('/domains?enabled', auth=self.auth)
+ id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
+ self.assertEqual(len(id_list), 3)
+ self.assertIn(self.domainA['id'], id_list)
+ self.assertIn(self.domainB['id'], id_list)
+ self.assertIn(DEFAULT_DOMAIN_ID, id_list)
+
+ def test_multiple_filters(self):
+ """GET /domains?enabled&name=myname
+
+ Test Plan:
+ - Update policy for no protection on api
+ - Filter by the 'enabled' boolean and name - this should
+ return a single domain
+
+ """
+ new_policy = {"identity:list_domains": []}
+ self._set_policy(new_policy)
+
+ my_url = '/domains?enableds&name=%s' % self.domainA['name']
+ r = self.get(my_url, auth=self.auth)
+ id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
+ self.assertEqual(len(id_list), 1)
+ self.assertIn(self.domainA['id'], id_list)
diff --git a/keystone/tests/test_versions.py b/keystone/tests/test_versions.py
new file mode 100644
index 00000000..933fb246
--- /dev/null
+++ b/keystone/tests/test_versions.py
@@ -0,0 +1,422 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from keystone.tests import core as test
+
+from keystone import config
+from keystone import controllers
+from keystone.openstack.common import jsonutils
+
+
+CONF = config.CONF
+
+v2_MEDIA_TYPES = [
+ {
+ "base": "application/json",
+ "type": "application/"
+ "vnd.openstack.identity-v2.0+json"
+ }, {
+ "base": "application/xml",
+ "type": "application/"
+ "vnd.openstack.identity-v2.0+xml"
+ }
+]
+
+v2_HTML_DESCRIPTION = {
+ "rel": "describedby",
+ "type": "text/html",
+ "href": "http://docs.openstack.org/api/"
+ "openstack-identity-service/2.0/"
+ "content/"
+}
+
+v2_PDF_DESCRIPTION = {
+ "rel": "describedby",
+ "type": "application/pdf",
+ "href": "http://docs.openstack.org/api/"
+ "openstack-identity-service/2.0/"
+ "identity-dev-guide-2.0.pdf"
+}
+
+v2_EXPECTED_RESPONSE = {
+ "id": "v2.0",
+ "status": "stable",
+ "updated": "2013-03-06T00:00:00Z",
+ "links": [
+ {
+ "rel": "self",
+ "href": "", # Will get filled in after initialization
+ },
+ v2_HTML_DESCRIPTION,
+ v2_PDF_DESCRIPTION
+ ],
+ "media-types": v2_MEDIA_TYPES
+}
+
+v2_VERSION_RESPONSE = {
+ "version": v2_EXPECTED_RESPONSE
+}
+
+v3_MEDIA_TYPES = [
+ {
+ "base": "application/json",
+ "type": "application/"
+ "vnd.openstack.identity-v3+json"
+ }, {
+ "base": "application/xml",
+ "type": "application/"
+ "vnd.openstack.identity-v3+xml"
+ }
+]
+
+v3_EXPECTED_RESPONSE = {
+ "id": "v3.0",
+ "status": "stable",
+ "updated": "2013-03-06T00:00:00Z",
+ "links": [
+ {
+ "rel": "self",
+ "href": "", # Will get filled in after initialization
+ }
+ ],
+ "media-types": v3_MEDIA_TYPES
+}
+
+v3_VERSION_RESPONSE = {
+ "version": v3_EXPECTED_RESPONSE
+}
+
+VERSIONS_RESPONSE = {
+ "versions": {
+ "values": [
+ v3_EXPECTED_RESPONSE,
+ v2_EXPECTED_RESPONSE
+ ]
+ }
+}
+
+
+class VersionTestCase(test.TestCase):
+ def setUp(self):
+ super(VersionTestCase, self).setUp()
+ self.load_backends()
+ self.public_app = self.loadapp('keystone', 'main')
+ self.admin_app = self.loadapp('keystone', 'admin')
+
+ self.public_server = self.serveapp('keystone', name='main')
+ self.admin_server = self.serveapp('keystone', name='admin')
+
+ def _paste_in_port(self, response, port):
+ for link in response['links']:
+ if link['rel'] == 'self':
+ link['href'] = port
+
+ def test_public_versions(self):
+ client = self.client(self.public_app)
+ resp = client.get('/')
+ self.assertEqual(resp.status_int, 300)
+ data = jsonutils.loads(resp.body)
+ expected = VERSIONS_RESPONSE
+ for version in expected['versions']['values']:
+ if version['id'] == 'v3.0':
+ self._paste_in_port(
+ version, 'http://localhost:%s/v3/' % CONF.public_port)
+ elif version['id'] == 'v2.0':
+ self._paste_in_port(
+ version, 'http://localhost:%s/v2.0/' % CONF.public_port)
+ self.assertEqual(data, expected)
+
+ def test_admin_versions(self):
+ client = self.client(self.admin_app)
+ resp = client.get('/')
+ self.assertEqual(resp.status_int, 300)
+ data = jsonutils.loads(resp.body)
+ expected = VERSIONS_RESPONSE
+ for version in expected['versions']['values']:
+ if version['id'] == 'v3.0':
+ self._paste_in_port(
+ version, 'http://localhost:%s/v3/' % CONF.admin_port)
+ elif version['id'] == 'v2.0':
+ self._paste_in_port(
+ version, 'http://localhost:%s/v2.0/' % CONF.admin_port)
+ self.assertEqual(data, expected)
+
+ def test_public_version_v2(self):
+ client = self.client(self.public_app)
+ resp = client.get('/v2.0/')
+ self.assertEqual(resp.status_int, 200)
+ data = jsonutils.loads(resp.body)
+ expected = v2_VERSION_RESPONSE
+ self._paste_in_port(expected['version'],
+ 'http://localhost:%s/v2.0/' % CONF.public_port)
+ self.assertEqual(data, expected)
+
+ def test_admin_version_v2(self):
+ client = self.client(self.admin_app)
+ resp = client.get('/v2.0/')
+ self.assertEqual(resp.status_int, 200)
+ data = jsonutils.loads(resp.body)
+ expected = v2_VERSION_RESPONSE
+ self._paste_in_port(expected['version'],
+ 'http://localhost:%s/v2.0/' % CONF.admin_port)
+ self.assertEqual(data, expected)
+
+ def test_public_version_v3(self):
+ client = self.client(self.public_app)
+ resp = client.get('/v3/')
+ self.assertEqual(resp.status_int, 200)
+ data = jsonutils.loads(resp.body)
+ expected = v3_VERSION_RESPONSE
+ self._paste_in_port(expected['version'],
+ 'http://localhost:%s/v3/' % CONF.public_port)
+ self.assertEqual(data, expected)
+
+ def test_admin_version_v3(self):
+ client = self.client(self.public_app)
+ resp = client.get('/v3/')
+ self.assertEqual(resp.status_int, 200)
+ data = jsonutils.loads(resp.body)
+ expected = v3_VERSION_RESPONSE
+ self._paste_in_port(expected['version'],
+ 'http://localhost:%s/v3/' % CONF.admin_port)
+ self.assertEqual(data, expected)
+
+ def test_v2_disabled(self):
+ self.stubs.Set(controllers, '_VERSIONS', ['v3'])
+ client = self.client(self.public_app)
+ # request to /v2.0 should fail
+ resp = client.get('/v2.0/')
+ self.assertEqual(resp.status_int, 404)
+
+ # request to /v3 should pass
+ resp = client.get('/v3/')
+ self.assertEqual(resp.status_int, 200)
+ data = jsonutils.loads(resp.body)
+ expected = v3_VERSION_RESPONSE
+ self._paste_in_port(expected['version'],
+ 'http://localhost:%s/v3/' % CONF.public_port)
+ self.assertEqual(data, expected)
+
+ # only v3 information should be displayed by requests to /
+ v3_only_response = {
+ "versions": {
+ "values": [
+ v3_EXPECTED_RESPONSE
+ ]
+ }
+ }
+ self._paste_in_port(v3_only_response['versions']['values'][0],
+ 'http://localhost:%s/v3/' % CONF.public_port)
+ resp = client.get('/')
+ self.assertEqual(resp.status_int, 300)
+ data = jsonutils.loads(resp.body)
+ self.assertEqual(data, v3_only_response)
+
+ def test_v3_disabled(self):
+ self.stubs.Set(controllers, '_VERSIONS', ['v2.0'])
+ client = self.client(self.public_app)
+ # request to /v3 should fail
+ resp = client.get('/v3/')
+ self.assertEqual(resp.status_int, 404)
+
+ # request to /v2.0 should pass
+ resp = client.get('/v2.0/')
+ self.assertEqual(resp.status_int, 200)
+ data = jsonutils.loads(resp.body)
+ expected = v2_VERSION_RESPONSE
+ self._paste_in_port(expected['version'],
+ 'http://localhost:%s/v2.0/' % CONF.public_port)
+ self.assertEqual(data, expected)
+
+ # only v2 information should be displayed by requests to /
+ v2_only_response = {
+ "versions": {
+ "values": [
+ v2_EXPECTED_RESPONSE
+ ]
+ }
+ }
+ self._paste_in_port(v2_only_response['versions']['values'][0],
+ 'http://localhost:%s/v2.0/' % CONF.public_port)
+ resp = client.get('/')
+ self.assertEqual(resp.status_int, 300)
+ data = jsonutils.loads(resp.body)
+ self.assertEqual(data, v2_only_response)
+
+
+class XmlVersionTestCase(test.TestCase):
+
+ REQUEST_HEADERS = {'Accept': 'application/xml'}
+
+ DOC_INTRO = '<?xml version="1.0" encoding="UTF-8"?>'
+ XML_NAMESPACE_ATTR = 'xmlns="http://docs.openstack.org/identity/api/v2.0"'
+
+ v2_VERSION_DATA = """
+<version %(v2_namespace)s status="stable" updated="2013-03-06T00:00:00Z"
+ id="v2.0">
+ <media-types>
+ <media-type base="application/json" type="application/\
+vnd.openstack.identity-v2.0+json"/>
+ <media-type base="application/xml" type="application/\
+vnd.openstack.identity-v2.0+xml"/>
+ </media-types>
+ <links>
+ <link href="http://localhost:%%(port)s/v2.0/" rel="self"/>
+ <link href="http://docs.openstack.org/api/openstack-identity-service/\
+2.0/content/" type="text/html" rel="describedby"/>
+ <link href="http://docs.openstack.org/api/openstack-identity-service/\
+2.0/identity-dev-guide-2.0.pdf" type="application/pdf" rel="describedby"/>
+ </links>
+ <link href="http://localhost:%%(port)s/v2.0/" rel="self"/>
+ <link href="http://docs.openstack.org/api/openstack-identity-service/\
+2.0/content/" type="text/html" rel="describedby"/>
+ <link href="http://docs.openstack.org/api/openstack-identity-service/\
+2.0/identity-dev-guide-2.0.pdf" type="application/pdf" rel="describedby"/>
+</version>
+"""
+
+ v2_VERSION_RESPONSE = ((DOC_INTRO + v2_VERSION_DATA) %
+ dict(v2_namespace=XML_NAMESPACE_ATTR))
+
+ v3_VERSION_DATA = """
+<version %(v3_namespace)s status="stable" updated="2013-03-06T00:00:00Z"
+ id="v3.0">
+ <media-types>
+ <media-type base="application/json" type="application/\
+vnd.openstack.identity-v3+json"/>
+ <media-type base="application/xml" type="application/\
+vnd.openstack.identity-v3+xml"/>
+ </media-types>
+ <links>
+ <link href="http://localhost:%%(port)s/v3/" rel="self"/>
+ </links>
+</version>
+"""
+
+ v3_VERSION_RESPONSE = ((DOC_INTRO + v3_VERSION_DATA) %
+ dict(v3_namespace=XML_NAMESPACE_ATTR))
+
+ VERSIONS_RESPONSE = ((DOC_INTRO + """
+<versions %(namespace)s>
+""" +
+ v3_VERSION_DATA +
+ v2_VERSION_DATA + """
+</versions>
+""") % dict(namespace=XML_NAMESPACE_ATTR, v3_namespace='', v2_namespace=''))
+
+ def setUp(self):
+ super(XmlVersionTestCase, self).setUp()
+ self.load_backends()
+ self.public_app = self.loadapp('keystone', 'main')
+ self.admin_app = self.loadapp('keystone', 'admin')
+
+ self.public_server = self.serveapp('keystone', name='main')
+ self.admin_server = self.serveapp('keystone', name='admin')
+
+ def test_public_versions(self):
+ client = self.client(self.public_app)
+ resp = client.get('/', headers=self.REQUEST_HEADERS)
+ self.assertEqual(resp.status_int, 300)
+ data = resp.body
+ expected = self.VERSIONS_RESPONSE % dict(port=CONF.public_port)
+ self.assertEqualXML(data, expected)
+
+ def test_admin_versions(self):
+ client = self.client(self.admin_app)
+ resp = client.get('/', headers=self.REQUEST_HEADERS)
+ self.assertEqual(resp.status_int, 300)
+ data = resp.body
+ expected = self.VERSIONS_RESPONSE % dict(port=CONF.admin_port)
+ self.assertEqualXML(data, expected)
+
+ def test_public_version_v2(self):
+ client = self.client(self.public_app)
+ resp = client.get('/v2.0/', headers=self.REQUEST_HEADERS)
+ self.assertEqual(resp.status_int, 200)
+ data = resp.body
+ expected = self.v2_VERSION_RESPONSE % dict(port=CONF.public_port)
+ self.assertEqualXML(data, expected)
+
+ def test_admin_version_v2(self):
+ client = self.client(self.admin_app)
+ resp = client.get('/v2.0/', headers=self.REQUEST_HEADERS)
+ self.assertEqual(resp.status_int, 200)
+ data = resp.body
+ expected = self.v2_VERSION_RESPONSE % dict(port=CONF.admin_port)
+ self.assertEqualXML(data, expected)
+
+ def test_public_version_v3(self):
+ client = self.client(self.public_app)
+ resp = client.get('/v3/', headers=self.REQUEST_HEADERS)
+ self.assertEqual(resp.status_int, 200)
+ data = resp.body
+ expected = self.v3_VERSION_RESPONSE % dict(port=CONF.public_port)
+ self.assertEqualXML(data, expected)
+
+ def test_admin_version_v3(self):
+ client = self.client(self.public_app)
+ resp = client.get('/v3/', headers=self.REQUEST_HEADERS)
+ self.assertEqual(resp.status_int, 200)
+ data = resp.body
+ expected = self.v3_VERSION_RESPONSE % dict(port=CONF.admin_port)
+ self.assertEqualXML(data, expected)
+
+ def test_v2_disabled(self):
+ self.stubs.Set(controllers, '_VERSIONS', ['v3'])
+ client = self.client(self.public_app)
+
+ # request to /v3 should pass
+ resp = client.get('/v3/', headers=self.REQUEST_HEADERS)
+ self.assertEqual(resp.status_int, 200)
+ data = resp.body
+ expected = self.v3_VERSION_RESPONSE % dict(port=CONF.public_port)
+ self.assertEqualXML(data, expected)
+
+ # only v3 information should be displayed by requests to /
+ v3_only_response = ((self.DOC_INTRO + '<versions %(namespace)s>' +
+ self.v3_VERSION_DATA + '</versions>') %
+ dict(namespace=self.XML_NAMESPACE_ATTR,
+ v3_namespace='') %
+ dict(port=CONF.public_port))
+
+ resp = client.get('/', headers=self.REQUEST_HEADERS)
+ self.assertEqual(resp.status_int, 300)
+ data = resp.body
+ self.assertEqualXML(data, v3_only_response)
+
+ def test_v3_disabled(self):
+ self.stubs.Set(controllers, '_VERSIONS', ['v2.0'])
+ client = self.client(self.public_app)
+
+ # request to /v2.0 should pass
+ resp = client.get('/v2.0/', headers=self.REQUEST_HEADERS)
+ self.assertEqual(resp.status_int, 200)
+ data = resp.body
+ expected = self.v2_VERSION_RESPONSE % dict(port=CONF.public_port)
+ self.assertEqualXML(data, expected)
+
+ # only v2 information should be displayed by requests to /
+ v2_only_response = ((self.DOC_INTRO + '<versions %(namespace)s>' +
+ self.v2_VERSION_DATA + '</versions>') %
+ dict(namespace=self.XML_NAMESPACE_ATTR,
+ v2_namespace='') %
+ dict(port=CONF.public_port))
+
+ resp = client.get('/', headers=self.REQUEST_HEADERS)
+ self.assertEqual(resp.status_int, 300)
+ data = resp.body
+ self.assertEqualXML(data, v2_only_response)
diff --git a/keystone/tests/test_wsgi.py b/keystone/tests/test_wsgi.py
new file mode 100644
index 00000000..781159e2
--- /dev/null
+++ b/keystone/tests/test_wsgi.py
@@ -0,0 +1,213 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 OpenStack LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.common import wsgi
+from keystone import exception
+from keystone.openstack.common import jsonutils
+from keystone.tests import core as test
+
+
+class FakeApp(wsgi.Application):
+ def index(self, context):
+ return {'a': 'b'}
+
+
+class BaseWSGITest(test.TestCase):
+ def setUp(self):
+ self.app = FakeApp()
+ super(BaseWSGITest, self).setUp()
+
+ def _make_request(self, url='/'):
+ req = wsgi.Request.blank(url)
+ args = {'action': 'index', 'controller': None}
+ req.environ['wsgiorg.routing_args'] = [None, args]
+ return req
+
+
+class ApplicationTest(BaseWSGITest):
+ def test_response_content_type(self):
+ req = self._make_request()
+ resp = req.get_response(self.app)
+ self.assertEqual(resp.content_type, 'application/json')
+
+ def test_query_string_available(self):
+ class FakeApp(wsgi.Application):
+ def index(self, context):
+ return context['query_string']
+ req = self._make_request(url='/?1=2')
+ resp = req.get_response(FakeApp())
+ self.assertEqual(jsonutils.loads(resp.body), {'1': '2'})
+
+ def test_headers_available(self):
+ class FakeApp(wsgi.Application):
+ def index(self, context):
+ return context['headers']
+
+ app = FakeApp()
+ req = self._make_request(url='/?1=2')
+ req.headers['X-Foo'] = "bar"
+ resp = req.get_response(app)
+ self.assertIn('X-Foo', eval(resp.body))
+
+ def test_render_response(self):
+ data = {'attribute': 'value'}
+ body = '{"attribute": "value"}'
+
+ resp = wsgi.render_response(body=data)
+ self.assertEqual(resp.status, '200 OK')
+ self.assertEqual(resp.status_int, 200)
+ self.assertEqual(resp.body, body)
+ self.assertEqual(resp.headers.get('Vary'), 'X-Auth-Token')
+ self.assertEqual(resp.headers.get('Content-Length'), str(len(body)))
+
+ def test_render_response_custom_status(self):
+ resp = wsgi.render_response(status=(501, 'Not Implemented'))
+ self.assertEqual(resp.status, '501 Not Implemented')
+ self.assertEqual(resp.status_int, 501)
+
+ def test_render_response_custom_headers(self):
+ resp = wsgi.render_response(headers=[('Custom-Header', 'Some-Value')])
+ self.assertEqual(resp.headers.get('Custom-Header'), 'Some-Value')
+ self.assertEqual(resp.headers.get('Vary'), 'X-Auth-Token')
+
+ def test_render_response_no_body(self):
+ resp = wsgi.render_response()
+ self.assertEqual(resp.status, '204 No Content')
+ self.assertEqual(resp.status_int, 204)
+ self.assertEqual(resp.body, '')
+ self.assertEqual(resp.headers.get('Content-Length'), '0')
+ self.assertEqual(resp.headers.get('Content-Type'), None)
+
+ def test_application_local_config(self):
+ class FakeApp(wsgi.Application):
+ def __init__(self, *args, **kwargs):
+ self.kwargs = kwargs
+
+ app = FakeApp.factory({}, testkey="test")
+ self.assertIn("testkey", app.kwargs)
+ self.assertEquals("test", app.kwargs["testkey"])
+
+ def test_render_exception(self):
+ e = exception.Unauthorized(message=u'\u7f51\u7edc')
+ resp = wsgi.render_exception(e)
+ self.assertEqual(resp.status_int, 401)
+
+
+class ExtensionRouterTest(BaseWSGITest):
+ def test_extensionrouter_local_config(self):
+ class FakeRouter(wsgi.ExtensionRouter):
+ def __init__(self, *args, **kwargs):
+ self.kwargs = kwargs
+
+ factory = FakeRouter.factory({}, testkey="test")
+ app = factory(self.app)
+ self.assertIn("testkey", app.kwargs)
+ self.assertEquals("test", app.kwargs["testkey"])
+
+
+class MiddlewareTest(BaseWSGITest):
+ def test_middleware_request(self):
+ class FakeMiddleware(wsgi.Middleware):
+ def process_request(self, req):
+ req.environ['fake_request'] = True
+ return req
+ req = self._make_request()
+ resp = FakeMiddleware(None)(req)
+ self.assertIn('fake_request', resp.environ)
+
+ def test_middleware_response(self):
+ class FakeMiddleware(wsgi.Middleware):
+ def process_response(self, request, response):
+ response.environ = {}
+ response.environ['fake_response'] = True
+ return response
+ req = self._make_request()
+ resp = FakeMiddleware(self.app)(req)
+ self.assertIn('fake_response', resp.environ)
+
+ def test_middleware_bad_request(self):
+ class FakeMiddleware(wsgi.Middleware):
+ def process_response(self, request, response):
+ raise exception.Unauthorized()
+
+ req = self._make_request()
+ req.environ['REMOTE_ADDR'] = '127.0.0.1'
+ resp = FakeMiddleware(self.app)(req)
+ self.assertEquals(resp.status_int, exception.Unauthorized.code)
+
+ def test_middleware_type_error(self):
+ class FakeMiddleware(wsgi.Middleware):
+ def process_response(self, request, response):
+ raise TypeError()
+
+ req = self._make_request()
+ req.environ['REMOTE_ADDR'] = '127.0.0.1'
+ resp = FakeMiddleware(self.app)(req)
+ # This is a validationerror type
+ self.assertEquals(resp.status_int, exception.ValidationError.code)
+
+ def test_middleware_exception_error(self):
+ class FakeMiddleware(wsgi.Middleware):
+ def process_response(self, request, response):
+ raise exception.UnexpectedError("EXCEPTIONERROR")
+
+ req = self._make_request()
+ resp = FakeMiddleware(self.app)(req)
+ self.assertEquals(resp.status_int, exception.UnexpectedError.code)
+ self.assertIn("EXCEPTIONERROR", resp.body)
+
+ def test_middleware_local_config(self):
+ class FakeMiddleware(wsgi.Middleware):
+ def __init__(self, *args, **kwargs):
+ self.kwargs = kwargs
+
+ factory = FakeMiddleware.factory({}, testkey="test")
+ app = factory(self.app)
+ self.assertIn("testkey", app.kwargs)
+ self.assertEquals("test", app.kwargs["testkey"])
+
+
+class WSGIFunctionTest(test.TestCase):
+ def test_mask_password(self):
+ message = ("test = 'password': 'aaaaaa', 'param1': 'value1', "
+ "\"new_password\": 'bbbbbb'")
+ self.assertEqual(wsgi.mask_password(message, True),
+ u"test = 'password': '***', 'param1': 'value1', "
+ "\"new_password\": '***'")
+
+ message = "test = 'password' : 'aaaaaa'"
+ self.assertEqual(wsgi.mask_password(message, False, '111'),
+ "test = 'password' : '111'")
+
+ message = u"test = u'password' : u'aaaaaa'"
+ self.assertEqual(wsgi.mask_password(message, True),
+ u"test = u'password' : u'***'")
+
+ message = 'test = "password" : "aaaaaaaaa"'
+ self.assertEqual(wsgi.mask_password(message),
+ 'test = "password" : "***"')
+
+ message = 'test = "original_password" : "aaaaaaaaa"'
+ self.assertEqual(wsgi.mask_password(message),
+ 'test = "original_password" : "***"')
+
+ message = 'test = "original_password" : ""'
+ self.assertEqual(wsgi.mask_password(message),
+ 'test = "original_password" : "***"')
+
+ message = 'test = "param1" : "value"'
+ self.assertEqual(wsgi.mask_password(message),
+ 'test = "param1" : "value"')
diff --git a/keystone/tests/tmp/.gitkeep b/keystone/tests/tmp/.gitkeep
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone/tests/tmp/.gitkeep