diff options
| author | andy <github@anarkystic.com> | 2010-08-21 15:37:00 +0200 |
|---|---|---|
| committer | andy <github@anarkystic.com> | 2010-08-21 15:37:00 +0200 |
| commit | 152baf34247c5a4b76f643cac0d33c0158de0bfa (patch) | |
| tree | 216859744db75835fd28a9cacdb13d7acb790569 | |
| parent | 6f5aa18747384f46f8d89ac0d6c82a710849ce59 (diff) | |
| download | nova-152baf34247c5a4b76f643cac0d33c0158de0bfa.tar.gz nova-152baf34247c5a4b76f643cac0d33c0158de0bfa.tar.xz nova-152baf34247c5a4b76f643cac0d33c0158de0bfa.zip | |
Moves auth.manager to the data layer.
A couple weird things are going on, I added a try-except in
Manager.delete_project because it seems to have an issue finding the
network to delete, I think something is probably deleting it before the
tests get a chance to.
Also stubbed out task.LoopingCall in service_unittest because there
wasn't a good way to kill the task from outside of
service.Service.create()
| -rw-r--r-- | nova/auth/manager.py | 35 | ||||
| -rw-r--r-- | nova/db/api.py | 8 | ||||
| -rw-r--r-- | nova/db/sqlalchemy/api.py | 13 | ||||
| -rw-r--r-- | nova/network/service.py | 7 | ||||
| -rw-r--r-- | nova/tests/network_unittest.py | 4 | ||||
| -rw-r--r-- | nova/tests/service_unittest.py | 11 |
6 files changed, 58 insertions, 20 deletions
diff --git a/nova/auth/manager.py b/nova/auth/manager.py index eed67d8c3..070c5508a 100644 --- a/nova/auth/manager.py +++ b/nova/auth/manager.py @@ -29,6 +29,7 @@ import uuid import zipfile from nova import crypto +from nova import db from nova import exception from nova import flags from nova import models @@ -202,11 +203,6 @@ class Project(AuthBase): ip, port = AuthManager().get_project_vpn_data(self) return port - @property - def network(self): - session = models.create_session() - return session.query(models.Network).filter_by(project_id=self.id).first() - def has_manager(self, user): return AuthManager().is_project_manager(user, self) @@ -498,8 +494,8 @@ class AuthManager(object): return [] return [Project(**project_dict) for project_dict in project_list] - def create_project(self, name, manager_user, - description=None, member_users=None): + def create_project(self, name, manager_user, description=None, + member_users=None, context=None): """Create a project @type name: str @@ -530,8 +526,7 @@ class AuthManager(object): if project_dict: project = Project(**project_dict) # FIXME(ja): EVIL HACK - net = models.Network(project_id=project.id) - net.save() + db.network_create(context, {'project_id': project.id}) return project def add_to_project(self, user, project): @@ -558,7 +553,7 @@ class AuthManager(object): return drv.remove_from_project(User.safe_id(user), Project.safe_id(project)) - def get_project_vpn_data(self, project): + def get_project_vpn_data(self, project, context=None): """Gets vpn ip and port for project @type project: Project or project_id @@ -571,19 +566,27 @@ class AuthManager(object): # FIXME(vish): this shouldn't be messing with the datamodel directly if not isinstance(project, Project): project = self.get_project(project) - if not project.network.vpn_public_port: + + network_ref = db.project_get_network(context, project.id) + + if not network_ref['vpn_public_port']: raise exception.NotFound('project network data has not been set') - return (project.network.vpn_public_ip_str, - project.network.vpn_public_port) + return (network_ref['vpn_public_ip_str'], + network_ref['vpn_public_port']) - def delete_project(self, project): + def delete_project(self, project, context=None): """Deletes a project""" # FIXME(ja): EVIL HACK if not isinstance(project, Project): project = self.get_project(project) - project.network.delete() + network_ref = db.project_get_network(context, project.id) + try: + db.network_destroy(context, network_ref['id']) + except: + logging.exception('Could not destroy network: %s', + network_ref['id']) with self.driver() as drv: - return drv.delete_project(Project.safe_id(project)) + drv.delete_project(Project.safe_id(project)) def get_user(self, uid): """Retrieves a user by id""" diff --git a/nova/db/api.py b/nova/db/api.py index e76e6b057..bbd69ec65 100644 --- a/nova/db/api.py +++ b/nova/db/api.py @@ -111,6 +111,14 @@ def network_update(context, network_id, values): ################### +def project_get_network(context, project_id): + """Return the network associated with the project.""" + return _impl.project_get_network(context, project_id) + + +################### + + def volume_allocate_shelf_and_blade(context, volume_id): """Atomically allocate a free shelf and blade from the pool.""" return _impl.volume_allocate_shelf_and_blade(context, volume_id) diff --git a/nova/db/sqlalchemy/api.py b/nova/db/sqlalchemy/api.py index d80c03c19..e883e14cb 100644 --- a/nova/db/sqlalchemy/api.py +++ b/nova/db/sqlalchemy/api.py @@ -82,7 +82,7 @@ def network_create(context, values): for (key, value) in values.iteritems(): network_ref[key] = value network_ref.save() - return network_ref.id + return network_ref def network_destroy(context, network_id): @@ -104,6 +104,17 @@ def network_update(context, network_id, values): ################### +def project_get_network(context, project_id): + session = models.create_session() + rv = session.query(models.Network).filter_by(project_id=project_id).first() + if not rv: + raise exception.NotFound('No network for project: %s' % project_id) + return rv + + +################### + + def volume_allocate_shelf_and_blade(context, volume_id): session = models.NovaBase.get_session() query = session.query(models.ExportDevice).filter_by(volume=None) diff --git a/nova/network/service.py b/nova/network/service.py index 16ecfbf3e..e47f07ef0 100644 --- a/nova/network/service.py +++ b/nova/network/service.py @@ -24,6 +24,7 @@ import logging import IPy +from nova import db from nova import exception from nova import flags from nova import models @@ -89,12 +90,12 @@ def setup_compute_network(project_id): srv.setup_compute_network(network) -def get_network_for_project(project_id): +def get_network_for_project(project_id, context=None): """Get network allocated to project from datastore""" project = manager.AuthManager().get_project(project_id) if not project: raise exception.NotFound("Couldn't find project %s" % project_id) - return project.network + return db.project_get_network(context, project_id) def get_host_for_project(project_id): @@ -246,7 +247,7 @@ class VlanNetworkService(BaseNetworkService): session.add(network_index) session.commit() - def allocate_fixed_ip(self, project_id, instance_id, is_vpn=False, + def allocate_fixed_ip(self, project_id, instance_id, is_vpn=False, *args, **kwargs): """Gets a fixed ip from the pool""" network = get_network_for_project(project_id) diff --git a/nova/tests/network_unittest.py b/nova/tests/network_unittest.py index 0f2ce060d..76c76edbf 100644 --- a/nova/tests/network_unittest.py +++ b/nova/tests/network_unittest.py @@ -67,6 +67,8 @@ class NetworkTestCase(test.TrialTestCase): def tearDown(self): # pylint: disable=C0103 super(NetworkTestCase, self).tearDown() + # TODO(termie): this should really be instantiating clean datastores + # in between runs, one failure kills all the tests for project in self.projects: self.manager.delete_project(project) self.manager.delete_user(self.user) @@ -275,6 +277,8 @@ def is_allocated_in_project(address, project_id): fixed_ip = models.FixedIp.find_by_ip_str(address) project_net = service.get_network_for_project(project_id) # instance exists until release + logging.error('fixed_ip.instance: %s', fixed_ip.instance) + logging.error('project_net: %s', project_net) return fixed_ip.instance is not None and fixed_ip.network == project_net diff --git a/nova/tests/service_unittest.py b/nova/tests/service_unittest.py index 449494201..482988465 100644 --- a/nova/tests/service_unittest.py +++ b/nova/tests/service_unittest.py @@ -43,6 +43,8 @@ class ServiceTestCase(test.BaseTestCase): def test_create(self): self.mox.StubOutWithMock(rpc, 'AdapterConsumer', use_mock_anything=True) + self.mox.StubOutWithMock( + service.task, 'LoopingCall', use_mock_anything=True) rpc.AdapterConsumer(connection=mox.IgnoreArg(), topic='run_tests.py', proxy=mox.IsA(service.Service) @@ -52,6 +54,15 @@ class ServiceTestCase(test.BaseTestCase): topic='run_tests.py.%s' % FLAGS.node_name, proxy=mox.IsA(service.Service) ).AndReturn(rpc.AdapterConsumer) + + # Stub out looping call a bit needlessly since we don't have an easy + # way to cancel it (yet) when the tests finishes + service.task.LoopingCall( + mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()).AndReturn( + service.task.LoopingCall) + service.task.LoopingCall.start(interval=mox.IgnoreArg(), + now=mox.IgnoreArg()) + rpc.AdapterConsumer.attach_to_twisted() rpc.AdapterConsumer.attach_to_twisted() self.mox.ReplayAll() |
