summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorTodd Willey <todd@rubidine.com>2010-07-14 20:14:25 -0400
committerTodd Willey <todd@rubidine.com>2010-07-14 20:14:25 -0400
commit5ff20ec381ca6d051d5052efe99c142da602622f (patch)
tree4c5e919502c28f1aa8bb68e4c98a1fc9b949670f
parent70783d594c15f1aafd6b1a6ba28ae6bd25102285 (diff)
downloadnova-5ff20ec381ca6d051d5052efe99c142da602622f.tar.gz
nova-5ff20ec381ca6d051d5052efe99c142da602622f.tar.xz
nova-5ff20ec381ca6d051d5052efe99c142da602622f.zip
Move BasicModel into datastore.
-rw-r--r--nova/compute/model.py213
-rw-r--r--nova/compute/network.py43
-rw-r--r--nova/datastore.py202
-rw-r--r--nova/volume/storage.py2
4 files changed, 230 insertions, 230 deletions
diff --git a/nova/compute/model.py b/nova/compute/model.py
index 88b94525c..2ea4fecc9 100644
--- a/nova/compute/model.py
+++ b/nova/compute/model.py
@@ -57,19 +57,6 @@ from nova import utils
FLAGS = flags.FLAGS
-class ConnectionError(exception.Error):
- pass
-
-
-def absorb_connection_error(fn):
- def _wrapper(*args, **kwargs):
- try:
- return fn(*args, **kwargs)
- except redis.exceptions.ConnectionError, ce:
- raise ConnectionError(str(ce))
- return _wrapper
-
-
# TODO(todd): Implement this at the class level for Instance
class InstanceDirectory(object):
"""an api for interacting with the global state of instances"""
@@ -81,7 +68,7 @@ class InstanceDirectory(object):
def __getitem__(self, item):
return self.get(item)
- @absorb_connection_error
+ @datastore.absorb_connection_error
def by_project(self, project):
"""returns a list of instance objects for a project"""
for instance_id in datastore.Redis.instance().smembers('project:%s:instances' % project):
@@ -105,12 +92,12 @@ class InstanceDirectory(object):
"""returns the instance a volume is attached to"""
pass
- @absorb_connection_error
+ @datastore.absorb_connection_error
def exists(self, instance_id):
return datastore.Redis.instance().sismember('instances', instance_id)
@property
- @absorb_connection_error
+ @datastore.absorb_connection_error
def all(self):
"""returns a list of all instances"""
for instance_id in datastore.Redis.instance().smembers('instances'):
@@ -121,196 +108,8 @@ class InstanceDirectory(object):
instance_id = utils.generate_uid('i')
return self.get(instance_id)
-class BasicModel(object):
- """
- All Redis-backed data derives from this class.
-
- You MUST specify an identifier() property that returns a unique string
- per instance.
-
- You MUST have an initializer that takes a single argument that is a value
- returned by identifier() to load a new class with.
-
- You may want to specify a dictionary for default_state().
-
- You may also specify override_type at the class left to use a key other
- than __class__.__name__.
-
- You override save and destroy calls to automatically build and destroy
- associations.
- """
-
- override_type = None
-
- @absorb_connection_error
- def __init__(self):
- self.initial_state = {}
- self.state = datastore.Redis.instance().hgetall(self.__redis_key)
- if self.state:
- self.initial_state = self.state
- else:
- self.state = self.default_state()
-
- def default_state(self):
- """You probably want to define this in your subclass"""
- return {}
-
- @classmethod
- def _redis_name(cls):
- return self.override_type or cls.__name__
-
- @classmethod
- def lookup(cls, identifier):
- rv = cls(identifier)
- if rv.is_new_record():
- return None
- else:
- return rv
-
- @classmethod
- @absorb_connection_error
- def all(cls):
- """yields all objects in the store"""
- redis_set = cls._redis_set_name(cls.__name__)
- for identifier in datastore.Redis.instance().smembers(redis_set):
- yield cls(identifier)
-
- @classmethod
- @absorb_connection_error
- def associated_to(cls, foreign_type, foreign_id):
- redis_set = cls._redis_association_name(foreign_type, foreign_id)
- for identifier in datastore.Redis.instance().smembers(redis_set):
- yield cls(identifier)
-
- @classmethod
- def _redis_set_name(cls, kls_name):
- # stupidly pluralize (for compatiblity with previous codebase)
- return kls_name.lower() + "s"
-
- @classmethod
- def _redis_association_name(cls, foreign_type, foreign_id):
- return cls._redis_set_name("%s:%s:%s" %
- (foreign_type, foreign_id, cls.__name__))
-
- @property
- def identifier(self):
- """You DEFINITELY want to define this in your subclass"""
- raise NotImplementedError("Your sublcass should define identifier")
-
- @property
- def __redis_key(self):
- return '%s:%s' % (self.__class__.__name__.lower(), self.identifier)
-
- def __repr__(self):
- return "<%s:%s>" % (self.__class__.__name__, self.identifier)
-
- def keys(self):
- return self.state.keys()
-
- def copy(self):
- copyDict = {}
- for item in self.keys():
- copyDict[item] = self[item]
- return copyDict
-
- def get(self, item, default):
- return self.state.get(item, default)
-
- def update(self, update_dict):
- return self.state.update(update_dict)
-
- def setdefault(self, item, default):
- return self.state.setdefault(item, default)
-
- def __getitem__(self, item):
- return self.state[item]
-
- def __setitem__(self, item, val):
- self.state[item] = val
- return self.state[item]
-
- def __delitem__(self, item):
- """We don't support this"""
- raise Exception("Silly monkey, models NEED all their properties.")
-
- def is_new_record(self):
- return self.initial_state == {}
-
- @absorb_connection_error
- def add_to_index(self):
- set_name = self.__class__._redis_set_name(self.__class__.__name__)
- datastore.Redis.instance().sadd(set_name, self.identifier)
-
- @absorb_connection_error
- def remove_from_index(self):
- set_name = self.__class__._redis_set_name(self.__class__.__name__)
- datastore.Redis.instance().srem(set_name, self.identifier)
-
- @absorb_connection_error
- def remove_from_index(self):
- set_name = self.__class__._redis_set_name(self.__class__.__name__)
- datastore.Redis.instance().srem(set_name, self.identifier)
-
- @absorb_connection_error
- def associate_with(self, foreign_type, foreign_id):
- # note the extra 's' on the end is for plurality
- # to match the old data without requiring a migration of any sort
- self.add_associated_model_to_its_set(foreign_type, foreign_id)
- redis_set = self.__class__._redis_association_name(foreign_type,
- foreign_id)
- datastore.Redis.instance().sadd(redis_set, self.identifier)
-
- @absorb_connection_error
- def unassociate_with(self, foreign_type, foreign_id):
- redis_set = self.__class__._redis_association_name(foreign_type,
- foreign_id)
- datastore.Redis.instance().srem(redis_set, self.identifier)
-
- def add_associated_model_to_its_set(self, my_type, my_id):
- table = globals()
- klsname = my_type.capitalize()
- if table.has_key(klsname):
- my_class = table[klsname]
- my_inst = my_class(my_id)
- my_inst.save()
- else:
- logging.warning("no model class for %s when building"
- " association from %s",
- klsname, self)
-
- @absorb_connection_error
- def save(self):
- """
- update the directory with the state from this model
- also add it to the index of items of the same type
- then set the initial_state = state so new changes are tracked
- """
- # TODO(ja): implement hmset in redis-py and use it
- # instead of multiple calls to hset
- if self.is_new_record():
- self["create_time"] = utils.isotime()
- for key, val in self.state.iteritems():
- # if (not self.initial_state.has_key(key)
- # or self.initial_state[key] != val):
- datastore.Redis.instance().hset(self.__redis_key, key, val)
- self.add_to_index()
- self.initial_state = self.state
- return True
-
- @absorb_connection_error
- def destroy(self):
- """
- deletes all related records from datastore.
- does NOT do anything to running libvirt state.
- """
- logging.info("Destroying datamodel for %s %s",
- self.__class__.__name__, self.identifier)
- datastore.Redis.instance().delete(self.__redis_key)
- self.remove_from_index()
- return True
-
-class Instance(BasicModel):
+class Instance(datastore.BasicModel):
"""Wrapper around stored properties of an instance"""
def __init__(self, instance_id):
@@ -365,7 +164,7 @@ class Instance(BasicModel):
self.unassociate_with("project", self.project)
return super(Instance, self).destroy()
-class Host(BasicModel):
+class Host(datastore.BasicModel):
"""A Host is the machine where a Daemon is running."""
def __init__(self, hostname):
@@ -382,7 +181,7 @@ class Host(BasicModel):
return self.hostname
-class Daemon(BasicModel):
+class Daemon(datastore.BasicModel):
"""A Daemon is a job (compute, api, network, ...) that runs on a host."""
def __init__(self, host_or_combined, binpath=None):
diff --git a/nova/compute/network.py b/nova/compute/network.py
index 81618269d..15635d707 100644
--- a/nova/compute/network.py
+++ b/nova/compute/network.py
@@ -31,11 +31,10 @@ from nova import vendor
import IPy
from nova import datastore
-import nova.exception
-from nova.compute import exception
from nova import flags
-from nova.compute import model
from nova import utils
+from nova import exception
+from nova.compute import exception as compute_exception
from nova.auth import users
import linux_net
@@ -62,7 +61,7 @@ flags.DEFINE_integer('cloudpipe_start_port', 12000,
logging.getLogger().setLevel(logging.DEBUG)
-class Vlan(model.BasicModel):
+class Vlan(datastore.BasicModel):
def __init__(self, project, vlan):
"""
Since we don't want to try and find a vlan by its identifier,
@@ -82,7 +81,7 @@ class Vlan(model.BasicModel):
return instance
@classmethod
- @model.absorb_connection_error
+ @datastore.absorb_connection_error
def lookup(cls, project):
set_name = cls._redis_set_name(cls.__name__)
vlan = datastore.Redis.instance().hget(set_name, project)
@@ -92,14 +91,14 @@ class Vlan(model.BasicModel):
return None
@classmethod
- @model.absorb_connection_error
+ @datastore.absorb_connection_error
def dict_by_project(cls):
"""a hash of project:vlan"""
set_name = cls._redis_set_name(cls.__name__)
return datastore.Redis.instance().hgetall(set_name)
@classmethod
- @model.absorb_connection_error
+ @datastore.absorb_connection_error
def dict_by_vlan(cls):
"""a hash of vlan:project"""
set_name = cls._redis_set_name(cls.__name__)
@@ -110,13 +109,13 @@ class Vlan(model.BasicModel):
return rv
@classmethod
- @model.absorb_connection_error
+ @datastore.absorb_connection_error
def all(cls):
set_name = cls._redis_set_name(cls.__name__)
for project,vlan in datastore.Redis.instance().hgetall(set_name):
yield cls(project, vlan)
- @model.absorb_connection_error
+ @datastore.absorb_connection_error
def save(self):
"""
Vlan saves state into a giant hash named "vlans", with keys of
@@ -126,7 +125,7 @@ class Vlan(model.BasicModel):
set_name = self._redis_set_name(self.__class__.__name__)
datastore.Redis.instance().hset(set_name, self.project_id, self.vlan_id)
- @model.absorb_connection_error
+ @datastore.absorb_connection_error
def destroy(self):
set_name = self._redis_set_name(self.__class__.__name__)
datastore.Redis.instance().hdel(set_name, self.project)
@@ -144,7 +143,7 @@ class Vlan(model.BasicModel):
# TODO(ja): does vlanpool "keeper" need to know the min/max - shouldn't FLAGS always win?
# TODO(joshua): Save the IPs at the top of each subnet for cloudpipe vpn clients
-class BaseNetwork(model.BasicModel):
+class BaseNetwork(datastore.BasicModel):
override_type = 'network'
@property
@@ -241,11 +240,11 @@ class BaseNetwork(model.BasicModel):
self._add_host(user_id, project_id, address, mac)
self.express(address=address)
return address
- raise exception.NoMoreAddresses()
+ raise compute_exception.NoMoreAddresses()
def deallocate_ip(self, ip_str):
if not ip_str in self.assigned:
- raise exception.AddressNotAllocated()
+ raise compute_exception.AddressNotAllocated()
self.deexpress(address=ip_str)
self._rem_host(ip_str)
@@ -351,7 +350,7 @@ class DHCPNetwork(BridgedNetwork):
else:
linux_net.start_dnsmasq(self)
-class PublicAddress(model.BasicModel):
+class PublicAddress(datastore.BasicModel):
override_type = "address"
def __init__(self, address):
@@ -422,14 +421,14 @@ class PublicNetworkController(BaseNetwork):
def associate_address(self, public_ip, private_ip, instance_id):
if not public_ip in self.assigned:
- raise exception.AddressNotAllocated()
+ raise compute_exception.AddressNotAllocated()
# TODO(joshua): Keep an index going both ways
for addr in self.host_objs:
if addr.get('private_ip', None) == private_ip:
- raise exception.AddressAlreadyAssociated()
+ raise compute_exception.AddressAlreadyAssociated()
addr = self.get_host(public_ip)
if addr.get('private_ip', 'available') != 'available':
- raise exception.AddressAlreadyAssociated()
+ raise compute_exception.AddressAlreadyAssociated()
addr['private_ip'] = private_ip
addr['instance_id'] = instance_id
addr.save()
@@ -437,10 +436,10 @@ class PublicNetworkController(BaseNetwork):
def disassociate_address(self, public_ip):
if not public_ip in self.assigned:
- raise exception.AddressNotAllocated()
+ raise compute_exception.AddressNotAllocated()
addr = self.get_host(public_ip)
if addr.get('private_ip', 'available') == 'available':
- raise exception.AddressNotAssociated()
+ raise compute_exception.AddressNotAssociated()
self.deexpress(address=public_ip)
addr['private_ip'] = 'available'
addr['instance_id'] = 'available'
@@ -510,14 +509,14 @@ def get_vlan_for_project(project_id):
return vlan
else:
return Vlan.create(project_id, vnum)
- raise exception.AddressNotAllocated("Out of VLANs")
+ raise compute_exception.AddressNotAllocated("Out of VLANs")
def get_network_by_address(address):
for project in users.UserManager.instance().get_projects():
net = get_project_network(project.id)
if address in net.assigned:
return net
- raise exception.AddressNotAllocated()
+ raise compute_exception.AddressNotAllocated()
def allocate_vpn_ip(user_id, project_id, mac):
return get_project_network(project_id).allocate_vpn_ip(mac)
@@ -534,7 +533,7 @@ def get_project_network(project_id, security_group='default'):
# Refactor to still use the LDAP backend, but not User specific.
project = users.UserManager.instance().get_project(project_id)
if not project:
- raise nova.exception.Error("Project %s doesn't exist, uhoh." %
+ raise exception.Error("Project %s doesn't exist, uhoh." %
project_id)
return DHCPNetwork.get_network_for_project(project.project_manager_id,
project.id, security_group)
diff --git a/nova/datastore.py b/nova/datastore.py
index 5a9b80c62..405b40173 100644
--- a/nova/datastore.py
+++ b/nova/datastore.py
@@ -37,6 +37,7 @@ import time
from nova import vendor
import redis
+from nova import exception
from nova import flags
from nova import utils
@@ -305,3 +306,204 @@ def Keeper(prefix=''):
'sqlite': SqliteKeeper}
return KEEPERS[FLAGS.keeper_backend](prefix)
+class ConnectionError(exception.Error):
+ pass
+
+
+def absorb_connection_error(fn):
+ def _wrapper(*args, **kwargs):
+ try:
+ return fn(*args, **kwargs)
+ except redis.exceptions.ConnectionError, ce:
+ raise ConnectionError(str(ce))
+ return _wrapper
+
+
+class BasicModel(object):
+ """
+ All Redis-backed data derives from this class.
+
+ You MUST specify an identifier() property that returns a unique string
+ per instance.
+
+ You MUST have an initializer that takes a single argument that is a value
+ returned by identifier() to load a new class with.
+
+ You may want to specify a dictionary for default_state().
+
+ You may also specify override_type at the class left to use a key other
+ than __class__.__name__.
+
+ You override save and destroy calls to automatically build and destroy
+ associations.
+ """
+
+ override_type = None
+
+ @absorb_connection_error
+ def __init__(self):
+ self.initial_state = {}
+ self.state = Redis.instance().hgetall(self.__redis_key)
+ if self.state:
+ self.initial_state = self.state
+ else:
+ self.state = self.default_state()
+
+ def default_state(self):
+ """You probably want to define this in your subclass"""
+ return {}
+
+ @classmethod
+ def _redis_name(cls):
+ return self.override_type or cls.__name__
+
+ @classmethod
+ def lookup(cls, identifier):
+ rv = cls(identifier)
+ if rv.is_new_record():
+ return None
+ else:
+ return rv
+
+ @classmethod
+ @absorb_connection_error
+ def all(cls):
+ """yields all objects in the store"""
+ redis_set = cls._redis_set_name(cls.__name__)
+ for identifier in Redis.instance().smembers(redis_set):
+ yield cls(identifier)
+
+ @classmethod
+ @absorb_connection_error
+ def associated_to(cls, foreign_type, foreign_id):
+ redis_set = cls._redis_association_name(foreign_type, foreign_id)
+ for identifier in Redis.instance().smembers(redis_set):
+ yield cls(identifier)
+
+ @classmethod
+ def _redis_set_name(cls, kls_name):
+ # stupidly pluralize (for compatiblity with previous codebase)
+ return kls_name.lower() + "s"
+
+ @classmethod
+ def _redis_association_name(cls, foreign_type, foreign_id):
+ return cls._redis_set_name("%s:%s:%s" %
+ (foreign_type, foreign_id, cls.__name__))
+
+ @property
+ def identifier(self):
+ """You DEFINITELY want to define this in your subclass"""
+ raise NotImplementedError("Your sublcass should define identifier")
+
+ @property
+ def __redis_key(self):
+ return '%s:%s' % (self.__class__.__name__.lower(), self.identifier)
+
+ def __repr__(self):
+ return "<%s:%s>" % (self.__class__.__name__, self.identifier)
+
+ def keys(self):
+ return self.state.keys()
+
+ def copy(self):
+ copyDict = {}
+ for item in self.keys():
+ copyDict[item] = self[item]
+ return copyDict
+
+ def get(self, item, default):
+ return self.state.get(item, default)
+
+ def update(self, update_dict):
+ return self.state.update(update_dict)
+
+ def setdefault(self, item, default):
+ return self.state.setdefault(item, default)
+
+ def __getitem__(self, item):
+ return self.state[item]
+
+ def __setitem__(self, item, val):
+ self.state[item] = val
+ return self.state[item]
+
+ def __delitem__(self, item):
+ """We don't support this"""
+ raise Exception("Silly monkey, models NEED all their properties.")
+
+ def is_new_record(self):
+ return self.initial_state == {}
+
+ @absorb_connection_error
+ def add_to_index(self):
+ set_name = self.__class__._redis_set_name(self.__class__.__name__)
+ Redis.instance().sadd(set_name, self.identifier)
+
+ @absorb_connection_error
+ def remove_from_index(self):
+ set_name = self.__class__._redis_set_name(self.__class__.__name__)
+ Redis.instance().srem(set_name, self.identifier)
+
+ @absorb_connection_error
+ def remove_from_index(self):
+ set_name = self.__class__._redis_set_name(self.__class__.__name__)
+ Redis.instance().srem(set_name, self.identifier)
+
+ @absorb_connection_error
+ def associate_with(self, foreign_type, foreign_id):
+ # note the extra 's' on the end is for plurality
+ # to match the old data without requiring a migration of any sort
+ self.add_associated_model_to_its_set(foreign_type, foreign_id)
+ redis_set = self.__class__._redis_association_name(foreign_type,
+ foreign_id)
+ Redis.instance().sadd(redis_set, self.identifier)
+
+ @absorb_connection_error
+ def unassociate_with(self, foreign_type, foreign_id):
+ redis_set = self.__class__._redis_association_name(foreign_type,
+ foreign_id)
+ Redis.instance().srem(redis_set, self.identifier)
+
+ def add_associated_model_to_its_set(self, my_type, my_id):
+ table = globals()
+ klsname = my_type.capitalize()
+ if table.has_key(klsname):
+ my_class = table[klsname]
+ my_inst = my_class(my_id)
+ my_inst.save()
+ else:
+ logging.warning("no model class for %s when building"
+ " association from %s",
+ klsname, self)
+
+ @absorb_connection_error
+ def save(self):
+ """
+ update the directory with the state from this model
+ also add it to the index of items of the same type
+ then set the initial_state = state so new changes are tracked
+ """
+ # TODO(ja): implement hmset in redis-py and use it
+ # instead of multiple calls to hset
+ if self.is_new_record():
+ self["create_time"] = utils.isotime()
+ for key, val in self.state.iteritems():
+ # if (not self.initial_state.has_key(key)
+ # or self.initial_state[key] != val):
+ Redis.instance().hset(self.__redis_key, key, val)
+ self.add_to_index()
+ self.initial_state = self.state
+ return True
+
+ @absorb_connection_error
+ def destroy(self):
+ """
+ deletes all related records from datastore.
+ does NOT do anything to running libvirt state.
+ """
+ logging.info("Destroying datamodel for %s %s",
+ self.__class__.__name__, self.identifier)
+ Redis.instance().delete(self.__redis_key)
+ self.remove_from_index()
+ return True
+
diff --git a/nova/volume/storage.py b/nova/volume/storage.py
index 82d7a4c22..4a92c3b38 100644
--- a/nova/volume/storage.py
+++ b/nova/volume/storage.py
@@ -156,7 +156,7 @@ class BlockStore(object):
utils.runthis("PVCreate returned: %s", "sudo pvcreate %s" % (FLAGS.storage_dev))
utils.runthis("VGCreate returned: %s", "sudo vgcreate %s %s" % (FLAGS.volume_group, FLAGS.storage_dev))
-class Volume(model.BasicModel):
+class Volume(datastore.BasicModel):
def __init__(self, volume_id=None):
self.volume_id = volume_id