summaryrefslogtreecommitdiffstats
path: root/nova/compute
diff options
context:
space:
mode:
authorTodd Willey <todd@rubidine.com>2010-06-25 18:55:14 -0400
committerTodd Willey <todd@rubidine.com>2010-06-25 18:55:14 -0400
commit849282175c38ec419fc037b1698cb4de4efdb833 (patch)
tree0deab000fe0a95e05df55d3392a5169aa35cbe3b /nova/compute
parent8a2f1763cbadd47f6ce26467ac9f82bc9b436d2a (diff)
Admin API + Worker Tracking.
Diffstat (limited to 'nova/compute')
-rw-r--r--nova/compute/model.py281
-rw-r--r--nova/compute/node.py16
2 files changed, 258 insertions, 39 deletions
diff --git a/nova/compute/model.py b/nova/compute/model.py
index 86f912f01..ddcb120e4 100644
--- a/nova/compute/model.py
+++ b/nova/compute/model.py
@@ -43,16 +43,30 @@ True
"""
import logging
+import time
from nova import vendor
+import redis
from nova import datastore
+from nova import exception
from nova import flags
from nova import utils
FLAGS = flags.FLAGS
+class ConnectionError(exception.Error):
+ pass
+
+def absorb_connection_error(fn):
+ def _wrapper(*args, **kwargs):
+ try:
+ return fn(*args, **kwargs)
+ except redis.exceptions.ConnectionError, ce:
+ raise ConnectionError(str(ce))
+ return _wrapper
+
# TODO(ja): singleton instance of the directory
class InstanceDirectory(object):
"""an api for interacting with the global state of instances """
@@ -64,6 +78,7 @@ class InstanceDirectory(object):
def __getitem__(self, item):
return self.get(item)
+ @absorb_connection_error
def by_project(self, project):
""" returns a list of instance objects for a project """
for instance_id in datastore.Redis.instance().smembers('project:%s:instances' % project):
@@ -87,10 +102,12 @@ class InstanceDirectory(object):
""" returns the instance a volume is attached to """
pass
+ @absorb_connection_error
def exists(self, instance_id):
return datastore.Redis.instance().sismember('instances', instance_id)
@property
+ @absorb_connection_error
def all(self):
""" returns a list of all instances """
for instance_id in datastore.Redis.instance().smembers('instances'):
@@ -101,34 +118,66 @@ class InstanceDirectory(object):
instance_id = utils.generate_uid('i')
return self.get(instance_id)
-
-
-class Instance(object):
- """ Wrapper around stored properties of an instance """
-
- def __init__(self, instance_id):
- """ loads an instance from the datastore if exists """
- self.instance_id = instance_id
+class BasicModel(object):
+ @absorb_connection_error
+ def __init__(self):
self.initial_state = {}
self.state = datastore.Redis.instance().hgetall(self.__redis_key)
if self.state:
self.initial_state = self.state
else:
- self.state = {'state': 0,
- 'state_description': 'pending',
- 'instance_id': instance_id,
- 'node_name': 'unassigned',
- 'project_id': 'unassigned',
- 'user_id': 'unassigned'
- }
+ self.state = self.default_state()
+
+ def default_state(self):
+ """ You probably want to define this in your subclass """
+ return {}
+
+ @classmethod
+ def lookup(cls, identifier):
+ rv = cls(identifier)
+ if rv.new_record():
+ return None
+ else:
+ return rv
+
+ @classmethod
+ @absorb_connection_error
+ def all(cls):
+ """ yields all objects in the store """
+ redis_set = cls._redis_set_name(cls.__name__)
+ for identifier in datastore.Redis.instance().smembers(redis_set):
+ yield cls(identifier)
+
+ @classmethod
+ @absorb_connection_error
+ def associated_to(cls, foreign_type, foreign_id):
+ redis_set = cls._redis_association_name(foreign_type, foreign_id)
+ for identifier in datastore.Redis.instance().smembers(redis_set):
+ yield cls(identifier)
+
+ @classmethod
+ def _redis_set_name(cls, kls_name):
+ # stupidly pluralize (for compatiblity with previous codebase)
+ return kls_name.lower() + "s"
+
+ @classmethod
+ def _redis_association_name(cls, foreign_type, foreign_id):
+ return cls._redis_set_name(
+ "%s:%s:%s" %
+ (foreign_type, foreign_id, cls.__name__)
+ )
+
+ @property
+ def identifier(self):
+ """ You DEFINITELY want to define this in your subclass """
+ raise Exception("Your sublcass should define identifier")
@property
def __redis_key(self):
- """ Magic string for instance keys """
- return 'instance:%s' % self.instance_id
+ return '%s:%s' % (self.__class__.__name__.lower(), self.identifier)
def __repr__(self):
- return "<Instance:%s>" % self.instance_id
+ return "<%s:%s>" % (self.__class__.__name__, self.identifier)
def keys(self):
return self.state.keys()
@@ -157,12 +206,59 @@ class Instance(object):
def __delitem__(self, item):
""" We don't support this """
- raise Exception("Silly monkey, Instances NEED all their properties.")
+ raise Exception("Silly monkey, models NEED all their properties.")
+
+ def new_record(self):
+ return self.initial_state == {}
+
+ @absorb_connection_error
+ def add_to_index(self):
+ set_name = self.__class__._redis_set_name(self.__class__.__name__)
+ datastore.Redis.instance().sadd(set_name, self.identifier)
+
+ @absorb_connection_error
+ def remove_from_index(self):
+ set_name = self.__class__._redis_set_name(self.__class__.__name__)
+ datastore.Redis.instance().srem(set_name, self.identifier)
+
+ @absorb_connection_error
+ def associate_with(self, foreign_type, foreign_id):
+ # note the extra 's' on the end is for plurality
+ # to match the old data without requiring a migration of any sort
+ self.add_associated_model_to_its_set(foreign_type, foreign_id)
+ redis_set = self.__class__._redis_association_name(
+ foreign_type,
+ foreign_id
+ )
+ datastore.Redis.instance().sadd(redis_set, self.identifier)
+
+ @absorb_connection_error
+ def unassociate_with(self, foreign_type, foreign_id):
+ redis_set = self.__class__._redis_association_name(
+ foreign_type,
+ foreign_id
+ )
+ datastore.Redis.instance().srem(redis_set, self.identifier)
+
+ def add_associated_model_to_its_set(self, my_type, my_id):
+ table = globals()
+ klsname = my_type.capitalize()
+ if table.has_key(klsname):
+ my_class = table[klsname]
+ my_inst = my_class(my_id)
+ my_inst.save()
+ else:
+ logging.warning(
+ "no model class for %s when building association from %s" %
+ (klsname, self)
+ )
+ @absorb_connection_error
def save(self):
- """ update the directory with the state from this instance
- make sure you've set the project_id and user_id before you call save
- for the first time.
+ """
+ update the directory with the state from this model
+ also add it to the index of items of the same type
+ then set the initial_state = state so new changes are tracked
"""
# TODO(ja): implement hmset in redis-py and use it
# instead of multiple calls to hset
@@ -170,29 +266,53 @@ class Instance(object):
# if (not self.initial_state.has_key(key)
# or self.initial_state[key] != val):
datastore.Redis.instance().hset(self.__redis_key, key, val)
- if self.initial_state == {}:
- datastore.Redis.instance().sadd('project:%s:instances' % self.project,
- self.instance_id)
- datastore.Redis.instance().sadd('instances', self.instance_id)
+ self.add_to_index()
self.initial_state = self.state
return True
+ @absorb_connection_error
+ def destroy(self):
+ """
+ deletes all related records from datastore.
+ does NOT do anything to running libvirt state.
+ """
+ logging.info(
+ "Destroying datamodel for %s %s",
+ (self.__class__.__name__, self.identifier)
+ )
+ datastore.Redis.instance().delete(self.__redis_key)
+ return True
+
+
+class Instance(BasicModel):
+ """ Wrapper around stored properties of an instance """
+
+ def __init__(self, instance_id):
+ """ loads an instance from the datastore if exists """
+ # set instance data before super call since it uses default_state
+ self.instance_id = instance_id
+ super(Instance, self).__init__()
+
+ def default_state(self):
+ return {
+ 'state': 0,
+ 'state_description': 'pending',
+ 'instance_id': self.instance_id,
+ 'node_name': 'unassigned',
+ 'project_id': 'unassigned',
+ 'user_id': 'unassigned'
+ }
+
+ @property
+ def identifier(self):
+ return self.instance_id
+
@property
def project(self):
if self.state.get('project_id', None):
return self.state['project_id']
return self.state.get('owner_id', 'unassigned')
- def destroy(self):
- """ deletes all related records from datastore.
- does NOT do anything to running libvirt state.
- """
- logging.info("Destroying datamodel for instance %s", self.instance_id)
- datastore.Redis.instance().srem('project:%s:instances' % self.project,
- self.instance_id)
- datastore.Redis.instance().srem('instances', self.instance_id)
- return True
-
@property
def volumes(self):
""" returns a list of attached volumes """
@@ -203,6 +323,95 @@ class Instance(object):
""" Returns a reservation object """
pass
+ def save(self):
+ """ Call into superclass to save object, then save associations """
+ # XXX: doesn't track migration between projects, just adds the first one
+ should_update_project = self.new_record()
+ success = super(Instance, self).save()
+ if success and should_update_project:
+ self.associate_with("project", self.project)
+ return True
+
+ def destroy(self):
+ """ Destroy associations, then destroy the object """
+ self.unassociate_with("project", self.project)
+ return super(Instance, self).destroy()
+
+class Host(BasicModel):
+ """
+ A Host is the base machine that runs many virtualized Instance.
+ Hosts are usually controlled vi nova.compute.node.Node, this model
+ just stores stats about a host in redis.
+ """
+
+ def __init__(self, hostname):
+ """ loads an instance from the datastore if exists """
+ # set instance data before super call since it uses default_state
+ self.hostname = hostname
+ super(Host, self).__init__()
+
+ def default_state(self):
+ return {
+ "hostname": self.hostname
+ }
+
+ @property
+ def identifier(self):
+ return self.hostname
+
+
+class Worker(BasicModel):
+ """
+ A Worker is a job (compute, api, network, ...) that runs on a host.
+ """
+
+ def __init__(self, host_or_combined, binpath=None):
+ """ loads an instance from the datastore if exists """
+ # set instance data before super call since it uses default_state
+ # since loading from datastore expects a combined key that
+ # is equivilent to identifier, we need to expect that, while
+ # maintaining meaningful semantics (2 arguments) when creating
+ # from within other code like the bin/nova-* scripts
+ if binpath:
+ self.hostname = host_or_combined
+ self.binary = binpath
+ else:
+ self.hostname, self.binary = host_or_combined.split(":")
+ super(Worker, self).__init__()
+
+ def default_state(self):
+ return {
+ "hostname": self.hostname,
+ "binary": self.binary,
+ "updated_at": utils.timestamp()
+ }
+
+ @property
+ def identifier(self):
+ return "%s:%s" % (self.hostname, self.binary)
+
+ def save(self):
+ """ Call into superclass to save object, then save associations """
+ # XXX: doesn't clear out from host list after crash, termination, etc
+ success = super(Worker, self).save()
+ if success:
+ self.associate_with("host", self.hostname)
+ return True
+
+ def destroy(self):
+ """ Destroy associations, then destroy the object """
+ self.unassociate_with("host", self.hostname)
+ return super(Worker, self).destroy()
+
+ def heartbeat(self):
+ self['updated_at'] = utils.timestamp()
+ self.save()
+ return True
+
+ @classmethod
+ def by_host(cls, hostname):
+ for x in cls.associated_to("host", hostname):
+ yield x
if __name__ == "__main__":
import doctest
diff --git a/nova/compute/node.py b/nova/compute/node.py
index c217056f5..b0f6173c9 100644
--- a/nova/compute/node.py
+++ b/nova/compute/node.py
@@ -142,9 +142,19 @@ class Node(object, service.Service):
return retval
@defer.inlineCallbacks
- def report_state(self):
- logging.debug("Reporting State")
- return
+ def report_state(self, hostname, worker):
+ try:
+ record = model.Worker(hostname, worker)
+ record.heartbeat()
+ if getattr(self, "model_disconnected", False):
+ self.model_disconnected = False
+ logging.error("Recovered model server connection!")
+
+ except model.ConnectionError, ex:
+ if not getattr(self, "model_disconnected", False):
+ self.model_disconnected = True
+ logging.exception("model server went away")
+ yield
# @exception.wrap_exception
def run_instance(self, instance_id, **_kwargs):