summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorSandy Walsh <sandy.walsh@rackspace.com>2011-09-13 16:46:53 -0700
committerSandy Walsh <sandy.walsh@rackspace.com>2011-09-13 16:46:53 -0700
commit86b60c769a1e6c482afcd11fe6d11a6dd8bae2c0 (patch)
tree09221662579485b46a42915185f20e7541cdf158
parent8a9b192f5436fa53b58b713e193733ea677766cc (diff)
parent7f1a0a05ec32ecb07c3a5f2286f841c4abc8f5e0 (diff)
downloadnova-86b60c769a1e6c482afcd11fe6d11a6dd8bae2c0.tar.gz
nova-86b60c769a1e6c482afcd11fe6d11a6dd8bae2c0.tar.xz
nova-86b60c769a1e6c482afcd11fe6d11a6dd8bae2c0.zip
trunk merge
-rw-r--r--MANIFEST.in2
-rw-r--r--nova/api/ec2/cloud.py30
-rw-r--r--nova/api/openstack/common.py66
-rw-r--r--nova/api/openstack/flavors.py69
-rw-r--r--nova/api/openstack/image_metadata.py58
-rw-r--r--nova/api/openstack/images.py135
-rw-r--r--nova/api/openstack/ips.py59
-rw-r--r--nova/api/openstack/limits.py78
-rw-r--r--nova/api/openstack/schemas/v1.1/addresses.rng14
-rw-r--r--nova/api/openstack/schemas/v1.1/flavor.rng10
-rw-r--r--nova/api/openstack/schemas/v1.1/flavors.rng6
-rw-r--r--nova/api/openstack/schemas/v1.1/flavors_index.rng12
-rw-r--r--nova/api/openstack/schemas/v1.1/image.rng30
-rw-r--r--nova/api/openstack/schemas/v1.1/images.rng6
-rw-r--r--nova/api/openstack/schemas/v1.1/images_index.rng12
-rw-r--r--nova/api/openstack/schemas/v1.1/limits.rng28
-rw-r--r--nova/api/openstack/schemas/v1.1/metadata.rng9
-rw-r--r--nova/api/openstack/schemas/v1.1/server.rng6
-rw-r--r--nova/api/openstack/servers.py226
-rw-r--r--nova/api/openstack/versions.py242
-rw-r--r--nova/api/openstack/views/versions.py2
-rw-r--r--nova/api/openstack/wsgi.py5
-rw-r--r--nova/compute/api.py2
-rw-r--r--nova/db/sqlalchemy/migrate_repo/versions/046_add_instance_swap.py48
-rw-r--r--nova/db/sqlalchemy/models.py2
-rw-r--r--nova/image/fake.py3
-rw-r--r--nova/image/glance.py80
-rw-r--r--nova/image/s3.py3
-rw-r--r--nova/image/service.py200
-rw-r--r--nova/tests/api/ec2/public_key/dummy.fingerprint (renamed from nova/tests/public_key/dummy.fingerprint)0
-rw-r--r--nova/tests/api/ec2/public_key/dummy.pub (renamed from nova/tests/public_key/dummy.pub)0
-rw-r--r--nova/tests/api/ec2/test_cloud.py (renamed from nova/tests/test_cloud.py)4
-rw-r--r--nova/tests/api/openstack/common.py22
-rw-r--r--nova/tests/api/openstack/contrib/test_createserverext.py6
-rw-r--r--nova/tests/api/openstack/fakes.py127
-rw-r--r--nova/tests/api/openstack/test_common.py136
-rw-r--r--nova/tests/api/openstack/test_flavors.py207
-rw-r--r--nova/tests/api/openstack/test_image_metadata.py163
-rw-r--r--nova/tests/api/openstack/test_images.py1040
-rw-r--r--nova/tests/api/openstack/test_limits.py90
-rw-r--r--nova/tests/api/openstack/test_servers.py280
-rw-r--r--nova/tests/api/openstack/test_versions.py604
-rw-r--r--nova/tests/glance/stubs.py68
-rw-r--r--nova/tests/image/test_glance.py637
-rw-r--r--nova/tests/integrated/test_xml.py12
-rw-r--r--nova/tests/test_direct.py2
-rw-r--r--nova/tests/test_libvirt.py45
-rw-r--r--nova/tests/test_xenapi.py3
-rw-r--r--nova/virt/disk.py41
-rw-r--r--nova/virt/libvirt/connection.py23
-rw-r--r--nova/virt/xenapi/vmops.py12
-rw-r--r--nova/volume/driver.py9
-rw-r--r--tools/pip-requires1
53 files changed, 2493 insertions, 2482 deletions
diff --git a/MANIFEST.in b/MANIFEST.in
index 883aba8a1..5451ace4b 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -37,7 +37,7 @@ include nova/tests/bundle/1mb.manifest.xml
include nova/tests/bundle/1mb.no_kernel_or_ramdisk.manifest.xml
include nova/tests/bundle/1mb.part.0
include nova/tests/bundle/1mb.part.1
-include nova/tests/public_key/*
+include nova/tests/api/ec2/public_key/*
include nova/tests/db/nova.austin.sqlite
include plugins/xenapi/README
include plugins/xenapi/etc/xapi.d/plugins/objectstore
diff --git a/nova/api/ec2/cloud.py b/nova/api/ec2/cloud.py
index 4f7030a5a..0efb90d6e 100644
--- a/nova/api/ec2/cloud.py
+++ b/nova/api/ec2/cloud.py
@@ -272,11 +272,23 @@ class CloudController(object):
mappings = {}
mappings['ami'] = block_device.strip_dev(root_device_name)
mappings['root'] = root_device_name
-
- # 'ephemeralN' and 'swap'
+ default_local_device = instance_ref.get('default_local_device')
+ if default_local_device:
+ mappings['ephemeral0'] = default_local_device
+ default_swap_device = instance_ref.get('default_swap_device')
+ if default_swap_device:
+ mappings['swap'] = default_swap_device
+ ebs_devices = []
+
+ # 'ephemeralN', 'swap' and ebs
for bdm in db.block_device_mapping_get_all_by_instance(
ctxt, instance_ref['id']):
- if (bdm['volume_id'] or bdm['snapshot_id'] or bdm['no_device']):
+ if bdm['no_device']:
+ continue
+
+ # ebs volume case
+ if (bdm['volume_id'] or bdm['snapshot_id']):
+ ebs_devices.append(bdm['device_name'])
continue
virtual_name = bdm['virtual_name']
@@ -286,6 +298,16 @@ class CloudController(object):
if block_device.is_swap_or_ephemeral(virtual_name):
mappings[virtual_name] = bdm['device_name']
+ # NOTE(yamahata): I'm not sure how ebs device should be numbered.
+ # Right now sort by device name for deterministic
+ # result.
+ if ebs_devices:
+ nebs = 0
+ ebs_devices.sort()
+ for ebs in ebs_devices:
+ mappings['ebs%d' % nebs] = ebs
+ nebs += 1
+
return mappings
def get_metadata(self, address):
@@ -1467,7 +1489,7 @@ class CloudController(object):
return image
def _format_image(self, image):
- """Convert from format defined by BaseImageService to S3 format."""
+ """Convert from format defined by GlanceImageService to S3 format."""
i = {}
image_type = self._image_type(image.get('container_format'))
ec2_id = self.image_ec2_id(image.get('id'), image_type)
diff --git a/nova/api/openstack/common.py b/nova/api/openstack/common.py
index d743a66ef..a836a584c 100644
--- a/nova/api/openstack/common.py
+++ b/nova/api/openstack/common.py
@@ -16,6 +16,7 @@
# under the License.
import functools
+from lxml import etree
import re
import urlparse
from xml.dom import minidom
@@ -27,6 +28,7 @@ from nova import flags
from nova import log as logging
from nova import quota
from nova.api.openstack import wsgi
+from nova.api.openstack import xmlutil
from nova.compute import vm_states
from nova.compute import task_states
@@ -308,54 +310,48 @@ class MetadataHeadersSerializer(wsgi.ResponseHeadersSerializer):
class MetadataXMLSerializer(wsgi.XMLDictSerializer):
+
+ NSMAP = {None: xmlutil.XMLNS_V11}
+
def __init__(self, xmlns=wsgi.XMLNS_V11):
super(MetadataXMLSerializer, self).__init__(xmlns=xmlns)
- def _meta_item_to_xml(self, doc, key, value):
- node = doc.createElement('meta')
- doc.appendChild(node)
- node.setAttribute('key', '%s' % key)
- text = doc.createTextNode('%s' % value)
- node.appendChild(text)
- return node
-
- def meta_list_to_xml(self, xml_doc, meta_items):
- container_node = xml_doc.createElement('metadata')
- for (key, value) in meta_items:
- item_node = self._meta_item_to_xml(xml_doc, key, value)
- container_node.appendChild(item_node)
- return container_node
-
- def _meta_list_to_xml_string(self, metadata_dict):
- xml_doc = minidom.Document()
- items = metadata_dict['metadata'].items()
- container_node = self.meta_list_to_xml(xml_doc, items)
- xml_doc.appendChild(container_node)
- self._add_xmlns(container_node)
- return xml_doc.toxml('UTF-8')
+ def populate_metadata(self, metadata_elem, meta_dict):
+ for (key, value) in meta_dict.items():
+ elem = etree.SubElement(metadata_elem, 'meta')
+ elem.set('key', str(key))
+ elem.text = value
+
+ def _populate_meta_item(self, meta_elem, meta_item_dict):
+ """Populate a meta xml element from a dict."""
+ (key, value) = meta_item_dict.items()[0]
+ meta_elem.set('key', str(key))
+ meta_elem.text = value
def index(self, metadata_dict):
- return self._meta_list_to_xml_string(metadata_dict)
+ metadata = etree.Element('metadata', nsmap=self.NSMAP)
+ self.populate_metadata(metadata, metadata_dict.get('metadata', {}))
+ return self._to_xml(metadata)
def create(self, metadata_dict):
- return self._meta_list_to_xml_string(metadata_dict)
+ metadata = etree.Element('metadata', nsmap=self.NSMAP)
+ self.populate_metadata(metadata, metadata_dict.get('metadata', {}))
+ return self._to_xml(metadata)
def update_all(self, metadata_dict):
- return self._meta_list_to_xml_string(metadata_dict)
-
- def _meta_item_to_xml_string(self, meta_item_dict):
- xml_doc = minidom.Document()
- item_key, item_value = meta_item_dict.items()[0]
- item_node = self._meta_item_to_xml(xml_doc, item_key, item_value)
- xml_doc.appendChild(item_node)
- self._add_xmlns(item_node)
- return xml_doc.toxml('UTF-8')
+ metadata = etree.Element('metadata', nsmap=self.NSMAP)
+ self.populate_metadata(metadata, metadata_dict.get('metadata', {}))
+ return self._to_xml(metadata)
def show(self, meta_item_dict):
- return self._meta_item_to_xml_string(meta_item_dict['meta'])
+ meta = etree.Element('meta', nsmap=self.NSMAP)
+ self._populate_meta_item(meta, meta_item_dict['meta'])
+ return self._to_xml(meta)
def update(self, meta_item_dict):
- return self._meta_item_to_xml_string(meta_item_dict['meta'])
+ meta = etree.Element('meta', nsmap=self.NSMAP)
+ self._populate_meta_item(meta, meta_item_dict['meta'])
+ return self._to_xml(meta)
def default(self, *args, **kwargs):
return ''
diff --git a/nova/api/openstack/flavors.py b/nova/api/openstack/flavors.py
index fd36060da..805aad772 100644
--- a/nova/api/openstack/flavors.py
+++ b/nova/api/openstack/flavors.py
@@ -16,12 +16,13 @@
# under the License.
import webob
-import xml.dom.minidom as minidom
+from lxml import etree
from nova import db
from nova import exception
from nova.api.openstack import views
from nova.api.openstack import wsgi
+from nova.api.openstack import xmlutil
class Controller(object):
@@ -78,48 +79,44 @@ class ControllerV11(Controller):
class FlavorXMLSerializer(wsgi.XMLDictSerializer):
+ NSMAP = {None: xmlutil.XMLNS_V11, 'atom': xmlutil.XMLNS_ATOM}
+
def __init__(self):
super(FlavorXMLSerializer, self).__init__(xmlns=wsgi.XMLNS_V11)
- def _flavor_to_xml(self, xml_doc, flavor, detailed):
- flavor_node = xml_doc.createElement('flavor')
- flavor_node.setAttribute('id', str(flavor['id']))
- flavor_node.setAttribute('name', flavor['name'])
+ def _populate_flavor(self, flavor_elem, flavor_dict, detailed=False):
+ """Populate a flavor xml element from a dict."""
+ flavor_elem.set('name', flavor_dict['name'])
+ flavor_elem.set('id', str(flavor_dict['id']))
if detailed:
- flavor_node.setAttribute('ram', str(flavor['ram']))
- flavor_node.setAttribute('disk', str(flavor['disk']))
-
- link_nodes = self._create_link_nodes(xml_doc, flavor['links'])
- for link_node in link_nodes:
- flavor_node.appendChild(link_node)
- return flavor_node
-
- def _flavors_list_to_xml(self, xml_doc, flavors, detailed):
- container_node = xml_doc.createElement('flavors')
-
- for flavor in flavors:
- item_node = self._flavor_to_xml(xml_doc, flavor, detailed)
- container_node.appendChild(item_node)
- return container_node
+ flavor_elem.set('ram', str(flavor_dict['ram']))
+ flavor_elem.set('disk', str(flavor_dict['disk']))
+ for link in flavor_dict.get('links', []):
+ elem = etree.SubElement(flavor_elem,
+ '{%s}link' % xmlutil.XMLNS_ATOM)
+ elem.set('rel', link['rel'])
+ elem.set('href', link['href'])
+ return flavor_elem
def show(self, flavor_container):
- xml_doc = minidom.Document()
- flavor = flavor_container['flavor']
- node = self._flavor_to_xml(xml_doc, flavor, True)
- return self.to_xml_string(node, True)
-
- def detail(self, flavors_container):
- xml_doc = minidom.Document()
- flavors = flavors_container['flavors']
- node = self._flavors_list_to_xml(xml_doc, flavors, True)
- return self.to_xml_string(node, True)
-
- def index(self, flavors_container):
- xml_doc = minidom.Document()
- flavors = flavors_container['flavors']
- node = self._flavors_list_to_xml(xml_doc, flavors, False)
- return self.to_xml_string(node, True)
+ flavor = etree.Element('flavor', nsmap=self.NSMAP)
+ self._populate_flavor(flavor, flavor_container['flavor'], True)
+ return self._to_xml(flavor)
+
+ def detail(self, flavors_dict):
+ flavors = etree.Element('flavors', nsmap=self.NSMAP)
+ for flavor_dict in flavors_dict['flavors']:
+ flavor = etree.SubElement(flavors, 'flavor')
+ self._populate_flavor(flavor, flavor_dict, True)
+ return self._to_xml(flavors)
+
+ def index(self, flavors_dict):
+ flavors = etree.Element('flavors', nsmap=self.NSMAP)
+ for flavor_dict in flavors_dict['flavors']:
+ flavor = etree.SubElement(flavors, 'flavor')
+ self._populate_flavor(flavor, flavor_dict, False)
+ return self._to_xml(flavors)
def create_resource(version='1.0'):
diff --git a/nova/api/openstack/image_metadata.py b/nova/api/openstack/image_metadata.py
index 4d615ea96..adb6bee4b 100644
--- a/nova/api/openstack/image_metadata.py
+++ b/nova/api/openstack/image_metadata.py
@@ -17,6 +17,7 @@
from webob import exc
+from nova import exception
from nova import flags
from nova import image
from nova import utils
@@ -33,21 +34,22 @@ class Controller(object):
def __init__(self):
self.image_service = image.get_default_image_service()
- def _get_metadata(self, context, image_id, image=None):
- if not image:
- image = self.image_service.show(context, image_id)
- metadata = image.get('properties', {})
- return metadata
+ def _get_image(self, context, image_id):
+ try:
+ return self.image_service.show(context, image_id)
+ except exception.NotFound:
+ msg = _("Image not found.")
+ raise exc.HTTPNotFound(explanation=msg)
def index(self, req, image_id):
"""Returns the list of metadata for a given instance"""
context = req.environ['nova.context']
- metadata = self._get_metadata(context, image_id)
+ metadata = self._get_image(context, image_id)['properties']
return dict(metadata=metadata)
def show(self, req, image_id, id):
context = req.environ['nova.context']
- metadata = self._get_metadata(context, image_id)
+ metadata = self._get_image(context, image_id)['properties']
if id in metadata:
return {'meta': {id: metadata[id]}}
else:
@@ -55,15 +57,13 @@ class Controller(object):
def create(self, req, image_id, body):
context = req.environ['nova.context']
- img = self.image_service.show(context, image_id)
- metadata = self._get_metadata(context, image_id, img)
+ image = self._get_image(context, image_id)
if 'metadata' in body:
for key, value in body['metadata'].iteritems():
- metadata[key] = value
- common.check_img_metadata_quota_limit(context, metadata)
- img['properties'] = metadata
- self.image_service.update(context, image_id, img, None)
- return dict(metadata=metadata)
+ image['properties'][key] = value
+ common.check_img_metadata_quota_limit(context, image['properties'])
+ self.image_service.update(context, image_id, image, None)
+ return dict(metadata=image['properties'])
def update(self, req, image_id, id, body):
context = req.environ['nova.context']
@@ -80,32 +80,30 @@ class Controller(object):
if len(meta) > 1:
expl = _('Request body contains too many items')
raise exc.HTTPBadRequest(explanation=expl)
- img = self.image_service.show(context, image_id)
- metadata = self._get_metadata(context, image_id, img)
- metadata[id] = meta[id]
- common.check_img_metadata_quota_limit(context, metadata)
- img['properties'] = metadata
- self.image_service.update(context, image_id, img, None)
+
+ image = self._get_image(context, image_id)
+ image['properties'][id] = meta[id]
+ common.check_img_metadata_quota_limit(context, image['properties'])
+ self.image_service.update(context, image_id, image, None)
return dict(meta=meta)
def update_all(self, req, image_id, body):
context = req.environ['nova.context']
- img = self.image_service.show(context, image_id)
+ image = self._get_image(context, image_id)
metadata = body.get('metadata', {})
common.check_img_metadata_quota_limit(context, metadata)
- img['properties'] = metadata
- self.image_service.update(context, image_id, img, None)
+ image['properties'] = metadata
+ self.image_service.update(context, image_id, image, None)
return dict(metadata=metadata)
def delete(self, req, image_id, id):
context = req.environ['nova.context']
- img = self.image_service.show(context, image_id)
- metadata = self._get_metadata(context, image_id)
- if not id in metadata:
- raise exc.HTTPNotFound()
- metadata.pop(id)
- img['properties'] = metadata
- self.image_service.update(context, image_id, img, None)
+ image = self._get_image(context, image_id)
+ if not id in image['properties']:
+ msg = _("Invalid metadata key")
+ raise exc.HTTPNotFound(explanation=msg)
+ image['properties'].pop(id)
+ self.image_service.update(context, image_id, image, None)
def create_resource():
diff --git a/nova/api/openstack/images.py b/nova/api/openstack/images.py
index 1c8fc10c9..4340cbe3e 100644
--- a/nova/api/openstack/images.py
+++ b/nova/api/openstack/images.py
@@ -16,8 +16,8 @@
import urlparse
import os.path
+from lxml import etree
import webob.exc
-from xml.dom import minidom
from nova import compute
from nova import exception
@@ -29,6 +29,7 @@ from nova.api.openstack import image_metadata
from nova.api.openstack import servers
from nova.api.openstack.views import images as images_view
from nova.api.openstack import wsgi
+from nova.api.openstack import xmlutil
LOG = log.getLogger('nova.api.openstack.images')
@@ -50,7 +51,7 @@ class Controller(object):
"""Initialize new `ImageController`.
:param compute_service: `nova.compute.api:API`
- :param image_service: `nova.image.service:BaseImageService`
+ :param image_service: `nova.image.glance:GlancemageService`
"""
self._compute_service = compute_service or compute.API()
@@ -206,93 +207,71 @@ class ControllerV11(Controller):
class ImageXMLSerializer(wsgi.XMLDictSerializer):
- xmlns = wsgi.XMLNS_V11
+ NSMAP = {None: xmlutil.XMLNS_V11, 'atom': xmlutil.XMLNS_ATOM}
def __init__(self):
self.metadata_serializer = common.MetadataXMLSerializer()
- def _image_to_xml(self, xml_doc, image):
- image_node = xml_doc.createElement('image')
- image_node.setAttribute('id', str(image['id']))
- image_node.setAttribute('name', image['name'])
- link_nodes = self._create_link_nodes(xml_doc,
- image['links'])
- for link_node in link_nodes:
- image_node.appendChild(link_node)
- return image_node
-
- def _image_to_xml_detailed(self, xml_doc, image):
- image_node = xml_doc.createElement('image')
- self._add_image_attributes(image_node, image)
-
- if 'server' in image:
- server_node = self._create_server_node(xml_doc, image['server'])
- image_node.appendChild(server_node)
-
- metadata = image.get('metadata', {}).items()
- if len(metadata) > 0:
- metadata_node = self._create_metadata_node(xml_doc, metadata)
- image_node.appendChild(metadata_node)
-
- link_nodes = self._create_link_nodes(xml_doc,
- image['links'])
- for link_node in link_nodes:
- image_node.appendChild(link_node)
-
- return image_node
-
- def _add_image_attributes(self, node, image):
- node.setAttribute('id', str(image['id']))
- node.setAttribute('name', image['name'])
- node.setAttribute('created', image['created'])
- node.setAttribute('updated', image['updated'])
- node.setAttribute('status', image['status'])
- if 'progress' in image:
- node.setAttribute('progress', str(image['progress']))
-
- def _create_metadata_node(self, xml_doc, metadata):
- return self.metadata_serializer.meta_list_to_xml(xml_doc, metadata)
-
- def _create_server_node(self, xml_doc, server):
- server_node = xml_doc.createElement('server')
- server_node.setAttribute('id', str(server['id']))
- link_nodes = self._create_link_nodes(xml_doc,
- server['links'])
- for link_node in link_nodes:
- server_node.appendChild(link_node)
- return server_node
-
- def _image_list_to_xml(self, xml_doc, images, detailed):
- container_node = xml_doc.createElement('images')
+ def _create_metadata_node(self, metadata_dict):
+ metadata_elem = etree.Element('metadata', nsmap=self.NSMAP)
+ self.metadata_serializer.populate_metadata(metadata_elem,
+ metadata_dict)
+ return metadata_elem
+
+ def _create_server_node(self, server_dict):
+ server_elem = etree.Element('server', nsmap=self.NSMAP)
+ server_elem.set('id', str(server_dict['id']))
+ for link in server_dict.get('links', []):
+ elem = etree.SubElement(server_elem,
+ '{%s}link' % xmlutil.XMLNS_ATOM)
+ elem.set('rel', link['rel'])
+ elem.set('href', link['href'])
+ return server_elem
+
+ def _populate_image(self, image_elem, image_dict, detailed=False):
+ """Populate an image xml element from a dict."""
+
+ image_elem.set('name', image_dict['name'])
+ image_elem.set('id', str(image_dict['id']))
if detailed:
- image_to_xml = self._image_to_xml_detailed
- else:
- image_to_xml = self._image_to_xml
-
- for image in images:
- item_node = image_to_xml(xml_doc, image)
- container_node.appendChild(item_node)
- return container_node
+ image_elem.set('updated', str(image_dict['updated']))
+ image_elem.set('created', str(image_dict['created']))
+ image_elem.set('status', str(image_dict['status']))
+ if 'progress' in image_dict:
+ image_elem.set('progress', str(image_dict['progress']))
+ if 'server' in image_dict:
+ server_elem = self._create_server_node(image_dict['server'])
+ image_elem.append(server_elem)
+
+ meta_elem = self._create_metadata_node(
+ image_dict.get('metadata', {}))
+ image_elem.append(meta_elem)
+
+ for link in image_dict.get('links', []):
+ elem = etree.SubElement(image_elem,
+ '{%s}link' % xmlutil.XMLNS_ATOM)
+ elem.set('rel', link['rel'])
+ elem.set('href', link['href'])
+ return image_elem
def index(self, images_dict):
- xml_doc = minidom.Document()
- node = self._image_list_to_xml(xml_doc,
- images_dict['images'],
- detailed=False)
- return self.to_xml_string(node, True)
+ images = etree.Element('images', nsmap=self.NSMAP)
+ for image_dict in images_dict['images']:
+ image = etree.SubElement(images, 'image')
+ self._populate_image(image, image_dict, False)
+ return self._to_xml(images)
def detail(self, images_dict):
- xml_doc = minidom.Document()
- node = self._image_list_to_xml(xml_doc,
- images_dict['images'],
- detailed=True)
- return self.to_xml_string(node, True)
+ images = etree.Element('images', nsmap=self.NSMAP)
+ for image_dict in images_dict['images']:
+ image = etree.SubElement(images, 'image')
+ self._populate_image(image, image_dict, True)
+ return self._to_xml(images)
def show(self, image_dict):
- xml_doc = minidom.Document()
- node = self._image_to_xml_detailed(xml_doc,
- image_dict['image'])
- return self.to_xml_string(node, True)
+ image = etree.Element('image', nsmap=self.NSMAP)
+ self._populate_image(image, image_dict['image'], True)
+ return self._to_xml(image)
def create_resource(version='1.0'):
diff --git a/nova/api/openstack/ips.py b/nova/api/openstack/ips.py
index a74fae487..7e644ba04 100644
--- a/nova/api/openstack/ips.py
+++ b/nova/api/openstack/ips.py
@@ -15,14 +15,15 @@
# License for the specific language governing permissions and limitations
# under the License.
+from lxml import etree
import time
-from xml.dom import minidom
from webob import exc
import nova
import nova.api.openstack.views.addresses
from nova.api.openstack import wsgi
+from nova.api.openstack import xmlutil
from nova import db
@@ -102,42 +103,36 @@ class ControllerV11(Controller):
class IPXMLSerializer(wsgi.XMLDictSerializer):
+
+ NSMAP = {None: xmlutil.XMLNS_V11}
+
def __init__(self, xmlns=wsgi.XMLNS_V11):
super(IPXMLSerializer, self).__init__(xmlns=xmlns)
- def _ip_to_xml(self, xml_doc, ip_dict):
- ip_node = xml_doc.createElement('ip')
- ip_node.setAttribute('addr', ip_dict['addr'])
- ip_node.setAttribute('version', str(ip_dict['version']))
- return ip_node
-
- def _network_to_xml(self, xml_doc, network_id, ip_dicts):
- network_node = xml_doc.createElement('network')
- network_node.setAttribute('id', network_id)
+ def populate_addresses_node(self, addresses_elem, addresses_dict):
+ for (network_id, ip_dicts) in addresses_dict.items():
+ network_elem = self._create_network_node(network_id, ip_dicts)
+ addresses_elem.append(network_elem)
+ def _create_network_node(self, network_id, ip_dicts):
+ network_elem = etree.Element('network', nsmap=self.NSMAP)
+ network_elem.set('id', str(network_id))
for ip_dict in ip_dicts:
- ip_node = self._ip_to_xml(xml_doc, ip_dict)
- network_node.appendChild(ip_node)
-
- return network_node
-
- def networks_to_xml(self, xml_doc, networks_container):
- addresses_node = xml_doc.createElement('addresses')
- for (network_id, ip_dicts) in networks_container.items():
- network_node = self._network_to_xml(xml_doc, network_id, ip_dicts)
- addresses_node.appendChild(network_node)
- return addresses_node
-
- def show(self, network_container):
- (network_id, ip_dicts) = network_container.items()[0]
- xml_doc = minidom.Document()
- node = self._network_to_xml(xml_doc, network_id, ip_dicts)
- return self.to_xml_string(node, False)
-
- def index(self, addresses_container):
- xml_doc = minidom.Document()
- node = self.networks_to_xml(xml_doc, addresses_container['addresses'])
- return self.to_xml_string(node, False)
+ ip_elem = etree.SubElement(network_elem, 'ip')
+ ip_elem.set('version', str(ip_dict['version']))
+ ip_elem.set('addr', ip_dict['addr'])
+ return network_elem
+
+ def show(self, network_dict):
+ (network_id, ip_dicts) = network_dict.items()[0]
+ network = self._create_network_node(network_id, ip_dicts)
+ return self._to_xml(network)
+
+ def index(self, addresses_dict):
+ addresses = etree.Element('addresses', nsmap=self.NSMAP)
+ self.populate_addresses_node(addresses,
+ addresses_dict.get('addresses', {}))
+ return self._to_xml(addresses)
def create_resource(version):
diff --git a/nova/api/openstack/limits.py b/nova/api/openstack/limits.py
index 86afa3b62..f6df94eea 100644
--- a/nova/api/openstack/limits.py
+++ b/nova/api/openstack/limits.py
@@ -20,12 +20,12 @@ Module dedicated functions/classes dealing with rate limiting requests.
import copy
import httplib
import json
+from lxml import etree
import math
import re
import time
import urllib
import webob.exc
-from xml.dom import minidom
from collections import defaultdict
@@ -38,6 +38,7 @@ from nova.api.openstack import common
from nova.api.openstack import faults
from nova.api.openstack.views import limits as limits_views
from nova.api.openstack import wsgi
+from nova.api.openstack import xmlutil
# Convenience constants for the limits dictionary passed to Limiter().
@@ -81,52 +82,49 @@ class LimitsXMLSerializer(wsgi.XMLDictSerializer):
xmlns = wsgi.XMLNS_V11
+ NSMAP = {None: xmlutil.XMLNS_V11, 'atom': xmlutil.XMLNS_ATOM}
+
def __init__(self):
pass
- def _create_rates_node(self, xml_doc, rates):
- rates_node = xml_doc.createElement('rates')
+ def _create_rates_node(self, rates):
+ rates_elem = etree.Element('rates', nsmap=self.NSMAP)
for rate in rates:
- rate_node = xml_doc.createElement('rate')
- rate_node.setAttribute('uri', rate['uri'])
- rate_node.setAttribute('regex', rate['regex'])
-
+ rate_node = etree.SubElement(rates_elem, 'rate')
+ rate_node.set('uri', rate['uri'])
+ rate_node.set('regex', rate['regex'])
for limit in rate['limit']:
- limit_node = xml_doc.createElement('limit')
- limit_node.setAttribute('value', str(limit['value']))
- limit_node.setAttribute('verb', limit['verb'])
- limit_node.setAttribute('remaining', str(limit['remaining']))
- limit_node.setAttribute('unit', limit['unit'])
- limit_node.setAttribute('next-available',
- str(limit['next-available']))
- rate_node.appendChild(limit_node)
-
- rates_node.appendChild(rate_node)
- return rates_node
-
- def _create_absolute_node(self, xml_doc, absolutes):
- absolute_node = xml_doc.createElement('absolute')
- for key, value in absolutes.iteritems():
- limit_node = xml_doc.createElement('limit')
- limit_node.setAttribute('name', key)
- limit_node.setAttribute('value', str(value))
- absolute_node.appendChild(limit_node)
- return absolute_node
-
- def _limits_to_xml(self, xml_doc, limits):
- limits_node = xml_doc.createElement('limits')
- rates_node = self._create_rates_node(xml_doc, limits['rate'])
- limits_node.appendChild(rates_node)
-
- absolute_node = self._create_absolute_node(xml_doc, limits['absolute'])
- limits_node.appendChild(absolute_node)
-
- return limits_node
+ limit_elem = etree.SubElement(rate_node, 'limit')
+ limit_elem.set('value', str(limit['value']))
+ limit_elem.set('verb', str(limit['verb']))
+ limit_elem.set('remaining', str(limit['remaining']))
+ limit_elem.set('unit', str(limit['unit']))
+ limit_elem.set('next-available', str(limit['next-available']))
+ return rates_elem
+
+ def _create_absolute_node(self, absolute_dict):
+ absolute_elem = etree.Element('absolute', nsmap=self.NSMAP)
+ for key, value in absolute_dict.items():
+ limit_elem = etree.SubElement(absolute_elem, 'limit')
+ limit_elem.set('name', str(key))
+ limit_elem.set('value', str(value))
+ return absolute_elem
+
+ def _populate_limits(self, limits_elem, limits_dict):
+ """Populate a limits xml element from a dict."""
+
+ rates_elem = self._create_rates_node(
+ limits_dict.get('rate', []))
+ limits_elem.append(rates_elem)
+
+ absolutes_elem = self._create_absolute_node(
+ limits_dict.get('absolute', {}))
+ limits_elem.append(absolutes_elem)
def index(self, limits_dict):
- xml_doc = minidom.Document()
- node = self._limits_to_xml(xml_doc, limits_dict['limits'])
- return self.to_xml_string(node, False)
+ limits = etree.Element('limits', nsmap=self.NSMAP)
+ self._populate_limits(limits, limits_dict['limits'])
+ return self._to_xml(limits)
def create_resource(version='1.0'):
diff --git a/nova/api/openstack/schemas/v1.1/addresses.rng b/nova/api/openstack/schemas/v1.1/addresses.rng
new file mode 100644
index 000000000..b498e8a63
--- /dev/null
+++ b/nova/api/openstack/schemas/v1.1/addresses.rng
@@ -0,0 +1,14 @@
+<element name="addresses" ns="http://docs.openstack.org/compute/api/v1.1"
+ xmlns="http://relaxng.org/ns/structure/1.0">
+ <zeroOrMore>
+ <element name="network">
+ <attribute name="id"> <text/> </attribute>
+ <zeroOrMore>
+ <element name="ip">
+ <attribute name="version"> <text/> </attribute>
+ <attribute name="addr"> <text/> </attribute>
+ </element>
+ </zeroOrMore>
+ </element>
+ </zeroOrMore>
+</element>
diff --git a/nova/api/openstack/schemas/v1.1/flavor.rng b/nova/api/openstack/schemas/v1.1/flavor.rng
new file mode 100644
index 000000000..a00e4e9ee
--- /dev/null
+++ b/nova/api/openstack/schemas/v1.1/flavor.rng
@@ -0,0 +1,10 @@
+<element name="flavor" ns="http://docs.openstack.org/compute/api/v1.1"
+ xmlns="http://relaxng.org/ns/structure/1.0">
+ <attribute name="name"> <text/> </attribute>
+ <attribute name="id"> <text/> </attribute>
+ <attribute name="ram"> <text/> </attribute>
+ <attribute name="disk"> <text/> </attribute>
+ <zeroOrMore>
+ <externalRef href="../atom-link.rng"/>
+ </zeroOrMore>
+</element>
diff --git a/nova/api/openstack/schemas/v1.1/flavors.rng b/nova/api/openstack/schemas/v1.1/flavors.rng
new file mode 100644
index 000000000..b7a3acc01
--- /dev/null
+++ b/nova/api/openstack/schemas/v1.1/flavors.rng
@@ -0,0 +1,6 @@
+<element name="flavors" xmlns="http://relaxng.org/ns/structure/1.0"
+ ns="http://docs.openstack.org/compute/api/v1.1">
+ <zeroOrMore>
+ <externalRef href="flavor.rng"/>
+ </zeroOrMore>
+</element>
diff --git a/nova/api/openstack/schemas/v1.1/flavors_index.rng b/nova/api/openstack/schemas/v1.1/flavors_index.rng
new file mode 100644
index 000000000..d1a4fedb1
--- /dev/null
+++ b/nova/api/openstack/schemas/v1.1/flavors_index.rng
@@ -0,0 +1,12 @@
+<element name="flavors" ns="http://docs.openstack.org/compute/api/v1.1"
+ xmlns="http://relaxng.org/ns/structure/1.0">
+ <zeroOrMore>
+ <element name="flavor">
+ <attribute name="name"> <text/> </attribute>
+ <attribute name="id"> <text/> </attribute>
+ <zeroOrMore>
+ <externalRef href="../atom-link.rng"/>
+ </zeroOrMore>
+ </element>
+ </zeroOrMore>
+</element>
diff --git a/nova/api/openstack/schemas/v1.1/image.rng b/nova/api/openstack/schemas/v1.1/image.rng
new file mode 100644
index 000000000..887f76751
--- /dev/null
+++ b/nova/api/openstack/schemas/v1.1/image.rng
@@ -0,0 +1,30 @@
+<element name="image" ns="http://docs.openstack.org/compute/api/v1.1"
+ xmlns="http://relaxng.org/ns/structure/1.0">
+ <attribute name="name"> <text/> </attribute>
+ <attribute name="id"> <text/> </attribute>
+ <attribute name="updated"> <text/> </attribute>
+ <attribute name="created"> <text/> </attribute>
+ <attribute name="status"> <text/> </attribute>
+ <optional>
+ <attribute name="progress"> <text/> </attribute>
+ </optional>
+ <optional>
+ <element name="server">
+ <attribute name="id"> <text/> </attribute>
+ <zeroOrMore>
+ <externalRef href="../atom-link.rng"/>
+ </zeroOrMore>
+ </element>
+ </optional>
+ <element name="metadata">
+ <zeroOrMore>
+ <element name="meta">
+ <attribute name="key"> <text/> </attribute>
+ <text/>
+ </element>
+ </zeroOrMore>
+ </element>
+ <zeroOrMore>
+ <externalRef href="../atom-link.rng"/>
+ </zeroOrMore>
+</element>
diff --git a/nova/api/openstack/schemas/v1.1/images.rng b/nova/api/openstack/schemas/v1.1/images.rng
new file mode 100644
index 000000000..064d4d9cc
--- /dev/null
+++ b/nova/api/openstack/schemas/v1.1/images.rng
@@ -0,0 +1,6 @@
+<element name="images" xmlns="http://relaxng.org/ns/structure/1.0"
+ ns="http://docs.openstack.org/compute/api/v1.1">
+ <zeroOrMore>
+ <externalRef href="image.rng"/>
+ </zeroOrMore>
+</element>
diff --git a/nova/api/openstack/schemas/v1.1/images_index.rng b/nova/api/openstack/schemas/v1.1/images_index.rng
new file mode 100644
index 000000000..81af19cb5
--- /dev/null
+++ b/nova/api/openstack/schemas/v1.1/images_index.rng
@@ -0,0 +1,12 @@
+<element name="images" ns="http://docs.openstack.org/compute/api/v1.1"
+ xmlns="http://relaxng.org/ns/structure/1.0">
+ <zeroOrMore>
+ <element name="image">
+ <attribute name="name"> <text/> </attribute>
+ <attribute name="id"> <text/> </attribute>
+ <zeroOrMore>
+ <externalRef href="../atom-link.rng"/>
+ </zeroOrMore>
+ </element>
+ </zeroOrMore>
+</element>
diff --git a/nova/api/openstack/schemas/v1.1/limits.rng b/nova/api/openstack/schemas/v1.1/limits.rng
new file mode 100644
index 000000000..1af8108ec
--- /dev/null
+++ b/nova/api/openstack/schemas/v1.1/limits.rng
@@ -0,0 +1,28 @@
+<element name="limits" ns="http://docs.openstack.org/compute/api/v1.1"
+ xmlns="http://relaxng.org/ns/structure/1.0">
+ <element name="rates">
+ <zeroOrMore>
+ <element name="rate">
+ <attribute name="uri"> <text/> </attribute>
+ <attribute name="regex"> <text/> </attribute>
+ <zeroOrMore>
+ <element name="limit">
+ <attribute name="value"> <text/> </attribute>
+ <attribute name="verb"> <text/> </attribute>
+ <attribute name="remaining"> <text/> </attribute>
+ <attribute name="unit"> <text/> </attribute>
+ <attribute name="next-available"> <text/> </attribute>
+ </element>
+ </zeroOrMore>
+ </element>
+ </zeroOrMore>
+ </element>
+ <element name="absolute">
+ <zeroOrMore>
+ <element name="limit">
+ <attribute name="name"> <text/> </attribute>
+ <attribute name="value"> <text/> </attribute>
+ </element>
+ </zeroOrMore>
+ </element>
+</element>
diff --git a/nova/api/openstack/schemas/v1.1/metadata.rng b/nova/api/openstack/schemas/v1.1/metadata.rng
new file mode 100644
index 000000000..b2f5d702a
--- /dev/null
+++ b/nova/api/openstack/schemas/v1.1/metadata.rng
@@ -0,0 +1,9 @@
+ <element name="metadata" ns="http://docs.openstack.org/compute/api/v1.1"
+ xmlns="http://relaxng.org/ns/structure/1.0">
+ <zeroOrMore>
+ <element name="meta">
+ <attribute name="key"> <text/> </attribute>
+ <text/>
+ </element>
+ </zeroOrMore>
+ </element>
diff --git a/nova/api/openstack/schemas/v1.1/server.rng b/nova/api/openstack/schemas/v1.1/server.rng
index ef835e408..4eb1a0b85 100644
--- a/nova/api/openstack/schemas/v1.1/server.rng
+++ b/nova/api/openstack/schemas/v1.1/server.rng
@@ -17,9 +17,6 @@
<optional>
<attribute name="adminPass"> <text/> </attribute>
</optional>
- <zeroOrMore>
- <externalRef href="../atom-link.rng"/>
- </zeroOrMore>
<element name="image">
<attribute name="id"> <text/> </attribute>
<externalRef href="../atom-link.rng"/>
@@ -49,4 +46,7 @@
</element>
</zeroOrMore>
</element>
+ <zeroOrMore>
+ <externalRef href="../atom-link.rng"/>
+ </zeroOrMore>
</element>
diff --git a/nova/api/openstack/servers.py b/nova/api/openstack/servers.py
index f5447edc5..5affd1f33 100644
--- a/nova/api/openstack/servers.py
+++ b/nova/api/openstack/servers.py
@@ -17,8 +17,8 @@ import base64
import os
import traceback
+from lxml import etree
from webob import exc
-from xml.dom import minidom
import webob
from nova import compute
@@ -38,6 +38,7 @@ import nova.api.openstack.views.addresses
import nova.api.openstack.views.flavors
import nova.api.openstack.views.images
import nova.api.openstack.views.servers
+from nova.api.openstack import xmlutil
LOG = logging.getLogger('nova.api.openstack.servers')
@@ -850,130 +851,113 @@ class HeadersSerializer(wsgi.ResponseHeadersSerializer):
class ServerXMLSerializer(wsgi.XMLDictSerializer):
- xmlns = wsgi.XMLNS_V11
+ NSMAP = {None: xmlutil.XMLNS_V11, 'atom': xmlutil.XMLNS_ATOM}
def __init__(self):
self.metadata_serializer = common.MetadataXMLSerializer()
self.addresses_serializer = ips.IPXMLSerializer()
- def _create_basic_entity_node(self, xml_doc, id, links, name):
- basic_node = xml_doc.createElement(name)
- basic_node.setAttribute('id', str(id))
- link_nodes = self._create_link_nodes(xml_doc, links)
- for link_node in link_nodes:
- basic_node.appendChild(link_node)
- return basic_node
-
- def _create_metadata_node(self, xml_doc, metadata):
- return self.metadata_serializer.meta_list_to_xml(xml_doc, metadata)
-
- def _create_addresses_node(self, xml_doc, addresses):
- return self.addresses_serializer.networks_to_xml(xml_doc, addresses)
-
- def _add_server_attributes(self, node, server):
- node.setAttribute('id', str(server['id']))
- node.setAttribute('userId', str(server['user_id']))
- node.setAttribute('tenantId', str(server['tenant_id']))
- node.setAttribute('uuid', str(server['uuid']))
- node.setAttribute('hostId', str(server['hostId']))
- node.setAttribute('name', server['name'])
- node.setAttribute('created', str(server['created']))
- node.setAttribute('updated', str(server['updated']))
- node.setAttribute('status', server['status'])
- if 'accessIPv4' in server:
- node.setAttribute('accessIPv4', str(server['accessIPv4']))
- if 'accessIPv6' in server:
- node.setAttribute('accessIPv6', str(server['accessIPv6']))
- if 'progress' in server:
- node.setAttribute('progress', str(server['progress']))
-
- def _server_to_xml(self, xml_doc, server):
- server_node = xml_doc.createElement('server')
- server_node.setAttribute('id', str(server['id']))
- server_node.setAttribute('name', server['name'])
- link_nodes = self._create_link_nodes(xml_doc,
- server['links'])
- for link_node in link_nodes:
- server_node.appendChild(link_node)
- return server_node
-
- def _server_to_xml_detailed(self, xml_doc, server):
- server_node = xml_doc.createElement('server')
- self._add_server_attributes(server_node, server)
-
- link_nodes = self._create_link_nodes(xml_doc,
- server['links'])
- for link_node in link_nodes:
- server_node.appendChild(link_node)
-
- if 'image' in server:
- image_node = self._create_basic_entity_node(xml_doc,
- server['image']['id'],
- server['image']['links'],
- 'image')
- server_node.appendChild(image_node)
-
- if 'flavor' in server:
- flavor_node = self._create_basic_entity_node(xml_doc,
- server['flavor']['id'],
- server['flavor']['links'],
- 'flavor')
- server_node.appendChild(flavor_node)
-
- metadata = server.get('metadata', {}).items()
- if len(metadata) > 0:
- metadata_node = self._create_metadata_node(xml_doc, metadata)
- server_node.appendChild(metadata_node)
-
- addresses_node = self._create_addresses_node(xml_doc,
- server['addresses'])
- server_node.appendChild(addresses_node)
-
- if 'security_groups' in server:
- security_groups_node = self._create_security_groups_node(xml_doc,
- server['security_groups'])
- server_node.appendChild(security_groups_node)
-
- return server_node
-
- def _server_list_to_xml(self, xml_doc, servers, detailed):
- container_node = xml_doc.createElement('servers')
+ def _create_metadata_node(self, metadata_dict):
+ metadata_elem = etree.Element('metadata', nsmap=self.NSMAP)
+ self.metadata_serializer.populate_metadata(metadata_elem,
+ metadata_dict)
+ return metadata_elem
+
+ def _create_image_node(self, image_dict):
+ image_elem = etree.Element('image', nsmap=self.NSMAP)
+ image_elem.set('id', str(image_dict['id']))
+ for link in image_dict.get('links', []):
+ elem = etree.SubElement(image_elem,
+ '{%s}link' % xmlutil.XMLNS_ATOM)
+ elem.set('rel', link['rel'])
+ elem.set('href', link['href'])
+ return image_elem
+
+ def _create_flavor_node(self, flavor_dict):
+ flavor_elem = etree.Element('flavor', nsmap=self.NSMAP)
+ flavor_elem.set('id', str(flavor_dict['id']))
+ for link in flavor_dict.get('links', []):
+ elem = etree.SubElement(flavor_elem,
+ '{%s}link' % xmlutil.XMLNS_ATOM)
+ elem.set('rel', link['rel'])
+ elem.set('href', link['href'])
+ return flavor_elem
+
+ def _create_addresses_node(self, addresses_dict):
+ addresses_elem = etree.Element('addresses', nsmap=self.NSMAP)
+ self.addresses_serializer.populate_addresses_node(addresses_elem,
+ addresses_dict)
+ return addresses_elem
+
+ def _populate_server(self, server_elem, server_dict, detailed=False):
+ """Populate a server xml element from a dict."""
+
+ server_elem.set('name', server_dict['name'])
+ server_elem.set('id', str(server_dict['id']))
if detailed:
- server_to_xml = self._server_to_xml_detailed
- else:
- server_to_xml = self._server_to_xml
-
- for server in servers:
- item_node = server_to_xml(xml_doc, server)
- container_node.appendChild(item_node)
- return container_node
+ server_elem.set('uuid', str(server_dict['uuid']))
+ server_elem.set('userId', str(server_dict['user_id']))
+ server_elem.set('tenantId', str(server_dict['tenant_id']))
+ server_elem.set('updated', str(server_dict['updated']))
+ server_elem.set('created', str(server_dict['created']))
+ server_elem.set('hostId', str(server_dict['hostId']))
+ server_elem.set('accessIPv4', str(server_dict['accessIPv4']))
+ server_elem.set('accessIPv6', str(server_dict['accessIPv6']))
+ server_elem.set('status', str(server_dict['status']))
+ if 'progress' in server_dict:
+ server_elem.set('progress', str(server_dict['progress']))
+ image_elem = self._create_image_node(server_dict['image'])
+ server_elem.append(image_elem)
+
+ flavor_elem = self._create_flavor_node(server_dict['flavor'])
+ server_elem.append(flavor_elem)
+
+ meta_elem = self._create_metadata_node(
+ server_dict.get('metadata', {}))
+ server_elem.append(meta_elem)
+
+ addresses_elem = self._create_addresses_node(
+ server_dict.get('addresses', {}))
+ server_elem.append(addresses_elem)
+ groups = server_dict.get('security_groups')
+ if groups:
+ groups_elem = etree.SubElement(server_elem, 'security_groups')
+ for group in groups:
+ group_elem = etree.SubElement(groups_elem,
+ 'security_group')
+ group_elem.set('name', group['name'])
+
+ for link in server_dict.get('links', []):
+ elem = etree.SubElement(server_elem,
+ '{%s}link' % xmlutil.XMLNS_ATOM)
+ elem.set('rel', link['rel'])
+ elem.set('href', link['href'])
+ return server_elem
def index(self, servers_dict):
- xml_doc = minidom.Document()
- node = self._server_list_to_xml(xml_doc,
- servers_dict['servers'],
- detailed=False)
- return self.to_xml_string(node, True)
+ servers = etree.Element('servers', nsmap=self.NSMAP)
+ for server_dict in servers_dict['servers']:
+ server = etree.SubElement(servers, 'server')
+ self._populate_server(server, server_dict, False)
+ return self._to_xml(servers)
def detail(self, servers_dict):
- xml_doc = minidom.Document()
- node = self._server_list_to_xml(xml_doc,
- servers_dict['servers'],
- detailed=True)
- return self.to_xml_string(node, True)
+ servers = etree.Element('servers', nsmap=self.NSMAP)
+ for server_dict in servers_dict['servers']:
+ server = etree.SubElement(servers, 'server')
+ self._populate_server(server, server_dict, True)
+ return self._to_xml(servers)
def show(self, server_dict):
- xml_doc = minidom.Document()
- node = self._server_to_xml_detailed(xml_doc,
- server_dict['server'])
- return self.to_xml_string(node, True)
+ server = etree.Element('server', nsmap=self.NSMAP)
+ self._populate_server(server, server_dict['server'], True)
+ return self._to_xml(server)
def create(self, server_dict):
- xml_doc = minidom.Document()
- node = self._server_to_xml_detailed(xml_doc,
- server_dict['server'])
- node.setAttribute('adminPass', server_dict['server']['adminPass'])
- return self.to_xml_string(node, True)
+ server = etree.Element('server', nsmap=self.NSMAP)
+ self._populate_server(server, server_dict['server'], True)
+ server.set('adminPass', server_dict['server']['adminPass'])
+ return self._to_xml(server)
def action(self, server_dict):
#NOTE(bcwaldon): We need a way to serialize actions individually. This
@@ -981,23 +965,9 @@ class ServerXMLSerializer(wsgi.XMLDictSerializer):
return self.create(server_dict)
def update(self, server_dict):
- xml_doc = minidom.Document()
- node = self._server_to_xml_detailed(xml_doc,
- server_dict['server'])
- return self.to_xml_string(node, True)
-
- def _security_group_to_xml(self, doc, security_group):
- node = doc.createElement('security_group')
- node.setAttribute('name', str(security_group.get('name')))
- return node
-
- def _create_security_groups_node(self, xml_doc, security_groups):
- security_groups_node = xml_doc.createElement('security_groups')
- if security_groups:
- for security_group in security_groups:
- node = self._security_group_to_xml(xml_doc, security_group)
- security_groups_node.appendChild(node)
- return security_groups_node
+ server = etree.Element('server', nsmap=self.NSMAP)
+ self._populate_server(server, server_dict['server'], True)
+ return self._to_xml(server)
def create_resource(version='1.0'):
diff --git a/nova/api/openstack/versions.py b/nova/api/openstack/versions.py
index e2f892fb6..31dd9dc11 100644
--- a/nova/api/openstack/versions.py
+++ b/nova/api/openstack/versions.py
@@ -16,12 +16,13 @@
# under the License.
from datetime import datetime
+from lxml import etree
import webob
import webob.dec
-from xml.dom import minidom
import nova.api.openstack.views.versions
from nova.api.openstack import wsgi
+from nova.api.openstack import xmlutil
VERSIONS = {
@@ -159,83 +160,51 @@ class VersionsRequestDeserializer(wsgi.RequestDeserializer):
class VersionsXMLSerializer(wsgi.XMLDictSerializer):
- #TODO(wwolf): this is temporary until we get rid of toprettyxml
- # in the base class (XMLDictSerializer), which I plan to do in
- # another branch
- def to_xml_string(self, node, has_atom=False):
- self._add_xmlns(node, has_atom)
- return node.toxml(encoding='UTF-8')
-
- def _versions_to_xml(self, versions, name="versions", xmlns=None):
- root = self._xml_doc.createElement(name)
- root.setAttribute("xmlns", wsgi.XMLNS_V11)
- root.setAttribute("xmlns:atom", wsgi.XMLNS_ATOM)
- for version in versions:
- root.appendChild(self._create_version_node(version))
-
- return root
-
- def _create_media_types(self, media_types):
- base = self._xml_doc.createElement('media-types')
- for type in media_types:
- node = self._xml_doc.createElement('media-type')
- node.setAttribute('base', type['base'])
- node.setAttribute('type', type['type'])
- base.appendChild(node)
-
- return base
-
- def _create_version_node(self, version, create_ns=False):
- version_node = self._xml_doc.createElement('version')
- if create_ns:
- xmlns = wsgi.XMLNS_V11
- xmlns_atom = wsgi.XMLNS_ATOM
- version_node.setAttribute('xmlns', xmlns)
- version_node.setAttribute('xmlns:atom', xmlns_atom)
-
- version_node.setAttribute('id', version['id'])
- version_node.setAttribute('status', version['status'])
+ def _populate_version(self, version_node, version):
+ version_node.set('id', version['id'])
+ version_node.set('status', version['status'])
if 'updated' in version:
- version_node.setAttribute('updated', version['updated'])
-
+ version_node.set('updated', version['updated'])
if 'media-types' in version:
- media_types = self._create_media_types(version['media-types'])
- version_node.appendChild(media_types)
-
- link_nodes = self._create_link_nodes(self._xml_doc, version['links'])
- for link in link_nodes:
- version_node.appendChild(link)
-
- return version_node
+ media_types = etree.SubElement(version_node, 'media-types')
+ for mtype in version['media-types']:
+ elem = etree.SubElement(media_types, 'media-type')
+ elem.set('base', mtype['base'])
+ elem.set('type', mtype['type'])
+ for link in version.get('links', []):
+ elem = etree.SubElement(version_node,
+ '{%s}link' % xmlutil.XMLNS_ATOM)
+ elem.set('rel', link['rel'])
+ elem.set('href', link['href'])
+ if 'type' in link:
+ elem.set('type', link['type'])
+
+ NSMAP = {None: xmlutil.XMLNS_V11, 'atom': xmlutil.XMLNS_ATOM}
def index(self, data):
- self._xml_doc = minidom.Document()
- node = self._versions_to_xml(data['versions'])
-
- return self.to_xml_string(node)
+ root = etree.Element('versions', nsmap=self.NSMAP)
+ for version in data['versions']:
+ version_elem = etree.SubElement(root, 'version')
+ self._populate_version(version_elem, version)
+ return self._to_xml(root)
def show(self, data):
- self._xml_doc = minidom.Document()
- node = self._create_version_node(data['version'], True)
-
- return self.to_xml_string(node)
+ root = etree.Element('version', nsmap=self.NSMAP)
+ self._populate_version(root, data['version'])
+ return self._to_xml(root)
def multi(self, data):
- self._xml_doc = minidom.Document()
- node = self._versions_to_xml(data['choices'], 'choices',
- xmlns=wsgi.XMLNS_V11)
-
- return self.to_xml_string(node)
+ root = etree.Element('choices', nsmap=self.NSMAP)
+ for version in data['choices']:
+ version_elem = etree.SubElement(root, 'version')
+ self._populate_version(version_elem, version)
+ return self._to_xml(root)
class VersionsAtomSerializer(wsgi.XMLDictSerializer):
- #TODO(wwolf): this is temporary until we get rid of toprettyxml
- # in the base class (XMLDictSerializer), which I plan to do in
- # another branch
- def to_xml_string(self, node, has_atom=False):
- self._add_xmlns(node, has_atom)
- return node.toxml(encoding='UTF-8')
+
+ NSMAP = {None: xmlutil.XMLNS_ATOM}
def __init__(self, metadata=None, xmlns=None):
self.metadata = metadata or {}
@@ -244,14 +213,6 @@ class VersionsAtomSerializer(wsgi.XMLDictSerializer):
else:
self.xmlns = xmlns
- def _create_text_elem(self, name, text, type=None):
- elem = self._xml_doc.createElement(name)
- if type:
- elem.setAttribute('type', type)
- elem_text = self._xml_doc.createTextNode(text)
- elem.appendChild(elem_text)
- return elem
-
def _get_most_recent_update(self, versions):
recent = None
for version in versions:
@@ -269,105 +230,64 @@ class VersionsAtomSerializer(wsgi.XMLDictSerializer):
link_href = link_href.rstrip('/')
return link_href.rsplit('/', 1)[0] + '/'
- def _create_detail_meta(self, root, version):
- title = self._create_text_elem('title', "About This Version",
- type='text')
-
- updated = self._create_text_elem('updated', version['updated'])
-
- uri = version['links'][0]['href']
- id = self._create_text_elem('id', uri)
-
- link = self._xml_doc.createElement('link')
- link.setAttribute('rel', 'self')
- link.setAttribute('href', uri)
+ def _create_feed(self, versions, feed_title, feed_id):
+ feed = etree.Element('feed', nsmap=self.NSMAP)
+ title = etree.SubElement(feed, 'title')
+ title.set('type', 'text')
+ title.text = feed_title
- author = self._xml_doc.createElement('author')
- author_name = self._create_text_elem('name', 'Rackspace')
- author_uri = self._create_text_elem('uri', 'http://www.rackspace.com/')
- author.appendChild(author_name)
- author.appendChild(author_uri)
-
- root.appendChild(title)
- root.appendChild(updated)
- root.appendChild(id)
- root.appendChild(author)
- root.appendChild(link)
-
- def _create_list_meta(self, root, versions):
- title = self._create_text_elem('title', "Available API Versions",
- type='text')
# Set this updated to the most recently updated version
recent = self._get_most_recent_update(versions)
- updated = self._create_text_elem('updated', recent)
-
- base_url = self._get_base_url(versions[0]['links'][0]['href'])
- id = self._create_text_elem('id', base_url)
+ etree.SubElement(feed, 'updated').text = recent
- link = self._xml_doc.createElement('link')
- link.setAttribute('rel', 'self')
- link.setAttribute('href', base_url)
+ etree.SubElement(feed, 'id').text = feed_id
- author = self._xml_doc.createElement('author')
- author_name = self._create_text_elem('name', 'Rackspace')
- author_uri = self._create_text_elem('uri', 'http://www.rackspace.com/')
- author.appendChild(author_name)
- author.appendChild(author_uri)
+ link = etree.SubElement(feed, 'link')
+ link.set('rel', 'self')
+ link.set('href', feed_id)
- root.appendChild(title)
- root.appendChild(updated)
- root.appendChild(id)
- root.appendChild(author)
- root.appendChild(link)
+ author = etree.SubElement(feed, 'author')
+ etree.SubElement(author, 'name').text = 'Rackspace'
+ etree.SubElement(author, 'uri').text = 'http://www.rackspace.com/'
- def _create_version_entries(self, root, versions):
for version in versions:
- entry = self._xml_doc.createElement('entry')
-
- id = self._create_text_elem('id', version['links'][0]['href'])
- title = self._create_text_elem('title',
- 'Version %s' % version['id'],
- type='text')
- updated = self._create_text_elem('updated', version['updated'])
-
- entry.appendChild(id)
- entry.appendChild(title)
- entry.appendChild(updated)
-
- for link in version['links']:
- link_node = self._xml_doc.createElement('link')
- link_node.setAttribute('rel', link['rel'])
- link_node.setAttribute('href', link['href'])
- if 'type' in link:
- link_node.setAttribute('type', link['type'])
-
- entry.appendChild(link_node)
-
- content = self._create_text_elem('content',
- 'Version %s %s (%s)' %
- (version['id'],
- version['status'],
- version['updated']),
- type='text')
-
- entry.appendChild(content)
- root.appendChild(entry)
+ feed.append(self._create_version_entry(version))
+
+ return feed
+
+ def _create_version_entry(self, version):
+ entry = etree.Element('entry')
+ etree.SubElement(entry, 'id').text = version['links'][0]['href']
+ title = etree.SubElement(entry, 'title')
+ title.set('type', 'text')
+ title.text = 'Version %s' % version['id']
+ etree.SubElement(entry, 'updated').text = version['updated']
+
+ for link in version['links']:
+ link_elem = etree.SubElement(entry, 'link')
+ link_elem.set('rel', link['rel'])
+ link_elem.set('href', link['href'])
+ if 'type' in link:
+ link_elem.set('type', link['type'])
+
+ content = etree.SubElement(entry, 'content')
+ content.set('type', 'text')
+ content.text = 'Version %s %s (%s)' % (version['id'],
+ version['status'],
+ version['updated'])
+ return entry
def index(self, data):
- self._xml_doc = minidom.Document()
- node = self._xml_doc.createElementNS(self.xmlns, 'feed')
- self._create_list_meta(node, data['versions'])
- self._create_version_entries(node, data['versions'])
-
- return self.to_xml_string(node)
+ versions = data['versions']
+ feed_id = self._get_base_url(versions[0]['links'][0]['href'])
+ feed = self._create_feed(versions, 'Available API Versions', feed_id)
+ return self._to_xml(feed)
def show(self, data):
- self._xml_doc = minidom.Document()
- node = self._xml_doc.createElementNS(self.xmlns, 'feed')
- self._create_detail_meta(node, data['version'])
- self._create_version_entries(node, [data['version']])
-
- return self.to_xml_string(node)
+ version = data['version']
+ feed_id = version['links'][0]['href']
+ feed = self._create_feed([version], 'About This Version', feed_id)
+ return self._to_xml(feed)
class VersionsHeadersSerializer(wsgi.ResponseHeadersSerializer):
diff --git a/nova/api/openstack/views/versions.py b/nova/api/openstack/views/versions.py
index 03da80818..1ac398706 100644
--- a/nova/api/openstack/views/versions.py
+++ b/nova/api/openstack/views/versions.py
@@ -52,7 +52,7 @@ class ViewBuilder(object):
def build_versions(self, versions):
version_objs = []
- for version in versions:
+ for version in sorted(versions.keys()):
version = versions[version]
version_objs.append({
"id": version['id'],
diff --git a/nova/api/openstack/wsgi.py b/nova/api/openstack/wsgi.py
index 8641e960a..bdcadcb99 100644
--- a/nova/api/openstack/wsgi.py
+++ b/nova/api/openstack/wsgi.py
@@ -1,5 +1,6 @@
import json
+from lxml import etree
import webob
from xml.dom import minidom
from xml.parsers import expat
@@ -392,6 +393,10 @@ class XMLDictSerializer(DictSerializer):
link_nodes.append(link_node)
return link_nodes
+ def _to_xml(self, root):
+ """Convert the xml object to an xml string."""
+ return etree.tostring(root, encoding='UTF-8', xml_declaration=True)
+
class ResponseHeadersSerializer(ActionDispatcher):
"""Default response headers serialization"""
diff --git a/nova/compute/api.py b/nova/compute/api.py
index 4da79cd3c..2a65ff042 100644
--- a/nova/compute/api.py
+++ b/nova/compute/api.py
@@ -1048,7 +1048,7 @@ class API(base.Base):
vm_state=vm_states.ACTIVE,
task_state=task_states.REBOOTING)
self._cast_compute_message('reboot_instance', context, instance_id,
- reboot_type)
+ params={'reboot_type': reboot_type})
@scheduler_api.reroute_compute("rebuild")
def rebuild(self, context, instance_id, image_href, admin_password,
diff --git a/nova/db/sqlalchemy/migrate_repo/versions/046_add_instance_swap.py b/nova/db/sqlalchemy/migrate_repo/versions/046_add_instance_swap.py
new file mode 100644
index 000000000..63e7bc4f9
--- /dev/null
+++ b/nova/db/sqlalchemy/migrate_repo/versions/046_add_instance_swap.py
@@ -0,0 +1,48 @@
+# Copyright 2011 Isaku Yamahata
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from sqlalchemy import Column, Integer, MetaData, Table, String
+
+meta = MetaData()
+
+default_local_device = Column(
+ 'default_local_device',
+ String(length=255, convert_unicode=False, assert_unicode=None,
+ unicode_error=None, _warn_on_bytestring=False),
+ nullable=True)
+
+default_swap_device = Column(
+ 'default_swap_device',
+ String(length=255, convert_unicode=False, assert_unicode=None,
+ unicode_error=None, _warn_on_bytestring=False),
+ nullable=True)
+
+instances = Table('instances', meta,
+ Column('id', Integer(), primary_key=True, nullable=False),
+ )
+
+
+def upgrade(migrate_engine):
+ # Upgrade operations go here. Don't create your own engine;
+ # bind migrate_engine to your metadata
+ meta.bind = migrate_engine
+ instances.create_column(default_local_device)
+ instances.create_column(default_swap_device)
+
+
+def downgrade(migrate_engine):
+ # Operations to reverse the above upgrade go here.
+ meta.bind = migrate_engine
+ instances.drop_column('default_swap_device')
+ instances.drop_column('default_local_device')
diff --git a/nova/db/sqlalchemy/models.py b/nova/db/sqlalchemy/models.py
index bc1d70104..73aa7621b 100644
--- a/nova/db/sqlalchemy/models.py
+++ b/nova/db/sqlalchemy/models.py
@@ -232,6 +232,8 @@ class Instance(BASE, NovaBase):
uuid = Column(String(36))
root_device_name = Column(String(255))
+ default_local_device = Column(String(255), nullable=True)
+ default_swap_device = Column(String(255), nullable=True)
config_drive = Column(String(255))
# User editable field meant to represent what ip should be used
diff --git a/nova/image/fake.py b/nova/image/fake.py
index 97af81711..4eceabc11 100644
--- a/nova/image/fake.py
+++ b/nova/image/fake.py
@@ -24,7 +24,6 @@ import random
from nova import exception
from nova import flags
from nova import log as logging
-from nova.image import service
LOG = logging.getLogger('nova.image.fake')
@@ -33,7 +32,7 @@ LOG = logging.getLogger('nova.image.fake')
FLAGS = flags.FLAGS
-class _FakeImageService(service.BaseImageService):
+class _FakeImageService(object):
"""Mock (fake) image service for unit testing."""
def __init__(self):
diff --git a/nova/image/glance.py b/nova/image/glance.py
index 13c8ff843..5ee1d2b8a 100644
--- a/nova/image/glance.py
+++ b/nova/image/glance.py
@@ -31,7 +31,6 @@ from nova import exception
from nova import flags
from nova import log as logging
from nova import utils
-from nova.image import service
LOG = logging.getLogger('nova.image.glance')
@@ -114,17 +113,9 @@ def get_glance_client(context, image_href):
return (glance_client, image_id)
-class GlanceImageService(service.BaseImageService):
+class GlanceImageService(object):
"""Provides storage and retrieval of disk image objects within Glance."""
- GLANCE_ONLY_ATTRS = ['size', 'location', 'disk_format',
- 'container_format', 'checksum']
-
- # NOTE(sirp): Overriding to use _translate_to_service provided by
- # BaseImageService
- SERVICE_IMAGE_ATTRS = service.BaseImageService.BASE_IMAGE_ATTRS +\
- GLANCE_ONLY_ATTRS
-
def __init__(self, client=None):
self._client = client
@@ -160,7 +151,7 @@ class GlanceImageService(service.BaseImageService):
images = []
for image_meta in image_metas:
if self._is_image_available(context, image_meta):
- base_image_meta = self._translate_to_base(image_meta)
+ base_image_meta = self._translate_from_glance(image_meta)
images.append(base_image_meta)
return images
@@ -224,7 +215,7 @@ class GlanceImageService(service.BaseImageService):
if not self._is_image_available(context, image_meta):
raise exception.ImageNotFound(image_id=image_id)
- base_image_meta = self._translate_to_base(image_meta)
+ base_image_meta = self._translate_from_glance(image_meta)
return base_image_meta
def show_by_name(self, context, name):
@@ -248,7 +239,7 @@ class GlanceImageService(service.BaseImageService):
for chunk in image_chunks:
data.write(chunk)
- base_image_meta = self._translate_to_base(image_meta)
+ base_image_meta = self._translate_from_glance(image_meta)
return base_image_meta
def create(self, context, image_meta, data=None):
@@ -260,7 +251,7 @@ class GlanceImageService(service.BaseImageService):
# Translate Base -> Service
LOG.debug(_('Creating image in Glance. Metadata passed in %s'),
image_meta)
- sent_service_image_meta = self._translate_to_service(image_meta)
+ sent_service_image_meta = self._translate_to_glance(image_meta)
LOG.debug(_('Metadata after formatting for Glance %s'),
sent_service_image_meta)
@@ -268,7 +259,7 @@ class GlanceImageService(service.BaseImageService):
sent_service_image_meta, data)
# Translate Service -> Base
- base_image_meta = self._translate_to_base(recv_service_image_meta)
+ base_image_meta = self._translate_from_glance(recv_service_image_meta)
LOG.debug(_('Metadata returned from Glance formatted for Base %s'),
base_image_meta)
return base_image_meta
@@ -281,14 +272,14 @@ class GlanceImageService(service.BaseImageService):
"""
# NOTE(vish): show is to check if image is available
self.show(context, image_id)
- image_meta = _convert_to_string(image_meta)
+ image_meta = self._translate_to_glance(image_meta)
try:
client = self._get_client(context)
image_meta = client.update_image(image_id, image_meta, data)
except glance_exception.NotFound:
raise exception.ImageNotFound(image_id=image_id)
- base_image_meta = self._translate_to_base(image_meta)
+ base_image_meta = self._translate_from_glance(image_meta)
return base_image_meta
def delete(self, context, image_id):
@@ -310,17 +301,14 @@ class GlanceImageService(service.BaseImageService):
pass
@classmethod
- def _translate_to_service(cls, image_meta):
- image_meta = super(GlanceImageService,
- cls)._translate_to_service(image_meta)
+ def _translate_to_glance(cls, image_meta):
image_meta = _convert_to_string(image_meta)
+ image_meta = _remove_read_only(image_meta)
return image_meta
@classmethod
- def _translate_to_base(cls, image_meta):
- """Override translation to handle conversion to datetime objects."""
- image_meta = service.BaseImageService._propertify_metadata(
- image_meta, cls.SERVICE_IMAGE_ATTRS)
+ def _translate_from_glance(cls, image_meta):
+ image_meta = _limit_attributes(image_meta)
image_meta = _convert_timestamps_to_datetimes(image_meta)
image_meta = _convert_from_string(image_meta)
return image_meta
@@ -330,14 +318,26 @@ class GlanceImageService(service.BaseImageService):
"""Check image availability.
Under Glance, images are always available if the context has
- an auth_token. Otherwise, we fall back to the superclass
- method.
+ an auth_token.
"""
if hasattr(context, 'auth_token') and context.auth_token:
return True
- return service.BaseImageService._is_image_available(context,
- image_meta)
+
+ if image_meta['is_public'] or context.is_admin:
+ return True
+
+ properties = image_meta['properties']
+
+ if context.project_id and ('project_id' in properties):
+ return str(properties['project_id']) == str(context.project_id)
+
+ try:
+ user_id = properties['user_id']
+ except KeyError:
+ return False
+
+ return str(user_id) == str(context.user_id)
# utility functions
@@ -397,3 +397,27 @@ def _convert_from_string(metadata):
def _convert_to_string(metadata):
return _convert(_json_dumps, metadata)
+
+
+def _limit_attributes(image_meta):
+ IMAGE_ATTRIBUTES = ['size', 'location', 'disk_format',
+ 'container_format', 'checksum', 'id',
+ 'name', 'created_at', 'updated_at',
+ 'deleted_at', 'deleted', 'status',
+ 'is_public']
+ output = {}
+ for attr in IMAGE_ATTRIBUTES:
+ output[attr] = image_meta.get(attr)
+
+ output['properties'] = image_meta.get('properties', {})
+
+ return output
+
+
+def _remove_read_only(image_meta):
+ IMAGE_ATTRIBUTES = ['updated_at', 'created_at', 'deleted_at']
+ output = copy.deepcopy(image_meta)
+ for attr in IMAGE_ATTRIBUTES:
+ if attr in output:
+ del output[attr]
+ return output
diff --git a/nova/image/s3.py b/nova/image/s3.py
index abf01a942..343555887 100644
--- a/nova/image/s3.py
+++ b/nova/image/s3.py
@@ -34,7 +34,6 @@ from nova import flags
from nova import image
from nova import log as logging
from nova import utils
-from nova.image import service
from nova.api.ec2 import ec2utils
@@ -48,7 +47,7 @@ flags.DEFINE_string('s3_secret_key', 'notchecked',
'secret key to use for s3 server for images')
-class S3ImageService(service.BaseImageService):
+class S3ImageService(object):
"""Wraps an existing image service to support s3 based register."""
def __init__(self, service=None, *args, **kwargs):
diff --git a/nova/image/service.py b/nova/image/service.py
deleted file mode 100644
index 5361cfc89..000000000
--- a/nova/image/service.py
+++ /dev/null
@@ -1,200 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2010 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-from nova import utils
-
-
-class BaseImageService(object):
- """Base class for providing image search and retrieval services.
-
- ImageService exposes two concepts of metadata:
-
- 1. First-class attributes: This is metadata that is common to all
- ImageService subclasses and is shared across all hypervisors. These
- attributes are defined by IMAGE_ATTRS.
-
- 2. Properties: This is metdata that is specific to an ImageService,
- and Image, or a particular hypervisor. Any attribute not present in
- BASE_IMAGE_ATTRS should be considered an image property.
-
- This means that ImageServices will return BASE_IMAGE_ATTRS as keys in the
- metadata dict, all other attributes will be returned as keys in the nested
- 'properties' dict.
-
- """
-
- BASE_IMAGE_ATTRS = ['id', 'name', 'created_at', 'updated_at',
- 'deleted_at', 'deleted', 'status', 'is_public']
-
- # NOTE(sirp): ImageService subclasses may override this to aid translation
- # between BaseImageService attributes and additional metadata stored by
- # the ImageService subclass
- SERVICE_IMAGE_ATTRS = []
-
- def index(self, context, *args, **kwargs):
- """List images.
-
- :returns: a sequence of mappings with the following signature
- {'id': opaque id of image, 'name': name of image}
-
- """
- raise NotImplementedError
-
- def detail(self, context, *args, **kwargs):
- """Detailed information about an images.
-
- :returns: a sequence of mappings with the following signature
- {'id': opaque id of image,
- 'name': name of image,
- 'created_at': creation datetime object,
- 'updated_at': modification datetime object,
- 'deleted_at': deletion datetime object or None,
- 'deleted': boolean indicating if image has been deleted,
- 'status': string description of image status,
- 'is_public': boolean indicating if image is public
- }
-
- If the service does not implement a method that provides a detailed
- set of information about images, then the method should raise
- NotImplementedError, in which case Nova will emulate this method
- with repeated calls to show() for each image received from the
- index() method.
-
- """
- raise NotImplementedError
-
- def show(self, context, image_id):
- """Detailed information about an image.
-
- :returns: a mapping with the following signature:
- {'id': opaque id of image,
- 'name': name of image,
- 'created_at': creation datetime object,
- 'updated_at': modification datetime object,
- 'deleted_at': deletion datetime object or None,
- 'deleted': boolean indicating if image has been deleted,
- 'status': string description of image status,
- 'is_public': boolean indicating if image is public
- }, ...
-
- :raises: NotFound if the image does not exist
-
- """
- raise NotImplementedError
-
- def get(self, context, data):
- """Get an image.
-
- :param data: a file-like object to hold binary image data
- :returns: a dict containing image metadata, writes image data to data.
- :raises: NotFound if the image does not exist
-
- """
- raise NotImplementedError
-
- def create(self, context, metadata, data=None):
- """Store the image metadata and data.
-
- :returns: the new image metadata.
- :raises: AlreadyExists if the image already exist.
-
- """
- raise NotImplementedError
-
- def update(self, context, image_id, metadata, data=None):
- """Update the given image metadata and data and return the metadata.
-
- :raises: NotFound if the image does not exist.
-
- """
- raise NotImplementedError
-
- def delete(self, context, image_id):
- """Delete the given image.
-
- :raises: NotFound if the image does not exist.
-
- """
- raise NotImplementedError
-
- @staticmethod
- def _is_image_available(context, image_meta):
- """Check image availability.
-
- Images are always available if they are public or if the user is an
- admin.
-
- Otherwise, we filter by project_id (if present) and then fall-back to
- images owned by user.
-
- """
- # FIXME(sirp): We should be filtering by user_id on the Glance side
- # for security; however, we can't do that until we get authn/authz
- # sorted out. Until then, filtering in Nova.
- if image_meta['is_public'] or context.is_admin:
- return True
-
- properties = image_meta['properties']
-
- if context.project_id and ('project_id' in properties):
- return str(properties['project_id']) == str(context.project_id)
-
- try:
- user_id = properties['user_id']
- except KeyError:
- return False
-
- return str(user_id) == str(context.user_id)
-
- @classmethod
- def _translate_to_base(cls, metadata):
- """Return a metadata dictionary that is BaseImageService compliant.
-
- This is used by subclasses to expose only a metadata dictionary that
- is the same across ImageService implementations.
-
- """
- return cls._propertify_metadata(metadata, cls.BASE_IMAGE_ATTRS)
-
- @classmethod
- def _translate_to_service(cls, metadata):
- """Return a metadata dict that is usable by the ImageService subclass.
-
- As an example, Glance has additional attributes (like 'location'); the
- BaseImageService considers these properties, but we need to translate
- these back to first-class attrs for sending to Glance. This method
- handles this by allowing you to specify the attributes an ImageService
- considers first-class.
-
- """
- if not cls.SERVICE_IMAGE_ATTRS:
- raise NotImplementedError(_('Cannot use this without specifying '
- 'SERVICE_IMAGE_ATTRS for subclass'))
- return cls._propertify_metadata(metadata, cls.SERVICE_IMAGE_ATTRS)
-
- @staticmethod
- def _propertify_metadata(metadata, keys):
- """Move unknown keys to a nested 'properties' dict.
-
- :returns: a new dict with the keys moved.
-
- """
- flattened = utils.flatten_dict(metadata)
- attributes, properties = utils.partition_dict(flattened, keys)
- attributes['properties'] = properties
- return attributes
diff --git a/nova/tests/public_key/dummy.fingerprint b/nova/tests/api/ec2/public_key/dummy.fingerprint
index 715bca27a..715bca27a 100644
--- a/nova/tests/public_key/dummy.fingerprint
+++ b/nova/tests/api/ec2/public_key/dummy.fingerprint
diff --git a/nova/tests/public_key/dummy.pub b/nova/tests/api/ec2/public_key/dummy.pub
index d4cf2bc0d..d4cf2bc0d 100644
--- a/nova/tests/public_key/dummy.pub
+++ b/nova/tests/api/ec2/public_key/dummy.pub
diff --git a/nova/tests/test_cloud.py b/nova/tests/api/ec2/test_cloud.py
index 7fe353b3d..7bdae0552 100644
--- a/nova/tests/test_cloud.py
+++ b/nova/tests/api/ec2/test_cloud.py
@@ -1540,7 +1540,9 @@ class CloudTestCase(test.TestCase):
'ephemeral0': '/dev/sdb',
'swap': '/dev/sdc',
'ephemeral1': '/dev/sdd',
- 'ephemeral2': '/dev/sd3'}
+ 'ephemeral2': '/dev/sd3',
+ 'ebs0': '/dev/sdh',
+ 'ebs1': '/dev/sdi'}
self.assertEqual(self.cloud._format_instance_mapping(ctxt,
instance_ref0),
diff --git a/nova/tests/api/openstack/common.py b/nova/tests/api/openstack/common.py
index 74bb8729a..19515ca67 100644
--- a/nova/tests/api/openstack/common.py
+++ b/nova/tests/api/openstack/common.py
@@ -34,3 +34,25 @@ def webob_factory(url):
req.body = json.dumps(body)
return req
return web_request
+
+
+def compare_links(actual, expected):
+ """Compare xml atom links."""
+
+ return compare_tree_to_dict(actual, expected, ('rel', 'href', 'type'))
+
+
+def compare_media_types(actual, expected):
+ """Compare xml media types."""
+
+ return compare_tree_to_dict(actual, expected, ('base', 'type'))
+
+
+def compare_tree_to_dict(actual, expected, keys):
+ """Compare parts of lxml.etree objects to dicts."""
+
+ for elem, data in zip(actual, expected):
+ for key in keys:
+ if elem.get(key) != data.get(key):
+ return False
+ return True
diff --git a/nova/tests/api/openstack/contrib/test_createserverext.py b/nova/tests/api/openstack/contrib/test_createserverext.py
index 078b72d67..03c7d1ec5 100644
--- a/nova/tests/api/openstack/contrib/test_createserverext.py
+++ b/nova/tests/api/openstack/contrib/test_createserverext.py
@@ -49,9 +49,13 @@ INSTANCE = {
"id": 1,
"display_name": "test_server",
"uuid": FAKE_UUID,
+ "user_id": 'fake_user_id',
+ "tenant_id": 'fake_tenant_id',
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
- "security_groups": [{"id": 1, "name": "test"}]
+ "security_groups": [{"id": 1, "name": "test"}],
+ "image_ref": 'http://foo.com/123',
+ "instance_type": {"flavorid": '124'},
}
diff --git a/nova/tests/api/openstack/fakes.py b/nova/tests/api/openstack/fakes.py
index 098b1e284..3a567f0cc 100644
--- a/nova/tests/api/openstack/fakes.py
+++ b/nova/tests/api/openstack/fakes.py
@@ -40,8 +40,8 @@ from nova.api.openstack import limits
from nova.auth.manager import User, Project
import nova.image.fake
from nova.image import glance
-from nova.image import service
from nova.tests import fake_flags
+from nova.tests.glance import stubs as glance_stubs
class Context(object):
@@ -83,7 +83,7 @@ def wsgi_app(inner_app10=None, inner_app11=None, fake_auth=True,
if fake_auth_context is not None:
ctxt = fake_auth_context
else:
- ctxt = context.RequestContext('fake', 'fake')
+ ctxt = context.RequestContext('fake', 'fake', auth_token=True)
api10 = openstack.FaultWrapper(api_auth.InjectContext(ctxt,
limits.RateLimitingMiddleware(inner_app10)))
api11 = openstack.FaultWrapper(api_auth.InjectContext(ctxt,
@@ -177,6 +177,39 @@ def stub_out_compute_api_backup(stubs):
stubs.Set(nova.compute.API, 'backup', backup)
+def _make_image_fixtures():
+ NOW_GLANCE_FORMAT = "2010-10-11T10:30:22"
+
+ image_id = 123
+ base_attrs = {'deleted': False}
+
+ fixtures = []
+
+ def add_fixture(**kwargs):
+ kwargs.update(base_attrs)
+ fixtures.append(kwargs)
+
+ # Public image
+ add_fixture(id=image_id, name='public image', is_public=True,
+ status='active', properties={'key1': 'value1'})
+ image_id += 1
+
+ # Snapshot for User 1
+ server_ref = 'http://localhost/v1.1/servers/42'
+ snapshot_properties = {'instance_ref': server_ref, 'user_id': 'fake'}
+ for status in ('queued', 'saving', 'active', 'killed',
+ 'deleted', 'pending_delete'):
+ add_fixture(id=image_id, name='%s snapshot' % status,
+ is_public=False, status=status,
+ properties=snapshot_properties)
+ image_id += 1
+
+ # Image without a name
+ add_fixture(id=image_id, is_public=True, status='active', properties={})
+
+ return fixtures
+
+
def stub_out_glance_add_image(stubs, sent_to_glance):
"""
We return the metadata sent to glance by modifying the sent_to_glance dict
@@ -192,91 +225,11 @@ def stub_out_glance_add_image(stubs, sent_to_glance):
stubs.Set(glance_client.Client, 'add_image', fake_add_image)
-def stub_out_glance(stubs, initial_fixtures=None):
-
- class FakeGlanceClient:
-
- def __init__(self, initial_fixtures):
- self.fixtures = initial_fixtures or []
-
- def _filter_images(self, filters=None, marker=None, limit=None):
- found = True
- if marker:
- found = False
- if limit == 0:
- limit = None
-
- fixtures = []
- count = 0
- for f in self.fixtures:
- if limit and count >= limit:
- break
- if found:
- fixtures.append(f)
- count = count + 1
- if f['id'] == marker:
- found = True
-
- return fixtures
-
- def fake_get_images(self, filters=None, marker=None, limit=None):
- fixtures = self._filter_images(filters, marker, limit)
- return [dict(id=f['id'], name=f['name'])
- for f in fixtures]
-
- def fake_get_images_detailed(self, filters=None,
- marker=None, limit=None):
- return self._filter_images(filters, marker, limit)
-
- def fake_get_image_meta(self, image_id):
- image = self._find_image(image_id)
- if image:
- return copy.deepcopy(image)
- raise glance_exc.NotFound
-
- def fake_add_image(self, image_meta, data=None):
- image_meta = copy.deepcopy(image_meta)
- image_id = ''.join(random.choice(string.letters)
- for _ in range(20))
- image_meta['id'] = image_id
- self.fixtures.append(image_meta)
- return copy.deepcopy(image_meta)
-
- def fake_update_image(self, image_id, image_meta, data=None):
- for attr in ('created_at', 'updated_at', 'deleted_at', 'deleted'):
- if attr in image_meta:
- del image_meta[attr]
-
- f = self._find_image(image_id)
- if not f:
- raise glance_exc.NotFound
-
- f.update(image_meta)
- return copy.deepcopy(f)
-
- def fake_delete_image(self, image_id):
- f = self._find_image(image_id)
- if not f:
- raise glance_exc.NotFound
-
- self.fixtures.remove(f)
-
- def _find_image(self, image_id):
- for f in self.fixtures:
- if str(f['id']) == str(image_id):
- return f
- return None
-
- GlanceClient = glance_client.Client
- fake = FakeGlanceClient(initial_fixtures)
-
- stubs.Set(GlanceClient, 'get_images', fake.fake_get_images)
- stubs.Set(GlanceClient, 'get_images_detailed',
- fake.fake_get_images_detailed)
- stubs.Set(GlanceClient, 'get_image_meta', fake.fake_get_image_meta)
- stubs.Set(GlanceClient, 'add_image', fake.fake_add_image)
- stubs.Set(GlanceClient, 'update_image', fake.fake_update_image)
- stubs.Set(GlanceClient, 'delete_image', fake.fake_delete_image)
+def stub_out_glance(stubs):
+ def fake_get_image_service():
+ client = glance_stubs.StubGlanceClient(_make_image_fixtures())
+ return nova.image.glance.GlanceImageService(client)
+ stubs.Set(nova.image, 'get_default_image_service', fake_get_image_service)
class FakeToken(object):
diff --git a/nova/tests/api/openstack/test_common.py b/nova/tests/api/openstack/test_common.py
index b422bc4d1..867e9d446 100644
--- a/nova/tests/api/openstack/test_common.py
+++ b/nova/tests/api/openstack/test_common.py
@@ -19,6 +19,7 @@
Test suites for 'common' code used throughout the OpenStack HTTP API.
"""
+from lxml import etree
import webob.exc
import xml.dom.minidom as minidom
@@ -26,6 +27,11 @@ from webob import Request
from nova import test
from nova.api.openstack import common
+from nova.api.openstack import xmlutil
+
+
+NS = "{http://docs.openstack.org/compute/api/v1.1}"
+ATOMNS = "{http://www.w3.org/2005/Atom}"
class LimiterTest(test.TestCase):
@@ -314,7 +320,7 @@ class MetadataXMLDeserializationTest(test.TestCase):
class MetadataXMLSerializationTest(test.TestCase):
- def test_index(self):
+ def test_xml_declaration(self):
serializer = common.MetadataXMLSerializer()
fixture = {
'metadata': {
@@ -322,17 +328,31 @@ class MetadataXMLSerializationTest(test.TestCase):
'three': 'four',
},
}
- output = serializer.serialize(fixture, 'index')
- actual = minidom.parseString(output.replace(" ", ""))
- expected = minidom.parseString("""
- <metadata xmlns="http://docs.openstack.org/compute/api/v1.1">
- <meta key="three">four</meta>
- <meta key="one">two</meta>
- </metadata>
- """.replace(" ", "").replace("\n", ""))
+ output = serializer.serialize(fixture, 'index')
+ print output
+ has_dec = output.startswith("<?xml version='1.0' encoding='UTF-8'?>")
+ self.assertTrue(has_dec)
- self.assertEqual(expected.toxml(), actual.toxml())
+ def test_index(self):
+ serializer = common.MetadataXMLSerializer()
+ fixture = {
+ 'metadata': {
+ 'one': 'two',
+ 'three': 'four',
+ },
+ }
+ output = serializer.serialize(fixture, 'index')
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'metadata')
+ metadata_dict = fixture['metadata']
+ metadata_elems = root.findall('{0}meta'.format(NS))
+ self.assertEqual(len(metadata_elems), 2)
+ for i, metadata_elem in enumerate(metadata_elems):
+ (meta_key, meta_value) = metadata_dict.items()[i]
+ self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
+ self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
def test_index_null(self):
serializer = common.MetadataXMLSerializer()
@@ -342,15 +362,16 @@ class MetadataXMLSerializationTest(test.TestCase):
},
}
output = serializer.serialize(fixture, 'index')
- actual = minidom.parseString(output.replace(" ", ""))
-
- expected = minidom.parseString("""
- <metadata xmlns="http://docs.openstack.org/compute/api/v1.1">
- <meta key="None">None</meta>
- </metadata>
- """.replace(" ", "").replace("\n", ""))
-
- self.assertEqual(expected.toxml(), actual.toxml())
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'metadata')
+ metadata_dict = fixture['metadata']
+ metadata_elems = root.findall('{0}meta'.format(NS))
+ self.assertEqual(len(metadata_elems), 1)
+ for i, metadata_elem in enumerate(metadata_elems):
+ (meta_key, meta_value) = metadata_dict.items()[i]
+ self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
+ self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
def test_index_unicode(self):
serializer = common.MetadataXMLSerializer()
@@ -360,15 +381,16 @@ class MetadataXMLSerializationTest(test.TestCase):
},
}
output = serializer.serialize(fixture, 'index')
- actual = minidom.parseString(output.replace(" ", ""))
-
- expected = minidom.parseString(u"""
- <metadata xmlns="http://docs.openstack.org/compute/api/v1.1">
- <meta key="three">Jos\xe9</meta>
- </metadata>
- """.encode("UTF-8").replace(" ", "").replace("\n", ""))
-
- self.assertEqual(expected.toxml(), actual.toxml())
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'metadata')
+ metadata_dict = fixture['metadata']
+ metadata_elems = root.findall('{0}meta'.format(NS))
+ self.assertEqual(len(metadata_elems), 1)
+ for i, metadata_elem in enumerate(metadata_elems):
+ (meta_key, meta_value) = metadata_dict.items()[i]
+ self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
+ self.assertEqual(metadata_elem.text.strip(), meta_value)
def test_show(self):
serializer = common.MetadataXMLSerializer()
@@ -378,14 +400,12 @@ class MetadataXMLSerializationTest(test.TestCase):
},
}
output = serializer.serialize(fixture, 'show')
- actual = minidom.parseString(output.replace(" ", ""))
-
- expected = minidom.parseString("""
- <meta xmlns="http://docs.openstack.org/compute/api/v1.1"
- key="one">two</meta>
- """.replace(" ", "").replace("\n", ""))
-
- self.assertEqual(expected.toxml(), actual.toxml())
+ print output
+ root = etree.XML(output)
+ meta_dict = fixture['meta']
+ (meta_key, meta_value) = meta_dict.items()[0]
+ self.assertEqual(str(root.get('key')), str(meta_key))
+ self.assertEqual(root.text.strip(), meta_value)
def test_update_all(self):
serializer = common.MetadataXMLSerializer()
@@ -396,16 +416,16 @@ class MetadataXMLSerializationTest(test.TestCase):
},
}
output = serializer.serialize(fixture, 'update_all')
- actual = minidom.parseString(output.replace(" ", ""))
-
- expected = minidom.parseString("""
- <metadata xmlns="http://docs.openstack.org/compute/api/v1.1">
- <meta key="key6">value6</meta>
- <meta key="key4">value4</meta>
- </metadata>
- """.replace(" ", "").replace("\n", ""))
-
- self.assertEqual(expected.toxml(), actual.toxml())
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'metadata')
+ metadata_dict = fixture['metadata']
+ metadata_elems = root.findall('{0}meta'.format(NS))
+ self.assertEqual(len(metadata_elems), 2)
+ for i, metadata_elem in enumerate(metadata_elems):
+ (meta_key, meta_value) = metadata_dict.items()[i]
+ self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
+ self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
def test_update_item(self):
serializer = common.MetadataXMLSerializer()
@@ -415,14 +435,12 @@ class MetadataXMLSerializationTest(test.TestCase):
},
}
output = serializer.serialize(fixture, 'update')
- actual = minidom.parseString(output.replace(" ", ""))
-
- expected = minidom.parseString("""
- <meta xmlns="http://docs.openstack.org/compute/api/v1.1"
- key="one">two</meta>
- """.replace(" ", "").replace("\n", ""))
-
- self.assertEqual(expected.toxml(), actual.toxml())
+ print output
+ root = etree.XML(output)
+ meta_dict = fixture['meta']
+ (meta_key, meta_value) = meta_dict.items()[0]
+ self.assertEqual(str(root.get('key')), str(meta_key))
+ self.assertEqual(root.text.strip(), meta_value)
def test_create(self):
serializer = common.MetadataXMLSerializer()
@@ -434,6 +452,16 @@ class MetadataXMLSerializationTest(test.TestCase):
},
}
output = serializer.serialize(fixture, 'create')
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'metadata')
+ metadata_dict = fixture['metadata']
+ metadata_elems = root.findall('{0}meta'.format(NS))
+ self.assertEqual(len(metadata_elems), 3)
+ for i, metadata_elem in enumerate(metadata_elems):
+ (meta_key, meta_value) = metadata_dict.items()[i]
+ self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
+ self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
actual = minidom.parseString(output.replace(" ", ""))
expected = minidom.parseString("""
diff --git a/nova/tests/api/openstack/test_flavors.py b/nova/tests/api/openstack/test_flavors.py
index 812bece42..a3c5bd107 100644
--- a/nova/tests/api/openstack/test_flavors.py
+++ b/nova/tests/api/openstack/test_flavors.py
@@ -17,16 +17,21 @@
import json
import webob
-import xml.dom.minidom as minidom
+from lxml import etree
from nova.api.openstack import flavors
import nova.db.api
from nova import exception
from nova import test
+from nova.api.openstack import xmlutil
from nova.tests.api.openstack import fakes
from nova import wsgi
+NS = "{http://docs.openstack.org/compute/api/v1.1}"
+ATOMNS = "{http://www.w3.org/2005/Atom}"
+
+
def stub_flavor(flavorid, name, memory_mb="256", local_gb="10"):
return {
"flavorid": str(flavorid),
@@ -262,10 +267,37 @@ class FlavorsTest(test.TestCase):
class FlavorsXMLSerializationTest(test.TestCase):
+ def test_xml_declaration(self):
+ serializer = flavors.FlavorXMLSerializer()
+
+ fixture = {
+ "flavor": {
+ "id": "12",
+ "name": "asdf",
+ "ram": "256",
+ "disk": "10",
+ "links": [
+ {
+ "rel": "self",
+ "href": "http://localhost/v1.1/fake/flavors/12",
+ },
+ {
+ "rel": "bookmark",
+ "href": "http://localhost/fake/flavors/12",
+ },
+ ],
+ },
+ }
+
+ output = serializer.serialize(fixture, 'show')
+ print output
+ has_dec = output.startswith("<?xml version='1.0' encoding='UTF-8'?>")
+ self.assertTrue(has_dec)
+
def test_show(self):
serializer = flavors.FlavorXMLSerializer()
- input = {
+ fixture = {
"flavor": {
"id": "12",
"name": "asdf",
@@ -284,29 +316,25 @@ class FlavorsXMLSerializationTest(test.TestCase):
},
}
- output = serializer.serialize(input, 'show')
- actual = minidom.parseString(output.replace(" ", ""))
-
- expected = minidom.parseString("""
- <flavor xmlns="http://docs.openstack.org/compute/api/v1.1"
- xmlns:atom="http://www.w3.org/2005/Atom"
- id="12"
- name="asdf"
- ram="256"
- disk="10">
- <atom:link href="http://localhost/v1.1/fake/flavors/12"
- rel="self"/>
- <atom:link href="http://localhost/fake/flavors/12"
- rel="bookmark"/>
- </flavor>
- """.replace(" ", ""))
-
- self.assertEqual(expected.toxml(), actual.toxml())
+ output = serializer.serialize(fixture, 'show')
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'flavor')
+ flavor_dict = fixture['flavor']
+
+ for key in ['name', 'id', 'ram', 'disk']:
+ self.assertEqual(root.get(key), str(flavor_dict[key]))
+
+ link_nodes = root.findall('{0}link'.format(ATOMNS))
+ self.assertEqual(len(link_nodes), 2)
+ for i, link in enumerate(flavor_dict['links']):
+ for key, value in link.items():
+ self.assertEqual(link_nodes[i].get(key), value)
def test_show_handles_integers(self):
serializer = flavors.FlavorXMLSerializer()
- input = {
+ fixture = {
"flavor": {
"id": 12,
"name": "asdf",
@@ -325,29 +353,25 @@ class FlavorsXMLSerializationTest(test.TestCase):
},
}
- output = serializer.serialize(input, 'show')
- actual = minidom.parseString(output.replace(" ", ""))
-
- expected = minidom.parseString("""
- <flavor xmlns="http://docs.openstack.org/compute/api/v1.1"
- xmlns:atom="http://www.w3.org/2005/Atom"
- id="12"
- name="asdf"
- ram="256"
- disk="10">
- <atom:link href="http://localhost/v1.1/fake/flavors/12"
- rel="self"/>
- <atom:link href="http://localhost/fake/flavors/12"
- rel="bookmark"/>
- </flavor>
- """.replace(" ", ""))
-
- self.assertEqual(expected.toxml(), actual.toxml())
+ output = serializer.serialize(fixture, 'show')
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'flavor')
+ flavor_dict = fixture['flavor']
+
+ for key in ['name', 'id', 'ram', 'disk']:
+ self.assertEqual(root.get(key), str(flavor_dict[key]))
+
+ link_nodes = root.findall('{0}link'.format(ATOMNS))
+ self.assertEqual(len(link_nodes), 2)
+ for i, link in enumerate(flavor_dict['links']):
+ for key, value in link.items():
+ self.assertEqual(link_nodes[i].get(key), value)
def test_detail(self):
serializer = flavors.FlavorXMLSerializer()
- input = {
+ fixture = {
"flavors": [
{
"id": "23",
@@ -383,39 +407,28 @@ class FlavorsXMLSerializationTest(test.TestCase):
],
}
- output = serializer.serialize(input, 'detail')
- actual = minidom.parseString(output.replace(" ", ""))
-
- expected = minidom.parseString("""
- <flavors xmlns="http://docs.openstack.org/compute/api/v1.1"
- xmlns:atom="http://www.w3.org/2005/Atom">
- <flavor id="23"
- name="flavor 23"
- ram="512"
- disk="20">
- <atom:link href="http://localhost/v1.1/fake/flavors/23"
- rel="self"/>
- <atom:link href="http://localhost/fake/flavors/23"
- rel="bookmark"/>
- </flavor>
- <flavor id="13"
- name="flavor 13"
- ram="256"
- disk="10">
- <atom:link href="http://localhost/v1.1/fake/flavors/13"
- rel="self"/>
- <atom:link href="http://localhost/fake/flavors/13"
- rel="bookmark"/>
- </flavor>
- </flavors>
- """.replace(" ", "") % locals())
-
- self.assertEqual(expected.toxml(), actual.toxml())
+ output = serializer.serialize(fixture, 'detail')
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'flavors')
+ flavor_elems = root.findall('{0}flavor'.format(NS))
+ self.assertEqual(len(flavor_elems), 2)
+ for i, flavor_elem in enumerate(flavor_elems):
+ flavor_dict = fixture['flavors'][i]
+
+ for key in ['name', 'id', 'ram', 'disk']:
+ self.assertEqual(flavor_elem.get(key), str(flavor_dict[key]))
+
+ link_nodes = flavor_elem.findall('{0}link'.format(ATOMNS))
+ self.assertEqual(len(link_nodes), 2)
+ for i, link in enumerate(flavor_dict['links']):
+ for key, value in link.items():
+ self.assertEqual(link_nodes[i].get(key), value)
def test_index(self):
serializer = flavors.FlavorXMLSerializer()
- input = {
+ fixture = {
"flavors": [
{
"id": "23",
@@ -451,42 +464,34 @@ class FlavorsXMLSerializationTest(test.TestCase):
],
}
- output = serializer.serialize(input, 'index')
- actual = minidom.parseString(output.replace(" ", ""))
-
- expected = minidom.parseString("""
- <flavors xmlns="http://docs.openstack.org/compute/api/v1.1"
- xmlns:atom="http://www.w3.org/2005/Atom">
- <flavor id="23" name="flavor 23">
- <atom:link href="http://localhost/v1.1/fake/flavors/23"
- rel="self"/>
- <atom:link href="http://localhost/fake/flavors/23"
- rel="bookmark"/>
- </flavor>
- <flavor id="13" name="flavor 13">
- <atom:link href="http://localhost/v1.1/fake/flavors/13"
- rel="self"/>
- <atom:link href="http://localhost/fake/flavors/13"
- rel="bookmark"/>
- </flavor>
- </flavors>
- """.replace(" ", "") % locals())
-
- self.assertEqual(expected.toxml(), actual.toxml())
+ output = serializer.serialize(fixture, 'index')
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'flavors_index')
+ flavor_elems = root.findall('{0}flavor'.format(NS))
+ self.assertEqual(len(flavor_elems), 2)
+ for i, flavor_elem in enumerate(flavor_elems):
+ flavor_dict = fixture['flavors'][i]
+
+ for key in ['name', 'id']:
+ self.assertEqual(flavor_elem.get(key), str(flavor_dict[key]))
+
+ link_nodes = flavor_elem.findall('{0}link'.format(ATOMNS))
+ self.assertEqual(len(link_nodes), 2)
+ for i, link in enumerate(flavor_dict['links']):
+ for key, value in link.items():
+ self.assertEqual(link_nodes[i].get(key), value)
def test_index_empty(self):
serializer = flavors.FlavorXMLSerializer()
- input = {
+ fixture = {
"flavors": [],
}
- output = serializer.serialize(input, 'index')
- actual = minidom.parseString(output.replace(" ", ""))
-
- expected = minidom.parseString("""
- <flavors xmlns="http://docs.openstack.org/compute/api/v1.1"
- xmlns:atom="http://www.w3.org/2005/Atom" />
- """.replace(" ", "") % locals())
-
- self.assertEqual(expected.toxml(), actual.toxml())
+ output = serializer.serialize(fixture, 'index')
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'flavors_index')
+ flavor_elems = root.findall('{0}flavor'.format(NS))
+ self.assertEqual(len(flavor_elems), 0)
diff --git a/nova/tests/api/openstack/test_image_metadata.py b/nova/tests/api/openstack/test_image_metadata.py
index fe42e35e5..314c3c38e 100644
--- a/nova/tests/api/openstack/test_image_metadata.py
+++ b/nova/tests/api/openstack/test_image_metadata.py
@@ -23,7 +23,6 @@ from nova import flags
from nova.api import openstack
from nova import test
from nova.tests.api.openstack import fakes
-import nova.wsgi
FLAGS = flags.FLAGS
@@ -31,76 +30,20 @@ FLAGS = flags.FLAGS
class ImageMetaDataTest(test.TestCase):
- IMAGE_FIXTURES = [
- {'status': 'active',
- 'name': 'image1',
- 'deleted': False,
- 'container_format': None,
- 'checksum': None,
- 'created_at': '2011-03-22T17:40:15',
- 'disk_format': None,
- 'updated_at': '2011-03-22T17:40:15',
- 'id': '1',
- 'location': 'file:///var/lib/glance/images/1',
- 'is_public': True,
- 'deleted_at': None,
- 'properties': {
- 'key1': 'value1',
- 'key2': 'value2'},
- 'size': 5882349},
- {'status': 'active',
- 'name': 'image2',
- 'deleted': False,
- 'container_format': None,
- 'checksum': None,
- 'created_at': '2011-03-22T17:40:15',
- 'disk_format': None,
- 'updated_at': '2011-03-22T17:40:15',
- 'id': '2',
- 'location': 'file:///var/lib/glance/images/2',
- 'is_public': True,
- 'deleted_at': None,
- 'properties': {
- 'key1': 'value1',
- 'key2': 'value2'},
- 'size': 5882349},
- {'status': 'active',
- 'name': 'image3',
- 'deleted': False,
- 'container_format': None,
- 'checksum': None,
- 'created_at': '2011-03-22T17:40:15',
- 'disk_format': None,
- 'updated_at': '2011-03-22T17:40:15',
- 'id': '3',
- 'location': 'file:///var/lib/glance/images/2',
- 'is_public': True,
- 'deleted_at': None,
- 'properties': {},
- 'size': 5882349},
- ]
-
def setUp(self):
super(ImageMetaDataTest, self).setUp()
- self.flags(image_service='nova.image.glance.GlanceImageService')
- # NOTE(dprince) max out properties/metadata in image 3 for testing
- img3 = self.IMAGE_FIXTURES[2]
- for num in range(FLAGS.quota_metadata_items):
- img3['properties']['key%i' % num] = "blah"
- fakes.stub_out_glance(self.stubs, self.IMAGE_FIXTURES)
+ fakes.stub_out_glance(self.stubs)
def test_index(self):
- req = webob.Request.blank('/v1.1/123/images/1/metadata')
+ req = webob.Request.blank('/v1.1/123/images/123/metadata')
res = req.get_response(fakes.wsgi_app())
res_dict = json.loads(res.body)
self.assertEqual(200, res.status_int)
- expected = self.IMAGE_FIXTURES[0]['properties']
- self.assertEqual(len(expected), len(res_dict['metadata']))
- for (key, value) in res_dict['metadata'].items():
- self.assertEqual(value, res_dict['metadata'][key])
+ expected = {'metadata': {'key1': 'value1'}}
+ self.assertEqual(res_dict, expected)
def test_show(self):
- req = webob.Request.blank('/v1.1/fake/images/1/metadata/key1')
+ req = webob.Request.blank('/v1.1/fake/images/123/metadata/key1')
res = req.get_response(fakes.wsgi_app())
res_dict = json.loads(res.body)
self.assertEqual(200, res.status_int)
@@ -109,32 +52,38 @@ class ImageMetaDataTest(test.TestCase):
self.assertEqual('value1', res_dict['meta']['key1'])
def test_show_not_found(self):
- req = webob.Request.blank('/v1.1/fake/images/1/metadata/key9')
+ req = webob.Request.blank('/v1.1/fake/images/123/metadata/key9')
+ res = req.get_response(fakes.wsgi_app())
+ self.assertEqual(404, res.status_int)
+
+ def test_show_image_not_found(self):
+ req = webob.Request.blank('/v1.1/fake/images/100/metadata/key1')
res = req.get_response(fakes.wsgi_app())
self.assertEqual(404, res.status_int)
def test_create(self):
- req = webob.Request.blank('/v1.1/fake/images/2/metadata')
+ req = webob.Request.blank('/v1.1/fake/images/123/metadata')
req.method = 'POST'
- req.body = '{"metadata": {"key9": "value9"}}'
+ req.body = '{"metadata": {"key7": "value7"}}'
req.headers["content-type"] = "application/json"
res = req.get_response(fakes.wsgi_app())
self.assertEqual(200, res.status_int)
actual_output = json.loads(res.body)
+ expected_output = {'metadata': {'key1': 'value1', 'key7': 'value7'}}
+ self.assertEqual(expected_output, actual_output)
- expected_output = {
- 'metadata': {
- 'key1': 'value1',
- 'key2': 'value2',
- 'key9': 'value9',
- },
- }
+ def test_create_image_not_found(self):
+ req = webob.Request.blank('/v1.1/fake/images/100/metadata')
+ req.method = 'POST'
+ req.body = '{"metadata": {"key7": "value7"}}'
+ req.headers["content-type"] = "application/json"
+ res = req.get_response(fakes.wsgi_app())
- self.assertEqual(expected_output, actual_output)
+ self.assertEqual(404, res.status_int)
def test_update_all(self):
- req = webob.Request.blank('/v1.1/fake/images/1/metadata')
+ req = webob.Request.blank('/v1.1/fake/images/123/metadata')
req.method = 'PUT'
req.body = '{"metadata": {"key9": "value9"}}'
req.headers["content-type"] = "application/json"
@@ -142,17 +91,20 @@ class ImageMetaDataTest(test.TestCase):
self.assertEqual(200, res.status_int)
actual_output = json.loads(res.body)
+ expected_output = {'metadata': {'key9': 'value9'}}
+ self.assertEqual(expected_output, actual_output)
- expected_output = {
- 'metadata': {
- 'key9': 'value9',
- },
- }
+ def test_update_all_image_not_found(self):
+ req = webob.Request.blank('/v1.1/fake/images/100/metadata')
+ req.method = 'PUT'
+ req.body = '{"metadata": {"key9": "value9"}}'
+ req.headers["content-type"] = "application/json"
+ res = req.get_response(fakes.wsgi_app())
- self.assertEqual(expected_output, actual_output)
+ self.assertEqual(404, res.status_int)
def test_update_item(self):
- req = webob.Request.blank('/v1.1/fake/images/1/metadata/key1')
+ req = webob.Request.blank('/v1.1/fake/images/123/metadata/key1')
req.method = 'PUT'
req.body = '{"meta": {"key1": "zz"}}'
req.headers["content-type"] = "application/json"
@@ -160,15 +112,20 @@ class ImageMetaDataTest(test.TestCase):
self.assertEqual(200, res.status_int)
actual_output = json.loads(res.body)
- expected_output = {
- 'meta': {
- 'key1': 'zz',
- },
- }
+ expected_output = {'meta': {'key1': 'zz'}}
self.assertEqual(actual_output, expected_output)
+ def test_update_item_image_not_found(self):
+ req = webob.Request.blank('/v1.1/fake/images/100/metadata/key1')
+ req.method = 'PUT'
+ req.body = '{"meta": {"key1": "zz"}}'
+ req.headers["content-type"] = "application/json"
+ res = req.get_response(fakes.wsgi_app())
+
+ self.assertEqual(404, res.status_int)
+
def test_update_item_bad_body(self):
- req = webob.Request.blank('/v1.1/fake/images/1/metadata/key1')
+ req = webob.Request.blank('/v1.1/fake/images/123/metadata/key1')
req.method = 'PUT'
req.body = '{"key1": "zz"}'
req.headers["content-type"] = "application/json"
@@ -176,15 +133,18 @@ class ImageMetaDataTest(test.TestCase):
self.assertEqual(400, res.status_int)
def test_update_item_too_many_keys(self):
- req = webob.Request.blank('/v1.1/fake/images/1/metadata/key1')
+ req = webob.Request.blank('/v1.1/fake/images/123/metadata/key1')
req.method = 'PUT'
- req.body = '{"meta": {"key1": "value1", "key2": "value2"}}'
+ overload = {}
+ for num in range(FLAGS.quota_metadata_items + 1):
+ overload['key%s' % num] = 'value%s' % num
+ req.body = json.dumps({'meta': overload})
req.headers["content-type"] = "application/json"
res = req.get_response(fakes.wsgi_app())
self.assertEqual(400, res.status_int)
def test_update_item_body_uri_mismatch(self):
- req = webob.Request.blank('/v1.1/fake/images/1/metadata/bad')
+ req = webob.Request.blank('/v1.1/fake/images/123/metadata/bad')
req.method = 'PUT'
req.body = '{"meta": {"key1": "value1"}}'
req.headers["content-type"] = "application/json"
@@ -192,7 +152,7 @@ class ImageMetaDataTest(test.TestCase):
self.assertEqual(400, res.status_int)
def test_update_item_xml(self):
- req = webob.Request.blank('/v1.1/fake/images/1/metadata/key1')
+ req = webob.Request.blank('/v1.1/fake/images/123/metadata/key1')
req.method = 'PUT'
req.body = '<meta key="key1">five</meta>'
req.headers["content-type"] = "application/xml"
@@ -200,22 +160,24 @@ class ImageMetaDataTest(test.TestCase):
self.assertEqual(200, res.status_int)
actual_output = json.loads(res.body)
- expected_output = {
- 'meta': {
- 'key1': 'five',
- },
- }
+ expected_output = {'meta': {'key1': 'five'}}
self.assertEqual(actual_output, expected_output)
def test_delete(self):
- req = webob.Request.blank('/v1.1/fake/images/2/metadata/key1')
+ req = webob.Request.blank('/v1.1/fake/images/123/metadata/key1')
req.method = 'DELETE'
res = req.get_response(fakes.wsgi_app())
self.assertEqual(204, res.status_int)
self.assertEqual('', res.body)
def test_delete_not_found(self):
- req = webob.Request.blank('/v1.1/fake/images/2/metadata/blah')
+ req = webob.Request.blank('/v1.1/fake/images/123/metadata/blah')
+ req.method = 'DELETE'
+ res = req.get_response(fakes.wsgi_app())
+ self.assertEqual(404, res.status_int)
+
+ def test_delete_image_not_found(self):
+ req = webob.Request.blank('/v1.1/fake/images/100/metadata/key1')
req.method = 'DELETE'
res = req.get_response(fakes.wsgi_app())
self.assertEqual(404, res.status_int)
@@ -225,7 +187,7 @@ class ImageMetaDataTest(test.TestCase):
for num in range(FLAGS.quota_metadata_items + 1):
data['metadata']['key%i' % num] = "blah"
json_string = str(data).replace("\'", "\"")
- req = webob.Request.blank('/v1.1/fake/images/2/metadata')
+ req = webob.Request.blank('/v1.1/fake/images/123/metadata')
req.method = 'POST'
req.body = json_string
req.headers["content-type"] = "application/json"
@@ -233,7 +195,8 @@ class ImageMetaDataTest(test.TestCase):
self.assertEqual(413, res.status_int)
def test_too_many_metadata_items_on_put(self):
- req = webob.Request.blank('/v1.1/fake/images/3/metadata/blah')
+ FLAGS.quota_metadata_items = 1
+ req = webob.Request.blank('/v1.1/fake/images/123/metadata/blah')
req.method = 'PUT'
req.body = '{"meta": {"blah": "blah"}}'
req.headers["content-type"] = "application/json"
diff --git a/nova/tests/api/openstack/test_images.py b/nova/tests/api/openstack/test_images.py
index 5856e635f..2aee1bc14 100644
--- a/nova/tests/api/openstack/test_images.py
+++ b/nova/tests/api/openstack/test_images.py
@@ -22,358 +22,76 @@ and as a WSGI layer
import copy
import json
-import os
-import shutil
-import tempfile
import xml.dom.minidom as minidom
+from lxml import etree
import mox
import stubout
import webob
-from glance import client as glance_client
from nova import context
-from nova import exception
-from nova import test
-from nova import utils
import nova.api.openstack
from nova.api.openstack import images
+from nova.api.openstack import xmlutil
+from nova import test
from nova.tests.api.openstack import fakes
-class _BaseImageServiceTests(test.TestCase):
- """Tasks to test for all image services"""
-
- def __init__(self, *args, **kwargs):
- super(_BaseImageServiceTests, self).__init__(*args, **kwargs)
- self.service = None
- self.context = None
-
- def test_create(self):
- fixture = self._make_fixture('test image')
- num_images = len(self.service.index(self.context))
-
- image_id = self.service.create(self.context, fixture)['id']
-
- self.assertNotEquals(None, image_id)
- self.assertEquals(num_images + 1,
- len(self.service.index(self.context)))
-
- def test_create_and_show_non_existing_image(self):
- fixture = self._make_fixture('test image')
- num_images = len(self.service.index(self.context))
-
- image_id = self.service.create(self.context, fixture)['id']
-
- self.assertNotEquals(None, image_id)
- self.assertRaises(exception.NotFound,
- self.service.show,
- self.context,
- 'bad image id')
-
- def test_create_and_show_non_existing_image_by_name(self):
- fixture = self._make_fixture('test image')
- num_images = len(self.service.index(self.context))
-
- image_id = self.service.create(self.context, fixture)['id']
-
- self.assertNotEquals(None, image_id)
- self.assertRaises(exception.ImageNotFound,
- self.service.show_by_name,
- self.context,
- 'bad image id')
-
- def test_update(self):
- fixture = self._make_fixture('test image')
- image_id = self.service.create(self.context, fixture)['id']
- fixture['status'] = 'in progress'
-
- self.service.update(self.context, image_id, fixture)
-
- new_image_data = self.service.show(self.context, image_id)
- self.assertEquals('in progress', new_image_data['status'])
-
- def test_delete(self):
- fixture1 = self._make_fixture('test image 1')
- fixture2 = self._make_fixture('test image 2')
- fixtures = [fixture1, fixture2]
-
- num_images = len(self.service.index(self.context))
- self.assertEquals(0, num_images, str(self.service.index(self.context)))
-
- ids = []
- for fixture in fixtures:
- new_id = self.service.create(self.context, fixture)['id']
- ids.append(new_id)
-
- num_images = len(self.service.index(self.context))
- self.assertEquals(2, num_images, str(self.service.index(self.context)))
-
- self.service.delete(self.context, ids[0])
-
- num_images = len(self.service.index(self.context))
- self.assertEquals(1, num_images)
-
- def test_index(self):
- fixture = self._make_fixture('test image')
- image_id = self.service.create(self.context, fixture)['id']
- image_metas = self.service.index(self.context)
- expected = [{'id': 'DONTCARE', 'name': 'test image'}]
- self.assertDictListMatch(image_metas, expected)
-
- @staticmethod
- def _make_fixture(name):
- fixture = {'name': name,
- 'updated': None,
- 'created': None,
- 'status': None,
- 'is_public': True}
- return fixture
-
-
-class GlanceImageServiceTest(_BaseImageServiceTests):
-
- """Tests the Glance image service, in particular that metadata translation
- works properly.
-
- At a high level, the translations involved are:
+NS = "{http://docs.openstack.org/compute/api/v1.1}"
+ATOMNS = "{http://www.w3.org/2005/Atom}"
+NOW_API_FORMAT = "2010-10-11T10:30:22Z"
- 1. Glance -> ImageService - This is needed so we can support
- multple ImageServices (Glance, Local, etc)
- 2. ImageService -> API - This is needed so we can support multple
- APIs (OpenStack, EC2)
- """
- def setUp(self):
- super(GlanceImageServiceTest, self).setUp()
- self.stubs = stubout.StubOutForTesting()
- fakes.stub_out_glance(self.stubs)
- fakes.stub_out_compute_api_snapshot(self.stubs)
- service_class = 'nova.image.glance.GlanceImageService'
- self.service = utils.import_object(service_class)
- self.context = context.RequestContext('fake', 'fake')
- self.service.delete_all()
- self.sent_to_glance = {}
- fakes.stub_out_glance_add_image(self.stubs, self.sent_to_glance)
-
- def tearDown(self):
- self.stubs.UnsetAll()
- super(GlanceImageServiceTest, self).tearDown()
-
- def test_create_with_instance_id(self):
- """Ensure instance_id is persisted as an image-property"""
- fixture = {'name': 'test image',
- 'is_public': False,
- 'properties': {'instance_id': '42', 'user_id': 'fake'}}
-
- image_id = self.service.create(self.context, fixture)['id']
- expected = fixture
- self.assertDictMatch(self.sent_to_glance['metadata'], expected)
-
- image_meta = self.service.show(self.context, image_id)
- expected = {'id': image_id,
- 'name': 'test image',
- 'is_public': False,
- 'properties': {'instance_id': '42', 'user_id': 'fake'}}
- self.assertDictMatch(image_meta, expected)
-
- image_metas = self.service.detail(self.context)
- self.assertDictMatch(image_metas[0], expected)
-
- def test_create_without_instance_id(self):
- """
- Ensure we can create an image without having to specify an
- instance_id. Public images are an example of an image not tied to an
- instance.
- """
- fixture = {'name': 'test image'}
- image_id = self.service.create(self.context, fixture)['id']
-
- expected = {'name': 'test image', 'properties': {}}
- self.assertDictMatch(self.sent_to_glance['metadata'], expected)
-
- def test_index_default_limit(self):
- fixtures = []
- ids = []
- for i in range(10):
- fixture = self._make_fixture('TestImage %d' % (i))
- fixtures.append(fixture)
- ids.append(self.service.create(self.context, fixture)['id'])
-
- image_metas = self.service.index(self.context)
- i = 0
- for meta in image_metas:
- expected = {'id': 'DONTCARE',
- 'name': 'TestImage %d' % (i)}
- self.assertDictMatch(meta, expected)
- i = i + 1
-
- def test_index_marker(self):
- fixtures = []
- ids = []
- for i in range(10):
- fixture = self._make_fixture('TestImage %d' % (i))
- fixtures.append(fixture)
- ids.append(self.service.create(self.context, fixture)['id'])
-
- image_metas = self.service.index(self.context, marker=ids[1])
- self.assertEquals(len(image_metas), 8)
- i = 2
- for meta in image_metas:
- expected = {'id': 'DONTCARE',
- 'name': 'TestImage %d' % (i)}
- self.assertDictMatch(meta, expected)
- i = i + 1
-
- def test_index_limit(self):
- fixtures = []
- ids = []
- for i in range(10):
- fixture = self._make_fixture('TestImage %d' % (i))
- fixtures.append(fixture)
- ids.append(self.service.create(self.context, fixture)['id'])
-
- image_metas = self.service.index(self.context, limit=3)
- self.assertEquals(len(image_metas), 3)
-
- def test_index_marker_and_limit(self):
- fixtures = []
- ids = []
- for i in range(10):
- fixture = self._make_fixture('TestImage %d' % (i))
- fixtures.append(fixture)
- ids.append(self.service.create(self.context, fixture)['id'])
-
- image_metas = self.service.index(self.context, marker=ids[3], limit=1)
- self.assertEquals(len(image_metas), 1)
- i = 4
- for meta in image_metas:
- expected = {'id': 'DONTCARE',
- 'name': 'TestImage %d' % (i)}
- self.assertDictMatch(meta, expected)
- i = i + 1
-
- def test_detail_marker(self):
- fixtures = []
- ids = []
- for i in range(10):
- fixture = self._make_fixture('TestImage %d' % (i))
- fixtures.append(fixture)
- ids.append(self.service.create(self.context, fixture)['id'])
-
- image_metas = self.service.detail(self.context, marker=ids[1])
- self.assertEquals(len(image_metas), 8)
- i = 2
- for meta in image_metas:
- expected = {
- 'id': 'DONTCARE',
- 'status': None,
- 'is_public': True,
- 'name': 'TestImage %d' % (i),
- 'properties': {
- 'updated': None,
- 'created': None,
- },
- }
-
- self.assertDictMatch(meta, expected)
- i = i + 1
-
- def test_detail_limit(self):
- fixtures = []
- ids = []
- for i in range(10):
- fixture = self._make_fixture('TestImage %d' % (i))
- fixtures.append(fixture)
- ids.append(self.service.create(self.context, fixture)['id'])
-
- image_metas = self.service.detail(self.context, limit=3)
- self.assertEquals(len(image_metas), 3)
-
- def test_detail_marker_and_limit(self):
- fixtures = []
- ids = []
- for i in range(10):
- fixture = self._make_fixture('TestImage %d' % (i))
- fixtures.append(fixture)
- ids.append(self.service.create(self.context, fixture)['id'])
-
- image_metas = self.service.detail(self.context, marker=ids[3], limit=3)
- self.assertEquals(len(image_metas), 3)
- i = 4
- for meta in image_metas:
- expected = {
- 'id': 'DONTCARE',
- 'status': None,
- 'is_public': True,
- 'name': 'TestImage %d' % (i),
- 'properties': {
- 'updated': None, 'created': None},
- }
- self.assertDictMatch(meta, expected)
- i = i + 1
-
-
-class ImageControllerWithGlanceServiceTest(test.TestCase):
+class ImagesTest(test.TestCase):
"""
Test of the OpenStack API /images application controller w/Glance.
"""
- NOW_GLANCE_FORMAT = "2010-10-11T10:30:22"
- NOW_API_FORMAT = "2010-10-11T10:30:22Z"
def setUp(self):
"""Run before each test."""
- super(ImageControllerWithGlanceServiceTest, self).setUp()
- self.flags(image_service='nova.image.glance.GlanceImageService')
+ super(ImagesTest, self).setUp()
self.stubs = stubout.StubOutForTesting()
fakes.stub_out_networking(self.stubs)
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
- self.fixtures = self._make_image_fixtures()
- fakes.stub_out_glance(self.stubs, initial_fixtures=self.fixtures)
fakes.stub_out_compute_api_snapshot(self.stubs)
fakes.stub_out_compute_api_backup(self.stubs)
+ fakes.stub_out_glance(self.stubs)
def tearDown(self):
"""Run after each test."""
self.stubs.UnsetAll()
- super(ImageControllerWithGlanceServiceTest, self).tearDown()
+ super(ImagesTest, self).tearDown()
def _get_fake_context(self):
class Context(object):
project_id = 'fake'
+ auth_token = True
return Context()
- def _applicable_fixture(self, fixture, user_id):
- """Determine if this fixture is applicable for given user id."""
- is_public = fixture["is_public"]
- try:
- uid = fixture["properties"]["user_id"]
- except KeyError:
- uid = None
- return uid == user_id or is_public
-
def test_get_image_index(self):
request = webob.Request.blank('/v1.0/images')
- response = request.get_response(fakes.wsgi_app())
+ app = fakes.wsgi_app(fake_auth_context=self._get_fake_context())
+ response = request.get_response(app)
response_dict = json.loads(response.body)
response_list = response_dict["images"]
- expected = [{'id': 123, 'name': 'public image'},
- {'id': 124, 'name': 'queued snapshot'},
- {'id': 125, 'name': 'saving snapshot'},
- {'id': 126, 'name': 'active snapshot'},
- {'id': 127, 'name': 'killed snapshot'},
- {'id': 128, 'name': 'deleted snapshot'},
- {'id': 129, 'name': 'pending_delete snapshot'},
- {'id': 131, 'name': None}]
+ expected = [{'id': '123', 'name': 'public image'},
+ {'id': '124', 'name': 'queued snapshot'},
+ {'id': '125', 'name': 'saving snapshot'},
+ {'id': '126', 'name': 'active snapshot'},
+ {'id': '127', 'name': 'killed snapshot'},
+ {'id': '128', 'name': 'deleted snapshot'},
+ {'id': '129', 'name': 'pending_delete snapshot'},
+ {'id': '130', 'name': None}]
self.assertDictListMatch(response_list, expected)
def test_get_image(self):
request = webob.Request.blank('/v1.0/images/123')
- response = request.get_response(fakes.wsgi_app())
+ app = fakes.wsgi_app(fake_auth_context=self._get_fake_context())
+ response = request.get_response(app)
self.assertEqual(200, response.status_int)
@@ -381,20 +99,21 @@ class ImageControllerWithGlanceServiceTest(test.TestCase):
expected_image = {
"image": {
- "id": 123,
+ "id": "123",
"name": "public image",
- "updated": self.NOW_API_FORMAT,
- "created": self.NOW_API_FORMAT,
+ "updated": NOW_API_FORMAT,
+ "created": NOW_API_FORMAT,
"status": "ACTIVE",
"progress": 100,
},
}
- self.assertEqual(expected_image, actual_image)
+ self.assertDictMatch(expected_image, actual_image)
def test_get_image_v1_1(self):
request = webob.Request.blank('/v1.1/fake/images/124')
- response = request.get_response(fakes.wsgi_app())
+ app = fakes.wsgi_app(fake_auth_context=self._get_fake_context())
+ response = request.get_response(app)
actual_image = json.loads(response.body)
@@ -405,10 +124,10 @@ class ImageControllerWithGlanceServiceTest(test.TestCase):
expected_image = {
"image": {
- "id": 124,
+ "id": "124",
"name": "queued snapshot",
- "updated": self.NOW_API_FORMAT,
- "created": self.NOW_API_FORMAT,
+ "updated": NOW_API_FORMAT,
+ "created": NOW_API_FORMAT,
"status": "SAVING",
"progress": 0,
'server': {
@@ -442,11 +161,12 @@ class ImageControllerWithGlanceServiceTest(test.TestCase):
def test_get_image_xml(self):
request = webob.Request.blank('/v1.0/images/123')
request.accept = "application/xml"
- response = request.get_response(fakes.wsgi_app())
+ app = fakes.wsgi_app(fake_auth_context=self._get_fake_context())
+ response = request.get_response(app)
actual_image = minidom.parseString(response.body.replace(" ", ""))
- expected_now = self.NOW_API_FORMAT
+ expected_now = NOW_API_FORMAT
expected_image = minidom.parseString("""
<image id="123"
name="public image"
@@ -460,15 +180,16 @@ class ImageControllerWithGlanceServiceTest(test.TestCase):
self.assertEqual(expected_image.toxml(), actual_image.toxml())
def test_get_image_xml_no_name(self):
- request = webob.Request.blank('/v1.0/images/131')
+ request = webob.Request.blank('/v1.0/images/130')
request.accept = "application/xml"
- response = request.get_response(fakes.wsgi_app())
+ app = fakes.wsgi_app(fake_auth_context=self._get_fake_context())
+ response = request.get_response(app)
actual_image = minidom.parseString(response.body.replace(" ", ""))
- expected_now = self.NOW_API_FORMAT
+ expected_now = NOW_API_FORMAT
expected_image = minidom.parseString("""
- <image id="131"
+ <image id="130"
name="None"
updated="%(expected_now)s"
created="%(expected_now)s"
@@ -553,106 +274,198 @@ class ImageControllerWithGlanceServiceTest(test.TestCase):
def test_get_image_index_v1_1(self):
request = webob.Request.blank('/v1.1/fake/images')
- response = request.get_response(fakes.wsgi_app())
+ app = fakes.wsgi_app(fake_auth_context=self._get_fake_context())
+ response = request.get_response(app)
response_dict = json.loads(response.body)
response_list = response_dict["images"]
- fixtures = copy.copy(self.fixtures)
-
- for image in fixtures:
- if not self._applicable_fixture(image, "fake"):
- fixtures.remove(image)
- continue
-
- href = "http://localhost/v1.1/fake/images/%s" % image["id"]
- bookmark = "http://localhost/fake/images/%s" % image["id"]
- test_image = {
- "id": image["id"],
- "name": image["name"],
+ expected = [
+ {
+ "id": "123",
+ "name": "public image",
+ "links": [
+ {
+ "rel": "self",
+ "href": "http://localhost/v1.1/fake/images/123",
+ },
+ {
+ "rel": "bookmark",
+ "href": "http://localhost/fake/images/123",
+ },
+ ],
+ },
+ {
+ "id": "124",
+ "name": "queued snapshot",
+ "links": [
+ {
+ "rel": "self",
+ "href": "http://localhost/v1.1/fake/images/124",
+ },
+ {
+ "rel": "bookmark",
+ "href": "http://localhost/fake/images/124",
+ },
+ ],
+ },
+ {
+ "id": "125",
+ "name": "saving snapshot",
+ "links": [
+ {
+ "rel": "self",
+ "href": "http://localhost/v1.1/fake/images/125",
+ },
+ {
+ "rel": "bookmark",
+ "href": "http://localhost/fake/images/125",
+ },
+ ],
+ },
+ {
+ "id": "126",
+ "name": "active snapshot",
+ "links": [
+ {
+ "rel": "self",
+ "href": "http://localhost/v1.1/fake/images/126",
+ },
+ {
+ "rel": "bookmark",
+ "href": "http://localhost/fake/images/126",
+ },
+ ],
+ },
+ {
+ "id": "127",
+ "name": "killed snapshot",
+ "links": [
+ {
+ "rel": "self",
+ "href": "http://localhost/v1.1/fake/images/127",
+ },
+ {
+ "rel": "bookmark",
+ "href": "http://localhost/fake/images/127",
+ },
+ ],
+ },
+ {
+ "id": "128",
+ "name": "deleted snapshot",
+ "links": [
+ {
+ "rel": "self",
+ "href": "http://localhost/v1.1/fake/images/128",
+ },
+ {
+ "rel": "bookmark",
+ "href": "http://localhost/fake/images/128",
+ },
+ ],
+ },
+ {
+ "id": "129",
+ "name": "pending_delete snapshot",
+ "links": [
+ {
+ "rel": "self",
+ "href": "http://localhost/v1.1/fake/images/129",
+ },
+ {
+ "rel": "bookmark",
+ "href": "http://localhost/fake/images/129",
+ },
+ ],
+ },
+ {
+ "id": "130",
+ "name": None,
"links": [
{
"rel": "self",
- "href": href,
+ "href": "http://localhost/v1.1/fake/images/130",
},
{
"rel": "bookmark",
- "href": bookmark,
+ "href": "http://localhost/fake/images/130",
},
],
- }
- self.assertTrue(test_image in response_list)
+ },
+ ]
- self.assertEqual(len(response_list), len(fixtures))
+ self.assertDictListMatch(response_list, expected)
def test_get_image_details(self):
request = webob.Request.blank('/v1.0/images/detail')
- response = request.get_response(fakes.wsgi_app())
+ app = fakes.wsgi_app(fake_auth_context=self._get_fake_context())
+ response = request.get_response(app)
response_dict = json.loads(response.body)
response_list = response_dict["images"]
expected = [{
- 'id': 123,
+ 'id': '123',
'name': 'public image',
- 'updated': self.NOW_API_FORMAT,
- 'created': self.NOW_API_FORMAT,
+ 'updated': NOW_API_FORMAT,
+ 'created': NOW_API_FORMAT,
'status': 'ACTIVE',
'progress': 100,
},
{
- 'id': 124,
+ 'id': '124',
'name': 'queued snapshot',
- 'updated': self.NOW_API_FORMAT,
- 'created': self.NOW_API_FORMAT,
+ 'updated': NOW_API_FORMAT,
+ 'created': NOW_API_FORMAT,
'status': 'SAVING',
'progress': 0,
},
{
- 'id': 125,
+ 'id': '125',
'name': 'saving snapshot',
- 'updated': self.NOW_API_FORMAT,
- 'created': self.NOW_API_FORMAT,
+ 'updated': NOW_API_FORMAT,
+ 'created': NOW_API_FORMAT,
'status': 'SAVING',
'progress': 0,
},
{
- 'id': 126,
+ 'id': '126',
'name': 'active snapshot',
- 'updated': self.NOW_API_FORMAT,
- 'created': self.NOW_API_FORMAT,
+ 'updated': NOW_API_FORMAT,
+ 'created': NOW_API_FORMAT,
'status': 'ACTIVE',
'progress': 100,
},
{
- 'id': 127,
+ 'id': '127',
'name': 'killed snapshot',
- 'updated': self.NOW_API_FORMAT,
- 'created': self.NOW_API_FORMAT,
+ 'updated': NOW_API_FORMAT,
+ 'created': NOW_API_FORMAT,
'status': 'ERROR',
'progress': 0,
},
{
- 'id': 128,
+ 'id': '128',
'name': 'deleted snapshot',
- 'updated': self.NOW_API_FORMAT,
- 'created': self.NOW_API_FORMAT,
+ 'updated': NOW_API_FORMAT,
+ 'created': NOW_API_FORMAT,
'status': 'DELETED',
'progress': 0,
},
{
- 'id': 129,
+ 'id': '129',
'name': 'pending_delete snapshot',
- 'updated': self.NOW_API_FORMAT,
- 'created': self.NOW_API_FORMAT,
+ 'updated': NOW_API_FORMAT,
+ 'created': NOW_API_FORMAT,
'status': 'DELETED',
'progress': 0,
},
{
- 'id': 131,
+ 'id': '130',
'name': None,
- 'updated': self.NOW_API_FORMAT,
- 'created': self.NOW_API_FORMAT,
+ 'updated': NOW_API_FORMAT,
+ 'created': NOW_API_FORMAT,
'status': 'ACTIVE',
'progress': 100,
}]
@@ -661,7 +474,8 @@ class ImageControllerWithGlanceServiceTest(test.TestCase):
def test_get_image_details_v1_1(self):
request = webob.Request.blank('/v1.1/fake/images/detail')
- response = request.get_response(fakes.wsgi_app())
+ app = fakes.wsgi_app(fake_auth_context=self._get_fake_context())
+ response = request.get_response(app)
response_dict = json.loads(response.body)
response_list = response_dict["images"]
@@ -669,11 +483,11 @@ class ImageControllerWithGlanceServiceTest(test.TestCase):
server_bookmark = "http://localhost/servers/42"
expected = [{
- 'id': 123,
+ 'id': '123',
'name': 'public image',
- 'metadata': {},
- 'updated': self.NOW_API_FORMAT,
- 'created': self.NOW_API_FORMAT,
+ 'metadata': {'key1': 'value1'},
+ 'updated': NOW_API_FORMAT,
+ 'created': NOW_API_FORMAT,
'status': 'ACTIVE',
'progress': 100,
"links": [{
@@ -686,14 +500,14 @@ class ImageControllerWithGlanceServiceTest(test.TestCase):
}],
},
{
- 'id': 124,
+ 'id': '124',
'name': 'queued snapshot',
'metadata': {
u'instance_ref': u'http://localhost/v1.1/servers/42',
u'user_id': u'fake',
},
- 'updated': self.NOW_API_FORMAT,
- 'created': self.NOW_API_FORMAT,
+ 'updated': NOW_API_FORMAT,
+ 'created': NOW_API_FORMAT,
'status': 'SAVING',
'progress': 0,
'server': {
@@ -717,14 +531,14 @@ class ImageControllerWithGlanceServiceTest(test.TestCase):
}],
},
{
- 'id': 125,
+ 'id': '125',
'name': 'saving snapshot',
'metadata': {
u'instance_ref': u'http://localhost/v1.1/servers/42',
u'user_id': u'fake',
},
- 'updated': self.NOW_API_FORMAT,
- 'created': self.NOW_API_FORMAT,
+ 'updated': NOW_API_FORMAT,
+ 'created': NOW_API_FORMAT,
'status': 'SAVING',
'progress': 0,
'server': {
@@ -748,14 +562,14 @@ class ImageControllerWithGlanceServiceTest(test.TestCase):
}],
},
{
- 'id': 126,
+ 'id': '126',
'name': 'active snapshot',
'metadata': {
u'instance_ref': u'http://localhost/v1.1/servers/42',
u'user_id': u'fake',
},
- 'updated': self.NOW_API_FORMAT,
- 'created': self.NOW_API_FORMAT,
+ 'updated': NOW_API_FORMAT,
+ 'created': NOW_API_FORMAT,
'status': 'ACTIVE',
'progress': 100,
'server': {
@@ -779,14 +593,14 @@ class ImageControllerWithGlanceServiceTest(test.TestCase):
}],
},
{
- 'id': 127,
+ 'id': '127',
'name': 'killed snapshot',
'metadata': {
u'instance_ref': u'http://localhost/v1.1/servers/42',
u'user_id': u'fake',
},
- 'updated': self.NOW_API_FORMAT,
- 'created': self.NOW_API_FORMAT,
+ 'updated': NOW_API_FORMAT,
+ 'created': NOW_API_FORMAT,
'status': 'ERROR',
'progress': 0,
'server': {
@@ -810,14 +624,14 @@ class ImageControllerWithGlanceServiceTest(test.TestCase):
}],
},
{
- 'id': 128,
+ 'id': '128',
'name': 'deleted snapshot',
'metadata': {
u'instance_ref': u'http://localhost/v1.1/servers/42',
u'user_id': u'fake',
},
- 'updated': self.NOW_API_FORMAT,
- 'created': self.NOW_API_FORMAT,
+ 'updated': NOW_API_FORMAT,
+ 'created': NOW_API_FORMAT,
'status': 'DELETED',
'progress': 0,
'server': {
@@ -841,14 +655,14 @@ class ImageControllerWithGlanceServiceTest(test.TestCase):
}],
},
{
- 'id': 129,
+ 'id': '129',
'name': 'pending_delete snapshot',
'metadata': {
u'instance_ref': u'http://localhost/v1.1/servers/42',
u'user_id': u'fake',
},
- 'updated': self.NOW_API_FORMAT,
- 'created': self.NOW_API_FORMAT,
+ 'updated': NOW_API_FORMAT,
+ 'created': NOW_API_FORMAT,
'status': 'DELETED',
'progress': 0,
'server': {
@@ -872,20 +686,20 @@ class ImageControllerWithGlanceServiceTest(test.TestCase):
}],
},
{
- 'id': 131,
+ 'id': '130',
'name': None,
'metadata': {},
- 'updated': self.NOW_API_FORMAT,
- 'created': self.NOW_API_FORMAT,
+ 'updated': NOW_API_FORMAT,
+ 'created': NOW_API_FORMAT,
'status': 'ACTIVE',
'progress': 100,
"links": [{
"rel": "self",
- "href": "http://localhost/v1.1/fake/images/131",
+ "href": "http://localhost/v1.1/fake/images/130",
},
{
"rel": "bookmark",
- "href": "http://localhost/fake/images/131",
+ "href": "http://localhost/fake/images/130",
}],
},
]
@@ -1097,11 +911,12 @@ class ImageControllerWithGlanceServiceTest(test.TestCase):
def test_get_image_found(self):
req = webob.Request.blank('/v1.0/images/123')
- res = req.get_response(fakes.wsgi_app())
+ app = fakes.wsgi_app(fake_auth_context=self._get_fake_context())
+ res = req.get_response(app)
image_meta = json.loads(res.body)['image']
- expected = {'id': 123, 'name': 'public image',
- 'updated': self.NOW_API_FORMAT,
- 'created': self.NOW_API_FORMAT, 'status': 'ACTIVE',
+ expected = {'id': '123', 'name': 'public image',
+ 'updated': NOW_API_FORMAT,
+ 'created': NOW_API_FORMAT, 'status': 'ACTIVE',
'progress': 100}
self.assertDictMatch(image_meta, expected)
@@ -1110,14 +925,6 @@ class ImageControllerWithGlanceServiceTest(test.TestCase):
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 404)
- def test_get_image_not_owned(self):
- """We should return a 404 if we request an image that doesn't belong
- to us
- """
- req = webob.Request.blank('/v1.0/images/130')
- res = req.get_response(fakes.wsgi_app())
- self.assertEqual(res.status_int, 404)
-
def test_create_image(self):
body = dict(image=dict(serverId='123', name='Snapshot 1'))
req = webob.Request.blank('/v1.0/images')
@@ -1160,51 +967,6 @@ class ImageControllerWithGlanceServiceTest(test.TestCase):
response = req.get_response(fakes.wsgi_app())
self.assertEqual(400, response.status_int)
- @classmethod
- def _make_image_fixtures(cls):
- image_id = 123
- base_attrs = {'created_at': cls.NOW_GLANCE_FORMAT,
- 'updated_at': cls.NOW_GLANCE_FORMAT,
- 'deleted_at': None,
- 'deleted': False}
-
- fixtures = []
-
- def add_fixture(**kwargs):
- kwargs.update(base_attrs)
- fixtures.append(kwargs)
-
- # Public image
- add_fixture(id=image_id, name='public image', is_public=True,
- status='active', properties={})
- image_id += 1
-
- # Snapshot for User 1
- server_ref = 'http://localhost/v1.1/servers/42'
- snapshot_properties = {'instance_ref': server_ref, 'user_id': 'fake'}
- statuses = ('queued', 'saving', 'active', 'killed', 'deleted',
- 'pending_delete')
- for status in statuses:
- add_fixture(id=image_id, name='%s snapshot' % status,
- is_public=False, status=status,
- properties=snapshot_properties)
- image_id += 1
-
- # Snapshot for User 2
- other_snapshot_properties = {'instance_id': '43', 'user_id': 'other'}
- add_fixture(id=image_id, name='someone elses snapshot',
- is_public=False, status='active',
- properties=other_snapshot_properties)
-
- image_id += 1
-
- # Image without a name
- add_fixture(id=image_id, is_public=True, status='active',
- properties={})
- image_id += 1
-
- return fixtures
-
class ImageXMLSerializationTest(test.TestCase):
@@ -1214,7 +976,7 @@ class ImageXMLSerializationTest(test.TestCase):
IMAGE_HREF = 'http://localhost/v1.1/fake/images/%s'
IMAGE_BOOKMARK = 'http://localhost/fake/images/%s'
- def test_show(self):
+ def test_xml_declaration(self):
serializer = images.ImageXMLSerializer()
fixture = {
@@ -1226,7 +988,7 @@ class ImageXMLSerializationTest(test.TestCase):
'status': 'ACTIVE',
'progress': 80,
'server': {
- 'id': 1,
+ 'id': '1',
'links': [
{
'href': self.SERVER_HREF,
@@ -1255,37 +1017,80 @@ class ImageXMLSerializationTest(test.TestCase):
}
output = serializer.serialize(fixture, 'show')
- actual = minidom.parseString(output.replace(" ", ""))
+ print output
+ has_dec = output.startswith("<?xml version='1.0' encoding='UTF-8'?>")
+ self.assertTrue(has_dec)
- expected_server_href = self.SERVER_HREF
- expected_server_bookmark = self.SERVER_BOOKMARK
- expected_href = self.IMAGE_HREF % 1
- expected_bookmark = self.IMAGE_BOOKMARK % 1
- expected_now = self.TIMESTAMP
- expected = minidom.parseString("""
- <image id="1"
- xmlns="http://docs.openstack.org/compute/api/v1.1"
- xmlns:atom="http://www.w3.org/2005/Atom"
- name="Image1"
- updated="%(expected_now)s"
- created="%(expected_now)s"
- status="ACTIVE"
- progress="80">
- <server id="1">
- <atom:link rel="self" href="%(expected_server_href)s"/>
- <atom:link rel="bookmark" href="%(expected_server_bookmark)s"/>
- </server>
- <metadata>
- <meta key="key1">
- value1
- </meta>
- </metadata>
- <atom:link href="%(expected_href)s" rel="self"/>
- <atom:link href="%(expected_bookmark)s" rel="bookmark"/>
- </image>
- """.replace(" ", "") % (locals()))
+ def test_show(self):
+ serializer = images.ImageXMLSerializer()
- self.assertEqual(expected.toxml(), actual.toxml())
+ fixture = {
+ 'image': {
+ 'id': 1,
+ 'name': 'Image1',
+ 'created': self.TIMESTAMP,
+ 'updated': self.TIMESTAMP,
+ 'status': 'ACTIVE',
+ 'progress': 80,
+ 'server': {
+ 'id': '1',
+ 'links': [
+ {
+ 'href': self.SERVER_HREF,
+ 'rel': 'self',
+ },
+ {
+ 'href': self.SERVER_BOOKMARK,
+ 'rel': 'bookmark',
+ },
+ ],
+ },
+ 'metadata': {
+ 'key1': 'value1',
+ },
+ 'links': [
+ {
+ 'href': self.IMAGE_HREF % 1,
+ 'rel': 'self',
+ },
+ {
+ 'href': self.IMAGE_BOOKMARK % 1,
+ 'rel': 'bookmark',
+ },
+ ],
+ },
+ }
+
+ output = serializer.serialize(fixture, 'show')
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'image')
+ image_dict = fixture['image']
+
+ for key in ['name', 'id', 'updated', 'created', 'status', 'progress']:
+ self.assertEqual(root.get(key), str(image_dict[key]))
+
+ link_nodes = root.findall('{0}link'.format(ATOMNS))
+ self.assertEqual(len(link_nodes), 2)
+ for i, link in enumerate(image_dict['links']):
+ for key, value in link.items():
+ self.assertEqual(link_nodes[i].get(key), value)
+
+ metadata_root = root.find('{0}metadata'.format(NS))
+ metadata_elems = metadata_root.findall('{0}meta'.format(NS))
+ self.assertEqual(len(metadata_elems), 1)
+ for i, metadata_elem in enumerate(metadata_elems):
+ (meta_key, meta_value) = image_dict['metadata'].items()[i]
+ self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
+ self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
+
+ server_root = root.find('{0}server'.format(NS))
+ self.assertEqual(server_root.get('id'), image_dict['server']['id'])
+ link_nodes = server_root.findall('{0}link'.format(ATOMNS))
+ self.assertEqual(len(link_nodes), 2)
+ for i, link in enumerate(image_dict['server']['links']):
+ for key, value in link.items():
+ self.assertEqual(link_nodes[i].get(key), value)
def test_show_zero_metadata(self):
serializer = images.ImageXMLSerializer()
@@ -1298,7 +1103,7 @@ class ImageXMLSerializationTest(test.TestCase):
'updated': self.TIMESTAMP,
'status': 'ACTIVE',
'server': {
- 'id': 1,
+ 'id': '1',
'links': [
{
'href': self.SERVER_HREF,
@@ -1325,31 +1130,31 @@ class ImageXMLSerializationTest(test.TestCase):
}
output = serializer.serialize(fixture, 'show')
- actual = minidom.parseString(output.replace(" ", ""))
-
- expected_server_href = self.SERVER_HREF
- expected_server_bookmark = self.SERVER_BOOKMARK
- expected_href = self.IMAGE_HREF % 1
- expected_bookmark = self.IMAGE_BOOKMARK % 1
- expected_now = self.TIMESTAMP
- expected = minidom.parseString("""
- <image id="1"
- xmlns="http://docs.openstack.org/compute/api/v1.1"
- xmlns:atom="http://www.w3.org/2005/Atom"
- name="Image1"
- updated="%(expected_now)s"
- created="%(expected_now)s"
- status="ACTIVE">
- <server id="1">
- <atom:link rel="self" href="%(expected_server_href)s"/>
- <atom:link rel="bookmark" href="%(expected_server_bookmark)s"/>
- </server>
- <atom:link href="%(expected_href)s" rel="self"/>
- <atom:link href="%(expected_bookmark)s" rel="bookmark"/>
- </image>
- """.replace(" ", "") % (locals()))
-
- self.assertEqual(expected.toxml(), actual.toxml())
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'image')
+ image_dict = fixture['image']
+
+ for key in ['name', 'id', 'updated', 'created', 'status']:
+ self.assertEqual(root.get(key), str(image_dict[key]))
+
+ link_nodes = root.findall('{0}link'.format(ATOMNS))
+ self.assertEqual(len(link_nodes), 2)
+ for i, link in enumerate(image_dict['links']):
+ for key, value in link.items():
+ self.assertEqual(link_nodes[i].get(key), value)
+
+ metadata_root = root.find('{0}metadata'.format(NS))
+ meta_nodes = root.findall('{0}meta'.format(ATOMNS))
+ self.assertEqual(len(meta_nodes), 0)
+
+ server_root = root.find('{0}server'.format(NS))
+ self.assertEqual(server_root.get('id'), image_dict['server']['id'])
+ link_nodes = server_root.findall('{0}link'.format(ATOMNS))
+ self.assertEqual(len(link_nodes), 2)
+ for i, link in enumerate(image_dict['server']['links']):
+ for key, value in link.items():
+ self.assertEqual(link_nodes[i].get(key), value)
def test_show_image_no_metadata_key(self):
serializer = images.ImageXMLSerializer()
@@ -1362,7 +1167,7 @@ class ImageXMLSerializationTest(test.TestCase):
'updated': self.TIMESTAMP,
'status': 'ACTIVE',
'server': {
- 'id': 1,
+ 'id': '1',
'links': [
{
'href': self.SERVER_HREF,
@@ -1388,31 +1193,31 @@ class ImageXMLSerializationTest(test.TestCase):
}
output = serializer.serialize(fixture, 'show')
- actual = minidom.parseString(output.replace(" ", ""))
-
- expected_server_href = self.SERVER_HREF
- expected_server_bookmark = self.SERVER_BOOKMARK
- expected_href = self.IMAGE_HREF % 1
- expected_bookmark = self.IMAGE_BOOKMARK % 1
- expected_now = self.TIMESTAMP
- expected = minidom.parseString("""
- <image id="1"
- xmlns="http://docs.openstack.org/compute/api/v1.1"
- xmlns:atom="http://www.w3.org/2005/Atom"
- name="Image1"
- updated="%(expected_now)s"
- created="%(expected_now)s"
- status="ACTIVE">
- <server id="1">
- <atom:link rel="self" href="%(expected_server_href)s"/>
- <atom:link rel="bookmark" href="%(expected_server_bookmark)s"/>
- </server>
- <atom:link href="%(expected_href)s" rel="self"/>
- <atom:link href="%(expected_bookmark)s" rel="bookmark"/>
- </image>
- """.replace(" ", "") % (locals()))
-
- self.assertEqual(expected.toxml(), actual.toxml())
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'image')
+ image_dict = fixture['image']
+
+ for key in ['name', 'id', 'updated', 'created', 'status']:
+ self.assertEqual(root.get(key), str(image_dict[key]))
+
+ link_nodes = root.findall('{0}link'.format(ATOMNS))
+ self.assertEqual(len(link_nodes), 2)
+ for i, link in enumerate(image_dict['links']):
+ for key, value in link.items():
+ self.assertEqual(link_nodes[i].get(key), value)
+
+ metadata_root = root.find('{0}metadata'.format(NS))
+ meta_nodes = root.findall('{0}meta'.format(ATOMNS))
+ self.assertEqual(len(meta_nodes), 0)
+
+ server_root = root.find('{0}server'.format(NS))
+ self.assertEqual(server_root.get('id'), image_dict['server']['id'])
+ link_nodes = server_root.findall('{0}link'.format(ATOMNS))
+ self.assertEqual(len(link_nodes), 2)
+ for i, link in enumerate(image_dict['server']['links']):
+ for key, value in link.items():
+ self.assertEqual(link_nodes[i].get(key), value)
def test_show_no_server(self):
serializer = images.ImageXMLSerializer()
@@ -1441,30 +1246,30 @@ class ImageXMLSerializationTest(test.TestCase):
}
output = serializer.serialize(fixture, 'show')
- actual = minidom.parseString(output.replace(" ", ""))
-
- expected_href = self.IMAGE_HREF % 1
- expected_bookmark = self.IMAGE_BOOKMARK % 1
- expected_now = self.TIMESTAMP
- expected = minidom.parseString("""
- <image id="1"
- xmlns="http://docs.openstack.org/compute/api/v1.1"
- xmlns:atom="http://www.w3.org/2005/Atom"
- name="Image1"
- updated="%(expected_now)s"
- created="%(expected_now)s"
- status="ACTIVE">
- <metadata>
- <meta key="key1">
- value1
- </meta>
- </metadata>
- <atom:link href="%(expected_href)s" rel="self"/>
- <atom:link href="%(expected_bookmark)s" rel="bookmark"/>
- </image>
- """.replace(" ", "") % (locals()))
-
- self.assertEqual(expected.toxml(), actual.toxml())
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'image')
+ image_dict = fixture['image']
+
+ for key in ['name', 'id', 'updated', 'created', 'status']:
+ self.assertEqual(root.get(key), str(image_dict[key]))
+
+ link_nodes = root.findall('{0}link'.format(ATOMNS))
+ self.assertEqual(len(link_nodes), 2)
+ for i, link in enumerate(image_dict['links']):
+ for key, value in link.items():
+ self.assertEqual(link_nodes[i].get(key), value)
+
+ metadata_root = root.find('{0}metadata'.format(NS))
+ metadata_elems = metadata_root.findall('{0}meta'.format(NS))
+ self.assertEqual(len(metadata_elems), 1)
+ for i, metadata_elem in enumerate(metadata_elems):
+ (meta_key, meta_value) = image_dict['metadata'].items()[i]
+ self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
+ self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
+
+ server_root = root.find('{0}server'.format(NS))
+ self.assertEqual(server_root, None)
def test_index(self):
serializer = images.ImageXMLSerializer()
@@ -1479,6 +1284,10 @@ class ImageXMLSerializationTest(test.TestCase):
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
+ {
+ 'href': self.IMAGE_BOOKMARK % 1,
+ 'rel': 'bookmark',
+ },
],
},
{
@@ -1489,35 +1298,32 @@ class ImageXMLSerializationTest(test.TestCase):
'href': self.IMAGE_HREF % 2,
'rel': 'self',
},
+ {
+ 'href': self.IMAGE_BOOKMARK % 2,
+ 'rel': 'bookmark',
+ },
],
},
]
}
output = serializer.serialize(fixture, 'index')
- actual = minidom.parseString(output.replace(" ", ""))
-
- expected_server_href = self.SERVER_HREF
- expected_server_bookmark = self.SERVER_BOOKMARK
- expected_href = self.IMAGE_HREF % 1
- expected_bookmark = self.IMAGE_BOOKMARK % 1
- expected_href_two = self.IMAGE_HREF % 2
- expected_bookmark_two = self.IMAGE_BOOKMARK % 2
- expected_now = self.TIMESTAMP
- expected = minidom.parseString("""
- <images
- xmlns="http://docs.openstack.org/compute/api/v1.1"
- xmlns:atom="http://www.w3.org/2005/Atom">
- <image id="1" name="Image1">
- <atom:link href="%(expected_href)s" rel="self"/>
- </image>
- <image id="2" name="Image2">
- <atom:link href="%(expected_href_two)s" rel="self"/>
- </image>
- </images>
- """.replace(" ", "") % (locals()))
-
- self.assertEqual(expected.toxml(), actual.toxml())
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'images_index')
+ image_elems = root.findall('{0}image'.format(NS))
+ self.assertEqual(len(image_elems), 2)
+ for i, image_elem in enumerate(image_elems):
+ image_dict = fixture['images'][i]
+
+ for key in ['name', 'id']:
+ self.assertEqual(image_elem.get(key), str(image_dict[key]))
+
+ link_nodes = image_elem.findall('{0}link'.format(ATOMNS))
+ self.assertEqual(len(link_nodes), 2)
+ for i, link in enumerate(image_dict['links']):
+ for key, value in link.items():
+ self.assertEqual(link_nodes[i].get(key), value)
def test_index_zero_images(self):
serializer = images.ImageXMLSerializer()
@@ -1527,15 +1333,11 @@ class ImageXMLSerializationTest(test.TestCase):
}
output = serializer.serialize(fixtures, 'index')
- actual = minidom.parseString(output.replace(" ", ""))
-
- expected = minidom.parseString("""
- <images
- xmlns="http://docs.openstack.org/compute/api/v1.1"
- xmlns:atom="http://www.w3.org/2005/Atom" />
- """.replace(" ", "") % (locals()))
-
- self.assertEqual(expected.toxml(), actual.toxml())
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'images_index')
+ image_elems = root.findall('{0}image'.format(NS))
+ self.assertEqual(len(image_elems), 0)
def test_detail(self):
serializer = images.ImageXMLSerializer()
@@ -1549,7 +1351,7 @@ class ImageXMLSerializationTest(test.TestCase):
'updated': self.TIMESTAMP,
'status': 'ACTIVE',
'server': {
- 'id': 1,
+ 'id': '1',
'links': [
{
'href': self.SERVER_HREF,
@@ -1573,7 +1375,7 @@ class ImageXMLSerializationTest(test.TestCase):
],
},
{
- 'id': 2,
+ 'id': '2',
'name': 'Image2',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
@@ -1597,46 +1399,22 @@ class ImageXMLSerializationTest(test.TestCase):
}
output = serializer.serialize(fixture, 'detail')
- actual = minidom.parseString(output.replace(" ", ""))
-
- expected_server_href = self.SERVER_HREF
- expected_server_bookmark = self.SERVER_BOOKMARK
- expected_href = self.IMAGE_HREF % 1
- expected_bookmark = self.IMAGE_BOOKMARK % 1
- expected_href_two = self.IMAGE_HREF % 2
- expected_bookmark_two = self.IMAGE_BOOKMARK % 2
- expected_now = self.TIMESTAMP
- expected = minidom.parseString("""
- <images
- xmlns="http://docs.openstack.org/compute/api/v1.1"
- xmlns:atom="http://www.w3.org/2005/Atom">
- <image id="1"
- name="Image1"
- updated="%(expected_now)s"
- created="%(expected_now)s"
- status="ACTIVE">
- <server id="1">
- <atom:link rel="self" href="%(expected_server_href)s"/>
- <atom:link rel="bookmark" href="%(expected_server_bookmark)s"/>
- </server>
- <atom:link href="%(expected_href)s" rel="self"/>
- <atom:link href="%(expected_bookmark)s" rel="bookmark"/>
- </image>
- <image id="2"
- name="Image2"
- updated="%(expected_now)s"
- created="%(expected_now)s"
- status="SAVING"
- progress="80">
- <metadata>
- <meta key="key1">
- value1
- </meta>
- </metadata>
- <atom:link href="%(expected_href_two)s" rel="self"/>
- <atom:link href="%(expected_bookmark_two)s" rel="bookmark"/>
- </image>
- </images>
- """.replace(" ", "") % (locals()))
-
- self.assertEqual(expected.toxml(), actual.toxml())
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'images')
+ image_elems = root.findall('{0}image'.format(NS))
+ self.assertEqual(len(image_elems), 2)
+ for i, image_elem in enumerate(image_elems):
+ image_dict = fixture['images'][i]
+
+ for key in ['name', 'id', 'updated', 'created', 'status']:
+ self.assertEqual(image_elem.get(key), str(image_dict[key]))
+
+ link_nodes = image_elem.findall('{0}link'.format(ATOMNS))
+ self.assertEqual(len(link_nodes), 2)
+ for i, link in enumerate(image_dict['links']):
+ for key, value in link.items():
+ self.assertEqual(link_nodes[i].get(key), value)
+
+ metadata_root = image_elem.find('{0}metadata'.format(NS))
+ metadata_elems = metadata_root.findall('{0}meta'.format(NS))
diff --git a/nova/tests/api/openstack/test_limits.py b/nova/tests/api/openstack/test_limits.py
index 801b06230..3db57ee86 100644
--- a/nova/tests/api/openstack/test_limits.py
+++ b/nova/tests/api/openstack/test_limits.py
@@ -19,6 +19,7 @@ Tests dealing with HTTP rate-limiting.
import httplib
import json
+from lxml import etree
import StringIO
import stubout
import time
@@ -29,6 +30,7 @@ from xml.dom import minidom
import nova.context
from nova.api.openstack import limits
from nova.api.openstack import views
+from nova.api.openstack import xmlutil
from nova import test
@@ -39,6 +41,10 @@ TEST_LIMITS = [
limits.Limit("PUT", "*", "", 10, limits.PER_MINUTE),
limits.Limit("PUT", "/servers", "^/servers", 5, limits.PER_MINUTE),
]
+NS = {
+ 'atom': 'http://www.w3.org/2005/Atom',
+ 'ns': 'http://docs.openstack.org/compute/api/v1.1'
+}
class BaseLimitTestSuite(unittest.TestCase):
@@ -980,9 +986,22 @@ class LimitsXMLSerializationTest(test.TestCase):
def tearDown(self):
pass
- def test_index(self):
+ def test_xml_declaration(self):
serializer = limits.LimitsXMLSerializer()
+
fixture = {"limits": {
+ "rate": [],
+ "absolute": {}}}
+
+ output = serializer.serialize(fixture, 'index')
+ print output
+ has_dec = output.startswith("<?xml version='1.0' encoding='UTF-8'?>")
+ self.assertTrue(has_dec)
+
+ def test_index(self):
+ serializer = limits.LimitsXMLSerializer()
+ fixture = {
+ "limits": {
"rate": [{
"uri": "*",
"regex": ".*",
@@ -1006,32 +1025,32 @@ class LimitsXMLSerializationTest(test.TestCase):
"maxPersonalitySize": 10240}}}
output = serializer.serialize(fixture, 'index')
- actual = minidom.parseString(output.replace(" ", ""))
-
- expected = minidom.parseString("""
- <limits xmlns="http://docs.openstack.org/compute/api/v1.1">
- <rates>
- <rate uri="*" regex=".*">
- <limit value="10" verb="POST" remaining="2"
- unit="MINUTE"
- next-available="2011-12-15T22:42:45Z"/>
- </rate>
- <rate uri="*/servers" regex="^/servers">
- <limit value="50" verb="POST" remaining="10"
- unit="DAY"
- next-available="2011-12-15T22:42:45Z"/>
- </rate>
- </rates>
- <absolute>
- <limit name="maxServerMeta" value="1"/>
- <limit name="maxPersonality" value="5"/>
- <limit name="maxImageMeta" value="1"/>
- <limit name="maxPersonalitySize" value="10240"/>
- </absolute>
- </limits>
- """.replace(" ", ""))
-
- self.assertEqual(expected.toxml(), actual.toxml())
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'limits')
+
+ #verify absolute limits
+ absolutes = root.xpath('ns:absolute/ns:limit', namespaces=NS)
+ self.assertEqual(len(absolutes), 4)
+ for limit in absolutes:
+ name = limit.get('name')
+ value = limit.get('value')
+ self.assertEqual(value, str(fixture['limits']['absolute'][name]))
+
+ #verify rate limits
+ rates = root.xpath('ns:rates/ns:rate', namespaces=NS)
+ self.assertEqual(len(rates), 2)
+ for i, rate in enumerate(rates):
+ for key in ['uri', 'regex']:
+ self.assertEqual(rate.get(key),
+ str(fixture['limits']['rate'][i][key]))
+ rate_limits = rate.xpath('ns:limit', namespaces=NS)
+ self.assertEqual(len(rate_limits), 1)
+ for j, limit in enumerate(rate_limits):
+ for key in ['verb', 'value', 'remaining', 'unit',
+ 'next-available']:
+ self.assertEqual(limit.get(key),
+ str(fixture['limits']['rate'][i]['limit'][j][key]))
def test_index_no_limits(self):
serializer = limits.LimitsXMLSerializer()
@@ -1041,13 +1060,14 @@ class LimitsXMLSerializationTest(test.TestCase):
"absolute": {}}}
output = serializer.serialize(fixture, 'index')
- actual = minidom.parseString(output.replace(" ", ""))
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'limits')
- expected = minidom.parseString("""
- <limits xmlns="http://docs.openstack.org/compute/api/v1.1">
- <rates />
- <absolute />
- </limits>
- """.replace(" ", ""))
+ #verify absolute limits
+ absolutes = root.xpath('ns:absolute/ns:limit', namespaces=NS)
+ self.assertEqual(len(absolutes), 0)
- self.assertEqual(expected.toxml(), actual.toxml())
+ #verify rate limits
+ rates = root.xpath('ns:rates/ns:rate', namespaces=NS)
+ self.assertEqual(len(rates), 0)
diff --git a/nova/tests/api/openstack/test_servers.py b/nova/tests/api/openstack/test_servers.py
index f0a1c5ce5..ee7927c64 100644
--- a/nova/tests/api/openstack/test_servers.py
+++ b/nova/tests/api/openstack/test_servers.py
@@ -52,6 +52,10 @@ from nova.tests.api.openstack import fakes
FAKE_UUID = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
NS = "{http://docs.openstack.org/compute/api/v1.1}"
ATOMNS = "{http://www.w3.org/2005/Atom}"
+XPATH_NS = {
+ 'atom': 'http://www.w3.org/2005/Atom',
+ 'ns': 'http://docs.openstack.org/compute/api/v1.1'
+}
def fake_gen_uuid():
@@ -412,12 +416,7 @@ class ServersTest(test.TestCase):
def test_get_server_by_id_v1_1_xml(self):
image_bookmark = "http://localhost/fake/images/10"
- flavor_ref = "http://localhost/v1.1/fake/flavors/1"
- flavor_id = "1"
flavor_bookmark = "http://localhost/fake/flavors/1"
- server_href = "http://localhost/v1.1/fake/servers/1"
- server_bookmark = "http://localhost/fake/servers/1"
-
public_ip = '192.168.0.3'
private_ip = '172.19.0.1'
interfaces = [
@@ -441,50 +440,88 @@ class ServersTest(test.TestCase):
req = webob.Request.blank('/v1.1/fake/servers/1')
req.headers['Accept'] = 'application/xml'
res = req.get_response(fakes.wsgi_app())
- actual = minidom.parseString(res.body.replace(' ', ''))
- expected_uuid = FAKE_UUID
- expected_updated = "2010-11-11T11:00:00Z"
- expected_created = "2010-10-10T12:00:00Z"
- expected = minidom.parseString("""
- <server id="1"
- uuid="%(expected_uuid)s"
- userId="fake"
- tenantId="fake"
- xmlns="http://docs.openstack.org/compute/api/v1.1"
- xmlns:atom="http://www.w3.org/2005/Atom"
- name="server1"
- updated="%(expected_updated)s"
- created="%(expected_created)s"
- hostId=""
- status="BUILD"
- accessIPv4=""
- accessIPv6=""
- progress="0">
- <atom:link href="%(server_href)s" rel="self"/>
- <atom:link href="%(server_bookmark)s" rel="bookmark"/>
- <image id="10">
- <atom:link rel="bookmark" href="%(image_bookmark)s"/>
- </image>
- <flavor id="1">
- <atom:link rel="bookmark" href="%(flavor_bookmark)s"/>
- </flavor>
- <metadata>
- <meta key="seq">
- 1
- </meta>
- </metadata>
- <addresses>
- <network id="public">
- <ip version="4" addr="%(public_ip)s"/>
- </network>
- <network id="private">
- <ip version="4" addr="%(private_ip)s"/>
- </network>
- </addresses>
- </server>
- """.replace(" ", "") % (locals()))
-
- self.assertEqual(expected.toxml(), actual.toxml())
+ output = res.body
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'server')
+
+ expected = {
+ 'id': 1,
+ 'uuid': FAKE_UUID,
+ 'user_id': 'fake',
+ 'tenant_id': 'fake',
+ 'updated': '2010-11-11T11:00:00Z',
+ 'created': '2010-10-10T12:00:00Z',
+ 'progress': 0,
+ 'name': 'server1',
+ 'status': 'BUILD',
+ 'accessIPv4': '',
+ 'accessIPv6': '',
+ 'hostId': '',
+ 'key_name': '',
+ 'image': {
+ 'id': '10',
+ 'links': [{'rel': 'bookmark', 'href': image_bookmark}],
+ },
+ 'flavor': {
+ 'id': '1',
+ 'links': [{'rel': 'bookmark', 'href': flavor_bookmark}],
+ },
+ 'addresses': {
+ 'public': [{'version': 4, 'addr': public_ip}],
+ 'private': [{'version': 4, 'addr': private_ip}],
+ },
+ 'metadata': {'seq': '1'},
+ 'config_drive': None,
+ 'links': [
+ {
+ 'rel': 'self',
+ 'href': 'http://localhost/v1.1/fake/servers/1',
+ },
+ {
+ 'rel': 'bookmark',
+ 'href': 'http://localhost/fake/servers/1',
+ },
+ ],
+ }
+
+ self.assertTrue(root.xpath('/ns:server', namespaces=XPATH_NS))
+ for key in ['id', 'uuid', 'created', 'progress', 'name', 'status',
+ 'accessIPv4', 'accessIPv6', 'hostId']:
+ self.assertEqual(root.get(key), str(expected[key]))
+ self.assertEqual(root.get('userId'), str(expected['user_id']))
+ self.assertEqual(root.get('tenantId'), str(expected['tenant_id']))
+
+ (image,) = root.xpath('ns:image', namespaces=XPATH_NS)
+ self.assertEqual(image.get('id'), str(expected['image']['id']))
+
+ links = root.xpath('ns:image/atom:link', namespaces=XPATH_NS)
+ self.assertTrue(common.compare_links(links,
+ expected['image']['links']))
+
+ (flavor,) = root.xpath('ns:flavor', namespaces=XPATH_NS)
+ self.assertEqual(flavor.get('id'), str(expected['flavor']['id']))
+
+ (meta,) = root.xpath('ns:metadata/ns:meta', namespaces=XPATH_NS)
+ self.assertEqual(meta.get('key'), 'seq')
+ self.assertEqual(meta.text, '1')
+
+ (pub_network, priv_network) = root.xpath('ns:addresses/ns:network',
+ namespaces=XPATH_NS)
+ self.assertEqual(pub_network.get('id'), 'public')
+ (pub_ip,) = pub_network.xpath('ns:ip', namespaces=XPATH_NS)
+ (priv_ip,) = priv_network.xpath('ns:ip', namespaces=XPATH_NS)
+ self.assertEqual(pub_ip.get('version'),
+ str(expected['addresses']['public'][0]['version']))
+ self.assertEqual(pub_ip.get('addr'),
+ str(expected['addresses']['public'][0]['addr']))
+ self.assertEqual(priv_ip.get('version'),
+ str(expected['addresses']['private'][0]['version']))
+ self.assertEqual(priv_ip.get('addr'),
+ str(expected['addresses']['private'][0]['addr']))
+
+ links = root.xpath('atom:link', namespaces=XPATH_NS)
+ self.assertTrue(common.compare_links(links, expected['links']))
def test_get_server_with_active_status_by_id_v1_1(self):
image_bookmark = "http://localhost/fake/images/10"
@@ -3285,7 +3322,7 @@ class TestAddressesXMLSerialization(test.TestCase):
serializer = nova.api.openstack.ips.IPXMLSerializer()
- def test_show(self):
+ def test_xml_declaration(self):
fixture = {
'network_2': [
{'addr': '192.168.0.1', 'version': 4},
@@ -3293,17 +3330,29 @@ class TestAddressesXMLSerialization(test.TestCase):
],
}
output = self.serializer.serialize(fixture, 'show')
- actual = minidom.parseString(output.replace(" ", ""))
-
- expected = minidom.parseString("""
- <network xmlns="http://docs.openstack.org/compute/api/v1.1"
- id="network_2">
- <ip version="4" addr="192.168.0.1"/>
- <ip version="6" addr="fe80::beef"/>
- </network>
- """.replace(" ", ""))
+ print output
+ has_dec = output.startswith("<?xml version='1.0' encoding='UTF-8'?>")
+ self.assertTrue(has_dec)
- self.assertEqual(expected.toxml(), actual.toxml())
+ def test_show(self):
+ fixture = {
+ 'network_2': [
+ {'addr': '192.168.0.1', 'version': 4},
+ {'addr': 'fe80::beef', 'version': 6},
+ ],
+ }
+ output = self.serializer.serialize(fixture, 'show')
+ print output
+ root = etree.XML(output)
+ network = fixture['network_2']
+ self.assertEqual(str(root.get('id')), 'network_2')
+ ip_elems = root.findall('{0}ip'.format(NS))
+ for z, ip_elem in enumerate(ip_elems):
+ ip = network[z]
+ self.assertEqual(str(ip_elem.get('version')),
+ str(ip['version']))
+ self.assertEqual(str(ip_elem.get('addr')),
+ str(ip['addr']))
def test_index(self):
fixture = {
@@ -3319,22 +3368,22 @@ class TestAddressesXMLSerialization(test.TestCase):
},
}
output = self.serializer.serialize(fixture, 'index')
- actual = minidom.parseString(output.replace(" ", ""))
-
- expected = minidom.parseString("""
- <addresses xmlns="http://docs.openstack.org/compute/api/v1.1">
- <network id="network_2">
- <ip version="4" addr="192.168.0.1"/>
- <ip version="6" addr="fe80::beef"/>
- </network>
- <network id="network_1">
- <ip version="4" addr="192.168.0.3"/>
- <ip version="4" addr="192.168.0.5"/>
- </network>
- </addresses>
- """.replace(" ", ""))
-
- self.assertEqual(expected.toxml(), actual.toxml())
+ print output
+ root = etree.XML(output)
+ xmlutil.validate_schema(root, 'addresses')
+ addresses_dict = fixture['addresses']
+ network_elems = root.findall('{0}network'.format(NS))
+ self.assertEqual(len(network_elems), 2)
+ for i, network_elem in enumerate(network_elems):
+ network = addresses_dict.items()[i]
+ self.assertEqual(str(network_elem.get('id')), str(network[0]))
+ ip_elems = network_elem.findall('{0}ip'.format(NS))
+ for z, ip_elem in enumerate(ip_elems):
+ ip = network[1][z]
+ self.assertEqual(str(ip_elem.get('version')),
+ str(ip['version']))
+ self.assertEqual(str(ip_elem.get('addr')),
+ str(ip['addr']))
class TestServerInstanceCreation(test.TestCase):
@@ -4059,6 +4108,85 @@ class ServerXMLSerializationTest(test.TestCase):
self.maxDiff = None
test.TestCase.setUp(self)
+ def test_xml_declaration(self):
+ serializer = servers.ServerXMLSerializer()
+
+ fixture = {
+ "server": {
+ 'id': 1,
+ 'uuid': FAKE_UUID,
+ 'user_id': 'fake_user_id',
+ 'tenant_id': 'fake_tenant_id',
+ 'created': self.TIMESTAMP,
+ 'updated': self.TIMESTAMP,
+ "progress": 0,
+ "name": "test_server",
+ "status": "BUILD",
+ "hostId": 'e4d909c290d0fb1ca068ffaddf22cbd0',
+ "accessIPv4": "1.2.3.4",
+ "accessIPv6": "fead::1234",
+ "image": {
+ "id": "5",
+ "links": [
+ {
+ "rel": "bookmark",
+ "href": self.IMAGE_BOOKMARK,
+ },
+ ],
+ },
+ "flavor": {
+ "id": "1",
+ "links": [
+ {
+ "rel": "bookmark",
+ "href": self.FLAVOR_BOOKMARK,
+ },
+ ],
+ },
+ "addresses": {
+ "network_one": [
+ {
+ "version": 4,
+ "addr": "67.23.10.138",
+ },
+ {
+ "version": 6,
+ "addr": "::babe:67.23.10.138",
+ },
+ ],
+ "network_two": [
+ {
+ "version": 4,
+ "addr": "67.23.10.139",
+ },
+ {
+ "version": 6,
+ "addr": "::babe:67.23.10.139",
+ },
+ ],
+ },
+ "metadata": {
+ "Open": "Stack",
+ "Number": "1",
+ },
+ 'links': [
+ {
+ 'href': self.SERVER_HREF,
+ 'rel': 'self',
+ },
+ {
+ 'href': self.SERVER_BOOKMARK,
+ 'rel': 'bookmark',
+ },
+ ],
+ }
+ }
+
+ output = serializer.serialize(fixture, 'show')
+ print output
+ has_dec = output.startswith("<?xml version='1.0' encoding='UTF-8'?>")
+ self.assertTrue(has_dec)
+
def test_show(self):
serializer = servers.ServerXMLSerializer()
diff --git a/nova/tests/api/openstack/test_versions.py b/nova/tests/api/openstack/test_versions.py
index 1269f13c9..f69dbd316 100644
--- a/nova/tests/api/openstack/test_versions.py
+++ b/nova/tests/api/openstack/test_versions.py
@@ -15,19 +15,24 @@
# License for the specific language governing permissions and limitations
# under the License.
+import feedparser
import json
import stubout
import webob
-import xml.etree.ElementTree
-
+from lxml import etree
from nova import context
from nova import test
-from nova.tests.api.openstack import fakes
from nova.api.openstack import versions
from nova.api.openstack import views
from nova.api.openstack import wsgi
+from nova.tests.api.openstack import common
+from nova.tests.api.openstack import fakes
+NS = {
+ 'atom': 'http://www.w3.org/2005/Atom',
+ 'ns': 'http://docs.openstack.org/compute/api/v1.1'
+}
VERSIONS = {
"v1.0": {
"id": "v1.0",
@@ -113,23 +118,23 @@ class VersionsTest(test.TestCase):
versions = json.loads(res.body)["versions"]
expected = [
{
- "id": "v1.1",
- "status": "CURRENT",
+ "id": "v1.0",
+ "status": "DEPRECATED",
"updated": "2011-01-21T11:33:21Z",
"links": [
{
"rel": "self",
- "href": "http://localhost/v1.1/",
+ "href": "http://localhost/v1.0/",
}],
},
{
- "id": "v1.0",
- "status": "DEPRECATED",
+ "id": "v1.1",
+ "status": "CURRENT",
"updated": "2011-01-21T11:33:21Z",
"links": [
{
"rel": "self",
- "href": "http://localhost/v1.0/",
+ "href": "http://localhost/v1.1/",
}],
},
]
@@ -233,48 +238,20 @@ class VersionsTest(test.TestCase):
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 200)
self.assertEqual(res.content_type, "application/xml")
- root = xml.etree.ElementTree.XML(res.body)
- self.assertEqual(root.tag.split('}')[1], "version")
- self.assertEqual(root.tag.split('}')[0].strip('{'), wsgi.XMLNS_V11)
- children = list(root)
- media_types = children[0]
- media_type_nodes = list(media_types)
- links = (children[1], children[2], children[3])
-
- self.assertEqual(media_types.tag.split('}')[1], 'media-types')
- for media_node in media_type_nodes:
- self.assertEqual(media_node.tag.split('}')[1], 'media-type')
-
- expected = """
- <version id="v1.0" status="DEPRECATED"
- updated="2011-01-21T11:33:21Z"
- xmlns="%s"
- xmlns:atom="http://www.w3.org/2005/Atom">
-
- <media-types>
- <media-type base="application/xml"
- type="application/vnd.openstack.compute-v1.0+xml"/>
- <media-type base="application/json"
- type="application/vnd.openstack.compute-v1.0+json"/>
- </media-types>
-
- <atom:link href="http://localhost/v1.0/"
- rel="self"/>
-
- <atom:link href="http://docs.rackspacecloud.com/servers/
- api/v1.0/cs-devguide-20110125.pdf"
- rel="describedby"
- type="application/pdf"/>
-
- <atom:link href="http://docs.rackspacecloud.com/servers/
- api/v1.0/application.wadl"
- rel="describedby"
- type="application/vnd.sun.wadl+xml"/>
- </version>""".replace(" ", "").replace("\n", "") % wsgi.XMLNS_V11
-
- actual = res.body.replace(" ", "").replace("\n", "")
- self.assertEqual(expected, actual)
+ version = etree.XML(res.body)
+ expected = VERSIONS['v1.0']
+ self.assertTrue(version.xpath('/ns:version', namespaces=NS))
+ media_types = version.xpath('ns:media-types/ns:media-type',
+ namespaces=NS)
+ self.assertTrue(common.compare_media_types(media_types,
+ expected['media-types']))
+ for key in ['id', 'status', 'updated']:
+ self.assertEqual(version.get(key), expected[key])
+ links = version.xpath('atom:link', namespaces=NS)
+ self.assertTrue(common.compare_links(links,
+ [{'rel': 'self', 'href': 'http://localhost/v1.0/'}]
+ + expected['links']))
def test_get_version_1_1_detail_xml(self):
req = webob.Request.blank('/v1.1/')
@@ -282,35 +259,20 @@ class VersionsTest(test.TestCase):
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 200)
self.assertEqual(res.content_type, "application/xml")
- expected = """
- <version id="v1.1" status="CURRENT"
- updated="2011-01-21T11:33:21Z"
- xmlns="%s"
- xmlns:atom="http://www.w3.org/2005/Atom">
-
- <media-types>
- <media-type base="application/xml"
- type="application/vnd.openstack.compute-v1.1+xml"/>
- <media-type base="application/json"
- type="application/vnd.openstack.compute-v1.1+json"/>
- </media-types>
-
- <atom:link href="http://localhost/v1.1/"
- rel="self"/>
-
- <atom:link href="http://docs.rackspacecloud.com/servers/
- api/v1.1/cs-devguide-20110125.pdf"
- rel="describedby"
- type="application/pdf"/>
-
- <atom:link href="http://docs.rackspacecloud.com/servers/
- api/v1.1/application.wadl"
- rel="describedby"
- type="application/vnd.sun.wadl+xml"/>
- </version>""".replace(" ", "").replace("\n", "") % wsgi.XMLNS_V11
-
- actual = res.body.replace(" ", "").replace("\n", "")
- self.assertEqual(expected, actual)
+
+ version = etree.XML(res.body)
+ expected = VERSIONS['v1.1']
+ self.assertTrue(version.xpath('/ns:version', namespaces=NS))
+ media_types = version.xpath('ns:media-types/ns:media-type',
+ namespaces=NS)
+ self.assertTrue(common.compare_media_types(media_types,
+ expected['media-types']))
+ for key in ['id', 'status', 'updated']:
+ self.assertEqual(version.get(key), expected[key])
+ links = version.xpath('atom:link', namespaces=NS)
+ self.assertTrue(common.compare_links(links,
+ [{'rel': 'self', 'href': 'http://localhost/v1.1/'}]
+ + expected['links']))
def test_get_version_list_xml(self):
req = webob.Request.blank('/')
@@ -319,21 +281,19 @@ class VersionsTest(test.TestCase):
self.assertEqual(res.status_int, 200)
self.assertEqual(res.content_type, "application/xml")
- expected = """
- <versions xmlns="%s" xmlns:atom="%s">
- <version id="v1.1" status="CURRENT" updated="2011-01-21T11:33:21Z">
- <atom:link href="http://localhost/v1.1/" rel="self"/>
- </version>
- <version id="v1.0" status="DEPRECATED"
- updated="2011-01-21T11:33:21Z">
- <atom:link href="http://localhost/v1.0/" rel="self"/>
- </version>
- </versions>""".replace(" ", "").replace("\n", "") % (wsgi.XMLNS_V11,
- wsgi.XMLNS_ATOM)
+ root = etree.XML(res.body)
+ self.assertTrue(root.xpath('/ns:versions', namespaces=NS))
+ versions = root.xpath('ns:version', namespaces=NS)
+ self.assertEqual(len(versions), 2)
- actual = res.body.replace(" ", "").replace("\n", "")
-
- self.assertEqual(expected, actual)
+ for i, v in enumerate(['v1.0', 'v1.1']):
+ version = versions[i]
+ expected = VERSIONS[v]
+ for key in ['id', 'status', 'updated']:
+ self.assertEqual(version.get(key), expected[key])
+ (link,) = version.xpath('atom:link', namespaces=NS)
+ self.assertTrue(common.compare_links(link,
+ [{'rel': 'self', 'href': 'http://localhost/%s/' % v}]))
def test_get_version_1_0_detail_atom(self):
req = webob.Request.blank('/v1.0/')
@@ -341,36 +301,38 @@ class VersionsTest(test.TestCase):
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 200)
self.assertEqual("application/atom+xml", res.content_type)
- expected = """
- <feed xmlns="http://www.w3.org/2005/Atom">
- <title type="text">About This Version</title>
- <updated>2011-01-21T11:33:21Z</updated>
- <id>http://localhost/v1.0/</id>
- <author>
- <name>Rackspace</name>
- <uri>http://www.rackspace.com/</uri>
- </author>
- <link href="http://localhost/v1.0/" rel="self"/>
- <entry>
- <id>http://localhost/v1.0/</id>
- <title type="text">Version v1.0</title>
- <updated>2011-01-21T11:33:21Z</updated>
- <link href="http://localhost/v1.0/"
- rel="self"/>
- <link href="http://docs.rackspacecloud.com/servers/
- api/v1.0/cs-devguide-20110125.pdf"
- rel="describedby" type="application/pdf"/>
- <link href="http://docs.rackspacecloud.com/servers/
- api/v1.0/application.wadl"
- rel="describedby" type="application/vnd.sun.wadl+xml"/>
- <content type="text">
- Version v1.0 DEPRECATED (2011-01-21T11:33:21Z)
- </content>
- </entry>
- </feed>""".replace(" ", "").replace("\n", "")
-
- actual = res.body.replace(" ", "").replace("\n", "")
- self.assertEqual(expected, actual)
+
+ f = feedparser.parse(res.body)
+ self.assertEqual(f.feed.title, 'About This Version')
+ self.assertEqual(f.feed.updated, '2011-01-21T11:33:21Z')
+ self.assertEqual(f.feed.id, 'http://localhost/v1.0/')
+ self.assertEqual(f.feed.author, 'Rackspace')
+ self.assertEqual(f.feed.author_detail.href,
+ 'http://www.rackspace.com/')
+ self.assertEqual(f.feed.links[0]['href'], 'http://localhost/v1.0/')
+ self.assertEqual(f.feed.links[0]['rel'], 'self')
+
+ self.assertEqual(len(f.entries), 1)
+ entry = f.entries[0]
+ self.assertEqual(entry.id, 'http://localhost/v1.0/')
+ self.assertEqual(entry.title, 'Version v1.0')
+ self.assertEqual(entry.updated, '2011-01-21T11:33:21Z')
+ self.assertEqual(len(entry.content), 1)
+ self.assertEqual(entry.content[0].value,
+ 'Version v1.0 DEPRECATED (2011-01-21T11:33:21Z)')
+ self.assertEqual(len(entry.links), 3)
+ self.assertEqual(entry.links[0]['href'], 'http://localhost/v1.0/')
+ self.assertEqual(entry.links[0]['rel'], 'self')
+ self.assertEqual(entry.links[1], {
+ 'href': 'http://docs.rackspacecloud.com/servers/api/v1.0/'\
+ 'cs-devguide-20110125.pdf',
+ 'type': 'application/pdf',
+ 'rel': 'describedby'})
+ self.assertEqual(entry.links[2], {
+ 'href': 'http://docs.rackspacecloud.com/servers/api/v1.0/'\
+ 'application.wadl',
+ 'type': 'application/vnd.sun.wadl+xml',
+ 'rel': 'describedby'})
def test_get_version_1_1_detail_atom(self):
req = webob.Request.blank('/v1.1/')
@@ -378,36 +340,38 @@ class VersionsTest(test.TestCase):
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 200)
self.assertEqual("application/atom+xml", res.content_type)
- expected = """
- <feed xmlns="http://www.w3.org/2005/Atom">
- <title type="text">About This Version</title>
- <updated>2011-01-21T11:33:21Z</updated>
- <id>http://localhost/v1.1/</id>
- <author>
- <name>Rackspace</name>
- <uri>http://www.rackspace.com/</uri>
- </author>
- <link href="http://localhost/v1.1/" rel="self"/>
- <entry>
- <id>http://localhost/v1.1/</id>
- <title type="text">Version v1.1</title>
- <updated>2011-01-21T11:33:21Z</updated>
- <link href="http://localhost/v1.1/"
- rel="self"/>
- <link href="http://docs.rackspacecloud.com/servers/
- api/v1.1/cs-devguide-20110125.pdf"
- rel="describedby" type="application/pdf"/>
- <link href="http://docs.rackspacecloud.com/servers/
- api/v1.1/application.wadl"
- rel="describedby" type="application/vnd.sun.wadl+xml"/>
- <content type="text">
- Version v1.1 CURRENT (2011-01-21T11:33:21Z)
- </content>
- </entry>
- </feed>""".replace(" ", "").replace("\n", "")
-
- actual = res.body.replace(" ", "").replace("\n", "")
- self.assertEqual(expected, actual)
+
+ f = feedparser.parse(res.body)
+ self.assertEqual(f.feed.title, 'About This Version')
+ self.assertEqual(f.feed.updated, '2011-01-21T11:33:21Z')
+ self.assertEqual(f.feed.id, 'http://localhost/v1.1/')
+ self.assertEqual(f.feed.author, 'Rackspace')
+ self.assertEqual(f.feed.author_detail.href,
+ 'http://www.rackspace.com/')
+ self.assertEqual(f.feed.links[0]['href'], 'http://localhost/v1.1/')
+ self.assertEqual(f.feed.links[0]['rel'], 'self')
+
+ self.assertEqual(len(f.entries), 1)
+ entry = f.entries[0]
+ self.assertEqual(entry.id, 'http://localhost/v1.1/')
+ self.assertEqual(entry.title, 'Version v1.1')
+ self.assertEqual(entry.updated, '2011-01-21T11:33:21Z')
+ self.assertEqual(len(entry.content), 1)
+ self.assertEqual(entry.content[0].value,
+ 'Version v1.1 CURRENT (2011-01-21T11:33:21Z)')
+ self.assertEqual(len(entry.links), 3)
+ self.assertEqual(entry.links[0]['href'], 'http://localhost/v1.1/')
+ self.assertEqual(entry.links[0]['rel'], 'self')
+ self.assertEqual(entry.links[1], {
+ 'href': 'http://docs.rackspacecloud.com/servers/api/v1.1/'\
+ 'cs-devguide-20110125.pdf',
+ 'type': 'application/pdf',
+ 'rel': 'describedby'})
+ self.assertEqual(entry.links[2], {
+ 'href': 'http://docs.rackspacecloud.com/servers/api/v1.1/'\
+ 'application.wadl',
+ 'type': 'application/vnd.sun.wadl+xml',
+ 'rel': 'describedby'})
def test_get_version_list_atom(self):
req = webob.Request.blank('/')
@@ -416,40 +380,37 @@ class VersionsTest(test.TestCase):
self.assertEqual(res.status_int, 200)
self.assertEqual(res.content_type, "application/atom+xml")
- expected = """
- <feed xmlns="http://www.w3.org/2005/Atom">
- <title type="text">Available API Versions</title>
- <updated>2011-01-21T11:33:21Z</updated>
- <id>http://localhost/</id>
- <author>
- <name>Rackspace</name>
- <uri>http://www.rackspace.com/</uri>
- </author>
- <link href="http://localhost/" rel="self"/>
- <entry>
- <id>http://localhost/v1.1/</id>
- <title type="text">Version v1.1</title>
- <updated>2011-01-21T11:33:21Z</updated>
- <link href="http://localhost/v1.1/" rel="self"/>
- <content type="text">
- Version v1.1 CURRENT (2011-01-21T11:33:21Z)
- </content>
- </entry>
- <entry>
- <id>http://localhost/v1.0/</id>
- <title type="text">Version v1.0</title>
- <updated>2011-01-21T11:33:21Z</updated>
- <link href="http://localhost/v1.0/" rel="self"/>
- <content type="text">
- Version v1.0 DEPRECATED (2011-01-21T11:33:21Z)
- </content>
- </entry>
- </feed>
- """.replace(" ", "").replace("\n", "")
-
- actual = res.body.replace(" ", "").replace("\n", "")
-
- self.assertEqual(expected, actual)
+ f = feedparser.parse(res.body)
+ self.assertEqual(f.feed.title, 'Available API Versions')
+ self.assertEqual(f.feed.updated, '2011-01-21T11:33:21Z')
+ self.assertEqual(f.feed.id, 'http://localhost/')
+ self.assertEqual(f.feed.author, 'Rackspace')
+ self.assertEqual(f.feed.author_detail.href,
+ 'http://www.rackspace.com/')
+ self.assertEqual(f.feed.links[0]['href'], 'http://localhost/')
+ self.assertEqual(f.feed.links[0]['rel'], 'self')
+
+ self.assertEqual(len(f.entries), 2)
+ entry = f.entries[0]
+ self.assertEqual(entry.id, 'http://localhost/v1.0/')
+ self.assertEqual(entry.title, 'Version v1.0')
+ self.assertEqual(entry.updated, '2011-01-21T11:33:21Z')
+ self.assertEqual(len(entry.content), 1)
+ self.assertEqual(entry.content[0].value,
+ 'Version v1.0 DEPRECATED (2011-01-21T11:33:21Z)')
+ self.assertEqual(len(entry.links), 1)
+ self.assertEqual(entry.links[0]['href'], 'http://localhost/v1.0/')
+ self.assertEqual(entry.links[0]['rel'], 'self')
+ entry = f.entries[1]
+ self.assertEqual(entry.id, 'http://localhost/v1.1/')
+ self.assertEqual(entry.title, 'Version v1.1')
+ self.assertEqual(entry.updated, '2011-01-21T11:33:21Z')
+ self.assertEqual(len(entry.content), 1)
+ self.assertEqual(entry.content[0].value,
+ 'Version v1.1 CURRENT (2011-01-21T11:33:21Z)')
+ self.assertEqual(len(entry.links), 1)
+ self.assertEqual(entry.links[0]['href'], 'http://localhost/v1.1/')
+ self.assertEqual(entry.links[0]['rel'], 'self')
def test_multi_choice_image(self):
req = webob.Request.blank('/images/1')
@@ -511,28 +472,32 @@ class VersionsTest(test.TestCase):
self.assertEqual(res.status_int, 300)
self.assertEqual(res.content_type, "application/xml")
- expected = """
- <choices xmlns="%s" xmlns:atom="%s">
- <version id="v1.1" status="CURRENT">
- <media-types>
- <media-type base="application/xml"
- type="application/vnd.openstack.compute-v1.1+xml"/>
- <media-type base="application/json"
- type="application/vnd.openstack.compute-v1.1+json"/>
- </media-types>
- <atom:link href="http://localhost/v1.1/images/1" rel="self"/>
- </version>
- <version id="v1.0" status="DEPRECATED">
- <media-types>
- <media-type base="application/xml"
- type="application/vnd.openstack.compute-v1.0+xml"/>
- <media-type base="application/json"
- type="application/vnd.openstack.compute-v1.0+json"/>
- </media-types>
- <atom:link href="http://localhost/v1.0/images/1" rel="self"/>
- </version>
- </choices>""".replace(" ", "").replace("\n", "") % (wsgi.XMLNS_V11,
- wsgi.XMLNS_ATOM)
+ root = etree.XML(res.body)
+ self.assertTrue(root.xpath('/ns:choices', namespaces=NS))
+ versions = root.xpath('ns:version', namespaces=NS)
+ self.assertEqual(len(versions), 2)
+
+ version = versions[0]
+ self.assertEqual(version.get('id'), 'v1.1')
+ self.assertEqual(version.get('status'), 'CURRENT')
+ media_types = version.xpath('ns:media-types/ns:media-type',
+ namespaces=NS)
+ self.assertTrue(common.compare_media_types(media_types,
+ VERSIONS['v1.1']['media-types']))
+ links = version.xpath('atom:link', namespaces=NS)
+ self.assertTrue(common.compare_links(links,
+ [{'rel': 'self', 'href': 'http://localhost/v1.1/images/1'}]))
+
+ version = versions[1]
+ self.assertEqual(version.get('id'), 'v1.0')
+ self.assertEqual(version.get('status'), 'DEPRECATED')
+ media_types = version.xpath('ns:media-types/ns:media-type',
+ namespaces=NS)
+ self.assertTrue(common.compare_media_types(media_types,
+ VERSIONS['v1.0']['media-types']))
+ links = version.xpath('atom:link', namespaces=NS)
+ self.assertTrue(common.compare_links(links,
+ [{'rel': 'self', 'href': 'http://localhost/v1.0/images/1'}]))
def test_multi_choice_server_atom(self):
"""
@@ -665,22 +630,20 @@ class VersionsSerializerTests(test.TestCase):
serializer = versions.VersionsXMLSerializer()
response = serializer.index(versions_data)
- root = xml.etree.ElementTree.XML(response)
- self.assertEqual(root.tag.split('}')[1], "versions")
- self.assertEqual(root.tag.split('}')[0].strip('{'), wsgi.XMLNS_V11)
- version = list(root)[0]
- self.assertEqual(version.tag.split('}')[1], "version")
- self.assertEqual(version.get('id'),
- versions_data['versions'][0]['id'])
+ root = etree.XML(response)
+ self.assertTrue(root.xpath('/ns:versions', namespaces=NS))
+ version_elems = root.xpath('ns:version', namespaces=NS)
+ self.assertEqual(len(version_elems), 1)
+ version = version_elems[0]
+ self.assertEqual(version.get('id'), versions_data['versions'][0]['id'])
self.assertEqual(version.get('status'),
versions_data['versions'][0]['status'])
- link = list(version)[0]
-
- self.assertEqual(link.tag.split('}')[1], "link")
- self.assertEqual(link.tag.split('}')[0].strip('{'), wsgi.XMLNS_ATOM)
- for key, val in versions_data['versions'][0]['links'][0].items():
- self.assertEqual(link.get(key), val)
+ (link,) = version.xpath('atom:link', namespaces=NS)
+ self.assertTrue(common.compare_links(link, [{
+ 'rel': 'self',
+ 'href': 'http://test/2.7.1',
+ 'type': 'application/atom+xml'}]))
def test_versions_multi_xml_serializer(self):
versions_data = {
@@ -703,11 +666,9 @@ class VersionsSerializerTests(test.TestCase):
serializer = versions.VersionsXMLSerializer()
response = serializer.multi(versions_data)
- root = xml.etree.ElementTree.XML(response)
- self.assertEqual(root.tag.split('}')[1], "choices")
- self.assertEqual(root.tag.split('}')[0].strip('{'), wsgi.XMLNS_V11)
- version = list(root)[0]
- self.assertEqual(version.tag.split('}')[1], "version")
+ root = etree.XML(response)
+ self.assertTrue(root.xpath('/ns:choices', namespaces=NS))
+ (version,) = root.xpath('ns:version', namespaces=NS)
self.assertEqual(version.get('id'), versions_data['choices'][0]['id'])
self.assertEqual(version.get('status'),
versions_data['choices'][0]['status'])
@@ -716,19 +677,14 @@ class VersionsSerializerTests(test.TestCase):
media_type_nodes = list(media_types)
self.assertEqual(media_types.tag.split('}')[1], "media-types")
- set_types = versions_data['choices'][0]['media-types']
- for i, type in enumerate(set_types):
- node = media_type_nodes[i]
- self.assertEqual(node.tag.split('}')[1], "media-type")
- for key, val in set_types[i].items():
- self.assertEqual(node.get(key), val)
-
- link = list(version)[1]
+ media_types = version.xpath('ns:media-types/ns:media-type',
+ namespaces=NS)
+ self.assertTrue(common.compare_media_types(media_types,
+ versions_data['choices'][0]['media-types']))
- self.assertEqual(link.tag.split('}')[1], "link")
- self.assertEqual(link.tag.split('}')[0].strip('{'), wsgi.XMLNS_ATOM)
- for key, val in versions_data['choices'][0]['links'][0].items():
- self.assertEqual(link.get(key), val)
+ (link,) = version.xpath('atom:link', namespaces=NS)
+ self.assertTrue(common.compare_links(link,
+ versions_data['choices'][0]['links']))
def test_version_detail_xml_serializer(self):
version_data = {
@@ -770,7 +726,7 @@ class VersionsSerializerTests(test.TestCase):
serializer = versions.VersionsXMLSerializer()
response = serializer.show(version_data)
- root = xml.etree.ElementTree.XML(response)
+ root = etree.XML(response)
self.assertEqual(root.tag.split('}')[1], "version")
self.assertEqual(root.tag.split('}')[0].strip('{'), wsgi.XMLNS_V11)
@@ -811,59 +767,28 @@ class VersionsSerializerTests(test.TestCase):
serializer = versions.VersionsAtomSerializer()
response = serializer.index(versions_data)
-
- root = xml.etree.ElementTree.XML(response)
- self.assertEqual(root.tag.split('}')[1], "feed")
- self.assertEqual(root.tag.split('}')[0].strip('{'),
- "http://www.w3.org/2005/Atom")
-
- children = list(root)
- title = children[0]
- updated = children[1]
- id = children[2]
- author = children[3]
- link = children[4]
- entry = children[5]
-
- self.assertEqual(title.tag.split('}')[1], 'title')
- self.assertEqual(title.text, 'Available API Versions')
- self.assertEqual(updated.tag.split('}')[1], 'updated')
- self.assertEqual(updated.text, '2011-07-20T11:40:00Z')
- self.assertEqual(id.tag.split('}')[1], 'id')
- self.assertEqual(id.text, 'http://test/')
-
- self.assertEqual(author.tag.split('}')[1], 'author')
- author_name = list(author)[0]
- author_uri = list(author)[1]
- self.assertEqual(author_name.tag.split('}')[1], 'name')
- self.assertEqual(author_name.text, 'Rackspace')
- self.assertEqual(author_uri.tag.split('}')[1], 'uri')
- self.assertEqual(author_uri.text, 'http://www.rackspace.com/')
-
- self.assertEqual(link.get('href'), 'http://test/')
- self.assertEqual(link.get('rel'), 'self')
-
- self.assertEqual(entry.tag.split('}')[1], 'entry')
- entry_children = list(entry)
- entry_id = entry_children[0]
- entry_title = entry_children[1]
- entry_updated = entry_children[2]
- entry_link = entry_children[3]
- entry_content = entry_children[4]
- self.assertEqual(entry_id.tag.split('}')[1], "id")
- self.assertEqual(entry_id.text, "http://test/2.9.8")
- self.assertEqual(entry_title.tag.split('}')[1], "title")
- self.assertEqual(entry_title.get('type'), "text")
- self.assertEqual(entry_title.text, "Version 2.9.8")
- self.assertEqual(entry_updated.tag.split('}')[1], "updated")
- self.assertEqual(entry_updated.text, "2011-07-20T11:40:00Z")
- self.assertEqual(entry_link.tag.split('}')[1], "link")
- self.assertEqual(entry_link.get('href'), "http://test/2.9.8")
- self.assertEqual(entry_link.get('rel'), "self")
- self.assertEqual(entry_content.tag.split('}')[1], "content")
- self.assertEqual(entry_content.get('type'), "text")
- self.assertEqual(entry_content.text,
- "Version 2.9.8 CURRENT (2011-07-20T11:40:00Z)")
+ f = feedparser.parse(response)
+
+ self.assertEqual(f.feed.title, 'Available API Versions')
+ self.assertEqual(f.feed.updated, '2011-07-20T11:40:00Z')
+ self.assertEqual(f.feed.id, 'http://test/')
+ self.assertEqual(f.feed.author, 'Rackspace')
+ self.assertEqual(f.feed.author_detail.href,
+ 'http://www.rackspace.com/')
+ self.assertEqual(f.feed.links[0]['href'], 'http://test/')
+ self.assertEqual(f.feed.links[0]['rel'], 'self')
+
+ self.assertEqual(len(f.entries), 1)
+ entry = f.entries[0]
+ self.assertEqual(entry.id, 'http://test/2.9.8')
+ self.assertEqual(entry.title, 'Version 2.9.8')
+ self.assertEqual(entry.updated, '2011-07-20T11:40:00Z')
+ self.assertEqual(len(entry.content), 1)
+ self.assertEqual(entry.content[0].value,
+ 'Version 2.9.8 CURRENT (2011-07-20T11:40:00Z)')
+ self.assertEqual(len(entry.links), 1)
+ self.assertEqual(entry.links[0]['href'], 'http://test/2.9.8')
+ self.assertEqual(entry.links[0]['rel'], 'self')
def test_version_detail_atom_serializer(self):
versions_data = {
@@ -904,63 +829,36 @@ class VersionsSerializerTests(test.TestCase):
serializer = versions.VersionsAtomSerializer()
response = serializer.show(versions_data)
-
- root = xml.etree.ElementTree.XML(response)
- self.assertEqual(root.tag.split('}')[1], "feed")
- self.assertEqual(root.tag.split('}')[0].strip('{'),
- "http://www.w3.org/2005/Atom")
-
- children = list(root)
- title = children[0]
- updated = children[1]
- id = children[2]
- author = children[3]
- link = children[4]
- entry = children[5]
-
- self.assertEqual(root.tag.split('}')[1], 'feed')
- self.assertEqual(title.tag.split('}')[1], 'title')
- self.assertEqual(title.text, 'About This Version')
- self.assertEqual(updated.tag.split('}')[1], 'updated')
- self.assertEqual(updated.text, '2011-01-21T11:33:21Z')
- self.assertEqual(id.tag.split('}')[1], 'id')
- self.assertEqual(id.text, 'http://localhost/v1.1/')
-
- self.assertEqual(author.tag.split('}')[1], 'author')
- author_name = list(author)[0]
- author_uri = list(author)[1]
- self.assertEqual(author_name.tag.split('}')[1], 'name')
- self.assertEqual(author_name.text, 'Rackspace')
- self.assertEqual(author_uri.tag.split('}')[1], 'uri')
- self.assertEqual(author_uri.text, 'http://www.rackspace.com/')
-
- self.assertEqual(link.get('href'),
- 'http://localhost/v1.1/')
- self.assertEqual(link.get('rel'), 'self')
-
- self.assertEqual(entry.tag.split('}')[1], 'entry')
- entry_children = list(entry)
- entry_id = entry_children[0]
- entry_title = entry_children[1]
- entry_updated = entry_children[2]
- entry_links = (entry_children[3], entry_children[4], entry_children[5])
- entry_content = entry_children[6]
-
- self.assertEqual(entry_id.tag.split('}')[1], "id")
- self.assertEqual(entry_id.text,
- "http://localhost/v1.1/")
- self.assertEqual(entry_title.tag.split('}')[1], "title")
- self.assertEqual(entry_title.get('type'), "text")
- self.assertEqual(entry_title.text, "Version v1.1")
- self.assertEqual(entry_updated.tag.split('}')[1], "updated")
- self.assertEqual(entry_updated.text, "2011-01-21T11:33:21Z")
-
- for i, link in enumerate(versions_data["version"]["links"]):
- self.assertEqual(entry_links[i].tag.split('}')[1], "link")
- for key, val in versions_data["version"]["links"][i].items():
- self.assertEqual(entry_links[i].get(key), val)
-
- self.assertEqual(entry_content.tag.split('}')[1], "content")
- self.assertEqual(entry_content.get('type'), "text")
- self.assertEqual(entry_content.text,
- "Version v1.1 CURRENT (2011-01-21T11:33:21Z)")
+ f = feedparser.parse(response)
+
+ self.assertEqual(f.feed.title, 'About This Version')
+ self.assertEqual(f.feed.updated, '2011-01-21T11:33:21Z')
+ self.assertEqual(f.feed.id, 'http://localhost/v1.1/')
+ self.assertEqual(f.feed.author, 'Rackspace')
+ self.assertEqual(f.feed.author_detail.href,
+ 'http://www.rackspace.com/')
+ self.assertEqual(f.feed.links[0]['href'], 'http://localhost/v1.1/')
+ self.assertEqual(f.feed.links[0]['rel'], 'self')
+
+ self.assertEqual(len(f.entries), 1)
+ entry = f.entries[0]
+ self.assertEqual(entry.id, 'http://localhost/v1.1/')
+ self.assertEqual(entry.title, 'Version v1.1')
+ self.assertEqual(entry.updated, '2011-01-21T11:33:21Z')
+ self.assertEqual(len(entry.content), 1)
+ self.assertEqual(entry.content[0].value,
+ 'Version v1.1 CURRENT (2011-01-21T11:33:21Z)')
+ self.assertEqual(len(entry.links), 3)
+ self.assertEqual(entry.links[0]['href'], 'http://localhost/v1.1/')
+ self.assertEqual(entry.links[0]['rel'], 'self')
+ self.assertEqual(entry.links[1], {
+ 'rel': 'describedby',
+ 'type': 'application/pdf',
+ 'href': 'http://docs.rackspacecloud.com/'
+ 'servers/api/v1.1/cs-devguide-20110125.pdf'})
+ self.assertEqual(entry.links[2], {
+ 'rel': 'describedby',
+ 'type': 'application/vnd.sun.wadl+xml',
+ 'href': 'http://docs.rackspacecloud.com/'
+ 'servers/api/v1.1/application.wadl',
+ })
diff --git a/nova/tests/glance/stubs.py b/nova/tests/glance/stubs.py
index 6b74e671c..1567393e3 100644
--- a/nova/tests/glance/stubs.py
+++ b/nova/tests/glance/stubs.py
@@ -16,6 +16,7 @@
import StringIO
+from nova import exception
from nova.image import glance
@@ -78,3 +79,70 @@ class FakeGlance(object):
def get_image(self, image_id):
image = self.IMAGE_FIXTURES[int(image_id)]
return image['image_meta'], image['image_data']
+
+
+NOW_GLANCE_FORMAT = "2010-10-11T10:30:22"
+
+
+class StubGlanceClient(object):
+
+ def __init__(self, images=None):
+ self.images = []
+ _images = images or []
+ map(lambda image: self.add_image(image, None), _images)
+
+ def set_auth_token(self, auth_tok):
+ pass
+
+ def get_image_meta(self, image_id):
+ for image in self.images:
+ if image['id'] == str(image_id):
+ return image
+ raise exception.ImageNotFound(image_id=image_id)
+
+ #TODO(bcwaldon): implement filters
+ def get_images_detailed(self, filters=None, marker=None, limit=3):
+ if marker is None:
+ index = 0
+ else:
+ for index, image in enumerate(self.images):
+ if image['id'] == str(marker):
+ index += 1
+ break
+
+ return self.images[index:index + limit]
+
+ def get_image(self, image_id):
+ return self.get_image_meta(image_id), []
+
+ def add_image(self, metadata, data):
+ metadata['created_at'] = NOW_GLANCE_FORMAT
+ metadata['updated_at'] = NOW_GLANCE_FORMAT
+
+ self.images.append(metadata)
+
+ try:
+ image_id = str(metadata['id'])
+ except KeyError:
+ # auto-generate an id if one wasn't provided
+ image_id = str(len(self.images))
+
+ self.images[-1]['id'] = image_id
+
+ return self.images[-1]
+
+ def update_image(self, image_id, metadata, data):
+ for i, image in enumerate(self.images):
+ if image['id'] == str(image_id):
+ if 'id' in metadata:
+ metadata['id'] = str(metadata['id'])
+ self.images[i].update(metadata)
+ return self.images[i]
+ raise exception.ImageNotFound(image_id=image_id)
+
+ def delete_image(self, image_id):
+ for i, image in enumerate(self.images):
+ if image['id'] == image_id:
+ del self.images[i]
+ return
+ raise exception.ImageNotFound(image_id=image_id)
diff --git a/nova/tests/image/test_glance.py b/nova/tests/image/test_glance.py
index b1ebd8436..290c9a04a 100644
--- a/nova/tests/image/test_glance.py
+++ b/nova/tests/image/test_glance.py
@@ -17,47 +17,14 @@
import datetime
-import unittest
+import stubout
+from nova.tests.api.openstack import fakes
from nova import context
from nova import exception
-from nova import test
from nova.image import glance
-
-
-class StubGlanceClient(object):
-
- def __init__(self, images, add_response=None, update_response=None):
- self.images = images
- self.add_response = add_response
- self.update_response = update_response
-
- def set_auth_token(self, auth_tok):
- pass
-
- def get_image_meta(self, image_id):
- return self.images[image_id]
-
- def get_images_detailed(self, filters=None, marker=None, limit=None):
- images = self.images.values()
- if marker is None:
- index = 0
- else:
- for index, image in enumerate(images):
- if image['id'] == marker:
- index += 1
- break
- # default to a page size of 3 to ensure we flex the pagination code
- return images[index:index + 3]
-
- def get_image(self, image_id):
- return self.images[image_id], []
-
- def add_image(self, metadata, data):
- return self.add_response
-
- def update_image(self, image_id, metadata, data):
- return self.update_response
+from nova import test
+from nova.tests.glance import stubs as glance_stubs
class NullWriter(object):
@@ -67,218 +34,7 @@ class NullWriter(object):
pass
-class BaseGlanceTest(unittest.TestCase):
- NOW_GLANCE_OLD_FORMAT = "2010-10-11T10:30:22"
- NOW_GLANCE_FORMAT = "2010-10-11T10:30:22.000000"
- NOW_DATETIME = datetime.datetime(2010, 10, 11, 10, 30, 22)
-
- def setUp(self):
- self.client = StubGlanceClient(None)
- self.service = glance.GlanceImageService(client=self.client)
- self.context = context.RequestContext(None, None)
-
- def assertDateTimesFilled(self, image_meta):
- self.assertEqual(image_meta['created_at'], self.NOW_DATETIME)
- self.assertEqual(image_meta['updated_at'], self.NOW_DATETIME)
- self.assertEqual(image_meta['deleted_at'], self.NOW_DATETIME)
-
- def assertDateTimesEmpty(self, image_meta):
- self.assertEqual(image_meta['updated_at'], None)
- self.assertEqual(image_meta['deleted_at'], None)
-
- def assertDateTimesBlank(self, image_meta):
- self.assertEqual(image_meta['updated_at'], '')
- self.assertEqual(image_meta['deleted_at'], '')
-
-
-class TestGlanceImageServiceProperties(BaseGlanceTest):
- def test_show_passes_through_to_client(self):
- """Ensure attributes which aren't BASE_IMAGE_ATTRS are stored in the
- properties dict
- """
- fixtures = {'image1': {'id': '1', 'name': 'image1', 'is_public': True,
- 'foo': 'bar',
- 'properties': {'prop1': 'propvalue1'}}}
- self.client.images = fixtures
- image_meta = self.service.show(self.context, 'image1')
-
- expected = {'id': '1', 'name': 'image1', 'is_public': True,
- 'properties': {'prop1': 'propvalue1', 'foo': 'bar'}}
- self.assertEqual(image_meta, expected)
-
- def test_show_raises_when_no_authtoken_in_the_context(self):
- fixtures = {'image1': {'name': 'image1', 'is_public': False,
- 'foo': 'bar',
- 'properties': {'prop1': 'propvalue1'}}}
- self.client.images = fixtures
- self.context.auth_token = False
-
- expected = {'name': 'image1', 'is_public': True,
- 'properties': {'prop1': 'propvalue1', 'foo': 'bar'}}
- self.assertRaises(exception.ImageNotFound,
- self.service.show, self.context, 'image1')
-
- def test_show_passes_through_to_client_with_authtoken_in_context(self):
- fixtures = {'image1': {'name': 'image1', 'is_public': False,
- 'foo': 'bar',
- 'properties': {'prop1': 'propvalue1'}}}
- self.client.images = fixtures
- self.context.auth_token = True
-
- expected = {'name': 'image1', 'is_public': False,
- 'properties': {'prop1': 'propvalue1', 'foo': 'bar'}}
-
- image_meta = self.service.show(self.context, 'image1')
- self.assertEqual(image_meta, expected)
-
- def test_detail_passes_through_to_client(self):
- fixtures = {'image1': {'id': '1', 'name': 'image1', 'is_public': True,
- 'foo': 'bar',
- 'properties': {'prop1': 'propvalue1'}}}
- self.client.images = fixtures
- image_meta = self.service.detail(self.context)
- expected = [{'id': '1', 'name': 'image1', 'is_public': True,
- 'properties': {'prop1': 'propvalue1', 'foo': 'bar'}}]
- self.assertEqual(image_meta, expected)
-
-
-class TestGetterDateTimeNoneTests(BaseGlanceTest):
-
- def test_show_handles_none_datetimes(self):
- self.client.images = self._make_none_datetime_fixtures()
- image_meta = self.service.show(self.context, 'image1')
- self.assertDateTimesEmpty(image_meta)
-
- def test_show_handles_blank_datetimes(self):
- self.client.images = self._make_blank_datetime_fixtures()
- image_meta = self.service.show(self.context, 'image1')
- self.assertDateTimesBlank(image_meta)
-
- def test_detail_handles_none_datetimes(self):
- self.client.images = self._make_none_datetime_fixtures()
- image_meta = self.service.detail(self.context)[0]
- self.assertDateTimesEmpty(image_meta)
-
- def test_detail_handles_blank_datetimes(self):
- self.client.images = self._make_blank_datetime_fixtures()
- image_meta = self.service.detail(self.context)[0]
- self.assertDateTimesBlank(image_meta)
-
- def test_get_handles_none_datetimes(self):
- self.client.images = self._make_none_datetime_fixtures()
- writer = NullWriter()
- image_meta = self.service.get(self.context, 'image1', writer)
- self.assertDateTimesEmpty(image_meta)
-
- def test_get_handles_blank_datetimes(self):
- self.client.images = self._make_blank_datetime_fixtures()
- writer = NullWriter()
- image_meta = self.service.get(self.context, 'image1', writer)
- self.assertDateTimesBlank(image_meta)
-
- def test_show_makes_datetimes(self):
- self.client.images = self._make_datetime_fixtures()
- image_meta = self.service.show(self.context, 'image1')
- self.assertDateTimesFilled(image_meta)
- image_meta = self.service.show(self.context, 'image2')
- self.assertDateTimesFilled(image_meta)
-
- def test_detail_makes_datetimes(self):
- self.client.images = self._make_datetime_fixtures()
- image_meta = self.service.detail(self.context)[0]
- self.assertDateTimesFilled(image_meta)
- image_meta = self.service.detail(self.context)[1]
- self.assertDateTimesFilled(image_meta)
-
- def test_get_makes_datetimes(self):
- self.client.images = self._make_datetime_fixtures()
- writer = NullWriter()
- image_meta = self.service.get(self.context, 'image1', writer)
- self.assertDateTimesFilled(image_meta)
- image_meta = self.service.get(self.context, 'image2', writer)
- self.assertDateTimesFilled(image_meta)
-
- def _make_datetime_fixtures(self):
- fixtures = {
- 'image1': {
- 'id': '1',
- 'name': 'image1',
- 'is_public': True,
- 'created_at': self.NOW_GLANCE_FORMAT,
- 'updated_at': self.NOW_GLANCE_FORMAT,
- 'deleted_at': self.NOW_GLANCE_FORMAT,
- },
- 'image2': {
- 'id': '2',
- 'name': 'image2',
- 'is_public': True,
- 'created_at': self.NOW_GLANCE_OLD_FORMAT,
- 'updated_at': self.NOW_GLANCE_OLD_FORMAT,
- 'deleted_at': self.NOW_GLANCE_OLD_FORMAT,
- },
- }
- return fixtures
-
- def _make_none_datetime_fixtures(self):
- fixtures = {'image1': {'id': '1',
- 'name': 'image1',
- 'is_public': True,
- 'updated_at': None,
- 'deleted_at': None}}
- return fixtures
-
- def _make_blank_datetime_fixtures(self):
- fixtures = {'image1': {'id': '1',
- 'name': 'image1',
- 'is_public': True,
- 'updated_at': '',
- 'deleted_at': ''}}
- return fixtures
-
-
-class TestMutatorDateTimeTests(BaseGlanceTest):
- """Tests create(), update()"""
-
- def test_create_handles_datetimes(self):
- self.client.add_response = self._make_datetime_fixture()
- image_meta = self.service.create(self.context, {})
- self.assertDateTimesFilled(image_meta)
-
- def test_create_handles_none_datetimes(self):
- self.client.add_response = self._make_none_datetime_fixture()
- dummy_meta = {}
- image_meta = self.service.create(self.context, dummy_meta)
- self.assertDateTimesEmpty(image_meta)
-
- def test_update_handles_datetimes(self):
- self.client.images = {'image1': self._make_datetime_fixture()}
- self.client.update_response = self._make_datetime_fixture()
- dummy_meta = {}
- image_meta = self.service.update(self.context, 'image1', dummy_meta)
- self.assertDateTimesFilled(image_meta)
-
- def test_update_handles_none_datetimes(self):
- self.client.images = {'image1': self._make_datetime_fixture()}
- self.client.update_response = self._make_none_datetime_fixture()
- dummy_meta = {}
- image_meta = self.service.update(self.context, 'image1', dummy_meta)
- self.assertDateTimesEmpty(image_meta)
-
- def _make_datetime_fixture(self):
- fixture = {'id': 'image1', 'name': 'image1', 'is_public': True,
- 'created_at': self.NOW_GLANCE_FORMAT,
- 'updated_at': self.NOW_GLANCE_FORMAT,
- 'deleted_at': self.NOW_GLANCE_FORMAT}
- return fixture
-
- def _make_none_datetime_fixture(self):
- fixture = {'id': 'image1', 'name': 'image1', 'is_public': True,
- 'updated_at': None,
- 'deleted_at': None}
- return fixture
-
-
-class TestGlanceSerializer(unittest.TestCase):
+class TestGlanceSerializer(test.TestCase):
def test_serialize(self):
metadata = {'name': 'image1',
'is_public': True,
@@ -312,3 +68,386 @@ class TestGlanceSerializer(unittest.TestCase):
converted = glance._convert_to_string(metadata)
self.assertEqual(converted, converted_expected)
self.assertEqual(glance._convert_from_string(converted), metadata)
+
+
+class TestGlanceImageService(test.TestCase):
+ """
+ Tests the Glance image service.
+
+ At a high level, the translations involved are:
+
+ 1. Glance -> ImageService - This is needed so we can support
+ multple ImageServices (Glance, Local, etc)
+
+ 2. ImageService -> API - This is needed so we can support multple
+ APIs (OpenStack, EC2)
+
+ """
+ NOW_GLANCE_OLD_FORMAT = "2010-10-11T10:30:22"
+ NOW_GLANCE_FORMAT = "2010-10-11T10:30:22.000000"
+ NOW_DATETIME = datetime.datetime(2010, 10, 11, 10, 30, 22)
+
+ def setUp(self):
+ super(TestGlanceImageService, self).setUp()
+ self.stubs = stubout.StubOutForTesting()
+ fakes.stub_out_compute_api_snapshot(self.stubs)
+ client = glance_stubs.StubGlanceClient()
+ self.service = glance.GlanceImageService(client=client)
+ self.context = context.RequestContext('fake', 'fake', auth_token=True)
+ self.service.delete_all()
+
+ def tearDown(self):
+ self.stubs.UnsetAll()
+ super(TestGlanceImageService, self).tearDown()
+
+ @staticmethod
+ def _make_fixture(**kwargs):
+ fixture = {'name': None,
+ 'properties': {},
+ 'status': None,
+ 'is_public': None}
+ fixture.update(kwargs)
+ return fixture
+
+ def _make_datetime_fixture(self):
+ return self._make_fixture(created_at=self.NOW_GLANCE_FORMAT,
+ updated_at=self.NOW_GLANCE_FORMAT,
+ deleted_at=self.NOW_GLANCE_FORMAT)
+
+ def test_create_with_instance_id(self):
+ """Ensure instance_id is persisted as an image-property"""
+ fixture = {'name': 'test image',
+ 'is_public': False,
+ 'properties': {'instance_id': '42', 'user_id': 'fake'}}
+
+ image_id = self.service.create(self.context, fixture)['id']
+ image_meta = self.service.show(self.context, image_id)
+ expected = {
+ 'id': image_id,
+ 'name': 'test image',
+ 'is_public': False,
+ 'size': None,
+ 'location': None,
+ 'disk_format': None,
+ 'container_format': None,
+ 'checksum': None,
+ 'created_at': self.NOW_DATETIME,
+ 'updated_at': self.NOW_DATETIME,
+ 'deleted_at': None,
+ 'deleted': None,
+ 'status': None,
+ 'properties': {'instance_id': '42', 'user_id': 'fake'},
+ }
+ self.assertDictMatch(image_meta, expected)
+
+ image_metas = self.service.detail(self.context)
+ self.assertDictMatch(image_metas[0], expected)
+
+ def test_create_without_instance_id(self):
+ """
+ Ensure we can create an image without having to specify an
+ instance_id. Public images are an example of an image not tied to an
+ instance.
+ """
+ fixture = {'name': 'test image', 'is_public': False}
+ image_id = self.service.create(self.context, fixture)['id']
+
+ expected = {
+ 'id': image_id,
+ 'name': 'test image',
+ 'is_public': False,
+ 'size': None,
+ 'location': None,
+ 'disk_format': None,
+ 'container_format': None,
+ 'checksum': None,
+ 'created_at': self.NOW_DATETIME,
+ 'updated_at': self.NOW_DATETIME,
+ 'deleted_at': None,
+ 'deleted': None,
+ 'status': None,
+ 'properties': {},
+ }
+ actual = self.service.show(self.context, image_id)
+ self.assertDictMatch(actual, expected)
+
+ def test_create(self):
+ fixture = self._make_fixture(name='test image')
+ num_images = len(self.service.index(self.context))
+ image_id = self.service.create(self.context, fixture)['id']
+
+ self.assertNotEquals(None, image_id)
+ self.assertEquals(num_images + 1,
+ len(self.service.index(self.context)))
+
+ def test_create_and_show_non_existing_image(self):
+ fixture = self._make_fixture(name='test image')
+ image_id = self.service.create(self.context, fixture)['id']
+
+ self.assertNotEquals(None, image_id)
+ self.assertRaises(exception.NotFound,
+ self.service.show,
+ self.context,
+ 'bad image id')
+
+ def test_create_and_show_non_existing_image_by_name(self):
+ fixture = self._make_fixture(name='test image')
+ image_id = self.service.create(self.context, fixture)['id']
+
+ self.assertNotEquals(None, image_id)
+ self.assertRaises(exception.ImageNotFound,
+ self.service.show_by_name,
+ self.context,
+ 'bad image id')
+
+ def test_index(self):
+ fixture = self._make_fixture(name='test image')
+ image_id = self.service.create(self.context, fixture)['id']
+ image_metas = self.service.index(self.context)
+ expected = [{'id': image_id, 'name': 'test image'}]
+ self.assertDictListMatch(image_metas, expected)
+
+ def test_index_default_limit(self):
+ fixtures = []
+ ids = []
+ for i in range(10):
+ fixture = self._make_fixture(name='TestImage %d' % (i))
+ fixtures.append(fixture)
+ ids.append(self.service.create(self.context, fixture)['id'])
+
+ image_metas = self.service.index(self.context)
+ i = 0
+ for meta in image_metas:
+ expected = {'id': 'DONTCARE',
+ 'name': 'TestImage %d' % (i)}
+ self.assertDictMatch(meta, expected)
+ i = i + 1
+
+ def test_index_marker(self):
+ fixtures = []
+ ids = []
+ for i in range(10):
+ fixture = self._make_fixture(name='TestImage %d' % (i))
+ fixtures.append(fixture)
+ ids.append(self.service.create(self.context, fixture)['id'])
+
+ image_metas = self.service.index(self.context, marker=ids[1])
+ self.assertEquals(len(image_metas), 8)
+ i = 2
+ for meta in image_metas:
+ expected = {'id': 'DONTCARE',
+ 'name': 'TestImage %d' % (i)}
+ self.assertDictMatch(meta, expected)
+ i = i + 1
+
+ def test_index_limit(self):
+ fixtures = []
+ ids = []
+ for i in range(10):
+ fixture = self._make_fixture(name='TestImage %d' % (i))
+ fixtures.append(fixture)
+ ids.append(self.service.create(self.context, fixture)['id'])
+
+ image_metas = self.service.index(self.context, limit=5)
+ self.assertEquals(len(image_metas), 5)
+
+ def test_index_marker_and_limit(self):
+ fixtures = []
+ ids = []
+ for i in range(10):
+ fixture = self._make_fixture(name='TestImage %d' % (i))
+ fixtures.append(fixture)
+ ids.append(self.service.create(self.context, fixture)['id'])
+
+ image_metas = self.service.index(self.context, marker=ids[3], limit=1)
+ self.assertEquals(len(image_metas), 1)
+ i = 4
+ for meta in image_metas:
+ expected = {'id': ids[i],
+ 'name': 'TestImage %d' % (i)}
+ self.assertDictMatch(meta, expected)
+ i = i + 1
+
+ def test_detail_marker(self):
+ fixtures = []
+ ids = []
+ for i in range(10):
+ fixture = self._make_fixture(name='TestImage %d' % (i))
+ fixtures.append(fixture)
+ ids.append(self.service.create(self.context, fixture)['id'])
+
+ image_metas = self.service.detail(self.context, marker=ids[1])
+ self.assertEquals(len(image_metas), 8)
+ i = 2
+ for meta in image_metas:
+ expected = {
+ 'id': ids[i],
+ 'status': None,
+ 'is_public': None,
+ 'name': 'TestImage %d' % (i),
+ 'properties': {},
+ 'size': None,
+ 'location': None,
+ 'disk_format': None,
+ 'container_format': None,
+ 'checksum': None,
+ 'created_at': self.NOW_DATETIME,
+ 'updated_at': self.NOW_DATETIME,
+ 'deleted_at': None,
+ 'deleted': None
+ }
+
+ self.assertDictMatch(meta, expected)
+ i = i + 1
+
+ def test_detail_limit(self):
+ fixtures = []
+ ids = []
+ for i in range(10):
+ fixture = self._make_fixture(name='TestImage %d' % (i))
+ fixtures.append(fixture)
+ ids.append(self.service.create(self.context, fixture)['id'])
+
+ image_metas = self.service.detail(self.context, limit=5)
+ self.assertEquals(len(image_metas), 5)
+
+ def test_detail_marker_and_limit(self):
+ fixtures = []
+ ids = []
+ for i in range(10):
+ fixture = self._make_fixture(name='TestImage %d' % (i))
+ fixtures.append(fixture)
+ ids.append(self.service.create(self.context, fixture)['id'])
+
+ image_metas = self.service.detail(self.context, marker=ids[3], limit=5)
+ self.assertEquals(len(image_metas), 5)
+ i = 4
+ for meta in image_metas:
+ expected = {
+ 'id': ids[i],
+ 'status': None,
+ 'is_public': None,
+ 'name': 'TestImage %d' % (i),
+ 'properties': {},
+ 'size': None,
+ 'location': None,
+ 'disk_format': None,
+ 'container_format': None,
+ 'checksum': None,
+ 'created_at': self.NOW_DATETIME,
+ 'updated_at': self.NOW_DATETIME,
+ 'deleted_at': None,
+ 'deleted': None
+ }
+ self.assertDictMatch(meta, expected)
+ i = i + 1
+
+ def test_update(self):
+ fixture = self._make_fixture(name='test image')
+ image_id = self.service.create(self.context, fixture)['id']
+ fixture['name'] = 'new image name'
+ self.service.update(self.context, image_id, fixture)
+
+ new_image_data = self.service.show(self.context, image_id)
+ self.assertEquals('new image name', new_image_data['name'])
+
+ def test_delete(self):
+ fixture1 = self._make_fixture(name='test image 1')
+ fixture2 = self._make_fixture(name='test image 2')
+ fixtures = [fixture1, fixture2]
+
+ num_images = len(self.service.index(self.context))
+ self.assertEquals(0, num_images, str(self.service.index(self.context)))
+
+ ids = []
+ for fixture in fixtures:
+ new_id = self.service.create(self.context, fixture)['id']
+ ids.append(new_id)
+
+ num_images = len(self.service.index(self.context))
+ self.assertEquals(2, num_images, str(self.service.index(self.context)))
+
+ self.service.delete(self.context, ids[0])
+
+ num_images = len(self.service.index(self.context))
+ self.assertEquals(1, num_images)
+
+ def test_show_passes_through_to_client(self):
+ fixture = self._make_fixture(name='image1', is_public=True)
+ image_id = self.service.create(self.context, fixture)['id']
+
+ image_meta = self.service.show(self.context, image_id)
+ expected = {
+ 'id': image_id,
+ 'name': 'image1',
+ 'is_public': True,
+ 'size': None,
+ 'location': None,
+ 'disk_format': None,
+ 'container_format': None,
+ 'checksum': None,
+ 'created_at': self.NOW_DATETIME,
+ 'updated_at': self.NOW_DATETIME,
+ 'deleted_at': None,
+ 'deleted': None,
+ 'status': None,
+ 'properties': {},
+ }
+ self.assertEqual(image_meta, expected)
+
+ def test_show_raises_when_no_authtoken_in_the_context(self):
+ fixture = self._make_fixture(name='image1',
+ is_public=False,
+ properties={'one': 'two'})
+ image_id = self.service.create(self.context, fixture)['id']
+ self.context.auth_token = False
+ self.assertRaises(exception.ImageNotFound,
+ self.service.show,
+ self.context,
+ image_id)
+
+ def test_detail_passes_through_to_client(self):
+ fixture = self._make_fixture(name='image10', is_public=True)
+ image_id = self.service.create(self.context, fixture)['id']
+ image_metas = self.service.detail(self.context)
+ expected = [
+ {
+ 'id': image_id,
+ 'name': 'image10',
+ 'is_public': True,
+ 'size': None,
+ 'location': None,
+ 'disk_format': None,
+ 'container_format': None,
+ 'checksum': None,
+ 'created_at': self.NOW_DATETIME,
+ 'updated_at': self.NOW_DATETIME,
+ 'deleted_at': None,
+ 'deleted': None,
+ 'status': None,
+ 'properties': {},
+ },
+ ]
+ self.assertEqual(image_metas, expected)
+
+ def test_show_makes_datetimes(self):
+ fixture = self._make_datetime_fixture()
+ image_id = self.service.create(self.context, fixture)['id']
+ image_meta = self.service.show(self.context, image_id)
+ self.assertEqual(image_meta['created_at'], self.NOW_DATETIME)
+ self.assertEqual(image_meta['updated_at'], self.NOW_DATETIME)
+
+ def test_detail_makes_datetimes(self):
+ fixture = self._make_datetime_fixture()
+ self.service.create(self.context, fixture)
+ image_meta = self.service.detail(self.context)[0]
+ self.assertEqual(image_meta['created_at'], self.NOW_DATETIME)
+ self.assertEqual(image_meta['updated_at'], self.NOW_DATETIME)
+
+ def test_get_makes_datetimes(self):
+ fixture = self._make_datetime_fixture()
+ image_id = self.service.create(self.context, fixture)['id']
+ writer = NullWriter()
+ image_meta = self.service.get(self.context, image_id, writer)
+ self.assertEqual(image_meta['created_at'], self.NOW_DATETIME)
+ self.assertEqual(image_meta['updated_at'], self.NOW_DATETIME)
diff --git a/nova/tests/integrated/test_xml.py b/nova/tests/integrated/test_xml.py
index 74baaacc2..cf013da1d 100644
--- a/nova/tests/integrated/test_xml.py
+++ b/nova/tests/integrated/test_xml.py
@@ -15,6 +15,8 @@
# License for the specific language governing permissions and limitations
# under the License.
+from lxml import etree
+
from nova.log import logging
from nova.tests.integrated import integrated_helpers
from nova.api.openstack import common
@@ -34,9 +36,8 @@ class XmlTests(integrated_helpers._IntegratedTestBase):
response = self.api.api_request('/limits', headers=headers)
data = response.read()
LOG.debug("data: %s" % data)
-
- prefix = '<limits xmlns="%s"' % common.XML_NS_V11
- self.assertTrue(data.startswith(prefix))
+ root = etree.XML(data)
+ self.assertEqual(root.nsmap.get(None), common.XML_NS_V11)
def test_namespace_servers(self):
"""/servers should have v1.1 namespace (has changed in 1.1)."""
@@ -46,6 +47,5 @@ class XmlTests(integrated_helpers._IntegratedTestBase):
response = self.api.api_request('/servers', headers=headers)
data = response.read()
LOG.debug("data: %s" % data)
-
- prefix = '<servers xmlns="%s"' % common.XML_NS_V11
- self.assertTrue(data.startswith(prefix))
+ root = etree.XML(data)
+ self.assertEqual(root.nsmap.get(None), common.XML_NS_V11)
diff --git a/nova/tests/test_direct.py b/nova/tests/test_direct.py
index 4ed0c2aa5..8d856dc4b 100644
--- a/nova/tests/test_direct.py
+++ b/nova/tests/test_direct.py
@@ -30,7 +30,7 @@ from nova import test
from nova import volume
from nova import utils
from nova.api import direct
-from nova.tests import test_cloud
+from nova.tests.api.ec2 import test_cloud
class ArbitraryObject(object):
diff --git a/nova/tests/test_libvirt.py b/nova/tests/test_libvirt.py
index 233ee14de..d4e7f6b6b 100644
--- a/nova/tests/test_libvirt.py
+++ b/nova/tests/test_libvirt.py
@@ -35,6 +35,7 @@ from nova import utils
from nova.api.ec2 import cloud
from nova.compute import power_state
from nova.compute import vm_states
+from nova.virt import driver
from nova.virt.libvirt import connection
from nova.virt.libvirt import firewall
from nova.tests import fake_network
@@ -840,6 +841,50 @@ class LibvirtConnTestCase(test.TestCase):
_assert_volume_in_mapping('sdg', False)
_assert_volume_in_mapping('sdh1', False)
+ def test_reboot_signature(self):
+ """Test that libvirt driver method sig matches interface"""
+ def fake_reboot_with_correct_sig(ignore, instance,
+ network_info, reboot_type):
+ pass
+
+ def fake_destroy(instance, network_info, cleanup=False):
+ pass
+
+ def fake_plug_vifs(instance, network_info):
+ pass
+
+ def fake_create_new_domain(xml):
+ return
+
+ def fake_none(self, instance):
+ return
+
+ instance = db.instance_create(self.context, self.test_instance)
+ network_info = _fake_network_info(self.stubs, 1)
+
+ self.mox.StubOutWithMock(connection.LibvirtConnection, '_conn')
+ connection.LibvirtConnection._conn.lookupByName = self.fake_lookup
+
+ conn = connection.LibvirtConnection(False)
+ self.stubs.Set(conn, 'destroy', fake_destroy)
+ self.stubs.Set(conn, 'plug_vifs', fake_plug_vifs)
+ self.stubs.Set(conn.firewall_driver,
+ 'setup_basic_filtering',
+ fake_none)
+ self.stubs.Set(conn.firewall_driver,
+ 'prepare_instance_filter',
+ fake_none)
+ self.stubs.Set(conn, '_create_new_domain', fake_create_new_domain)
+ self.stubs.Set(conn.firewall_driver,
+ 'apply_instance_filter',
+ fake_none)
+
+ args = [instance, network_info, 'SOFT']
+ conn.reboot(*args)
+
+ compute_driver = driver.ComputeDriver()
+ self.assertRaises(NotImplementedError, compute_driver.reboot, *args)
+
class NWFilterFakes:
def __init__(self):
diff --git a/nova/tests/test_xenapi.py b/nova/tests/test_xenapi.py
index 4a83d139e..47c6a3c95 100644
--- a/nova/tests/test_xenapi.py
+++ b/nova/tests/test_xenapi.py
@@ -364,7 +364,7 @@ class XenAPIVMTestCase(test.TestCase):
def _test_spawn(self, image_ref, kernel_id, ramdisk_id,
instance_type_id="3", os_type="linux",
- architecture="x86-64", instance_id=1,
+ hostname="test", architecture="x86-64", instance_id=1,
check_injection=False,
create_record=True, empty_dns=False):
stubs.stubout_loopingcall_start(self.stubs)
@@ -377,6 +377,7 @@ class XenAPIVMTestCase(test.TestCase):
'ramdisk_id': ramdisk_id,
'instance_type_id': instance_type_id,
'os_type': os_type,
+ 'hostname': hostname,
'architecture': architecture}
instance = db.instance_create(self.context, values)
else:
diff --git a/nova/virt/disk.py b/nova/virt/disk.py
index cd3422829..9fe164cfb 100644
--- a/nova/virt/disk.py
+++ b/nova/virt/disk.py
@@ -52,6 +52,47 @@ flags.DEFINE_integer('timeout_nbd', 10,
flags.DEFINE_integer('max_nbd_devices', 16,
'maximum number of possible nbd devices')
+# NOTE(yamahata): DEFINE_list() doesn't work because the command may
+# include ','. For example,
+# mkfs.ext3 -O dir_index,extent -E stride=8,stripe-width=16
+# --label %(fs_label)s %(target)s
+#
+# DEFINE_list() parses its argument by
+# [s.strip() for s in argument.split(self._token)]
+# where self._token = ','
+# No escape nor exceptional handling for ','.
+# DEFINE_list() doesn't give us what we need.
+flags.DEFINE_multistring('virt_mkfs',
+ ['windows=mkfs.ntfs --fast --label %(fs_label)s '
+ '%(target)s',
+ # NOTE(yamahata): vfat case
+ #'windows=mkfs.vfat -n %(fs_label)s %(target)s',
+ 'linux=mkfs.ext3 -L %(fs_label)s -F %(target)s',
+ 'default=mkfs.ext3 -L %(fs_label)s -F %(target)s'],
+ 'mkfs commands for ephemeral device. The format is'
+ '<os_type>=<mkfs command>')
+
+
+_MKFS_COMMAND = {}
+_DEFAULT_MKFS_COMMAND = None
+
+
+for s in FLAGS.virt_mkfs:
+ # NOTE(yamahata): mkfs command may includes '=' for its options.
+ # So item.partition('=') doesn't work here
+ os_type, mkfs_command = s.split('=', 1)
+ if os_type:
+ _MKFS_COMMAND[os_type] = mkfs_command
+ if os_type == 'default':
+ _DEFAULT_MKFS_COMMAND = mkfs_command
+
+
+def mkfs(os_type, fs_label, target):
+ mkfs_command = (_MKFS_COMMAND.get(os_type, _DEFAULT_MKFS_COMMAND) or
+ '') % locals()
+ if mkfs_command:
+ utils.execute(*mkfs_command.split())
+
def extend(image, size):
"""Increase image to size"""
diff --git a/nova/virt/libvirt/connection.py b/nova/virt/libvirt/connection.py
index f591ce02c..18e643ea8 100644
--- a/nova/virt/libvirt/connection.py
+++ b/nova/virt/libvirt/connection.py
@@ -38,6 +38,7 @@ Supports KVM, LXC, QEMU, UML, and XEN.
"""
import hashlib
+import functools
import multiprocessing
import netaddr
import os
@@ -466,7 +467,7 @@ class LibvirtConnection(driver.ComputeDriver):
shutil.rmtree(temp_dir)
@exception.wrap_exception()
- def reboot(self, instance, network_info):
+ def reboot(self, instance, network_info, reboot_type):
"""Reboot a virtual machine, given an instance reference.
This method actually destroys and re-creates the domain to ensure the
@@ -778,6 +779,10 @@ class LibvirtConnection(driver.ComputeDriver):
if fs_format:
utils.execute('mkfs', '-t', fs_format, target)
+ def _create_ephemeral(self, target, local_size, fs_label, os_type):
+ self._create_local(target, local_size)
+ disk.mkfs(os_type, fs_label, target)
+
def _create_swap(self, target, swap_gb):
"""Create a swap file of specified size"""
self._create_local(target, swap_gb)
@@ -866,9 +871,13 @@ class LibvirtConnection(driver.ComputeDriver):
local_size=local_gb)
for eph in driver.block_device_info_get_ephemerals(block_device_info):
- self._cache_image(fn=self._create_local,
+ fn = functools.partial(self._create_ephemeral,
+ fs_label='ephemeral%d' % eph['num'],
+ os_type=inst.os_type)
+ self._cache_image(fn=fn,
target=basepath(_get_eph_disk(eph)),
- fname="local_%s" % eph['size'],
+ fname="ephemeral_%s_%s_%s" %
+ (eph['num'], eph['size'], inst.os_type),
cow=FLAGS.use_cow_images,
local_size=eph['size'])
@@ -1102,6 +1111,11 @@ class LibvirtConnection(driver.ComputeDriver):
nova_context.get_admin_context(), instance['id'],
{'root_device_name': '/dev/' + self.default_root_device})
+ if local_device:
+ db.instance_update(
+ nova_context.get_admin_context(), instance['id'],
+ {'default_local_device': '/dev/' + self.default_local_device})
+
swap = driver.block_device_info_get_swap(block_device_info)
if driver.swap_is_usable(swap):
xml_info['swap_device'] = block_device.strip_dev(
@@ -1110,6 +1124,9 @@ class LibvirtConnection(driver.ComputeDriver):
not self._volume_in_mapping(self.default_swap_device,
block_device_info)):
xml_info['swap_device'] = self.default_swap_device
+ db.instance_update(
+ nova_context.get_admin_context(), instance['id'],
+ {'default_swap_device': '/dev/' + self.default_swap_device})
config_drive = False
if instance.get('config_drive') or instance.get('config_drive_id'):
diff --git a/nova/virt/xenapi/vmops.py b/nova/virt/xenapi/vmops.py
index fb9c602d9..6b56d668e 100644
--- a/nova/virt/xenapi/vmops.py
+++ b/nova/virt/xenapi/vmops.py
@@ -253,6 +253,8 @@ class VMOps(object):
self.create_vifs(vm_ref, instance, network_info)
self.inject_network_info(instance, network_info, vm_ref)
+ self.inject_hostname(instance, vm_ref, instance['hostname'])
+
return vm_ref
def _attach_disks(self, instance, disk_image_type, vm_ref, first_vdi_ref,
@@ -1158,6 +1160,16 @@ class VMOps(object):
resp = self._make_plugin_call('agent', 'resetnetwork', instance, '',
args, vm_ref)
+ def inject_hostname(self, instance, vm_ref, hostname):
+ """Inject the hostname of the instance into the xenstore."""
+ if instance.os_type == "windows":
+ # NOTE(jk0): Windows hostnames can only be <= 15 chars.
+ hostname = hostname[:15]
+
+ logging.debug(_("injecting hostname to xs for vm: |%s|"), vm_ref)
+ self._session.call_xenapi_request("VM.add_to_xenstore_data",
+ (vm_ref, "vm-data/hostname", hostname))
+
def list_from_xenstore(self, vm, path):
"""
Runs the xenstore-ls command to get a listing of all records
diff --git a/nova/volume/driver.py b/nova/volume/driver.py
index 35e3ea8d0..e5bb498ed 100644
--- a/nova/volume/driver.py
+++ b/nova/volume/driver.py
@@ -221,7 +221,14 @@ class VolumeDriver(object):
class AOEDriver(VolumeDriver):
- """Implements AOE specific volume commands."""
+ """WARNING! Deprecated. This driver will be removed in Essex. Its use
+ is not recommended.
+
+ Implements AOE specific volume commands."""
+
+ def __init__(self, *args, **kwargs):
+ LOG.warn(_("AOEDriver is deprecated and will be removed in Essex"))
+ super(AOEDriver, self).__init__(*args, **kwargs)
def ensure_export(self, context, volume):
# NOTE(vish): we depend on vblade-persist for recreating exports
diff --git a/tools/pip-requires b/tools/pip-requires
index 66d6a48d9..a4af326dc 100644
--- a/tools/pip-requires
+++ b/tools/pip-requires
@@ -35,3 +35,4 @@ coverage
nosexcover
GitPython
paramiko
+feedparser