summaryrefslogtreecommitdiff
path: root/cloudinit
diff options
context:
space:
mode:
Diffstat (limited to 'cloudinit')
-rw-r--r--cloudinit/apport.py1
-rw-r--r--cloudinit/settings.py1
-rw-r--r--cloudinit/sources/DataSourceIBMCloud.py13
-rw-r--r--cloudinit/sources/DataSourceOpenStack.py12
-rw-r--r--cloudinit/sources/DataSourceOracle.py233
-rw-r--r--cloudinit/sources/__init__.py4
-rw-r--r--cloudinit/sources/helpers/openstack.py6
-rw-r--r--cloudinit/sources/tests/test_oracle.py331
8 files changed, 585 insertions, 16 deletions
diff --git a/cloudinit/apport.py b/cloudinit/apport.py
index 130ff269..22cb7fde 100644
--- a/cloudinit/apport.py
+++ b/cloudinit/apport.py
@@ -30,6 +30,7 @@ KNOWN_CLOUD_NAMES = [
'NoCloud',
'OpenNebula',
'OpenStack',
+ 'Oracle',
'OVF',
'OpenTelekomCloud',
'Scaleway',
diff --git a/cloudinit/settings.py b/cloudinit/settings.py
index dde5749d..ea367cb7 100644
--- a/cloudinit/settings.py
+++ b/cloudinit/settings.py
@@ -38,6 +38,7 @@ CFG_BUILTIN = {
'Scaleway',
'Hetzner',
'IBMCloud',
+ 'Oracle',
# At the end to act as a 'catch' when none of the above work...
'None',
],
diff --git a/cloudinit/sources/DataSourceIBMCloud.py b/cloudinit/sources/DataSourceIBMCloud.py
index 01106ec0..a5358148 100644
--- a/cloudinit/sources/DataSourceIBMCloud.py
+++ b/cloudinit/sources/DataSourceIBMCloud.py
@@ -295,7 +295,7 @@ def read_md():
results = metadata_from_dir(path)
else:
results = util.mount_cb(path, metadata_from_dir)
- except BrokenMetadata as e:
+ except sources.BrokenMetadata as e:
raise RuntimeError(
"Failed reading IBM config disk (platform=%s path=%s): %s" %
(platform, path, e))
@@ -304,10 +304,6 @@ def read_md():
return ret
-class BrokenMetadata(IOError):
- pass
-
-
def metadata_from_dir(source_dir):
"""Walk source_dir extracting standardized metadata.
@@ -352,12 +348,13 @@ def metadata_from_dir(source_dir):
try:
data = transl(raw)
except Exception as e:
- raise BrokenMetadata("Failed decoding %s: %s" % (path, e))
+ raise sources.BrokenMetadata(
+ "Failed decoding %s: %s" % (path, e))
results[name] = data
if results.get('metadata_raw') is None:
- raise BrokenMetadata(
+ raise sources.BrokenMetadata(
"%s missing required file 'meta_data.json'" % source_dir)
results['metadata'] = {}
@@ -368,7 +365,7 @@ def metadata_from_dir(source_dir):
try:
md['random_seed'] = base64.b64decode(md_raw['random_seed'])
except (ValueError, TypeError) as e:
- raise BrokenMetadata(
+ raise sources.BrokenMetadata(
"Badly formatted metadata random_seed entry: %s" % e)
renames = (
diff --git a/cloudinit/sources/DataSourceOpenStack.py b/cloudinit/sources/DataSourceOpenStack.py
index b9ade90d..4a015240 100644
--- a/cloudinit/sources/DataSourceOpenStack.py
+++ b/cloudinit/sources/DataSourceOpenStack.py
@@ -13,6 +13,7 @@ from cloudinit import url_helper
from cloudinit import util
from cloudinit.sources.helpers import openstack
+from cloudinit.sources import DataSourceOracle as oracle
LOG = logging.getLogger(__name__)
@@ -28,8 +29,7 @@ DMI_PRODUCT_NOVA = 'OpenStack Nova'
DMI_PRODUCT_COMPUTE = 'OpenStack Compute'
VALID_DMI_PRODUCT_NAMES = [DMI_PRODUCT_NOVA, DMI_PRODUCT_COMPUTE]
DMI_ASSET_TAG_OPENTELEKOM = 'OpenTelekomCloud'
-DMI_ASSET_TAG_ORACLE_CLOUD = 'OracleCloud.com'
-VALID_DMI_ASSET_TAGS = [DMI_ASSET_TAG_OPENTELEKOM, DMI_ASSET_TAG_ORACLE_CLOUD]
+VALID_DMI_ASSET_TAGS = [DMI_ASSET_TAG_OPENTELEKOM]
class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource):
@@ -122,8 +122,10 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource):
False when unable to contact metadata service or when metadata
format is invalid or disabled.
"""
- if not detect_openstack():
+ oracle_considered = 'Oracle' in self.sys_cfg.get('datasource_list')
+ if not detect_openstack(accept_oracle=not oracle_considered):
return False
+
if self.perform_dhcp_setup: # Setup networking in init-local stage.
try:
with EphemeralDHCPv4(self.fallback_interface):
@@ -215,7 +217,7 @@ def read_metadata_service(base_url, ssl_details=None,
return reader.read_v2()
-def detect_openstack():
+def detect_openstack(accept_oracle=False):
"""Return True when a potential OpenStack platform is detected."""
if not util.is_x86():
return True # Non-Intel cpus don't properly report dmi product names
@@ -224,6 +226,8 @@ def detect_openstack():
return True
elif util.read_dmi_data('chassis-asset-tag') in VALID_DMI_ASSET_TAGS:
return True
+ elif accept_oracle and oracle._is_platform_viable():
+ return True
elif util.get_proc_env(1).get('product_name') == DMI_PRODUCT_NOVA:
return True
return False
diff --git a/cloudinit/sources/DataSourceOracle.py b/cloudinit/sources/DataSourceOracle.py
new file mode 100644
index 00000000..fab39af3
--- /dev/null
+++ b/cloudinit/sources/DataSourceOracle.py
@@ -0,0 +1,233 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+"""Datasource for Oracle (OCI/Oracle Cloud Infrastructure)
+
+OCI provides a OpenStack like metadata service which provides only
+'2013-10-17' and 'latest' versions..
+
+Notes:
+ * This datasource does not support the OCI-Classic. OCI-Classic
+ provides an EC2 lookalike metadata service.
+ * The uuid provided in DMI data is not the same as the meta-data provided
+ instance-id, but has an equivalent lifespan.
+ * We do need to support upgrade from an instance that cloud-init
+ identified as OpenStack.
+ * Both bare-metal and vms use iscsi root
+ * Both bare-metal and vms provide chassis-asset-tag of OracleCloud.com
+"""
+
+from cloudinit.url_helper import combine_url, readurl, UrlError
+from cloudinit.net import dhcp
+from cloudinit import net
+from cloudinit import sources
+from cloudinit import util
+from cloudinit.net import cmdline
+from cloudinit import log as logging
+
+import json
+import re
+
+LOG = logging.getLogger(__name__)
+
+CHASSIS_ASSET_TAG = "OracleCloud.com"
+METADATA_ENDPOINT = "http://169.254.169.254/openstack/"
+
+
+class DataSourceOracle(sources.DataSource):
+
+ dsname = 'Oracle'
+ system_uuid = None
+ vendordata_pure = None
+ _network_config = sources.UNSET
+
+ def _is_platform_viable(self):
+ """Check platform environment to report if this datasource may run."""
+ return _is_platform_viable()
+
+ def _get_data(self):
+ if not self._is_platform_viable():
+ return False
+
+ # network may be configured if iscsi root. If that is the case
+ # then read_kernel_cmdline_config will return non-None.
+ if _is_iscsi_root():
+ data = self.crawl_metadata()
+ else:
+ with dhcp.EphemeralDHCPv4(net.find_fallback_nic()):
+ data = self.crawl_metadata()
+
+ self._crawled_metadata = data
+ vdata = data['2013-10-17']
+
+ self.userdata_raw = vdata.get('user_data')
+ self.system_uuid = vdata['system_uuid']
+
+ vd = vdata.get('vendor_data')
+ if vd:
+ self.vendordata_pure = vd
+ try:
+ self.vendordata_raw = sources.convert_vendordata(vd)
+ except ValueError as e:
+ LOG.warning("Invalid content in vendor-data: %s", e)
+ self.vendordata_raw = None
+
+ mdcopies = ('public_keys',)
+ md = dict([(k, vdata['meta_data'].get(k))
+ for k in mdcopies if k in vdata['meta_data']])
+
+ mdtrans = (
+ # oracle meta_data.json name, cloudinit.datasource.metadata name
+ ('availability_zone', 'availability-zone'),
+ ('hostname', 'local-hostname'),
+ ('launch_index', 'launch-index'),
+ ('uuid', 'instance-id'),
+ )
+ for dsname, ciname in mdtrans:
+ if dsname in vdata['meta_data']:
+ md[ciname] = vdata['meta_data'][dsname]
+
+ self.metadata = md
+ return True
+
+ def crawl_metadata(self):
+ return read_metadata()
+
+ def check_instance_id(self, sys_cfg):
+ """quickly check (local only) if self.instance_id is still valid
+
+ On Oracle, the dmi-provided system uuid differs from the instance-id
+ but has the same life-span."""
+ return sources.instance_id_matches_system_uuid(self.system_uuid)
+
+ def get_public_ssh_keys(self):
+ return sources.normalize_pubkey_data(self.metadata.get('public_keys'))
+
+ @property
+ def network_config(self):
+ """Network config is read from initramfs provided files
+ If none is present, then we fall back to fallback configuration.
+
+ One thing to note here is that this method is not currently
+ considered at all if there is is kernel/initramfs provided
+ data. In that case, stages considers that the cmdline data
+ overrides datasource provided data and does not consult here.
+
+ We nonetheless return cmdline provided config if present
+ and fallback to generate fallback."""
+ if self._network_config == sources.UNSET:
+ cmdline_cfg = cmdline.read_kernel_cmdline_config()
+ if cmdline_cfg:
+ self._network_config = cmdline_cfg
+ else:
+ self._network_config = self.distro.generate_fallback_config()
+ return self._network_config
+
+
+def _read_system_uuid():
+ sys_uuid = util.read_dmi_data('system-uuid')
+ return None if sys_uuid is None else sys_uuid.lower()
+
+
+def _is_platform_viable():
+ asset_tag = util.read_dmi_data('chassis-asset-tag')
+ return asset_tag == CHASSIS_ASSET_TAG
+
+
+def _is_iscsi_root():
+ return bool(cmdline.read_kernel_cmdline_config())
+
+
+def _load_index(content):
+ """Return a list entries parsed from content.
+
+ OpenStack's metadata service returns a newline delimited list
+ of items. Oracle's implementation has html formatted list of links.
+ The parser here just grabs targets from <a href="target">
+ and throws away "../".
+
+ Oracle has accepted that to be buggy and may fix in the future
+ to instead return a '\n' delimited plain text list. This function
+ will continue to work if that change is made."""
+ if not content.lower().startswith("<html>"):
+ return content.splitlines()
+ items = re.findall(
+ r'href="(?P<target>[^"]*)"', content, re.MULTILINE | re.IGNORECASE)
+ return [i for i in items if not i.startswith(".")]
+
+
+def read_metadata(endpoint_base=METADATA_ENDPOINT, sys_uuid=None,
+ version='2013-10-17'):
+ """Read metadata, return a dictionary.
+
+ Each path listed in the index will be represented in the dictionary.
+ If the path ends in .json, then the content will be decoded and
+ populated into the dictionary.
+
+ The system uuid (/sys/class/dmi/id/product_uuid) is also populated.
+ Example: given paths = ('user_data', 'meta_data.json')
+ This would return:
+ {version: {'user_data': b'blob', 'meta_data': json.loads(blob.decode())
+ 'system_uuid': '3b54f2e0-3ab2-458d-b770-af9926eee3b2'}}
+ """
+ endpoint = combine_url(endpoint_base, version) + "/"
+ if sys_uuid is None:
+ sys_uuid = _read_system_uuid()
+ if not sys_uuid:
+ raise sources.BrokenMetadata("Failed to read system uuid.")
+
+ try:
+ resp = readurl(endpoint)
+ if not resp.ok():
+ raise sources.BrokenMetadata(
+ "Bad response from %s: %s" % (endpoint, resp.code))
+ except UrlError as e:
+ raise sources.BrokenMetadata(
+ "Failed to read index at %s: %s" % (endpoint, e))
+
+ entries = _load_index(resp.contents.decode('utf-8'))
+ LOG.debug("index url %s contained: %s", endpoint, entries)
+
+ # meta_data.json is required.
+ mdj = 'meta_data.json'
+ if mdj not in entries:
+ raise sources.BrokenMetadata(
+ "Required field '%s' missing in index at %s" % (mdj, endpoint))
+
+ ret = {'system_uuid': sys_uuid}
+ for path in entries:
+ response = readurl(combine_url(endpoint, path))
+ if path.endswith(".json"):
+ ret[path.rpartition(".")[0]] = (
+ json.loads(response.contents.decode('utf-8')))
+ else:
+ ret[path] = response.contents
+
+ return {version: ret}
+
+
+# Used to match classes to dependencies
+datasources = [
+ (DataSourceOracle, (sources.DEP_FILESYSTEM,)),
+]
+
+
+# Return a list of data sources that match this set of dependencies
+def get_datasource_list(depends):
+ return sources.list_from_depends(depends, datasources)
+
+
+if __name__ == "__main__":
+ import argparse
+ import os
+
+ parser = argparse.ArgumentParser(description='Query Oracle Cloud Metadata')
+ parser.add_argument("--endpoint", metavar="URL",
+ help="The url of the metadata service.",
+ default=METADATA_ENDPOINT)
+ args = parser.parse_args()
+ sys_uuid = "uuid-not-available-not-root" if os.geteuid() != 0 else None
+
+ data = read_metadata(endpoint_base=args.endpoint, sys_uuid=sys_uuid)
+ data['is_platform_viable'] = _is_platform_viable()
+ print(util.json_dumps(data))
+
+# vi: ts=4 expandtab
diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py
index 06e613f8..41fde9ba 100644
--- a/cloudinit/sources/__init__.py
+++ b/cloudinit/sources/__init__.py
@@ -671,6 +671,10 @@ def convert_vendordata(data, recurse=True):
raise ValueError("Unknown data type for vendordata: %s" % type(data))
+class BrokenMetadata(IOError):
+ pass
+
+
# 'depends' is a list of dependencies (DEP_FILESYSTEM)
# ds_list is a list of 2 item lists
# ds_list = [
diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py
index a4cf0667..8f9c1441 100644
--- a/cloudinit/sources/helpers/openstack.py
+++ b/cloudinit/sources/helpers/openstack.py
@@ -21,6 +21,8 @@ from cloudinit import sources
from cloudinit import url_helper
from cloudinit import util
+from cloudinit.sources import BrokenMetadata
+
# See https://docs.openstack.org/user-guide/cli-config-drive.html
LOG = logging.getLogger(__name__)
@@ -68,10 +70,6 @@ class NonReadable(IOError):
pass
-class BrokenMetadata(IOError):
- pass
-
-
class SourceMixin(object):
def _ec2_name_to_device(self, name):
if not self.ec2_metadata:
diff --git a/cloudinit/sources/tests/test_oracle.py b/cloudinit/sources/tests/test_oracle.py
new file mode 100644
index 00000000..7599126c
--- /dev/null
+++ b/cloudinit/sources/tests/test_oracle.py
@@ -0,0 +1,331 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+from cloudinit.sources import DataSourceOracle as oracle
+from cloudinit.sources import BrokenMetadata
+from cloudinit import helpers
+
+from cloudinit.tests import helpers as test_helpers
+
+from textwrap import dedent
+import argparse
+import httpretty
+import json
+import mock
+import os
+import six
+import uuid
+
+DS_PATH = "cloudinit.sources.DataSourceOracle"
+MD_VER = "2013-10-17"
+
+
+class TestDataSourceOracle(test_helpers.CiTestCase):
+ """Test datasource DataSourceOracle."""
+
+ ds_class = oracle.DataSourceOracle
+
+ my_uuid = str(uuid.uuid4())
+ my_md = {"uuid": "ocid1.instance.oc1.phx.abyhqlj",
+ "name": "ci-vm1", "availability_zone": "phx-ad-3",
+ "hostname": "ci-vm1hostname",
+ "launch_index": 0, "files": [],
+ "public_keys": {"0": "ssh-rsa AAAAB3N...== user@host"},
+ "meta": {}}
+
+ def _patch_instance(self, inst, patches):
+ """Patch an instance of a class 'inst'.
+ for each name, kwargs in patches:
+ inst.name = mock.Mock(**kwargs)
+ returns a namespace object that has
+ namespace.name = mock.Mock(**kwargs)
+ Do not bother with cleanup as instance is assumed transient."""
+ mocks = argparse.Namespace()
+ for name, kwargs in patches.items():
+ imock = mock.Mock(name=name, spec=getattr(inst, name), **kwargs)
+ setattr(mocks, name, imock)
+ setattr(inst, name, imock)
+ return mocks
+
+ def _get_ds(self, sys_cfg=None, distro=None, paths=None, ud_proc=None,
+ patches=None):
+ if sys_cfg is None:
+ sys_cfg = {}
+ if patches is None:
+ patches = {}
+ if paths is None:
+ tmpd = self.tmp_dir()
+ dirs = {'cloud_dir': self.tmp_path('cloud_dir', tmpd),
+ 'run_dir': self.tmp_path('run_dir')}
+ for d in dirs.values():
+ os.mkdir(d)
+ paths = helpers.Paths(dirs)
+
+ ds = self.ds_class(sys_cfg=sys_cfg, distro=distro,
+ paths=paths, ud_proc=ud_proc)
+
+ return ds, self._patch_instance(ds, patches)
+
+ def test_platform_not_viable_returns_false(self):
+ ds, mocks = self._get_ds(
+ patches={'_is_platform_viable': {'return_value': False}})
+ self.assertFalse(ds._get_data())
+ mocks._is_platform_viable.assert_called_once_with()
+
+ @mock.patch(DS_PATH + "._is_iscsi_root", return_value=True)
+ def test_without_userdata(self, m_is_iscsi_root):
+ """If no user-data is provided, it should not be in return dict."""
+ ds, mocks = self._get_ds(patches={
+ '_is_platform_viable': {'return_value': True},
+ 'crawl_metadata': {
+ 'return_value': {
+ MD_VER: {'system_uuid': self.my_uuid,
+ 'meta_data': self.my_md}}}})
+ self.assertTrue(ds._get_data())
+ mocks._is_platform_viable.assert_called_once_with()
+ mocks.crawl_metadata.assert_called_once_with()
+ self.assertEqual(self.my_uuid, ds.system_uuid)
+ self.assertEqual(self.my_md['availability_zone'], ds.availability_zone)
+ self.assertIn(self.my_md["public_keys"]["0"], ds.get_public_ssh_keys())
+ self.assertEqual(self.my_md['uuid'], ds.get_instance_id())
+ self.assertIsNone(ds.userdata_raw)
+
+ @mock.patch(DS_PATH + "._is_iscsi_root", return_value=True)
+ def test_with_vendordata(self, m_is_iscsi_root):
+ """Test with vendor data."""
+ vd = {'cloud-init': '#cloud-config\nkey: value'}
+ ds, mocks = self._get_ds(patches={
+ '_is_platform_viable': {'return_value': True},
+ 'crawl_metadata': {
+ 'return_value': {
+ MD_VER: {'system_uuid': self.my_uuid,
+ 'meta_data': self.my_md,
+ 'vendor_data': vd}}}})
+ self.assertTrue(ds._get_data())
+ mocks._is_platform_viable.assert_called_once_with()
+ mocks.crawl_metadata.assert_called_once_with()
+ self.assertEqual(vd, ds.vendordata_pure)
+ self.assertEqual(vd['cloud-init'], ds.vendordata_raw)
+
+ @mock.patch(DS_PATH + "._is_iscsi_root", return_value=True)
+ def test_with_userdata(self, m_is_iscsi_root):
+ """Ensure user-data is populated if present and is binary."""
+ my_userdata = b'abcdefg'
+ ds, mocks = self._get_ds(patches={
+ '_is_platform_viable': {'return_value': True},
+ 'crawl_metadata': {
+ 'return_value': {
+ MD_VER: {'system_uuid': self.my_uuid,
+ 'meta_data': self.my_md,
+ 'user_data': my_userdata}}}})
+ self.assertTrue(ds._get_data())
+ mocks._is_platform_viable.assert_called_once_with()
+ mocks.crawl_metadata.assert_called_once_with()
+ self.assertEqual(self.my_uuid, ds.system_uuid)
+ self.assertIn(self.my_md["public_keys"]["0"], ds.get_public_ssh_keys())
+ self.assertEqual(self.my_md['uuid'], ds.get_instance_id())
+ self.assertEqual(my_userdata, ds.userdata_raw)
+
+ @mock.patch(DS_PATH + ".cmdline.read_kernel_cmdline_config")
+ @mock.patch(DS_PATH + "._is_iscsi_root", return_value=True)
+ def test_network_cmdline(self, m_is_iscsi_root, m_cmdline_config):
+ """network_config should read kernel cmdline."""
+ distro = mock.MagicMock()
+ ds, _ = self._get_ds(distro=distro, patches={
+ '_is_platform_viable': {'return_value': True},
+ 'crawl_metadata': {
+ 'return_value': {
+ MD_VER: {'system_uuid': self.my_uuid,
+ 'meta_data': self.my_md}}}})
+ ncfg = {'version': 1, 'config': [{'a': 'b'}]}
+ m_cmdline_config.return_value = ncfg
+ self.assertTrue(ds._get_data())
+ self.assertEqual(ncfg, ds.network_config)
+ m_cmdline_config.assert_called_once_with()
+ self.assertFalse(distro.generate_fallback_config.called)
+
+ @mock.patch(DS_PATH + ".cmdline.read_kernel_cmdline_config")
+ @mock.patch(DS_PATH + "._is_iscsi_root", return_value=True)
+ def test_network_fallback(self, m_is_iscsi_root, m_cmdline_config):
+ """test that fallback network is generated if no kernel cmdline."""
+ distro = mock.MagicMock()
+ ds, _ = self._get_ds(distro=distro, patches={
+ '_is_platform_viable': {'return_value': True},
+ 'crawl_metadata': {
+ 'return_value': {
+ MD_VER: {'system_uuid': self.my_uuid,
+ 'meta_data': self.my_md}}}})
+ ncfg = {'version': 1, 'config': [{'a': 'b'}]}
+ m_cmdline_config.return_value = None
+ self.assertTrue(ds._get_data())
+ ncfg = {'version': 1, 'config': [{'distro1': 'value'}]}
+ distro.generate_fallback_config.return_value = ncfg
+ self.assertEqual(ncfg, ds.network_config)
+ m_cmdline_config.assert_called_once_with()
+ distro.generate_fallback_config.assert_called_once_with()
+ self.assertEqual(1, m_cmdline_config.call_count)
+
+ # test that the result got cached, and the methods not re-called.
+ self.assertEqual(ncfg, ds.network_config)
+ self.assertEqual(1, m_cmdline_config.call_count)
+
+
+@mock.patch(DS_PATH + "._read_system_uuid", return_value=str(uuid.uuid4()))
+class TestReadMetaData(test_helpers.HttprettyTestCase):
+ """Test the read_metadata which interacts with http metadata service."""
+
+ mdurl = oracle.METADATA_ENDPOINT
+ my_md = {"uuid": "ocid1.instance.oc1.phx.abyhqlj",
+ "name": "ci-vm1", "availability_zone": "phx-ad-3",
+ "hostname": "ci-vm1hostname",
+ "launch_index": 0, "files": [],
+ "public_keys": {"0": "ssh-rsa AAAAB3N...== user@host"},
+ "meta": {}}
+
+ def populate_md(self, data):
+ """call httppretty.register_url for each item dict 'data',
+ including valid indexes. Text values converted to bytes."""
+ httpretty.register_uri(
+ httpretty.GET, self.mdurl + MD_VER + "/",
+ '\n'.join(data.keys()).encode('utf-8'))
+ for k, v in data.items():
+ httpretty.register_uri(
+ httpretty.GET, self.mdurl + MD_VER + "/" + k,
+ v if not isinstance(v, six.text_type) else v.encode('utf-8'))
+
+ def test_broken_no_sys_uuid(self, m_read_system_uuid):
+ """Datasource requires ability to read system_uuid and true return."""
+ m_read_system_uuid.return_value = None
+ self.assertRaises(BrokenMetadata, oracle.read_metadata)
+
+ def test_broken_no_metadata_json(self, m_read_system_uuid):
+ """Datasource requires meta_data.json."""
+ httpretty.register_uri(
+ httpretty.GET, self.mdurl + MD_VER + "/",
+ '\n'.join(['user_data']).encode('utf-8'))
+ with self.assertRaises(BrokenMetadata) as cm:
+ oracle.read_metadata()
+ self.assertIn("Required field 'meta_data.json' missing",
+ str(cm.exception))
+
+ def test_with_userdata(self, m_read_system_uuid):
+ data = {'user_data': b'#!/bin/sh\necho hi world\n',
+ 'meta_data.json': json.dumps(self.my_md)}
+ self.populate_md(data)
+ result = oracle.read_metadata()[MD_VER]
+ self.assertEqual(data['user_data'], result['user_data'])
+ self.assertEqual(self.my_md, result['meta_data'])
+
+ def test_without_userdata(self, m_read_system_uuid):
+ data = {'meta_data.json': json.dumps(self.my_md)}
+ self.populate_md(data)
+ result = oracle.read_metadata()[MD_VER]
+ self.assertNotIn('user_data', result)
+ self.assertEqual(self.my_md, result['meta_data'])
+
+ def test_unknown_fields_included(self, m_read_system_uuid):
+ """Unknown fields listed in index should be included.
+ And those ending in .json should be decoded."""
+ some_data = {'key1': 'data1', 'subk1': {'subd1': 'subv'}}
+ some_vendor_data = {'cloud-init': 'foo'}
+ data = {'meta_data.json': json.dumps(self.my_md),
+ 'some_data.json': json.dumps(some_data),
+ 'vendor_data.json': json.dumps(some_vendor_data),
+ 'other_blob': b'this is blob'}
+ self.populate_md(data)
+ result = oracle.read_metadata()[MD_VER]
+ self.assertNotIn('user_data', result)
+ self.assertEqual(self.my_md, result['meta_data'])
+ self.assertEqual(some_data, result['some_data'])
+ self.assertEqual(some_vendor_data, result['vendor_data'])
+ self.assertEqual(data['other_blob'], result['other_blob'])
+
+
+class TestIsPlatformViable(test_helpers.CiTestCase):
+ @mock.patch(DS_PATH + ".util.read_dmi_data",
+ return_value=oracle.CHASSIS_ASSET_TAG)
+ def test_expected_viable(self, m_read_dmi_data):
+ """System with known chassis tag is viable."""
+ self.assertTrue(oracle._is_platform_viable())
+ m_read_dmi_data.assert_has_calls([mock.call('chassis-asset-tag')])
+
+ @mock.patch(DS_PATH + ".util.read_dmi_data", return_value=None)
+ def test_expected_not_viable_dmi_data_none(self, m_read_dmi_data):
+ """System without known chassis tag is not viable."""
+ self.assertFalse(oracle._is_platform_viable())
+ m_read_dmi_data.assert_has_calls([mock.call('chassis-asset-tag')])
+
+ @mock.patch(DS_PATH + ".util.read_dmi_data", return_value="LetsGoCubs")
+ def test_expected_not_viable_other(self, m_read_dmi_data):
+ """System with unnown chassis tag is not viable."""
+ self.assertFalse(oracle._is_platform_viable())
+ m_read_dmi_data.assert_has_calls([mock.call('chassis-asset-tag')])
+
+
+class TestLoadIndex(test_helpers.CiTestCase):
+ """_load_index handles parsing of an index into a proper list.
+ The tests here guarantee correct parsing of html version or
+ a fixed version. See the function docstring for more doc."""
+
+ _known_html_api_versions = dedent("""\
+ <html>
+ <head><title>Index of /openstack/</title></head>
+ <body bgcolor="white">
+ <h1>Index of /openstack/</h1><hr><pre><a href="../">../</a>
+ <a href="2013-10-17/">2013-10-17/</a> 27-Jun-2018 12:22 -
+ <a href="latest/">latest/</a> 27-Jun-2018 12:22 -
+ </pre><hr></body>
+ </html>""")
+
+ _known_html_contents = dedent("""\
+ <html>
+ <head><title>Index of /openstack/2013-10-17/</title></head>
+ <body bgcolor="white">
+ <h1>Index of /openstack/2013-10-17/</h1><hr><pre><a href="../">../</a>
+ <a href="meta_data.json">meta_data.json</a> 27-Jun-2018 12:22 679
+ <a href="user_data">user_data</a> 27-Jun-2018 12:22 146
+ </pre><hr></body>
+ </html>""")
+
+ def test_parse_html(self):
+ """Test parsing of lower case html."""
+ self.assertEqual(
+ ['2013-10-17/', 'latest/'],
+ oracle._load_index(self._known_html_api_versions))
+ self.assertEqual(
+ ['meta_data.json', 'user_data'],
+ oracle._load_index(self._known_html_contents))
+
+ def test_parse_html_upper(self):
+ """Test parsing of upper case html, although known content is lower."""
+ def _toupper(data):
+ return data.replace("<a", "<A").replace("html>", "HTML>")
+
+ self.assertEqual(
+ ['2013-10-17/', 'latest/'],
+ oracle._load_index(_toupper(self._known_html_api_versions)))
+ self.assertEqual(
+ ['meta_data.json', 'user_data'],
+ oracle._load_index(_toupper(self._known_html_contents)))
+
+ def test_parse_newline_list_with_endl(self):
+ """Test parsing of newline separated list with ending newline."""
+ self.assertEqual(
+ ['2013-10-17/', 'latest/'],
+ oracle._load_index("\n".join(["2013-10-17/", "latest/", ""])))
+ self.assertEqual(
+ ['meta_data.json', 'user_data'],
+ oracle._load_index("\n".join(["meta_data.json", "user_data", ""])))
+
+ def test_parse_newline_list_without_endl(self):
+ """Test parsing of newline separated list with no ending newline.
+
+ Actual openstack implementation does not include trailing newline."""
+ self.assertEqual(
+ ['2013-10-17/', 'latest/'],
+ oracle._load_index("\n".join(["2013-10-17/", "latest/"])))
+ self.assertEqual(
+ ['meta_data.json', 'user_data'],
+ oracle._load_index("\n".join(["meta_data.json", "user_data"])))
+
+
+# vi: ts=4 expandtab