summaryrefslogtreecommitdiff
path: root/tests/unittests/test_datasource
diff options
context:
space:
mode:
authorScott Moser <smoser@brickies.net>2017-07-31 14:46:00 -0400
committerScott Moser <smoser@brickies.net>2017-07-31 14:46:00 -0400
commit19c248d009af6a7cff26fbb2febf5c958987084d (patch)
tree521cc4c8cd303fd7a9eb56bc4eb5975c48996298 /tests/unittests/test_datasource
parentf47c7ac027fc905ca7f6bee776007e2a922c117e (diff)
parente586fe35a692b7519000005c8024ebd2bcbc82e0 (diff)
downloadvyos-cloud-init-19c248d009af6a7cff26fbb2febf5c958987084d.tar.gz
vyos-cloud-init-19c248d009af6a7cff26fbb2febf5c958987084d.zip
merge from master at 0.7.9-233-ge586fe35
Diffstat (limited to 'tests/unittests/test_datasource')
-rw-r--r--tests/unittests/test_datasource/test_aliyun.py51
-rw-r--r--tests/unittests/test_datasource/test_azure.py250
-rw-r--r--tests/unittests/test_datasource/test_common.py5
-rw-r--r--tests/unittests/test_datasource/test_ec2.py202
-rw-r--r--tests/unittests/test_datasource/test_gce.py17
-rw-r--r--tests/unittests/test_datasource/test_scaleway.py262
6 files changed, 743 insertions, 44 deletions
diff --git a/tests/unittests/test_datasource/test_aliyun.py b/tests/unittests/test_datasource/test_aliyun.py
index c16d1a6e..990bff2c 100644
--- a/tests/unittests/test_datasource/test_aliyun.py
+++ b/tests/unittests/test_datasource/test_aliyun.py
@@ -2,6 +2,7 @@
import functools
import httpretty
+import mock
import os
from .. import helpers as test_helpers
@@ -111,15 +112,29 @@ class TestAliYunDatasource(test_helpers.HttprettyTestCase):
self.assertEqual(self.default_metadata['hostname'],
self.ds.get_hostname())
+ @mock.patch("cloudinit.sources.DataSourceAliYun._is_aliyun")
@httpretty.activate
- def test_with_mock_server(self):
+ def test_with_mock_server(self, m_is_aliyun):
+ m_is_aliyun.return_value = True
self.regist_default_server()
- self.ds.get_data()
+ ret = self.ds.get_data()
+ self.assertEqual(True, ret)
+ self.assertEqual(1, m_is_aliyun.call_count)
self._test_get_data()
self._test_get_sshkey()
self._test_get_iid()
self._test_host_name()
+ @mock.patch("cloudinit.sources.DataSourceAliYun._is_aliyun")
+ @httpretty.activate
+ def test_returns_false_when_not_on_aliyun(self, m_is_aliyun):
+ """If is_aliyun returns false, then get_data should return False."""
+ m_is_aliyun.return_value = False
+ self.regist_default_server()
+ ret = self.ds.get_data()
+ self.assertEqual(1, m_is_aliyun.call_count)
+ self.assertEqual(False, ret)
+
def test_parse_public_keys(self):
public_keys = {}
self.assertEqual(ay.parse_public_keys(public_keys), [])
@@ -149,4 +164,36 @@ class TestAliYunDatasource(test_helpers.HttprettyTestCase):
self.assertEqual(ay.parse_public_keys(public_keys),
public_keys['key-pair-0']['openssh-key'])
+
+class TestIsAliYun(test_helpers.CiTestCase):
+ ALIYUN_PRODUCT = 'Alibaba Cloud ECS'
+ read_dmi_data_expected = [mock.call('system-product-name')]
+
+ @mock.patch("cloudinit.sources.DataSourceAliYun.util.read_dmi_data")
+ def test_true_on_aliyun_product(self, m_read_dmi_data):
+ """Should return true if the dmi product data has expected value."""
+ m_read_dmi_data.return_value = self.ALIYUN_PRODUCT
+ ret = ay._is_aliyun()
+ self.assertEqual(self.read_dmi_data_expected,
+ m_read_dmi_data.call_args_list)
+ self.assertEqual(True, ret)
+
+ @mock.patch("cloudinit.sources.DataSourceAliYun.util.read_dmi_data")
+ def test_false_on_empty_string(self, m_read_dmi_data):
+ """Should return false on empty value returned."""
+ m_read_dmi_data.return_value = ""
+ ret = ay._is_aliyun()
+ self.assertEqual(self.read_dmi_data_expected,
+ m_read_dmi_data.call_args_list)
+ self.assertEqual(False, ret)
+
+ @mock.patch("cloudinit.sources.DataSourceAliYun.util.read_dmi_data")
+ def test_false_on_unknown_string(self, m_read_dmi_data):
+ """Should return false on an unrelated string."""
+ m_read_dmi_data.return_value = "cubs win"
+ ret = ay._is_aliyun()
+ self.assertEqual(self.read_dmi_data_expected,
+ m_read_dmi_data.call_args_list)
+ self.assertEqual(False, ret)
+
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py
index 852ec703..20e70fb7 100644
--- a/tests/unittests/test_datasource/test_azure.py
+++ b/tests/unittests/test_datasource/test_azure.py
@@ -76,7 +76,9 @@ def construct_valid_ovf_env(data=None, pubkeys=None, userdata=None):
return content
-class TestAzureDataSource(TestCase):
+class TestAzureDataSource(CiTestCase):
+
+ with_logs = True
def setUp(self):
super(TestAzureDataSource, self).setUp()
@@ -160,6 +162,12 @@ scbus-1 on xpt0 bus 0
self.instance_id = 'test-instance-id'
+ def _dmi_mocks(key):
+ if key == 'system-uuid':
+ return self.instance_id
+ elif key == 'chassis-asset-tag':
+ return '7783-7084-3265-9085-8269-3286-77'
+
self.apply_patches([
(dsaz, 'list_possible_azure_ds_devs', dsdevs),
(dsaz, 'invoke_agent', _invoke_agent),
@@ -170,16 +178,22 @@ scbus-1 on xpt0 bus 0
(dsaz, 'set_hostname', mock.MagicMock()),
(dsaz, 'get_metadata_from_fabric', self.get_metadata_from_fabric),
(dsaz.util, 'read_dmi_data', mock.MagicMock(
- return_value=self.instance_id)),
+ side_effect=_dmi_mocks)),
])
- dsrc = dsaz.DataSourceAzureNet(
+ dsrc = dsaz.DataSourceAzure(
data.get('sys_cfg', {}), distro=None, paths=self.paths)
if agent_command is not None:
dsrc.ds_cfg['agent_command'] = agent_command
return dsrc
+ def _get_and_setup(self, dsrc):
+ ret = dsrc.get_data()
+ if ret:
+ dsrc.setup(True)
+ return ret
+
def xml_equals(self, oxml, nxml):
"""Compare two sets of XML to make sure they are equal"""
@@ -241,6 +255,24 @@ fdescfs /dev/fd fdescfs rw 0 0
res = get_path_dev_freebsd('/etc', mnt_list)
self.assertIsNotNone(res)
+ @mock.patch('cloudinit.sources.DataSourceAzure.util.read_dmi_data')
+ def test_non_azure_dmi_chassis_asset_tag(self, m_read_dmi_data):
+ """Report non-azure when DMI's chassis asset tag doesn't match.
+
+ Return False when the asset tag doesn't match Azure's static
+ AZURE_CHASSIS_ASSET_TAG.
+ """
+ # Return a non-matching asset tag value
+ nonazure_tag = dsaz.AZURE_CHASSIS_ASSET_TAG + 'X'
+ m_read_dmi_data.return_value = nonazure_tag
+ dsrc = dsaz.DataSourceAzure(
+ {}, distro=None, paths=self.paths)
+ self.assertFalse(dsrc.get_data())
+ self.assertEqual(
+ "DEBUG: Non-Azure DMI asset tag '{0}' discovered.\n".format(
+ nonazure_tag),
+ self.logs.getvalue())
+
def test_basic_seed_dir(self):
odata = {'HostName': "myhost", 'UserName': "myuser"}
data = {'ovfcontent': construct_valid_ovf_env(data=odata),
@@ -273,7 +305,7 @@ fdescfs /dev/fd fdescfs rw 0 0
data = {'ovfcontent': construct_valid_ovf_env(data=odata)}
dsrc = self._get_ds(data)
- ret = dsrc.get_data()
+ ret = self._get_and_setup(dsrc)
self.assertTrue(ret)
self.assertEqual(data['agent_invoked'], cfg['agent_command'])
@@ -286,7 +318,7 @@ fdescfs /dev/fd fdescfs rw 0 0
data = {'ovfcontent': construct_valid_ovf_env(data=odata)}
dsrc = self._get_ds(data)
- ret = dsrc.get_data()
+ ret = self._get_and_setup(dsrc)
self.assertTrue(ret)
self.assertEqual(data['agent_invoked'], cfg['agent_command'])
@@ -296,7 +328,7 @@ fdescfs /dev/fd fdescfs rw 0 0
'sys_cfg': sys_cfg}
dsrc = self._get_ds(data)
- ret = dsrc.get_data()
+ ret = self._get_and_setup(dsrc)
self.assertTrue(ret)
self.assertEqual(data['agent_invoked'], '_COMMAND')
@@ -368,7 +400,7 @@ fdescfs /dev/fd fdescfs rw 0 0
pubkeys=pubkeys)}
dsrc = self._get_ds(data, agent_command=['not', '__builtin__'])
- ret = dsrc.get_data()
+ ret = self._get_and_setup(dsrc)
self.assertTrue(ret)
for mypk in mypklist:
self.assertIn(mypk, dsrc.cfg['_pubkeys'])
@@ -383,7 +415,7 @@ fdescfs /dev/fd fdescfs rw 0 0
pubkeys=pubkeys)}
dsrc = self._get_ds(data, agent_command=['not', '__builtin__'])
- ret = dsrc.get_data()
+ ret = self._get_and_setup(dsrc)
self.assertTrue(ret)
for mypk in mypklist:
@@ -399,7 +431,7 @@ fdescfs /dev/fd fdescfs rw 0 0
pubkeys=pubkeys)}
dsrc = self._get_ds(data, agent_command=['not', '__builtin__'])
- ret = dsrc.get_data()
+ ret = self._get_and_setup(dsrc)
self.assertTrue(ret)
for mypk in mypklist:
@@ -493,18 +525,20 @@ fdescfs /dev/fd fdescfs rw 0 0
dsrc.get_data()
def test_exception_fetching_fabric_data_doesnt_propagate(self):
- ds = self._get_ds({'ovfcontent': construct_valid_ovf_env()})
- ds.ds_cfg['agent_command'] = '__builtin__'
+ """Errors communicating with fabric should warn, but return True."""
+ dsrc = self._get_ds({'ovfcontent': construct_valid_ovf_env()})
+ dsrc.ds_cfg['agent_command'] = '__builtin__'
self.get_metadata_from_fabric.side_effect = Exception
- self.assertFalse(ds.get_data())
+ ret = self._get_and_setup(dsrc)
+ self.assertTrue(ret)
def test_fabric_data_included_in_metadata(self):
- ds = self._get_ds({'ovfcontent': construct_valid_ovf_env()})
- ds.ds_cfg['agent_command'] = '__builtin__'
+ dsrc = self._get_ds({'ovfcontent': construct_valid_ovf_env()})
+ dsrc.ds_cfg['agent_command'] = '__builtin__'
self.get_metadata_from_fabric.return_value = {'test': 'value'}
- ret = ds.get_data()
+ ret = self._get_and_setup(dsrc)
self.assertTrue(ret)
- self.assertEqual('value', ds.metadata['test'])
+ self.assertEqual('value', dsrc.metadata['test'])
def test_instance_id_from_dmidecode_used(self):
ds = self._get_ds({'ovfcontent': construct_valid_ovf_env()})
@@ -517,6 +551,95 @@ fdescfs /dev/fd fdescfs rw 0 0
ds.get_data()
self.assertEqual(self.instance_id, ds.metadata['instance-id'])
+ @mock.patch("cloudinit.sources.DataSourceAzure.util.is_FreeBSD")
+ @mock.patch("cloudinit.sources.DataSourceAzure._check_freebsd_cdrom")
+ def test_list_possible_azure_ds_devs(self, m_check_fbsd_cdrom,
+ m_is_FreeBSD):
+ """On FreeBSD, possible devs should show /dev/cd0."""
+ m_is_FreeBSD.return_value = True
+ m_check_fbsd_cdrom.return_value = True
+ self.assertEqual(dsaz.list_possible_azure_ds_devs(), ['/dev/cd0'])
+ self.assertEqual(
+ [mock.call("/dev/cd0")], m_check_fbsd_cdrom.call_args_list)
+
+ @mock.patch('cloudinit.net.get_interface_mac')
+ @mock.patch('cloudinit.net.get_devicelist')
+ @mock.patch('cloudinit.net.device_driver')
+ @mock.patch('cloudinit.net.generate_fallback_config')
+ def test_network_config(self, mock_fallback, mock_dd,
+ mock_devlist, mock_get_mac):
+ odata = {'HostName': "myhost", 'UserName': "myuser"}
+ data = {'ovfcontent': construct_valid_ovf_env(data=odata),
+ 'sys_cfg': {}}
+
+ fallback_config = {
+ 'version': 1,
+ 'config': [{
+ 'type': 'physical', 'name': 'eth0',
+ 'mac_address': '00:11:22:33:44:55',
+ 'params': {'driver': 'hv_netsvc'},
+ 'subnets': [{'type': 'dhcp'}],
+ }]
+ }
+ mock_fallback.return_value = fallback_config
+
+ mock_devlist.return_value = ['eth0']
+ mock_dd.return_value = ['hv_netsvc']
+ mock_get_mac.return_value = '00:11:22:33:44:55'
+
+ dsrc = self._get_ds(data)
+ ret = dsrc.get_data()
+ self.assertTrue(ret)
+
+ netconfig = dsrc.network_config
+ self.assertEqual(netconfig, fallback_config)
+ mock_fallback.assert_called_with(blacklist_drivers=['mlx4_core'],
+ config_driver=True)
+
+ @mock.patch('cloudinit.net.get_interface_mac')
+ @mock.patch('cloudinit.net.get_devicelist')
+ @mock.patch('cloudinit.net.device_driver')
+ @mock.patch('cloudinit.net.generate_fallback_config')
+ def test_network_config_blacklist(self, mock_fallback, mock_dd,
+ mock_devlist, mock_get_mac):
+ odata = {'HostName': "myhost", 'UserName': "myuser"}
+ data = {'ovfcontent': construct_valid_ovf_env(data=odata),
+ 'sys_cfg': {}}
+
+ fallback_config = {
+ 'version': 1,
+ 'config': [{
+ 'type': 'physical', 'name': 'eth0',
+ 'mac_address': '00:11:22:33:44:55',
+ 'params': {'driver': 'hv_netsvc'},
+ 'subnets': [{'type': 'dhcp'}],
+ }]
+ }
+ blacklist_config = {
+ 'type': 'physical',
+ 'name': 'eth1',
+ 'mac_address': '00:11:22:33:44:55',
+ 'params': {'driver': 'mlx4_core'}
+ }
+ mock_fallback.return_value = fallback_config
+
+ mock_devlist.return_value = ['eth0', 'eth1']
+ mock_dd.side_effect = [
+ 'hv_netsvc', # list composition, skipped
+ 'mlx4_core', # list composition, match
+ 'mlx4_core', # config get driver name
+ ]
+ mock_get_mac.return_value = '00:11:22:33:44:55'
+
+ dsrc = self._get_ds(data)
+ ret = dsrc.get_data()
+ self.assertTrue(ret)
+
+ netconfig = dsrc.network_config
+ expected_config = fallback_config
+ expected_config['config'].append(blacklist_config)
+ self.assertEqual(netconfig, expected_config)
+
class TestAzureBounce(TestCase):
@@ -531,9 +654,17 @@ class TestAzureBounce(TestCase):
self.patches.enter_context(
mock.patch.object(dsaz, 'get_metadata_from_fabric',
mock.MagicMock(return_value={})))
+
+ def _dmi_mocks(key):
+ if key == 'system-uuid':
+ return 'test-instance-id'
+ elif key == 'chassis-asset-tag':
+ return '7783-7084-3265-9085-8269-3286-77'
+ raise RuntimeError('should not get here')
+
self.patches.enter_context(
mock.patch.object(dsaz.util, 'read_dmi_data',
- mock.MagicMock(return_value='test-instance-id')))
+ mock.MagicMock(side_effect=_dmi_mocks)))
def setUp(self):
super(TestAzureBounce, self).setUp()
@@ -558,12 +689,18 @@ class TestAzureBounce(TestCase):
if ovfcontent is not None:
populate_dir(os.path.join(self.paths.seed_dir, "azure"),
{'ovf-env.xml': ovfcontent})
- dsrc = dsaz.DataSourceAzureNet(
+ dsrc = dsaz.DataSourceAzure(
{}, distro=None, paths=self.paths)
if agent_command is not None:
dsrc.ds_cfg['agent_command'] = agent_command
return dsrc
+ def _get_and_setup(self, dsrc):
+ ret = dsrc.get_data()
+ if ret:
+ dsrc.setup(True)
+ return ret
+
def get_ovf_env_with_dscfg(self, hostname, cfg):
odata = {
'HostName': hostname,
@@ -607,17 +744,20 @@ class TestAzureBounce(TestCase):
host_name = 'unchanged-host-name'
self.get_hostname.return_value = host_name
cfg = {'hostname_bounce': {'policy': 'force'}}
- self._get_ds(self.get_ovf_env_with_dscfg(host_name, cfg),
- agent_command=['not', '__builtin__']).get_data()
+ dsrc = self._get_ds(self.get_ovf_env_with_dscfg(host_name, cfg),
+ agent_command=['not', '__builtin__'])
+ ret = self._get_and_setup(dsrc)
+ self.assertTrue(ret)
self.assertEqual(1, perform_hostname_bounce.call_count)
def test_different_hostnames_sets_hostname(self):
expected_hostname = 'azure-expected-host-name'
self.get_hostname.return_value = 'default-host-name'
- self._get_ds(
+ dsrc = self._get_ds(
self.get_ovf_env_with_dscfg(expected_hostname, {}),
- agent_command=['not', '__builtin__'],
- ).get_data()
+ agent_command=['not', '__builtin__'])
+ ret = self._get_and_setup(dsrc)
+ self.assertTrue(ret)
self.assertEqual(expected_hostname,
self.set_hostname.call_args_list[0][0][0])
@@ -626,19 +766,21 @@ class TestAzureBounce(TestCase):
self, perform_hostname_bounce):
expected_hostname = 'azure-expected-host-name'
self.get_hostname.return_value = 'default-host-name'
- self._get_ds(
+ dsrc = self._get_ds(
self.get_ovf_env_with_dscfg(expected_hostname, {}),
- agent_command=['not', '__builtin__'],
- ).get_data()
+ agent_command=['not', '__builtin__'])
+ ret = self._get_and_setup(dsrc)
+ self.assertTrue(ret)
self.assertEqual(1, perform_hostname_bounce.call_count)
def test_different_hostnames_sets_hostname_back(self):
initial_host_name = 'default-host-name'
self.get_hostname.return_value = initial_host_name
- self._get_ds(
+ dsrc = self._get_ds(
self.get_ovf_env_with_dscfg('some-host-name', {}),
- agent_command=['not', '__builtin__'],
- ).get_data()
+ agent_command=['not', '__builtin__'])
+ ret = self._get_and_setup(dsrc)
+ self.assertTrue(ret)
self.assertEqual(initial_host_name,
self.set_hostname.call_args_list[-1][0][0])
@@ -648,10 +790,11 @@ class TestAzureBounce(TestCase):
perform_hostname_bounce.side_effect = Exception
initial_host_name = 'default-host-name'
self.get_hostname.return_value = initial_host_name
- self._get_ds(
+ dsrc = self._get_ds(
self.get_ovf_env_with_dscfg('some-host-name', {}),
- agent_command=['not', '__builtin__'],
- ).get_data()
+ agent_command=['not', '__builtin__'])
+ ret = self._get_and_setup(dsrc)
+ self.assertTrue(ret)
self.assertEqual(initial_host_name,
self.set_hostname.call_args_list[-1][0][0])
@@ -662,7 +805,9 @@ class TestAzureBounce(TestCase):
self.get_hostname.return_value = old_hostname
cfg = {'hostname_bounce': {'interface': interface, 'policy': 'force'}}
data = self.get_ovf_env_with_dscfg(hostname, cfg)
- self._get_ds(data, agent_command=['not', '__builtin__']).get_data()
+ dsrc = self._get_ds(data, agent_command=['not', '__builtin__'])
+ ret = self._get_and_setup(dsrc)
+ self.assertTrue(ret)
self.assertEqual(1, self.subp.call_count)
bounce_env = self.subp.call_args[1]['env']
self.assertEqual(interface, bounce_env['interface'])
@@ -674,7 +819,9 @@ class TestAzureBounce(TestCase):
dsaz.BUILTIN_DS_CONFIG['hostname_bounce']['command'] = cmd
cfg = {'hostname_bounce': {'policy': 'force'}}
data = self.get_ovf_env_with_dscfg('some-hostname', cfg)
- self._get_ds(data, agent_command=['not', '__builtin__']).get_data()
+ dsrc = self._get_ds(data, agent_command=['not', '__builtin__'])
+ ret = self._get_and_setup(dsrc)
+ self.assertTrue(ret)
self.assertEqual(1, self.subp.call_count)
bounce_args = self.subp.call_args[1]['args']
self.assertEqual(cmd, bounce_args)
@@ -696,6 +843,33 @@ class TestAzureBounce(TestCase):
self.assertEqual(0, self.set_hostname.call_count)
+class TestLoadAzureDsDir(CiTestCase):
+ """Tests for load_azure_ds_dir."""
+
+ def setUp(self):
+ self.source_dir = self.tmp_dir()
+ super(TestLoadAzureDsDir, self).setUp()
+
+ def test_missing_ovf_env_xml_raises_non_azure_datasource_error(self):
+ """load_azure_ds_dir raises an error When ovf-env.xml doesn't exit."""
+ with self.assertRaises(dsaz.NonAzureDataSource) as context_manager:
+ dsaz.load_azure_ds_dir(self.source_dir)
+ self.assertEqual(
+ 'No ovf-env file found',
+ str(context_manager.exception))
+
+ def test_wb_invalid_ovf_env_xml_calls_read_azure_ovf(self):
+ """load_azure_ds_dir calls read_azure_ovf to parse the xml."""
+ ovf_path = os.path.join(self.source_dir, 'ovf-env.xml')
+ with open(ovf_path, 'wb') as stream:
+ stream.write(b'invalid xml')
+ with self.assertRaises(dsaz.BrokenAzureDataSource) as context_manager:
+ dsaz.load_azure_ds_dir(self.source_dir)
+ self.assertEqual(
+ 'Invalid ovf-env.xml: syntax error: line 1, column 0',
+ str(context_manager.exception))
+
+
class TestReadAzureOvf(TestCase):
def test_invalid_xml_raises_non_azure_ds(self):
invalid_xml = "<foo>" + construct_valid_ovf_env(data={})
@@ -903,4 +1077,12 @@ class TestCanDevBeReformatted(CiTestCase):
self.assertEqual(False, value)
self.assertIn("3 or more", msg.lower())
+
+class TestAzureNetExists(CiTestCase):
+ def test_azure_net_must_exist_for_legacy_objpkl(self):
+ """DataSourceAzureNet must exist for old obj.pkl files
+ that reference it."""
+ self.assertTrue(hasattr(dsaz, "DataSourceAzureNet"))
+
+
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_datasource/test_common.py b/tests/unittests/test_datasource/test_common.py
index c08717f3..413e87ac 100644
--- a/tests/unittests/test_datasource/test_common.py
+++ b/tests/unittests/test_datasource/test_common.py
@@ -19,6 +19,7 @@ from cloudinit.sources import (
DataSourceOpenNebula as OpenNebula,
DataSourceOpenStack as OpenStack,
DataSourceOVF as OVF,
+ DataSourceScaleway as Scaleway,
DataSourceSmartOS as SmartOS,
)
from cloudinit.sources import DataSourceNone as DSNone
@@ -26,6 +27,7 @@ from cloudinit.sources import DataSourceNone as DSNone
from .. import helpers as test_helpers
DEFAULT_LOCAL = [
+ Azure.DataSourceAzure,
CloudSigma.DataSourceCloudSigma,
ConfigDrive.DataSourceConfigDrive,
DigitalOcean.DataSourceDigitalOcean,
@@ -36,8 +38,8 @@ DEFAULT_LOCAL = [
]
DEFAULT_NETWORK = [
+ AliYun.DataSourceAliYun,
AltCloud.DataSourceAltCloud,
- Azure.DataSourceAzureNet,
Bigstep.DataSourceBigstep,
CloudStack.DataSourceCloudStack,
DSNone.DataSourceNone,
@@ -47,6 +49,7 @@ DEFAULT_NETWORK = [
NoCloud.DataSourceNoCloudNet,
OpenStack.DataSourceOpenStack,
OVF.DataSourceOVFNet,
+ Scaleway.DataSourceScaleway,
]
diff --git a/tests/unittests/test_datasource/test_ec2.py b/tests/unittests/test_datasource/test_ec2.py
new file mode 100644
index 00000000..12230ae2
--- /dev/null
+++ b/tests/unittests/test_datasource/test_ec2.py
@@ -0,0 +1,202 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import httpretty
+import mock
+
+from .. import helpers as test_helpers
+from cloudinit import helpers
+from cloudinit.sources import DataSourceEc2 as ec2
+
+
+# collected from api version 2009-04-04/ with
+# python3 -c 'import json
+# from cloudinit.ec2_utils import get_instance_metadata as gm
+# print(json.dumps(gm("2009-04-04"), indent=1, sort_keys=True))'
+DEFAULT_METADATA = {
+ "ami-id": "ami-80861296",
+ "ami-launch-index": "0",
+ "ami-manifest-path": "(unknown)",
+ "block-device-mapping": {"ami": "/dev/sda1", "root": "/dev/sda1"},
+ "hostname": "ip-10-0-0-149",
+ "instance-action": "none",
+ "instance-id": "i-0052913950685138c",
+ "instance-type": "t2.micro",
+ "local-hostname": "ip-10-0-0-149",
+ "local-ipv4": "10.0.0.149",
+ "placement": {"availability-zone": "us-east-1b"},
+ "profile": "default-hvm",
+ "public-hostname": "",
+ "public-ipv4": "107.23.188.247",
+ "public-keys": {"brickies": ["ssh-rsa AAAAB3Nz....w== brickies"]},
+ "reservation-id": "r-00a2c173fb5782a08",
+ "security-groups": "wide-open"
+}
+
+
+def _register_ssh_keys(rfunc, base_url, keys_data):
+ """handle ssh key inconsistencies.
+
+ public-keys in the ec2 metadata is inconsistently formatted compared
+ to other entries.
+ Given keys_data of {name1: pubkey1, name2: pubkey2}
+
+ This registers the following urls:
+ base_url 0={name1}\n1={name2} # (for each name)
+ base_url/ 0={name1}\n1={name2} # (for each name)
+ base_url/0 openssh-key
+ base_url/0/ openssh-key
+ base_url/0/openssh-key {pubkey1}
+ base_url/0/openssh-key/ {pubkey1}
+ ...
+ """
+
+ base_url = base_url.rstrip("/")
+ odd_index = '\n'.join(
+ ["{0}={1}".format(n, name)
+ for n, name in enumerate(sorted(keys_data))])
+
+ rfunc(base_url, odd_index)
+ rfunc(base_url + "/", odd_index)
+
+ for n, name in enumerate(sorted(keys_data)):
+ val = keys_data[name]
+ if isinstance(val, list):
+ val = '\n'.join(val)
+ burl = base_url + "/%s" % n
+ rfunc(burl, "openssh-key")
+ rfunc(burl + "/", "openssh-key")
+ rfunc(burl + "/%s/openssh-key" % name, val)
+ rfunc(burl + "/%s/openssh-key/" % name, val)
+
+
+def register_mock_metaserver(base_url, data):
+ """Register with httpretty a ec2 metadata like service serving 'data'.
+
+ If given a dictionary, it will populate urls under base_url for
+ that dictionary. For example, input of
+ {"instance-id": "i-abc", "mac": "00:16:3e:00:00:00"}
+ populates
+ base_url with 'instance-id\nmac'
+ base_url/ with 'instance-id\nmac'
+ base_url/instance-id with i-abc
+ base_url/mac with 00:16:3e:00:00:00
+ In the index, references to lists or dictionaries have a trailing /.
+ """
+ def register_helper(register, base_url, body):
+ base_url = base_url.rstrip("/")
+ if isinstance(body, str):
+ register(base_url, body)
+ elif isinstance(body, list):
+ register(base_url, '\n'.join(body) + '\n')
+ register(base_url + '/', '\n'.join(body) + '\n')
+ elif isinstance(body, dict):
+ vals = []
+ for k, v in body.items():
+ if k == 'public-keys':
+ _register_ssh_keys(
+ register, base_url + '/public-keys/', v)
+ continue
+ suffix = k.rstrip("/")
+ if not isinstance(v, (str, list)):
+ suffix += "/"
+ vals.append(suffix)
+ url = base_url + '/' + suffix
+ register_helper(register, url, v)
+ register(base_url, '\n'.join(vals) + '\n')
+ register(base_url + '/', '\n'.join(vals) + '\n')
+ elif body is None:
+ register(base_url, 'not found', status_code=404)
+
+ def myreg(*argc, **kwargs):
+ # print("register_url(%s, %s)" % (argc, kwargs))
+ return httpretty.register_uri(httpretty.GET, *argc, **kwargs)
+
+ register_helper(myreg, base_url, data)
+
+
+class TestEc2(test_helpers.HttprettyTestCase):
+ valid_platform_data = {
+ 'uuid': 'ec212f79-87d1-2f1d-588f-d86dc0fd5412',
+ 'uuid_source': 'dmi',
+ 'serial': 'ec212f79-87d1-2f1d-588f-d86dc0fd5412',
+ }
+
+ def setUp(self):
+ super(TestEc2, self).setUp()
+ self.metadata_addr = ec2.DataSourceEc2.metadata_urls[0]
+ self.api_ver = '2009-04-04'
+
+ @property
+ def metadata_url(self):
+ return '/'.join([self.metadata_addr, self.api_ver, 'meta-data', ''])
+
+ @property
+ def userdata_url(self):
+ return '/'.join([self.metadata_addr, self.api_ver, 'user-data'])
+
+ def _patch_add_cleanup(self, mpath, *args, **kwargs):
+ p = mock.patch(mpath, *args, **kwargs)
+ p.start()
+ self.addCleanup(p.stop)
+
+ def _setup_ds(self, sys_cfg, platform_data, md, ud=None):
+ distro = {}
+ paths = helpers.Paths({})
+ if sys_cfg is None:
+ sys_cfg = {}
+ ds = ec2.DataSourceEc2(sys_cfg=sys_cfg, distro=distro, paths=paths)
+ if platform_data is not None:
+ self._patch_add_cleanup(
+ "cloudinit.sources.DataSourceEc2._collect_platform_data",
+ return_value=platform_data)
+
+ if md:
+ register_mock_metaserver(self.metadata_url, md)
+ register_mock_metaserver(self.userdata_url, ud)
+
+ return ds
+
+ @httpretty.activate
+ def test_valid_platform_with_strict_true(self):
+ """Valid platform data should return true with strict_id true."""
+ ds = self._setup_ds(
+ platform_data=self.valid_platform_data,
+ sys_cfg={'datasource': {'Ec2': {'strict_id': True}}},
+ md=DEFAULT_METADATA)
+ ret = ds.get_data()
+ self.assertEqual(True, ret)
+
+ @httpretty.activate
+ def test_valid_platform_with_strict_false(self):
+ """Valid platform data should return true with strict_id false."""
+ ds = self._setup_ds(
+ platform_data=self.valid_platform_data,
+ sys_cfg={'datasource': {'Ec2': {'strict_id': False}}},
+ md=DEFAULT_METADATA)
+ ret = ds.get_data()
+ self.assertEqual(True, ret)
+
+ @httpretty.activate
+ def test_unknown_platform_with_strict_true(self):
+ """Unknown platform data with strict_id true should return False."""
+ uuid = 'ab439480-72bf-11d3-91fc-b8aded755F9a'
+ ds = self._setup_ds(
+ platform_data={'uuid': uuid, 'uuid_source': 'dmi', 'serial': ''},
+ sys_cfg={'datasource': {'Ec2': {'strict_id': True}}},
+ md=DEFAULT_METADATA)
+ ret = ds.get_data()
+ self.assertEqual(False, ret)
+
+ @httpretty.activate
+ def test_unknown_platform_with_strict_false(self):
+ """Unknown platform data with strict_id false should return True."""
+ uuid = 'ab439480-72bf-11d3-91fc-b8aded755F9a'
+ ds = self._setup_ds(
+ platform_data={'uuid': uuid, 'uuid_source': 'dmi', 'serial': ''},
+ sys_cfg={'datasource': {'Ec2': {'strict_id': False}}},
+ md=DEFAULT_METADATA)
+ ret = ds.get_data()
+ self.assertEqual(True, ret)
+
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/test_datasource/test_gce.py b/tests/unittests/test_datasource/test_gce.py
index 6fd1341d..ad608bec 100644
--- a/tests/unittests/test_datasource/test_gce.py
+++ b/tests/unittests/test_datasource/test_gce.py
@@ -72,11 +72,11 @@ class TestDataSourceGCE(test_helpers.HttprettyTestCase):
self.ds = DataSourceGCE.DataSourceGCE(
settings.CFG_BUILTIN, None,
helpers.Paths({}))
- self.m_platform_reports_gce = mock.patch(
- 'cloudinit.sources.DataSourceGCE.platform_reports_gce',
- return_value=True)
- self.m_platform_reports_gce.start()
- self.addCleanup(self.m_platform_reports_gce.stop)
+ ppatch = self.m_platform_reports_gce = mock.patch(
+ 'cloudinit.sources.DataSourceGCE.platform_reports_gce')
+ self.m_platform_reports_gce = ppatch.start()
+ self.m_platform_reports_gce.return_value = True
+ self.addCleanup(ppatch.stop)
super(TestDataSourceGCE, self).setUp()
def test_connection(self):
@@ -163,9 +163,12 @@ class TestDataSourceGCE(test_helpers.HttprettyTestCase):
self.assertEqual(True, r)
self.assertEqual('bar', self.ds.availability_zone)
- def test_get_data_returns_false_if_not_on_gce(self):
+ @mock.patch("cloudinit.sources.DataSourceGCE.GoogleMetadataFetcher")
+ def test_get_data_returns_false_if_not_on_gce(self, m_fetcher):
self.m_platform_reports_gce.return_value = False
- self.assertEqual(False, self.ds.get_data())
+ ret = self.ds.get_data()
+ self.assertEqual(False, ret)
+ m_fetcher.assert_not_called()
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_datasource/test_scaleway.py b/tests/unittests/test_datasource/test_scaleway.py
new file mode 100644
index 00000000..65d83ad7
--- /dev/null
+++ b/tests/unittests/test_datasource/test_scaleway.py
@@ -0,0 +1,262 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import json
+
+import httpretty
+import requests
+
+from cloudinit import helpers
+from cloudinit import settings
+from cloudinit.sources import DataSourceScaleway
+
+from ..helpers import mock, HttprettyTestCase, TestCase
+
+
+class DataResponses(object):
+ """
+ Possible responses of the API endpoint
+ 169.254.42.42/user_data/cloud-init and
+ 169.254.42.42/vendor_data/cloud-init.
+ """
+
+ FAKE_USER_DATA = '#!/bin/bash\necho "user-data"'
+
+ @staticmethod
+ def rate_limited(method, uri, headers):
+ return 429, headers, ''
+
+ @staticmethod
+ def api_error(method, uri, headers):
+ return 500, headers, ''
+
+ @classmethod
+ def get_ok(cls, method, uri, headers):
+ return 200, headers, cls.FAKE_USER_DATA
+
+ @staticmethod
+ def empty(method, uri, headers):
+ """
+ No user data for this server.
+ """
+ return 404, headers, ''
+
+
+class MetadataResponses(object):
+ """
+ Possible responses of the metadata API.
+ """
+
+ FAKE_METADATA = {
+ 'id': '00000000-0000-0000-0000-000000000000',
+ 'hostname': 'scaleway.host',
+ 'ssh_public_keys': [{
+ 'key': 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABA',
+ 'fingerprint': '2048 06:ae:... login (RSA)'
+ }, {
+ 'key': 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABCCCCC',
+ 'fingerprint': '2048 06:ff:... login2 (RSA)'
+ }]
+ }
+
+ @classmethod
+ def get_ok(cls, method, uri, headers):
+ return 200, headers, json.dumps(cls.FAKE_METADATA)
+
+
+class TestOnScaleway(TestCase):
+
+ def install_mocks(self, fake_dmi, fake_file_exists, fake_cmdline):
+ mock, faked = fake_dmi
+ mock.return_value = 'Scaleway' if faked else 'Whatever'
+
+ mock, faked = fake_file_exists
+ mock.return_value = faked
+
+ mock, faked = fake_cmdline
+ mock.return_value = \
+ 'initrd=initrd showopts scaleway nousb' if faked \
+ else 'BOOT_IMAGE=/vmlinuz-3.11.0-26-generic'
+
+ @mock.patch('cloudinit.util.get_cmdline')
+ @mock.patch('os.path.exists')
+ @mock.patch('cloudinit.util.read_dmi_data')
+ def test_not_on_scaleway(self, m_read_dmi_data, m_file_exists,
+ m_get_cmdline):
+ self.install_mocks(
+ fake_dmi=(m_read_dmi_data, False),
+ fake_file_exists=(m_file_exists, False),
+ fake_cmdline=(m_get_cmdline, False)
+ )
+ self.assertFalse(DataSourceScaleway.on_scaleway())
+
+ # When not on Scaleway, get_data() returns False.
+ datasource = DataSourceScaleway.DataSourceScaleway(
+ settings.CFG_BUILTIN, None, helpers.Paths({})
+ )
+ self.assertFalse(datasource.get_data())
+
+ @mock.patch('cloudinit.util.get_cmdline')
+ @mock.patch('os.path.exists')
+ @mock.patch('cloudinit.util.read_dmi_data')
+ def test_on_scaleway_dmi(self, m_read_dmi_data, m_file_exists,
+ m_get_cmdline):
+ """
+ dmidecode returns "Scaleway".
+ """
+ # dmidecode returns "Scaleway"
+ self.install_mocks(
+ fake_dmi=(m_read_dmi_data, True),
+ fake_file_exists=(m_file_exists, False),
+ fake_cmdline=(m_get_cmdline, False)
+ )
+ self.assertTrue(DataSourceScaleway.on_scaleway())
+
+ @mock.patch('cloudinit.util.get_cmdline')
+ @mock.patch('os.path.exists')
+ @mock.patch('cloudinit.util.read_dmi_data')
+ def test_on_scaleway_var_run_scaleway(self, m_read_dmi_data, m_file_exists,
+ m_get_cmdline):
+ """
+ /var/run/scaleway exists.
+ """
+ self.install_mocks(
+ fake_dmi=(m_read_dmi_data, False),
+ fake_file_exists=(m_file_exists, True),
+ fake_cmdline=(m_get_cmdline, False)
+ )
+ self.assertTrue(DataSourceScaleway.on_scaleway())
+
+ @mock.patch('cloudinit.util.get_cmdline')
+ @mock.patch('os.path.exists')
+ @mock.patch('cloudinit.util.read_dmi_data')
+ def test_on_scaleway_cmdline(self, m_read_dmi_data, m_file_exists,
+ m_get_cmdline):
+ """
+ "scaleway" in /proc/cmdline.
+ """
+ self.install_mocks(
+ fake_dmi=(m_read_dmi_data, False),
+ fake_file_exists=(m_file_exists, False),
+ fake_cmdline=(m_get_cmdline, True)
+ )
+ self.assertTrue(DataSourceScaleway.on_scaleway())
+
+
+def get_source_address_adapter(*args, **kwargs):
+ """
+ Scaleway user/vendor data API requires to be called with a privileged port.
+
+ If the unittests are run as non-root, the user doesn't have the permission
+ to bind on ports below 1024.
+
+ This function removes the bind on a privileged address, since anyway the
+ HTTP call is mocked by httpretty.
+ """
+ kwargs.pop('source_address')
+ return requests.adapters.HTTPAdapter(*args, **kwargs)
+
+
+class TestDataSourceScaleway(HttprettyTestCase):
+
+ def setUp(self):
+ self.datasource = DataSourceScaleway.DataSourceScaleway(
+ settings.CFG_BUILTIN, None, helpers.Paths({})
+ )
+ super(TestDataSourceScaleway, self).setUp()
+
+ self.metadata_url = \
+ DataSourceScaleway.BUILTIN_DS_CONFIG['metadata_url']
+ self.userdata_url = \
+ DataSourceScaleway.BUILTIN_DS_CONFIG['userdata_url']
+ self.vendordata_url = \
+ DataSourceScaleway.BUILTIN_DS_CONFIG['vendordata_url']
+
+ @httpretty.activate
+ @mock.patch('cloudinit.sources.DataSourceScaleway.SourceAddressAdapter',
+ get_source_address_adapter)
+ @mock.patch('cloudinit.util.get_cmdline')
+ @mock.patch('time.sleep', return_value=None)
+ def test_metadata_ok(self, sleep, m_get_cmdline):
+ """
+ get_data() returns metadata, user data and vendor data.
+ """
+ m_get_cmdline.return_value = 'scaleway'
+
+ # Make user data API return a valid response
+ httpretty.register_uri(httpretty.GET, self.metadata_url,
+ body=MetadataResponses.get_ok)
+ httpretty.register_uri(httpretty.GET, self.userdata_url,
+ body=DataResponses.get_ok)
+ httpretty.register_uri(httpretty.GET, self.vendordata_url,
+ body=DataResponses.get_ok)
+ self.datasource.get_data()
+
+ self.assertEqual(self.datasource.get_instance_id(),
+ MetadataResponses.FAKE_METADATA['id'])
+ self.assertEqual(self.datasource.get_public_ssh_keys(), [
+ elem['key'] for elem in
+ MetadataResponses.FAKE_METADATA['ssh_public_keys']
+ ])
+ self.assertEqual(self.datasource.get_hostname(),
+ MetadataResponses.FAKE_METADATA['hostname'])
+ self.assertEqual(self.datasource.get_userdata_raw(),
+ DataResponses.FAKE_USER_DATA)
+ self.assertEqual(self.datasource.get_vendordata_raw(),
+ DataResponses.FAKE_USER_DATA)
+ self.assertIsNone(self.datasource.availability_zone)
+ self.assertIsNone(self.datasource.region)
+ self.assertEqual(sleep.call_count, 0)
+
+ @httpretty.activate
+ @mock.patch('cloudinit.sources.DataSourceScaleway.SourceAddressAdapter',
+ get_source_address_adapter)
+ @mock.patch('cloudinit.util.get_cmdline')
+ @mock.patch('time.sleep', return_value=None)
+ def test_metadata_404(self, sleep, m_get_cmdline):
+ """
+ get_data() returns metadata, but no user data nor vendor data.
+ """
+ m_get_cmdline.return_value = 'scaleway'
+
+ # Make user and vendor data APIs return HTTP/404, which means there is
+ # no user / vendor data for the server.
+ httpretty.register_uri(httpretty.GET, self.metadata_url,
+ body=MetadataResponses.get_ok)
+ httpretty.register_uri(httpretty.GET, self.userdata_url,
+ body=DataResponses.empty)
+ httpretty.register_uri(httpretty.GET, self.vendordata_url,
+ body=DataResponses.empty)
+ self.datasource.get_data()
+ self.assertIsNone(self.datasource.get_userdata_raw())
+ self.assertIsNone(self.datasource.get_vendordata_raw())
+ self.assertEqual(sleep.call_count, 0)
+
+ @httpretty.activate
+ @mock.patch('cloudinit.sources.DataSourceScaleway.SourceAddressAdapter',
+ get_source_address_adapter)
+ @mock.patch('cloudinit.util.get_cmdline')
+ @mock.patch('time.sleep', return_value=None)
+ def test_metadata_rate_limit(self, sleep, m_get_cmdline):
+ """
+ get_data() is rate limited two times by the metadata API when fetching
+ user data.
+ """
+ m_get_cmdline.return_value = 'scaleway'
+
+ httpretty.register_uri(httpretty.GET, self.metadata_url,
+ body=MetadataResponses.get_ok)
+ httpretty.register_uri(httpretty.GET, self.vendordata_url,
+ body=DataResponses.empty)
+
+ httpretty.register_uri(
+ httpretty.GET, self.userdata_url,
+ responses=[
+ httpretty.Response(body=DataResponses.rate_limited),
+ httpretty.Response(body=DataResponses.rate_limited),
+ httpretty.Response(body=DataResponses.get_ok),
+ ]
+ )
+ self.datasource.get_data()
+ self.assertEqual(self.datasource.get_userdata_raw(),
+ DataResponses.FAKE_USER_DATA)
+ self.assertEqual(sleep.call_count, 2)