summaryrefslogtreecommitdiff
path: root/tests/unittests
diff options
context:
space:
mode:
authorzsdc <taras@vyos.io>2020-09-15 17:05:20 +0300
committerzsdc <taras@vyos.io>2020-09-15 17:05:20 +0300
commit7cd260b313267dc7123cb99a75d4555e24909cca (patch)
treef57f3db085a724df237ffa64b589c6bb6dd3b28f /tests/unittests
parent1a790ee102fd405e5c3a20a17a69ba0c118ed874 (diff)
parent948bd9c1fcd08346cf8ec0551d7f6c2b234e896b (diff)
downloadvyos-cloud-init-7cd260b313267dc7123cb99a75d4555e24909cca.tar.gz
vyos-cloud-init-7cd260b313267dc7123cb99a75d4555e24909cca.zip
T2117: Cloud-init updated to 20.3
Merged with 20.3 tag from the upstream Cloud-init repository
Diffstat (limited to 'tests/unittests')
-rw-r--r--tests/unittests/test_builtin_handlers.py9
-rw-r--r--tests/unittests/test_cli.py6
-rw-r--r--tests/unittests/test_cs_util.py2
-rw-r--r--tests/unittests/test_data.py61
-rw-r--r--tests/unittests/test_datasource/test_aliyun.py2
-rw-r--r--tests/unittests/test_datasource/test_altcloud.py7
-rw-r--r--tests/unittests/test_datasource/test_azure.py114
-rw-r--r--tests/unittests/test_datasource/test_azure_helper.py406
-rw-r--r--tests/unittests/test_datasource/test_cloudsigma.py6
-rw-r--r--tests/unittests/test_datasource/test_cloudstack.py2
-rw-r--r--tests/unittests/test_datasource/test_ec2.py395
-rw-r--r--tests/unittests/test_datasource/test_gce.py3
-rw-r--r--tests/unittests/test_datasource/test_hetzner.py23
-rw-r--r--tests/unittests/test_datasource/test_ibmcloud.py7
-rw-r--r--tests/unittests/test_datasource/test_maas.py1
-rw-r--r--tests/unittests/test_datasource/test_nocloud.py17
-rw-r--r--tests/unittests/test_datasource/test_opennebula.py157
-rw-r--r--tests/unittests/test_datasource/test_openstack.py20
-rw-r--r--tests/unittests/test_datasource/test_ovf.py93
-rw-r--r--tests/unittests/test_datasource/test_rbx.py34
-rw-r--r--tests/unittests/test_datasource/test_scaleway.py81
-rw-r--r--tests/unittests/test_datasource/test_smartos.py20
-rw-r--r--tests/unittests/test_distros/test_bsd_utils.py67
-rw-r--r--tests/unittests/test_distros/test_create_users.py8
-rw-r--r--tests/unittests/test_distros/test_debian.py2
-rw-r--r--tests/unittests/test_distros/test_freebsd.py4
-rw-r--r--tests/unittests/test_distros/test_generic.py190
-rw-r--r--tests/unittests/test_distros/test_netbsd.py17
-rw-r--r--tests/unittests/test_distros/test_netconfig.py7
-rw-r--r--tests/unittests/test_distros/test_user_data_normalize.py6
-rw-r--r--tests/unittests/test_ds_identify.py59
-rw-r--r--tests/unittests/test_handler/test_handler_apk_configure.py299
-rw-r--r--tests/unittests/test_handler/test_handler_apt_configure_sources_list_v1.py11
-rw-r--r--tests/unittests/test_handler/test_handler_apt_configure_sources_list_v3.py5
-rw-r--r--tests/unittests/test_handler/test_handler_apt_source_v1.py20
-rw-r--r--tests/unittests/test_handler/test_handler_apt_source_v3.py58
-rw-r--r--tests/unittests/test_handler/test_handler_bootcmd.py6
-rw-r--r--tests/unittests/test_handler/test_handler_ca_certs.py44
-rw-r--r--tests/unittests/test_handler/test_handler_chef.py16
-rw-r--r--tests/unittests/test_handler/test_handler_disk_setup.py8
-rw-r--r--tests/unittests/test_handler/test_handler_etc_hosts.py8
-rw-r--r--tests/unittests/test_handler/test_handler_growpart.py16
-rw-r--r--tests/unittests/test_handler/test_handler_landscape.py6
-rw-r--r--tests/unittests/test_handler/test_handler_locale.py4
-rw-r--r--tests/unittests/test_handler/test_handler_lxd.py28
-rw-r--r--tests/unittests/test_handler/test_handler_mcollective.py7
-rw-r--r--tests/unittests/test_handler/test_handler_mounts.py138
-rw-r--r--tests/unittests/test_handler/test_handler_ntp.py180
-rw-r--r--tests/unittests/test_handler/test_handler_power_state.py29
-rw-r--r--tests/unittests/test_handler/test_handler_puppet.py51
-rw-r--r--tests/unittests/test_handler/test_handler_runcmd.py4
-rw-r--r--tests/unittests/test_handler/test_handler_seed_random.py5
-rw-r--r--tests/unittests/test_handler/test_handler_spacewalk.py20
-rw-r--r--tests/unittests/test_handler/test_handler_write_files.py85
-rw-r--r--tests/unittests/test_handler/test_handler_yum_add_repo.py17
-rw-r--r--tests/unittests/test_handler/test_handler_zypper_add_repo.py11
-rw-r--r--tests/unittests/test_handler/test_schema.py112
-rw-r--r--tests/unittests/test_net.py168
-rw-r--r--tests/unittests/test_net_freebsd.py2
-rw-r--r--tests/unittests/test_render_cloudcfg.py59
-rw-r--r--tests/unittests/test_reporting.py1
-rw-r--r--tests/unittests/test_reporting_hyperv.py93
-rw-r--r--tests/unittests/test_rh_subscription.py18
-rw-r--r--tests/unittests/test_sshutil.py271
-rw-r--r--tests/unittests/test_templating.py2
-rw-r--r--tests/unittests/test_util.py408
-rw-r--r--tests/unittests/test_vmware/test_guestcust_util.py46
-rw-r--r--tests/unittests/test_vmware_config_file.py22
68 files changed, 3086 insertions, 1018 deletions
diff --git a/tests/unittests/test_builtin_handlers.py b/tests/unittests/test_builtin_handlers.py
index b92ffc79..c5675249 100644
--- a/tests/unittests/test_builtin_handlers.py
+++ b/tests/unittests/test_builtin_handlers.py
@@ -15,6 +15,7 @@ from cloudinit.tests.helpers import (
from cloudinit import handlers
from cloudinit import helpers
+from cloudinit import subp
from cloudinit import util
from cloudinit.handlers.cloud_config import CloudConfigPartHandler
@@ -66,7 +67,7 @@ class TestUpstartJobPartHandler(FilesystemMockingTestCase):
util.ensure_dir("/etc/upstart")
with mock.patch(self.mpath + 'SUITABLE_UPSTART', return_value=True):
- with mock.patch.object(util, 'subp') as m_subp:
+ with mock.patch.object(subp, 'subp') as m_subp:
h = UpstartJobPartHandler(paths)
h.handle_part('', handlers.CONTENT_START,
None, None, None)
@@ -109,7 +110,7 @@ class TestJinjaTemplatePartHandler(CiTestCase):
cloudconfig_handler = CloudConfigPartHandler(self.paths)
h = JinjaTemplatePartHandler(
self.paths, sub_handlers=[script_handler, cloudconfig_handler])
- self.assertItemsEqual(
+ self.assertCountEqual(
['text/cloud-config', 'text/cloud-config-jsonp',
'text/x-shellscript'],
h.sub_handlers)
@@ -120,7 +121,7 @@ class TestJinjaTemplatePartHandler(CiTestCase):
cloudconfig_handler = CloudConfigPartHandler(self.paths)
h = JinjaTemplatePartHandler(
self.paths, sub_handlers=[script_handler, cloudconfig_handler])
- self.assertItemsEqual(
+ self.assertCountEqual(
['text/cloud-config', 'text/cloud-config-jsonp',
'text/x-shellscript'],
h.sub_handlers)
@@ -302,7 +303,7 @@ class TestConvertJinjaInstanceData(CiTestCase):
expected_data.update({'v1key1': 'v1.1', 'v2key1': 'v2.1'})
converted_data = convert_jinja_instance_data(data=data)
- self.assertItemsEqual(
+ self.assertCountEqual(
['ds', 'v1', 'v2', 'v1key1', 'v2key1'], converted_data.keys())
self.assertEqual(
expected_data,
diff --git a/tests/unittests/test_cli.py b/tests/unittests/test_cli.py
index e57c15d1..dcf0fe5a 100644
--- a/tests/unittests/test_cli.py
+++ b/tests/unittests/test_cli.py
@@ -214,17 +214,17 @@ class TestCLI(test_helpers.FilesystemMockingTestCase):
self.assertEqual(1, exit_code)
# Known whitebox output from schema subcommand
self.assertEqual(
- 'Expected either --config-file argument or --doc\n',
+ 'Expected either --config-file argument or --docs\n',
self.stderr.getvalue())
def test_wb_devel_schema_subcommand_doc_content(self):
"""Validate that doc content is sane from known examples."""
stdout = io.StringIO()
self.patchStdoutAndStderr(stdout=stdout)
- self._call_main(['cloud-init', 'devel', 'schema', '--doc'])
+ self._call_main(['cloud-init', 'devel', 'schema', '--docs', 'all'])
expected_doc_sections = [
'**Supported distros:** all',
- '**Supported distros:** centos, debian, fedora',
+ '**Supported distros:** alpine, centos, debian, fedora',
'**Config schema**:\n **resize_rootfs:** (true/false/noblock)',
'**Examples**::\n\n runcmd:\n - [ ls, -l, / ]\n'
]
diff --git a/tests/unittests/test_cs_util.py b/tests/unittests/test_cs_util.py
index 2a1095b9..bfd07ecf 100644
--- a/tests/unittests/test_cs_util.py
+++ b/tests/unittests/test_cs_util.py
@@ -1,7 +1,5 @@
# This file is part of cloud-init. See LICENSE file for license information.
-from __future__ import print_function
-
from cloudinit.tests import helpers as test_helpers
from cloudinit.cs_utils import Cepko
diff --git a/tests/unittests/test_data.py b/tests/unittests/test_data.py
index 74cc26ec..fb2b55e8 100644
--- a/tests/unittests/test_data.py
+++ b/tests/unittests/test_data.py
@@ -213,6 +213,40 @@ c: d
self.assertEqual(1, len(cc))
self.assertEqual('c', cc['a'])
+ def test_cloud_config_as_x_shell_script(self):
+ blob_cc = '''
+#cloud-config
+a: b
+c: d
+'''
+ message_cc = MIMEBase("text", "x-shellscript")
+ message_cc.set_payload(blob_cc)
+
+ blob_jp = '''
+#cloud-config-jsonp
+[
+ { "op": "replace", "path": "/a", "value": "c" },
+ { "op": "remove", "path": "/c" }
+]
+'''
+
+ message_jp = MIMEBase('text', "cloud-config-jsonp")
+ message_jp.set_payload(blob_jp)
+
+ message = MIMEMultipart()
+ message.attach(message_cc)
+ message.attach(message_jp)
+
+ self.reRoot()
+ ci = stages.Init()
+ ci.datasource = FakeDataSource(str(message))
+ ci.fetch()
+ ci.consume_data()
+ cc_contents = util.load_file(ci.paths.get_ipath("cloud_config"))
+ cc = util.load_yaml(cc_contents)
+ self.assertEqual(1, len(cc))
+ self.assertEqual('c', cc['a'])
+
def test_vendor_user_yaml_cloud_config(self):
vendor_blob = '''
#cloud-config
@@ -605,6 +639,33 @@ class TestConsumeUserDataHttp(TestConsumeUserData, helpers.HttprettyTestCase):
self.reRoot()
ci = stages.Init()
ci.datasource = FakeDataSource(blob)
+ ci.fetch()
+ with self.assertRaises(Exception) as context:
+ ci.consume_data()
+ self.assertIn('403', str(context.exception))
+
+ with self.assertRaises(FileNotFoundError):
+ util.load_file(ci.paths.get_ipath("cloud_config"))
+
+ @mock.patch('cloudinit.url_helper.time.sleep')
+ @mock.patch(
+ "cloudinit.user_data.features.ERROR_ON_USER_DATA_FAILURE", False
+ )
+ def test_include_bad_url_no_fail(self, mock_sleep):
+ """Test #include with a bad URL and failure disabled"""
+ bad_url = 'http://bad/forbidden'
+ bad_data = '#cloud-config\nbad: true\n'
+ httpretty.register_uri(httpretty.GET, bad_url, bad_data, status=403)
+
+ included_url = 'http://hostname/path'
+ included_data = '#cloud-config\nincluded: true\n'
+ httpretty.register_uri(httpretty.GET, included_url, included_data)
+
+ blob = '#include\n%s\n%s' % (bad_url, included_url)
+
+ self.reRoot()
+ ci = stages.Init()
+ ci.datasource = FakeDataSource(blob)
log_file = self.capture_log(logging.WARNING)
ci.fetch()
ci.consume_data()
diff --git a/tests/unittests/test_datasource/test_aliyun.py b/tests/unittests/test_datasource/test_aliyun.py
index 1e66fcdb..b626229e 100644
--- a/tests/unittests/test_datasource/test_aliyun.py
+++ b/tests/unittests/test_datasource/test_aliyun.py
@@ -143,7 +143,7 @@ class TestAliYunDatasource(test_helpers.HttprettyTestCase):
self.assertEqual('aliyun', self.ds.cloud_name)
self.assertEqual('ec2', self.ds.platform)
self.assertEqual(
- 'metadata (http://100.100.100.200)', self.ds.subplatform)
+ 'metadata (http://100.100.100.200)', self.ds.subplatform)
@mock.patch("cloudinit.sources.DataSourceAliYun._is_aliyun")
def test_returns_false_when_not_on_aliyun(self, m_is_aliyun):
diff --git a/tests/unittests/test_datasource/test_altcloud.py b/tests/unittests/test_datasource/test_altcloud.py
index 3119bfac..fc59d1d5 100644
--- a/tests/unittests/test_datasource/test_altcloud.py
+++ b/tests/unittests/test_datasource/test_altcloud.py
@@ -15,6 +15,7 @@ import shutil
import tempfile
from cloudinit import helpers
+from cloudinit import subp
from cloudinit import util
from cloudinit.tests.helpers import CiTestCase, mock
@@ -286,7 +287,7 @@ class TestUserDataRhevm(CiTestCase):
def test_modprobe_fails(self):
'''Test user_data_rhevm() where modprobe fails.'''
- self.m_modprobe_floppy.side_effect = util.ProcessExecutionError(
+ self.m_modprobe_floppy.side_effect = subp.ProcessExecutionError(
"Failed modprobe")
dsrc = dsac.DataSourceAltCloud({}, None, self.paths)
self.assertEqual(False, dsrc.user_data_rhevm())
@@ -294,7 +295,7 @@ class TestUserDataRhevm(CiTestCase):
def test_no_modprobe_cmd(self):
'''Test user_data_rhevm() with no modprobe command.'''
- self.m_modprobe_floppy.side_effect = util.ProcessExecutionError(
+ self.m_modprobe_floppy.side_effect = subp.ProcessExecutionError(
"No such file or dir")
dsrc = dsac.DataSourceAltCloud({}, None, self.paths)
self.assertEqual(False, dsrc.user_data_rhevm())
@@ -302,7 +303,7 @@ class TestUserDataRhevm(CiTestCase):
def test_udevadm_fails(self):
'''Test user_data_rhevm() where udevadm fails.'''
- self.m_udevadm_settle.side_effect = util.ProcessExecutionError(
+ self.m_udevadm_settle.side_effect = subp.ProcessExecutionError(
"Failed settle.")
dsrc = dsac.DataSourceAltCloud({}, None, self.paths)
self.assertEqual(False, dsrc.user_data_rhevm())
diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py
index a809fd87..47e03bd1 100644
--- a/tests/unittests/test_datasource/test_azure.py
+++ b/tests/unittests/test_datasource/test_azure.py
@@ -114,14 +114,14 @@ NETWORK_METADATA = {
"ipv4": {
"subnet": [
{
- "prefix": "24",
- "address": "10.0.0.0"
+ "prefix": "24",
+ "address": "10.0.0.0"
}
],
"ipAddress": [
{
- "privateIpAddress": "10.0.0.4",
- "publicIpAddress": "104.46.124.81"
+ "privateIpAddress": "10.0.0.4",
+ "publicIpAddress": "104.46.124.81"
}
]
}
@@ -278,6 +278,23 @@ class TestParseNetworkConfig(CiTestCase):
}
self.assertEqual(expected, dsaz.parse_network_config(imds_data))
+ @mock.patch('cloudinit.sources.DataSourceAzure.device_driver',
+ return_value='hv_netvsc')
+ def test_match_driver_for_netvsc(self, m_driver):
+ """parse_network_config emits driver when using netvsc."""
+ expected = {'ethernets': {
+ 'eth0': {
+ 'dhcp4': True,
+ 'dhcp4-overrides': {'route-metric': 100},
+ 'dhcp6': False,
+ 'match': {
+ 'macaddress': '00:0d:3a:04:75:98',
+ 'driver': 'hv_netvsc',
+ },
+ 'set-name': 'eth0'
+ }}, 'version': 2}
+ self.assertEqual(expected, dsaz.parse_network_config(NETWORK_METADATA))
+
class TestGetMetadataFromIMDS(HttprettyTestCase):
@@ -383,8 +400,6 @@ class TestGetMetadataFromIMDS(HttprettyTestCase):
class TestAzureDataSource(CiTestCase):
- with_logs = True
-
def setUp(self):
super(TestAzureDataSource, self).setUp()
self.tmp = self.tmp_dir()
@@ -493,7 +508,7 @@ scbus-1 on xpt0 bus 0
(dsaz, 'get_hostname', mock.MagicMock()),
(dsaz, 'set_hostname', mock.MagicMock()),
(dsaz, 'get_metadata_from_fabric', self.get_metadata_from_fabric),
- (dsaz.util, 'which', lambda x: True),
+ (dsaz.subp, 'which', lambda x: True),
(dsaz.util, 'read_dmi_data', mock.MagicMock(
side_effect=_dmi_mocks)),
(dsaz.util, 'wait_for_files', mock.MagicMock(
@@ -530,14 +545,14 @@ scbus-1 on xpt0 bus 0
def tags_exists(x, y):
for tag in x.keys():
- self.assertIn(tag, y)
+ assert tag in y
for tag in y.keys():
- self.assertIn(tag, x)
+ assert tag in x
def tags_equal(x, y):
for x_val in x.values():
y_val = y.get(x_val.tag)
- self.assertEqual(x_val.text, y_val.text)
+ assert x_val.text == y_val.text
old_cnt = create_tag_index(oxml)
new_cnt = create_tag_index(nxml)
@@ -651,7 +666,7 @@ scbus-1 on xpt0 bus 0
crawled_metadata = dsrc.crawl_metadata()
- self.assertItemsEqual(
+ self.assertCountEqual(
crawled_metadata.keys(),
['cfg', 'files', 'metadata', 'userdata_raw'])
self.assertEqual(crawled_metadata['cfg'], expected_cfg)
@@ -685,15 +700,17 @@ scbus-1 on xpt0 bus 0
'cloudinit.sources.DataSourceAzure.DataSourceAzure._report_ready')
@mock.patch('cloudinit.sources.DataSourceAzure.DataSourceAzure._poll_imds')
def test_crawl_metadata_on_reprovision_reports_ready(
- self, poll_imds_func,
- report_ready_func,
- m_write, m_dhcp):
+ self, poll_imds_func, report_ready_func, m_write, m_dhcp
+ ):
"""If reprovisioning, report ready at the end"""
ovfenv = construct_valid_ovf_env(
- platform_settings={"PreprovisionedVm": "True"})
+ platform_settings={"PreprovisionedVm": "True"}
+ )
- data = {'ovfcontent': ovfenv,
- 'sys_cfg': {}}
+ data = {
+ 'ovfcontent': ovfenv,
+ 'sys_cfg': {}
+ }
dsrc = self._get_ds(data)
poll_imds_func.return_value = ovfenv
dsrc.crawl_metadata()
@@ -708,15 +725,18 @@ scbus-1 on xpt0 bus 0
@mock.patch('cloudinit.net.dhcp.maybe_perform_dhcp_discovery')
@mock.patch('cloudinit.sources.DataSourceAzure.readurl')
def test_crawl_metadata_on_reprovision_reports_ready_using_lease(
- self, m_readurl, m_dhcp,
- m_net, report_ready_func,
- m_media_switch, m_write):
+ self, m_readurl, m_dhcp, m_net, report_ready_func,
+ m_media_switch, m_write
+ ):
"""If reprovisioning, report ready using the obtained lease"""
ovfenv = construct_valid_ovf_env(
- platform_settings={"PreprovisionedVm": "True"})
+ platform_settings={"PreprovisionedVm": "True"}
+ )
- data = {'ovfcontent': ovfenv,
- 'sys_cfg': {}}
+ data = {
+ 'ovfcontent': ovfenv,
+ 'sys_cfg': {}
+ }
dsrc = self._get_ds(data)
lease = {
@@ -1269,20 +1289,20 @@ scbus-1 on xpt0 bus 0
expected_config['config'].append(blacklist_config)
self.assertEqual(netconfig, expected_config)
- @mock.patch(MOCKPATH + 'util.subp')
- def test_get_hostname_with_no_args(self, subp):
+ @mock.patch(MOCKPATH + 'subp.subp')
+ def test_get_hostname_with_no_args(self, m_subp):
dsaz.get_hostname()
- subp.assert_called_once_with(("hostname",), capture=True)
+ m_subp.assert_called_once_with(("hostname",), capture=True)
- @mock.patch(MOCKPATH + 'util.subp')
- def test_get_hostname_with_string_arg(self, subp):
+ @mock.patch(MOCKPATH + 'subp.subp')
+ def test_get_hostname_with_string_arg(self, m_subp):
dsaz.get_hostname(hostname_command="hostname")
- subp.assert_called_once_with(("hostname",), capture=True)
+ m_subp.assert_called_once_with(("hostname",), capture=True)
- @mock.patch(MOCKPATH + 'util.subp')
- def test_get_hostname_with_iterable_arg(self, subp):
+ @mock.patch(MOCKPATH + 'subp.subp')
+ def test_get_hostname_with_iterable_arg(self, m_subp):
dsaz.get_hostname(hostname_command=("hostname",))
- subp.assert_called_once_with(("hostname",), capture=True)
+ m_subp.assert_called_once_with(("hostname",), capture=True)
class TestAzureBounce(CiTestCase):
@@ -1304,7 +1324,7 @@ class TestAzureBounce(CiTestCase):
mock.patch.object(dsaz, 'get_metadata_from_imds',
mock.MagicMock(return_value={})))
self.patches.enter_context(
- mock.patch.object(dsaz.util, 'which', lambda x: True))
+ mock.patch.object(dsaz.subp, 'which', lambda x: True))
self.patches.enter_context(mock.patch.object(
dsaz, '_get_random_seed', return_value='wild'))
@@ -1333,7 +1353,7 @@ class TestAzureBounce(CiTestCase):
self.set_hostname = self.patches.enter_context(
mock.patch.object(dsaz, 'set_hostname'))
self.subp = self.patches.enter_context(
- mock.patch(MOCKPATH + 'util.subp'))
+ mock.patch(MOCKPATH + 'subp.subp'))
self.find_fallback_nic = self.patches.enter_context(
mock.patch('cloudinit.net.find_fallback_nic', return_value='eth9'))
@@ -1416,7 +1436,7 @@ class TestAzureBounce(CiTestCase):
cfg = {'hostname_bounce': {'policy': 'force'}}
dsrc = self._get_ds(self.get_ovf_env_with_dscfg(host_name, cfg),
agent_command=['not', '__builtin__'])
- patch_path = MOCKPATH + 'util.which'
+ patch_path = MOCKPATH + 'subp.which'
with mock.patch(patch_path) as m_which:
m_which.return_value = None
ret = self._get_and_setup(dsrc)
@@ -1521,6 +1541,17 @@ class TestAzureBounce(CiTestCase):
self.assertEqual(0, self.set_hostname.call_count)
+ @mock.patch(MOCKPATH + 'perform_hostname_bounce')
+ def test_set_hostname_failed_disable_bounce(
+ self, perform_hostname_bounce):
+ cfg = {'set_hostname': True, 'hostname_bounce': {'policy': 'force'}}
+ self.get_hostname.return_value = "old-hostname"
+ self.set_hostname.side_effect = Exception
+ data = self.get_ovf_env_with_dscfg('some-hostname', cfg)
+ self._get_ds(data).get_data()
+
+ self.assertEqual(0, perform_hostname_bounce.call_count)
+
class TestLoadAzureDsDir(CiTestCase):
"""Tests for load_azure_ds_dir."""
@@ -1953,11 +1984,12 @@ class TestPreprovisioningPollIMDS(CiTestCase):
self.tries += 1
if self.tries == 1:
raise requests.Timeout('Fake connection timeout')
- elif self.tries == 2:
+ elif self.tries in (2, 3):
response = requests.Response()
- response.status_code = 404
+ response.status_code = 404 if self.tries == 2 else 410
raise requests.exceptions.HTTPError(
- "fake 404", response=response)
+ "fake {}".format(response.status_code), response=response
+ )
# Third try should succeed and stop retries or redhcp
return mock.MagicMock(status_code=200, text="good", content="good")
@@ -1969,7 +2001,7 @@ class TestPreprovisioningPollIMDS(CiTestCase):
self.assertEqual(report_ready_func.call_count, 1)
report_ready_func.assert_called_with(lease=lease)
self.assertEqual(3, m_dhcpv4.call_count, 'Expected 3 DHCP calls')
- self.assertEqual(3, self.tries, 'Expected 3 total reads from IMDS')
+ self.assertEqual(4, self.tries, 'Expected 4 total reads from IMDS')
def test_poll_imds_report_ready_false(self,
report_ready_func, fake_resp,
@@ -1989,7 +2021,7 @@ class TestPreprovisioningPollIMDS(CiTestCase):
self.assertEqual(report_ready_func.call_count, 0)
-@mock.patch(MOCKPATH + 'util.subp')
+@mock.patch(MOCKPATH + 'subp.subp')
@mock.patch(MOCKPATH + 'util.write_file')
@mock.patch(MOCKPATH + 'util.is_FreeBSD')
@mock.patch('cloudinit.sources.helpers.netlink.'
@@ -2160,7 +2192,7 @@ class TestWBIsPlatformViable(CiTestCase):
{'os.path.exists': False,
# Non-matching Azure chassis-asset-tag
'util.read_dmi_data': dsaz.AZURE_CHASSIS_ASSET_TAG + 'X',
- 'util.which': None},
+ 'subp.which': None},
dsaz._is_platform_viable, 'doesnotmatter'))
self.assertIn(
"DEBUG: Non-Azure DMI asset tag '{0}' discovered.\n".format(
diff --git a/tests/unittests/test_datasource/test_azure_helper.py b/tests/unittests/test_datasource/test_azure_helper.py
index 007df09f..5e6d3d2d 100644
--- a/tests/unittests/test_datasource/test_azure_helper.py
+++ b/tests/unittests/test_datasource/test_azure_helper.py
@@ -1,8 +1,10 @@
# This file is part of cloud-init. See LICENSE file for license information.
import os
-import unittest2
+import re
+import unittest
from textwrap import dedent
+from xml.etree import ElementTree
from cloudinit.sources.helpers import azure as azure_helper
from cloudinit.tests.helpers import CiTestCase, ExitStack, mock, populate_dir
@@ -48,6 +50,30 @@ GOAL_STATE_TEMPLATE = """\
</GoalState>
"""
+HEALTH_REPORT_XML_TEMPLATE = '''\
+<?xml version="1.0" encoding="utf-8"?>
+<Health xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:xsd="http://www.w3.org/2001/XMLSchema">
+ <GoalStateIncarnation>{incarnation}</GoalStateIncarnation>
+ <Container>
+ <ContainerId>{container_id}</ContainerId>
+ <RoleInstanceList>
+ <Role>
+ <InstanceId>{instance_id}</InstanceId>
+ <Health>
+ <State>{health_status}</State>
+ {health_detail_subsection}
+ </Health>
+ </Role>
+ </RoleInstanceList>
+ </Container>
+</Health>
+'''
+
+
+class SentinelException(Exception):
+ pass
+
class TestFindEndpoint(CiTestCase):
@@ -140,9 +166,7 @@ class TestGoalStateParsing(CiTestCase):
'certificates_url': 'MyCertificatesUrl',
}
- def _get_goal_state(self, http_client=None, **kwargs):
- if http_client is None:
- http_client = mock.MagicMock()
+ def _get_formatted_goal_state_xml_string(self, **kwargs):
parameters = self.default_parameters.copy()
parameters.update(kwargs)
xml = GOAL_STATE_TEMPLATE.format(**parameters)
@@ -153,7 +177,13 @@ class TestGoalStateParsing(CiTestCase):
continue
new_xml_lines.append(line)
xml = '\n'.join(new_xml_lines)
- return azure_helper.GoalState(xml, http_client)
+ return xml
+
+ def _get_goal_state(self, m_azure_endpoint_client=None, **kwargs):
+ if m_azure_endpoint_client is None:
+ m_azure_endpoint_client = mock.MagicMock()
+ xml = self._get_formatted_goal_state_xml_string(**kwargs)
+ return azure_helper.GoalState(xml, m_azure_endpoint_client)
def test_incarnation_parsed_correctly(self):
incarnation = '123'
@@ -190,25 +220,55 @@ class TestGoalStateParsing(CiTestCase):
azure_helper.is_byte_swapped(previous_iid, current_iid))
def test_certificates_xml_parsed_and_fetched_correctly(self):
- http_client = mock.MagicMock()
+ m_azure_endpoint_client = mock.MagicMock()
certificates_url = 'TestCertificatesUrl'
goal_state = self._get_goal_state(
- http_client=http_client, certificates_url=certificates_url)
+ m_azure_endpoint_client=m_azure_endpoint_client,
+ certificates_url=certificates_url)
certificates_xml = goal_state.certificates_xml
- self.assertEqual(1, http_client.get.call_count)
- self.assertEqual(certificates_url, http_client.get.call_args[0][0])
- self.assertTrue(http_client.get.call_args[1].get('secure', False))
- self.assertEqual(http_client.get.return_value.contents,
- certificates_xml)
+ self.assertEqual(1, m_azure_endpoint_client.get.call_count)
+ self.assertEqual(
+ certificates_url,
+ m_azure_endpoint_client.get.call_args[0][0])
+ self.assertTrue(
+ m_azure_endpoint_client.get.call_args[1].get(
+ 'secure', False))
+ self.assertEqual(
+ m_azure_endpoint_client.get.return_value.contents,
+ certificates_xml)
def test_missing_certificates_skips_http_get(self):
- http_client = mock.MagicMock()
+ m_azure_endpoint_client = mock.MagicMock()
goal_state = self._get_goal_state(
- http_client=http_client, certificates_url=None)
+ m_azure_endpoint_client=m_azure_endpoint_client,
+ certificates_url=None)
certificates_xml = goal_state.certificates_xml
- self.assertEqual(0, http_client.get.call_count)
+ self.assertEqual(0, m_azure_endpoint_client.get.call_count)
self.assertIsNone(certificates_xml)
+ def test_invalid_goal_state_xml_raises_parse_error(self):
+ xml = 'random non-xml data'
+ with self.assertRaises(ElementTree.ParseError):
+ azure_helper.GoalState(xml, mock.MagicMock())
+
+ def test_missing_container_id_in_goal_state_xml_raises_exc(self):
+ xml = self._get_formatted_goal_state_xml_string()
+ xml = re.sub('<ContainerId>.*</ContainerId>', '', xml)
+ with self.assertRaises(azure_helper.InvalidGoalStateXMLException):
+ azure_helper.GoalState(xml, mock.MagicMock())
+
+ def test_missing_instance_id_in_goal_state_xml_raises_exc(self):
+ xml = self._get_formatted_goal_state_xml_string()
+ xml = re.sub('<InstanceId>.*</InstanceId>', '', xml)
+ with self.assertRaises(azure_helper.InvalidGoalStateXMLException):
+ azure_helper.GoalState(xml, mock.MagicMock())
+
+ def test_missing_incarnation_in_goal_state_xml_raises_exc(self):
+ xml = self._get_formatted_goal_state_xml_string()
+ xml = re.sub('<Incarnation>.*</Incarnation>', '', xml)
+ with self.assertRaises(azure_helper.InvalidGoalStateXMLException):
+ azure_helper.GoalState(xml, mock.MagicMock())
+
class TestAzureEndpointHttpClient(CiTestCase):
@@ -222,61 +282,95 @@ class TestAzureEndpointHttpClient(CiTestCase):
patches = ExitStack()
self.addCleanup(patches.close)
- self.read_file_or_url = patches.enter_context(
- mock.patch.object(azure_helper.url_helper, 'read_file_or_url'))
+ self.readurl = patches.enter_context(
+ mock.patch.object(azure_helper.url_helper, 'readurl'))
+ patches.enter_context(
+ mock.patch.object(azure_helper.time, 'sleep', mock.MagicMock()))
def test_non_secure_get(self):
client = azure_helper.AzureEndpointHttpClient(mock.MagicMock())
url = 'MyTestUrl'
response = client.get(url, secure=False)
- self.assertEqual(1, self.read_file_or_url.call_count)
- self.assertEqual(self.read_file_or_url.return_value, response)
+ self.assertEqual(1, self.readurl.call_count)
+ self.assertEqual(self.readurl.return_value, response)
self.assertEqual(
- mock.call(url, headers=self.regular_headers, retries=10,
- timeout=5),
- self.read_file_or_url.call_args)
+ mock.call(url, headers=self.regular_headers,
+ timeout=5, retries=10, sec_between=5),
+ self.readurl.call_args)
+
+ def test_non_secure_get_raises_exception(self):
+ client = azure_helper.AzureEndpointHttpClient(mock.MagicMock())
+ self.readurl.side_effect = SentinelException
+ url = 'MyTestUrl'
+ with self.assertRaises(SentinelException):
+ client.get(url, secure=False)
def test_secure_get(self):
url = 'MyTestUrl'
- certificate = mock.MagicMock()
+ m_certificate = mock.MagicMock()
expected_headers = self.regular_headers.copy()
expected_headers.update({
"x-ms-cipher-name": "DES_EDE3_CBC",
- "x-ms-guest-agent-public-x509-cert": certificate,
+ "x-ms-guest-agent-public-x509-cert": m_certificate,
})
- client = azure_helper.AzureEndpointHttpClient(certificate)
+ client = azure_helper.AzureEndpointHttpClient(m_certificate)
response = client.get(url, secure=True)
- self.assertEqual(1, self.read_file_or_url.call_count)
- self.assertEqual(self.read_file_or_url.return_value, response)
+ self.assertEqual(1, self.readurl.call_count)
+ self.assertEqual(self.readurl.return_value, response)
self.assertEqual(
- mock.call(url, headers=expected_headers, retries=10,
- timeout=5),
- self.read_file_or_url.call_args)
+ mock.call(url, headers=expected_headers,
+ timeout=5, retries=10, sec_between=5),
+ self.readurl.call_args)
+
+ def test_secure_get_raises_exception(self):
+ url = 'MyTestUrl'
+ client = azure_helper.AzureEndpointHttpClient(mock.MagicMock())
+ self.readurl.side_effect = SentinelException
+ with self.assertRaises(SentinelException):
+ client.get(url, secure=True)
def test_post(self):
- data = mock.MagicMock()
+ m_data = mock.MagicMock()
url = 'MyTestUrl'
client = azure_helper.AzureEndpointHttpClient(mock.MagicMock())
- response = client.post(url, data=data)
- self.assertEqual(1, self.read_file_or_url.call_count)
- self.assertEqual(self.read_file_or_url.return_value, response)
+ response = client.post(url, data=m_data)
+ self.assertEqual(1, self.readurl.call_count)
+ self.assertEqual(self.readurl.return_value, response)
self.assertEqual(
- mock.call(url, data=data, headers=self.regular_headers, retries=10,
- timeout=5),
- self.read_file_or_url.call_args)
+ mock.call(url, data=m_data, headers=self.regular_headers,
+ timeout=5, retries=10, sec_between=5),
+ self.readurl.call_args)
+
+ def test_post_raises_exception(self):
+ m_data = mock.MagicMock()
+ url = 'MyTestUrl'
+ client = azure_helper.AzureEndpointHttpClient(mock.MagicMock())
+ self.readurl.side_effect = SentinelException
+ with self.assertRaises(SentinelException):
+ client.post(url, data=m_data)
def test_post_with_extra_headers(self):
url = 'MyTestUrl'
client = azure_helper.AzureEndpointHttpClient(mock.MagicMock())
extra_headers = {'test': 'header'}
client.post(url, extra_headers=extra_headers)
- self.assertEqual(1, self.read_file_or_url.call_count)
expected_headers = self.regular_headers.copy()
expected_headers.update(extra_headers)
+ self.assertEqual(1, self.readurl.call_count)
self.assertEqual(
mock.call(mock.ANY, data=mock.ANY, headers=expected_headers,
- retries=10, timeout=5),
- self.read_file_or_url.call_args)
+ timeout=5, retries=10, sec_between=5),
+ self.readurl.call_args)
+
+ def test_post_with_sleep_with_extra_headers_raises_exception(self):
+ m_data = mock.MagicMock()
+ url = 'MyTestUrl'
+ extra_headers = {'test': 'header'}
+ client = azure_helper.AzureEndpointHttpClient(mock.MagicMock())
+ self.readurl.side_effect = SentinelException
+ with self.assertRaises(SentinelException):
+ client.post(
+ url, data=m_data, extra_headers=extra_headers)
class TestOpenSSLManager(CiTestCase):
@@ -287,7 +381,7 @@ class TestOpenSSLManager(CiTestCase):
self.addCleanup(patches.close)
self.subp = patches.enter_context(
- mock.patch.object(azure_helper.util, 'subp'))
+ mock.patch.object(azure_helper.subp, 'subp'))
try:
self.open = patches.enter_context(
mock.patch('__builtin__.open'))
@@ -332,7 +426,7 @@ class TestOpenSSLManagerActions(CiTestCase):
path = 'tests/data/azure'
return os.path.join(path, name)
- @unittest2.skip("todo move to cloud_test")
+ @unittest.skip("todo move to cloud_test")
def test_pubkey_extract(self):
cert = load_file(self._data_file('pubkey_extract_cert'))
good_key = load_file(self._data_file('pubkey_extract_ssh_key'))
@@ -344,7 +438,7 @@ class TestOpenSSLManagerActions(CiTestCase):
fingerprint = sslmgr._get_fingerprint_from_cert(cert)
self.assertEqual(good_fingerprint, fingerprint)
- @unittest2.skip("todo move to cloud_test")
+ @unittest.skip("todo move to cloud_test")
@mock.patch.object(azure_helper.OpenSSLManager, '_decrypt_certs_from_xml')
def test_parse_certificates(self, mock_decrypt_certs):
"""Azure control plane puts private keys as well as certificates
@@ -365,6 +459,131 @@ class TestOpenSSLManagerActions(CiTestCase):
self.assertIn(fp, keys_by_fp)
+class TestGoalStateHealthReporter(CiTestCase):
+
+ default_parameters = {
+ 'incarnation': 1634,
+ 'container_id': 'MyContainerId',
+ 'instance_id': 'MyInstanceId'
+ }
+
+ test_endpoint = 'TestEndpoint'
+ test_url = 'http://{0}/machine?comp=health'.format(test_endpoint)
+ test_default_headers = {'Content-Type': 'text/xml; charset=utf-8'}
+
+ provisioning_success_status = 'Ready'
+
+ def setUp(self):
+ super(TestGoalStateHealthReporter, self).setUp()
+ patches = ExitStack()
+ self.addCleanup(patches.close)
+
+ patches.enter_context(
+ mock.patch.object(azure_helper.time, 'sleep', mock.MagicMock()))
+ self.read_file_or_url = patches.enter_context(
+ mock.patch.object(azure_helper.url_helper, 'read_file_or_url'))
+
+ self.post = patches.enter_context(
+ mock.patch.object(azure_helper.AzureEndpointHttpClient,
+ 'post'))
+
+ self.GoalState = patches.enter_context(
+ mock.patch.object(azure_helper, 'GoalState'))
+ self.GoalState.return_value.container_id = \
+ self.default_parameters['container_id']
+ self.GoalState.return_value.instance_id = \
+ self.default_parameters['instance_id']
+ self.GoalState.return_value.incarnation = \
+ self.default_parameters['incarnation']
+
+ def _get_formatted_health_report_xml_string(self, **kwargs):
+ return HEALTH_REPORT_XML_TEMPLATE.format(**kwargs)
+
+ def _get_report_ready_health_document(self):
+ return self._get_formatted_health_report_xml_string(
+ incarnation=self.default_parameters['incarnation'],
+ container_id=self.default_parameters['container_id'],
+ instance_id=self.default_parameters['instance_id'],
+ health_status=self.provisioning_success_status,
+ health_detail_subsection='')
+
+ def test_send_ready_signal_sends_post_request(self):
+ with mock.patch.object(
+ azure_helper.GoalStateHealthReporter,
+ 'build_report') as m_build_report:
+ client = azure_helper.AzureEndpointHttpClient(mock.MagicMock())
+ reporter = azure_helper.GoalStateHealthReporter(
+ azure_helper.GoalState(mock.MagicMock(), mock.MagicMock()),
+ client, self.test_endpoint)
+ reporter.send_ready_signal()
+
+ self.assertEqual(1, self.post.call_count)
+ self.assertEqual(
+ mock.call(
+ self.test_url,
+ data=m_build_report.return_value,
+ extra_headers=self.test_default_headers),
+ self.post.call_args)
+
+ def test_build_report_for_health_document(self):
+ health_document = self._get_report_ready_health_document()
+ reporter = azure_helper.GoalStateHealthReporter(
+ azure_helper.GoalState(mock.MagicMock(), mock.MagicMock()),
+ azure_helper.AzureEndpointHttpClient(mock.MagicMock()),
+ self.test_endpoint)
+ generated_health_document = reporter.build_report(
+ incarnation=self.default_parameters['incarnation'],
+ container_id=self.default_parameters['container_id'],
+ instance_id=self.default_parameters['instance_id'],
+ status=self.provisioning_success_status)
+ self.assertEqual(health_document, generated_health_document)
+ self.assertIn(
+ '<GoalStateIncarnation>{}</GoalStateIncarnation>'.format(
+ str(self.default_parameters['incarnation'])),
+ generated_health_document)
+ self.assertIn(
+ ''.join([
+ '<ContainerId>',
+ self.default_parameters['container_id'],
+ '</ContainerId>']),
+ generated_health_document)
+ self.assertIn(
+ ''.join([
+ '<InstanceId>',
+ self.default_parameters['instance_id'],
+ '</InstanceId>']),
+ generated_health_document)
+ self.assertIn(
+ ''.join([
+ '<State>',
+ self.provisioning_success_status,
+ '</State>']),
+ generated_health_document
+ )
+ self.assertNotIn('<Details>', generated_health_document)
+ self.assertNotIn('<SubStatus>', generated_health_document)
+ self.assertNotIn('<Description>', generated_health_document)
+
+ def test_send_ready_signal_calls_build_report(self):
+ with mock.patch.object(
+ azure_helper.GoalStateHealthReporter, 'build_report'
+ ) as m_build_report:
+ reporter = azure_helper.GoalStateHealthReporter(
+ azure_helper.GoalState(mock.MagicMock(), mock.MagicMock()),
+ azure_helper.AzureEndpointHttpClient(mock.MagicMock()),
+ self.test_endpoint)
+ reporter.send_ready_signal()
+
+ self.assertEqual(1, m_build_report.call_count)
+ self.assertEqual(
+ mock.call(
+ incarnation=self.default_parameters['incarnation'],
+ container_id=self.default_parameters['container_id'],
+ instance_id=self.default_parameters['instance_id'],
+ status=self.provisioning_success_status),
+ m_build_report.call_args)
+
+
class TestWALinuxAgentShim(CiTestCase):
def setUp(self):
@@ -383,14 +602,21 @@ class TestWALinuxAgentShim(CiTestCase):
patches.enter_context(
mock.patch.object(azure_helper.time, 'sleep', mock.MagicMock()))
- def test_http_client_uses_certificate(self):
+ self.test_incarnation = 'TestIncarnation'
+ self.test_container_id = 'TestContainerId'
+ self.test_instance_id = 'TestInstanceId'
+ self.GoalState.return_value.incarnation = self.test_incarnation
+ self.GoalState.return_value.container_id = self.test_container_id
+ self.GoalState.return_value.instance_id = self.test_instance_id
+
+ def test_azure_endpoint_client_uses_certificate_during_report_ready(self):
shim = wa_shim()
shim.register_with_azure_and_fetch_data()
self.assertEqual(
[mock.call(self.OpenSSLManager.return_value.certificate)],
self.AzureEndpointHttpClient.call_args_list)
- def test_correct_url_used_for_goalstate(self):
+ def test_correct_url_used_for_goalstate_during_report_ready(self):
self.find_endpoint.return_value = 'test_endpoint'
shim = wa_shim()
shim.register_with_azure_and_fetch_data()
@@ -404,11 +630,10 @@ class TestWALinuxAgentShim(CiTestCase):
self.GoalState.call_args_list)
def test_certificates_used_to_determine_public_keys(self):
+ # if register_with_azure_and_fetch_data() isn't passed some info about
+ # the user's public keys, there's no point in even trying to parse the
+ # certificates
shim = wa_shim()
- """if register_with_azure_and_fetch_data() isn't passed some info about
- the user's public keys, there's no point in even trying to parse
- the certificates
- """
mypk = [{'fingerprint': 'fp1', 'path': 'path1'},
{'fingerprint': 'fp3', 'path': 'path3', 'value': ''}]
certs = {'fp1': 'expected-key',
@@ -439,43 +664,67 @@ class TestWALinuxAgentShim(CiTestCase):
expected_url = 'http://test_endpoint/machine?comp=health'
self.assertEqual(
[mock.call(expected_url, data=mock.ANY, extra_headers=mock.ANY)],
- self.AzureEndpointHttpClient.return_value.post.call_args_list)
+ self.AzureEndpointHttpClient.return_value.post
+ .call_args_list)
def test_goal_state_values_used_for_report_ready(self):
- self.GoalState.return_value.incarnation = 'TestIncarnation'
- self.GoalState.return_value.container_id = 'TestContainerId'
- self.GoalState.return_value.instance_id = 'TestInstanceId'
shim = wa_shim()
shim.register_with_azure_and_fetch_data()
posted_document = (
- self.AzureEndpointHttpClient.return_value.post.call_args[1]['data']
+ self.AzureEndpointHttpClient.return_value.post
+ .call_args[1]['data']
)
- self.assertIn('TestIncarnation', posted_document)
- self.assertIn('TestContainerId', posted_document)
- self.assertIn('TestInstanceId', posted_document)
+ self.assertIn(self.test_incarnation, posted_document)
+ self.assertIn(self.test_container_id, posted_document)
+ self.assertIn(self.test_instance_id, posted_document)
+
+ def test_xml_elems_in_report_ready(self):
+ shim = wa_shim()
+ shim.register_with_azure_and_fetch_data()
+ health_document = HEALTH_REPORT_XML_TEMPLATE.format(
+ incarnation=self.test_incarnation,
+ container_id=self.test_container_id,
+ instance_id=self.test_instance_id,
+ health_status='Ready',
+ health_detail_subsection='')
+ posted_document = (
+ self.AzureEndpointHttpClient.return_value.post
+ .call_args[1]['data'])
+ self.assertEqual(health_document, posted_document)
def test_clean_up_can_be_called_at_any_time(self):
shim = wa_shim()
shim.clean_up()
- def test_clean_up_will_clean_up_openssl_manager_if_instantiated(self):
+ def test_clean_up_after_report_ready(self):
shim = wa_shim()
shim.register_with_azure_and_fetch_data()
shim.clean_up()
self.assertEqual(
1, self.OpenSSLManager.return_value.clean_up.call_count)
- def test_failure_to_fetch_goalstate_bubbles_up(self):
- class SentinelException(Exception):
- pass
- self.AzureEndpointHttpClient.return_value.get.side_effect = (
- SentinelException)
+ def test_fetch_goalstate_during_report_ready_raises_exc_on_get_exc(self):
+ self.AzureEndpointHttpClient.return_value.get \
+ .side_effect = (SentinelException)
shim = wa_shim()
self.assertRaises(SentinelException,
shim.register_with_azure_and_fetch_data)
+ def test_fetch_goalstate_during_report_ready_raises_exc_on_parse_exc(self):
+ self.GoalState.side_effect = SentinelException
+ shim = wa_shim()
+ self.assertRaises(SentinelException,
+ shim.register_with_azure_and_fetch_data)
-class TestGetMetadataFromFabric(CiTestCase):
+ def test_failure_to_send_report_ready_health_doc_bubbles_up(self):
+ self.AzureEndpointHttpClient.return_value.post \
+ .side_effect = SentinelException
+ shim = wa_shim()
+ self.assertRaises(SentinelException,
+ shim.register_with_azure_and_fetch_data)
+
+
+class TestGetMetadataGoalStateXMLAndReportReadyToFabric(CiTestCase):
@mock.patch.object(azure_helper, 'WALinuxAgentShim')
def test_data_from_shim_returned(self, shim):
@@ -491,14 +740,39 @@ class TestGetMetadataFromFabric(CiTestCase):
@mock.patch.object(azure_helper, 'WALinuxAgentShim')
def test_failure_in_registration_calls_clean_up(self, shim):
- class SentinelException(Exception):
- pass
shim.return_value.register_with_azure_and_fetch_data.side_effect = (
SentinelException)
self.assertRaises(SentinelException,
azure_helper.get_metadata_from_fabric)
self.assertEqual(1, shim.return_value.clean_up.call_count)
+ @mock.patch.object(azure_helper, 'WALinuxAgentShim')
+ def test_calls_shim_register_with_azure_and_fetch_data(self, shim):
+ m_pubkey_info = mock.MagicMock()
+ azure_helper.get_metadata_from_fabric(pubkey_info=m_pubkey_info)
+ self.assertEqual(
+ 1,
+ shim.return_value
+ .register_with_azure_and_fetch_data.call_count)
+ self.assertEqual(
+ mock.call(pubkey_info=m_pubkey_info),
+ shim.return_value
+ .register_with_azure_and_fetch_data.call_args)
+
+ @mock.patch.object(azure_helper, 'WALinuxAgentShim')
+ def test_instantiates_shim_with_kwargs(self, shim):
+ m_fallback_lease_file = mock.MagicMock()
+ m_dhcp_options = mock.MagicMock()
+ azure_helper.get_metadata_from_fabric(
+ fallback_lease_file=m_fallback_lease_file,
+ dhcp_opts=m_dhcp_options)
+ self.assertEqual(1, shim.call_count)
+ self.assertEqual(
+ mock.call(
+ fallback_lease_file=m_fallback_lease_file,
+ dhcp_options=m_dhcp_options),
+ shim.call_args)
+
class TestExtractIpAddressFromNetworkd(CiTestCase):
diff --git a/tests/unittests/test_datasource/test_cloudsigma.py b/tests/unittests/test_datasource/test_cloudsigma.py
index d62d542b..7aa3b1d1 100644
--- a/tests/unittests/test_datasource/test_cloudsigma.py
+++ b/tests/unittests/test_datasource/test_cloudsigma.py
@@ -3,6 +3,7 @@
import copy
from cloudinit.cs_utils import Cepko
+from cloudinit import distros
from cloudinit import helpers
from cloudinit import sources
from cloudinit.sources import DataSourceCloudSigma
@@ -47,8 +48,11 @@ class DataSourceCloudSigmaTest(test_helpers.CiTestCase):
self.paths = helpers.Paths({'run_dir': self.tmp_dir()})
self.add_patch(DS_PATH + '.is_running_in_cloudsigma',
"m_is_container", return_value=True)
+
+ distro_cls = distros.fetch("ubuntu")
+ distro = distro_cls("ubuntu", cfg={}, paths=self.paths)
self.datasource = DataSourceCloudSigma.DataSourceCloudSigma(
- "", "", paths=self.paths)
+ sys_cfg={}, distro=distro, paths=self.paths)
self.datasource.cepko = CepkoMock(SERVER_CONTEXT)
def test_get_hostname(self):
diff --git a/tests/unittests/test_datasource/test_cloudstack.py b/tests/unittests/test_datasource/test_cloudstack.py
index 83c2f753..e68168f2 100644
--- a/tests/unittests/test_datasource/test_cloudstack.py
+++ b/tests/unittests/test_datasource/test_cloudstack.py
@@ -41,7 +41,7 @@ class TestCloudStackPasswordFetching(CiTestCase):
def _set_password_server_response(self, response_string):
subp = mock.MagicMock(return_value=(response_string, ''))
self.patches.enter_context(
- mock.patch('cloudinit.sources.DataSourceCloudStack.util.subp',
+ mock.patch('cloudinit.sources.DataSourceCloudStack.subp.subp',
subp))
return subp
diff --git a/tests/unittests/test_datasource/test_ec2.py b/tests/unittests/test_datasource/test_ec2.py
index 2a96122f..a93f2195 100644
--- a/tests/unittests/test_datasource/test_ec2.py
+++ b/tests/unittests/test_datasource/test_ec2.py
@@ -3,6 +3,7 @@
import copy
import httpretty
import json
+import requests
from unittest import mock
from cloudinit import helpers
@@ -37,6 +38,8 @@ DYNAMIC_METADATA = {
# python3 -c 'import json
# from cloudinit.ec2_utils import get_instance_metadata as gm
# print(json.dumps(gm("2016-09-02"), indent=1, sort_keys=True))'
+# Note that the MAC addresses have been modified to sort in the opposite order
+# to the device-number attribute, to test LP: #1876312
DEFAULT_METADATA = {
"ami-id": "ami-8b92b4ee",
"ami-launch-index": "0",
@@ -76,7 +79,7 @@ DEFAULT_METADATA = {
"vpc-ipv4-cidr-blocks": "172.31.0.0/16",
"vpc-ipv6-cidr-blocks": "2600:1f16:aeb:b200::/56"
},
- "06:17:04:d7:26:0A": {
+ "06:17:04:d7:26:08": {
"device-number": "1", # Only IPv4 local config
"interface-id": "eni-e44ef49f",
"ipv4-associations": {"": "172.3.3.16"},
@@ -84,7 +87,7 @@ DEFAULT_METADATA = {
"local-hostname": ("ip-172-3-3-16.us-east-2."
"compute.internal"),
"local-ipv4s": "172.3.3.16",
- "mac": "06:17:04:d7:26:0A",
+ "mac": "06:17:04:d7:26:08",
"owner-id": "950047163771",
"public-hostname": ("ec2-172-3-3-16.us-east-2."
"compute.amazonaws.com"),
@@ -112,6 +115,122 @@ DEFAULT_METADATA = {
"services": {"domain": "amazonaws.com", "partition": "aws"},
}
+# collected from api version 2018-09-24/ with
+# python3 -c 'import json
+# from cloudinit.ec2_utils import get_instance_metadata as gm
+# print(json.dumps(gm("2018-09-24"), indent=1, sort_keys=True))'
+
+NIC1_MD_IPV4_IPV6_MULTI_IP = {
+ "device-number": "0",
+ "interface-id": "eni-0d6335689899ce9cc",
+ "ipv4-associations": {
+ "18.218.219.181": "172.31.44.13"
+ },
+ "ipv6s": [
+ "2600:1f16:292:100:c187:593c:4349:136",
+ "2600:1f16:292:100:f153:12a3:c37c:11f9",
+ "2600:1f16:292:100:f152:2222:3333:4444"
+ ],
+ "local-hostname": ("ip-172-31-44-13.us-east-2."
+ "compute.internal"),
+ "local-ipv4s": [
+ "172.31.44.13",
+ "172.31.45.70"
+ ],
+ "mac": "0a:07:84:3d:6e:38",
+ "owner-id": "329910648901",
+ "public-hostname": ("ec2-18-218-219-181.us-east-2."
+ "compute.amazonaws.com"),
+ "public-ipv4s": "18.218.219.181",
+ "security-group-ids": "sg-0c387755222ba8d2e",
+ "security-groups": "launch-wizard-4",
+ "subnet-id": "subnet-9d7ba0d1",
+ "subnet-ipv4-cidr-block": "172.31.32.0/20",
+ "subnet_ipv6_cidr_blocks": "2600:1f16:292:100::/64",
+ "vpc-id": "vpc-a07f62c8",
+ "vpc-ipv4-cidr-block": "172.31.0.0/16",
+ "vpc-ipv4-cidr-blocks": "172.31.0.0/16",
+ "vpc_ipv6_cidr_blocks": "2600:1f16:292:100::/56"
+}
+
+NIC2_MD = {
+ "device-number": "1",
+ "interface-id": "eni-043cdce36ded5e79f",
+ "local-hostname": "ip-172-31-47-221.us-east-2.compute.internal",
+ "local-ipv4s": "172.31.47.221",
+ "mac": "0a:75:69:92:e2:16",
+ "owner-id": "329910648901",
+ "security-group-ids": "sg-0d68fef37d8cc9b77",
+ "security-groups": "launch-wizard-17",
+ "subnet-id": "subnet-9d7ba0d1",
+ "subnet-ipv4-cidr-block": "172.31.32.0/20",
+ "vpc-id": "vpc-a07f62c8",
+ "vpc-ipv4-cidr-block": "172.31.0.0/16",
+ "vpc-ipv4-cidr-blocks": "172.31.0.0/16"
+}
+
+SECONDARY_IP_METADATA_2018_09_24 = {
+ "ami-id": "ami-0986c2ac728528ac2",
+ "ami-launch-index": "0",
+ "ami-manifest-path": "(unknown)",
+ "block-device-mapping": {
+ "ami": "/dev/sda1",
+ "root": "/dev/sda1"
+ },
+ "events": {
+ "maintenance": {
+ "history": "[]",
+ "scheduled": "[]"
+ }
+ },
+ "hostname": "ip-172-31-44-13.us-east-2.compute.internal",
+ "identity-credentials": {
+ "ec2": {
+ "info": {
+ "AccountId": "329910648901",
+ "Code": "Success",
+ "LastUpdated": "2019-07-06T14:22:56Z"
+ }
+ }
+ },
+ "instance-action": "none",
+ "instance-id": "i-069e01e8cc43732f8",
+ "instance-type": "t2.micro",
+ "local-hostname": "ip-172-31-44-13.us-east-2.compute.internal",
+ "local-ipv4": "172.31.44.13",
+ "mac": "0a:07:84:3d:6e:38",
+ "metrics": {
+ "vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ },
+ "network": {
+ "interfaces": {
+ "macs": {
+ "0a:07:84:3d:6e:38": NIC1_MD_IPV4_IPV6_MULTI_IP,
+ }
+ }
+ },
+ "placement": {
+ "availability-zone": "us-east-2c"
+ },
+ "profile": "default-hvm",
+ "public-hostname": (
+ "ec2-18-218-219-181.us-east-2.compute.amazonaws.com"),
+ "public-ipv4": "18.218.219.181",
+ "public-keys": {
+ "yourkeyname,e": [
+ "ssh-rsa AAAAW...DZ yourkeyname"
+ ]
+ },
+ "reservation-id": "r-09b4917135cdd33be",
+ "security-groups": "launch-wizard-4",
+ "services": {
+ "domain": "amazonaws.com",
+ "partition": "aws"
+ }
+}
+
+M_PATH_NET = 'cloudinit.sources.DataSourceEc2.net.'
+
def _register_ssh_keys(rfunc, base_url, keys_data):
"""handle ssh key inconsistencies.
@@ -200,6 +319,7 @@ def register_mock_metaserver(base_url, data):
class TestEc2(test_helpers.HttprettyTestCase):
with_logs = True
+ maxDiff = None
valid_platform_data = {
'uuid': 'ec212f79-87d1-2f1d-588f-d86dc0fd5412',
@@ -265,30 +385,23 @@ class TestEc2(test_helpers.HttprettyTestCase):
register_mock_metaserver(instance_id_url, None)
return ds
- def test_network_config_property_returns_version_1_network_data(self):
- """network_config property returns network version 1 for metadata.
-
- Only one device is configured even when multiple exist in metadata.
- """
+ def test_network_config_property_returns_version_2_network_data(self):
+ """network_config property returns network version 2 for metadata"""
ds = self._setup_ds(
platform_data=self.valid_platform_data,
sys_cfg={'datasource': {'Ec2': {'strict_id': True}}},
md={'md': DEFAULT_METADATA})
- find_fallback_path = (
- 'cloudinit.sources.DataSourceEc2.net.find_fallback_nic')
+ find_fallback_path = M_PATH_NET + 'find_fallback_nic'
with mock.patch(find_fallback_path) as m_find_fallback:
m_find_fallback.return_value = 'eth9'
ds.get_data()
mac1 = '06:17:04:d7:26:09' # Defined in DEFAULT_METADATA
- expected = {'version': 1, 'config': [
- {'mac_address': '06:17:04:d7:26:09', 'name': 'eth9',
- 'subnets': [{'type': 'dhcp4'}, {'type': 'dhcp6'}],
- 'type': 'physical'}]}
- patch_path = (
- 'cloudinit.sources.DataSourceEc2.net.get_interfaces_by_mac')
- get_interface_mac_path = (
- 'cloudinit.sources.DataSourceEc2.net.get_interface_mac')
+ expected = {'version': 2, 'ethernets': {'eth9': {
+ 'match': {'macaddress': '06:17:04:d7:26:09'}, 'set-name': 'eth9',
+ 'dhcp4': True, 'dhcp6': True}}}
+ patch_path = M_PATH_NET + 'get_interfaces_by_mac'
+ get_interface_mac_path = M_PATH_NET + 'get_interface_mac'
with mock.patch(patch_path) as m_get_interfaces_by_mac:
with mock.patch(find_fallback_path) as m_find_fallback:
with mock.patch(get_interface_mac_path) as m_get_mac:
@@ -297,30 +410,59 @@ class TestEc2(test_helpers.HttprettyTestCase):
m_get_mac.return_value = mac1
self.assertEqual(expected, ds.network_config)
- def test_network_config_property_set_dhcp4_on_private_ipv4(self):
- """network_config property configures dhcp4 on private ipv4 nics.
+ def test_network_config_property_set_dhcp4(self):
+ """network_config property configures dhcp4 on nics with local-ipv4s.
- Only one device is configured even when multiple exist in metadata.
+ Only one device is configured based on get_interfaces_by_mac even when
+ multiple MACs exist in metadata.
"""
ds = self._setup_ds(
platform_data=self.valid_platform_data,
sys_cfg={'datasource': {'Ec2': {'strict_id': True}}},
md={'md': DEFAULT_METADATA})
- find_fallback_path = (
- 'cloudinit.sources.DataSourceEc2.net.find_fallback_nic')
+ find_fallback_path = M_PATH_NET + 'find_fallback_nic'
with mock.patch(find_fallback_path) as m_find_fallback:
m_find_fallback.return_value = 'eth9'
ds.get_data()
- mac1 = '06:17:04:d7:26:0A' # IPv4 only in DEFAULT_METADATA
- expected = {'version': 1, 'config': [
- {'mac_address': '06:17:04:d7:26:0A', 'name': 'eth9',
- 'subnets': [{'type': 'dhcp4'}],
- 'type': 'physical'}]}
- patch_path = (
- 'cloudinit.sources.DataSourceEc2.net.get_interfaces_by_mac')
- get_interface_mac_path = (
- 'cloudinit.sources.DataSourceEc2.net.get_interface_mac')
+ mac1 = '06:17:04:d7:26:08' # IPv4 only in DEFAULT_METADATA
+ expected = {'version': 2, 'ethernets': {'eth9': {
+ 'match': {'macaddress': mac1.lower()}, 'set-name': 'eth9',
+ 'dhcp4': True, 'dhcp6': False}}}
+ patch_path = M_PATH_NET + 'get_interfaces_by_mac'
+ get_interface_mac_path = M_PATH_NET + 'get_interface_mac'
+ with mock.patch(patch_path) as m_get_interfaces_by_mac:
+ with mock.patch(find_fallback_path) as m_find_fallback:
+ with mock.patch(get_interface_mac_path) as m_get_mac:
+ m_get_interfaces_by_mac.return_value = {mac1: 'eth9'}
+ m_find_fallback.return_value = 'eth9'
+ m_get_mac.return_value = mac1
+ self.assertEqual(expected, ds.network_config)
+
+ def test_network_config_property_secondary_private_ips(self):
+ """network_config property configures any secondary ipv4 addresses.
+
+ Only one device is configured based on get_interfaces_by_mac even when
+ multiple MACs exist in metadata.
+ """
+ ds = self._setup_ds(
+ platform_data=self.valid_platform_data,
+ sys_cfg={'datasource': {'Ec2': {'strict_id': True}}},
+ md={'md': SECONDARY_IP_METADATA_2018_09_24})
+ find_fallback_path = M_PATH_NET + 'find_fallback_nic'
+ with mock.patch(find_fallback_path) as m_find_fallback:
+ m_find_fallback.return_value = 'eth9'
+ ds.get_data()
+
+ mac1 = '0a:07:84:3d:6e:38' # 1 secondary IPv4 and 2 secondary IPv6
+ expected = {'version': 2, 'ethernets': {'eth9': {
+ 'match': {'macaddress': mac1}, 'set-name': 'eth9',
+ 'addresses': ['172.31.45.70/20',
+ '2600:1f16:292:100:f152:2222:3333:4444/128',
+ '2600:1f16:292:100:f153:12a3:c37c:11f9/128'],
+ 'dhcp4': True, 'dhcp6': True}}}
+ patch_path = M_PATH_NET + 'get_interfaces_by_mac'
+ get_interface_mac_path = M_PATH_NET + 'get_interface_mac'
with mock.patch(patch_path) as m_get_interfaces_by_mac:
with mock.patch(find_fallback_path) as m_find_fallback:
with mock.patch(get_interface_mac_path) as m_get_mac:
@@ -356,21 +498,18 @@ class TestEc2(test_helpers.HttprettyTestCase):
register_mock_metaserver(
'http://169.254.169.254/2009-04-04/meta-data/', DEFAULT_METADATA)
mac1 = '06:17:04:d7:26:09' # Defined in DEFAULT_METADATA
- get_interface_mac_path = (
- 'cloudinit.sources.DataSourceEc2.net.get_interface_mac')
+ get_interface_mac_path = M_PATH_NET + 'get_interfaces_by_mac'
ds.fallback_nic = 'eth9'
- with mock.patch(get_interface_mac_path) as m_get_interface_mac:
- m_get_interface_mac.return_value = mac1
+ with mock.patch(get_interface_mac_path) as m_get_interfaces_by_mac:
+ m_get_interfaces_by_mac.return_value = {mac1: 'eth9'}
nc = ds.network_config # Will re-crawl network metadata
self.assertIsNotNone(nc)
self.assertIn(
'Refreshing stale metadata from prior to upgrade',
self.logs.getvalue())
- expected = {'version': 1, 'config': [
- {'mac_address': '06:17:04:d7:26:09',
- 'name': 'eth9',
- 'subnets': [{'type': 'dhcp4'}, {'type': 'dhcp6'}],
- 'type': 'physical'}]}
+ expected = {'version': 2, 'ethernets': {'eth9': {
+ 'match': {'macaddress': mac1}, 'set-name': 'eth9',
+ 'dhcp4': True, 'dhcp6': True}}}
self.assertEqual(expected, ds.network_config)
def test_ec2_get_instance_id_refreshes_identity_on_upgrade(self):
@@ -429,6 +568,55 @@ class TestEc2(test_helpers.HttprettyTestCase):
self.assertTrue(ds.get_data())
self.assertFalse(ds.is_classic_instance())
+ def test_aws_inaccessible_imds_service_fails_with_retries(self):
+ """Inaccessibility of http://169.254.169.254 are retried."""
+ ds = self._setup_ds(
+ platform_data=self.valid_platform_data,
+ sys_cfg={'datasource': {'Ec2': {'strict_id': False}}},
+ md=None)
+
+ conn_error = requests.exceptions.ConnectionError(
+ '[Errno 113] no route to host'
+ )
+
+ mock_success = mock.MagicMock(contents=b'fakesuccess')
+ mock_success.ok.return_value = True
+
+ with mock.patch('cloudinit.url_helper.readurl') as m_readurl:
+ m_readurl.side_effect = (conn_error, conn_error, mock_success)
+ with mock.patch('cloudinit.url_helper.time.sleep'):
+ self.assertTrue(ds.wait_for_metadata_service())
+
+ # Just one /latest/api/token request
+ self.assertEqual(3, len(m_readurl.call_args_list))
+ for readurl_call in m_readurl.call_args_list:
+ self.assertIn('latest/api/token', readurl_call[0][0])
+
+ def test_aws_token_403_fails_without_retries(self):
+ """Verify that 403s fetching AWS tokens are not retried."""
+ ds = self._setup_ds(
+ platform_data=self.valid_platform_data,
+ sys_cfg={'datasource': {'Ec2': {'strict_id': False}}},
+ md=None)
+ token_url = self.data_url('latest', data_item='api/token')
+ httpretty.register_uri(httpretty.PUT, token_url, body={}, status=403)
+ self.assertFalse(ds.get_data())
+ # Just one /latest/api/token request
+ logs = self.logs.getvalue()
+ failed_put_log = '"PUT /latest/api/token HTTP/1.1" 403 0'
+ expected_logs = [
+ 'WARNING: Ec2 IMDS endpoint returned a 403 error. HTTP endpoint is'
+ ' disabled. Aborting.',
+ "WARNING: IMDS's HTTP endpoint is probably disabled",
+ failed_put_log
+ ]
+ for log in expected_logs:
+ self.assertIn(log, logs)
+ self.assertEqual(
+ 1,
+ len([line for line in logs.splitlines() if failed_put_log in line])
+ )
+
def test_aws_token_redacted(self):
"""Verify that aws tokens are redacted when logged."""
ds = self._setup_ds(
@@ -443,7 +631,7 @@ class TestEc2(test_helpers.HttprettyTestCase):
logs_with_redacted = [log for log in all_logs if REDACT_TOK in log]
logs_with_token = [log for log in all_logs if 'API-TOKEN' in log]
self.assertEqual(1, len(logs_with_redacted_ttl))
- self.assertEqual(79, len(logs_with_redacted))
+ self.assertEqual(81, len(logs_with_redacted))
self.assertEqual(0, len(logs_with_token))
@mock.patch('cloudinit.net.dhcp.maybe_perform_dhcp_discovery')
@@ -556,7 +744,7 @@ class TestEc2(test_helpers.HttprettyTestCase):
ret = ds.get_data()
self.assertTrue(ret)
- m_dhcp.assert_called_once_with('eth9')
+ m_dhcp.assert_called_once_with('eth9', None)
m_net.assert_called_once_with(
broadcast='192.168.2.255', interface='eth9', ip='192.168.2.9',
prefix_or_mask='255.255.255.0', router='192.168.2.1',
@@ -564,23 +752,64 @@ class TestEc2(test_helpers.HttprettyTestCase):
self.assertIn('Crawl of metadata service took', self.logs.getvalue())
+class TestGetSecondaryAddresses(test_helpers.CiTestCase):
+
+ mac = '06:17:04:d7:26:ff'
+ with_logs = True
+
+ def test_md_with_no_secondary_addresses(self):
+ """Empty list is returned when nic metadata contains no secondary ip"""
+ self.assertEqual([], ec2.get_secondary_addresses(NIC2_MD, self.mac))
+
+ def test_md_with_secondary_v4_and_v6_addresses(self):
+ """All secondary addresses are returned from nic metadata"""
+ self.assertEqual(
+ ['172.31.45.70/20', '2600:1f16:292:100:f152:2222:3333:4444/128',
+ '2600:1f16:292:100:f153:12a3:c37c:11f9/128'],
+ ec2.get_secondary_addresses(NIC1_MD_IPV4_IPV6_MULTI_IP, self.mac))
+
+ def test_invalid_ipv4_ipv6_cidr_metadata_logged_with_defaults(self):
+ """Any invalid subnet-ipv(4|6)-cidr-block values use defaults"""
+ invalid_cidr_md = copy.deepcopy(NIC1_MD_IPV4_IPV6_MULTI_IP)
+ invalid_cidr_md['subnet-ipv4-cidr-block'] = "something-unexpected"
+ invalid_cidr_md['subnet-ipv6-cidr-block'] = "not/sure/what/this/is"
+ self.assertEqual(
+ ['172.31.45.70/24', '2600:1f16:292:100:f152:2222:3333:4444/128',
+ '2600:1f16:292:100:f153:12a3:c37c:11f9/128'],
+ ec2.get_secondary_addresses(invalid_cidr_md, self.mac))
+ expected_logs = [
+ "WARNING: Could not parse subnet-ipv4-cidr-block"
+ " something-unexpected for mac 06:17:04:d7:26:ff."
+ " ipv4 network config prefix defaults to /24",
+ "WARNING: Could not parse subnet-ipv6-cidr-block"
+ " not/sure/what/this/is for mac 06:17:04:d7:26:ff."
+ " ipv6 network config prefix defaults to /128"
+ ]
+ logs = self.logs.getvalue()
+ for log in expected_logs:
+ self.assertIn(log, logs)
+
+
class TestConvertEc2MetadataNetworkConfig(test_helpers.CiTestCase):
def setUp(self):
super(TestConvertEc2MetadataNetworkConfig, self).setUp()
self.mac1 = '06:17:04:d7:26:09'
+ interface_dict = copy.deepcopy(
+ DEFAULT_METADATA['network']['interfaces']['macs'][self.mac1])
+ # These tests are written assuming the base interface doesn't have IPv6
+ interface_dict.pop('ipv6s')
self.network_metadata = {
- 'interfaces': {'macs': {
- self.mac1: {'public-ipv4s': '172.31.2.16'}}}}
+ 'interfaces': {'macs': {self.mac1: interface_dict}}}
def test_convert_ec2_metadata_network_config_skips_absent_macs(self):
"""Any mac absent from metadata is skipped by network config."""
macs_to_nics = {self.mac1: 'eth9', 'DE:AD:BE:EF:FF:FF': 'vitualnic2'}
# DE:AD:BE:EF:FF:FF represented by OS but not in metadata
- expected = {'version': 1, 'config': [
- {'mac_address': self.mac1, 'type': 'physical',
- 'name': 'eth9', 'subnets': [{'type': 'dhcp4'}]}]}
+ expected = {'version': 2, 'ethernets': {'eth9': {
+ 'match': {'macaddress': self.mac1}, 'set-name': 'eth9',
+ 'dhcp4': True, 'dhcp6': False}}}
self.assertEqual(
expected,
ec2.convert_ec2_metadata_network_config(
@@ -594,15 +823,15 @@ class TestConvertEc2MetadataNetworkConfig(test_helpers.CiTestCase):
network_metadata_ipv6['interfaces']['macs'][self.mac1])
nic1_metadata['ipv6s'] = '2620:0:1009:fd00:e442:c88d:c04d:dc85/64'
nic1_metadata.pop('public-ipv4s')
- expected = {'version': 1, 'config': [
- {'mac_address': self.mac1, 'type': 'physical',
- 'name': 'eth9', 'subnets': [{'type': 'dhcp6'}]}]}
+ expected = {'version': 2, 'ethernets': {'eth9': {
+ 'match': {'macaddress': self.mac1}, 'set-name': 'eth9',
+ 'dhcp4': True, 'dhcp6': True}}}
self.assertEqual(
expected,
ec2.convert_ec2_metadata_network_config(
network_metadata_ipv6, macs_to_nics))
- def test_convert_ec2_metadata_network_config_handles_local_dhcp4(self):
+ def test_convert_ec2_metadata_network_config_local_only_dhcp4(self):
"""Config dhcp4 when there are no public addresses in public-ipv4s."""
macs_to_nics = {self.mac1: 'eth9'}
network_metadata_ipv6 = copy.deepcopy(self.network_metadata)
@@ -610,9 +839,9 @@ class TestConvertEc2MetadataNetworkConfig(test_helpers.CiTestCase):
network_metadata_ipv6['interfaces']['macs'][self.mac1])
nic1_metadata['local-ipv4s'] = '172.3.3.15'
nic1_metadata.pop('public-ipv4s')
- expected = {'version': 1, 'config': [
- {'mac_address': self.mac1, 'type': 'physical',
- 'name': 'eth9', 'subnets': [{'type': 'dhcp4'}]}]}
+ expected = {'version': 2, 'ethernets': {'eth9': {
+ 'match': {'macaddress': self.mac1}, 'set-name': 'eth9',
+ 'dhcp4': True, 'dhcp6': False}}}
self.assertEqual(
expected,
ec2.convert_ec2_metadata_network_config(
@@ -627,16 +856,16 @@ class TestConvertEc2MetadataNetworkConfig(test_helpers.CiTestCase):
nic1_metadata['public-ipv4s'] = ''
# When no ipv4 or ipv6 content but fallback_nic set, set dhcp4 config.
- expected = {'version': 1, 'config': [
- {'mac_address': self.mac1, 'type': 'physical',
- 'name': 'eth9', 'subnets': [{'type': 'dhcp4'}]}]}
+ expected = {'version': 2, 'ethernets': {'eth9': {
+ 'match': {'macaddress': self.mac1}, 'set-name': 'eth9',
+ 'dhcp4': True, 'dhcp6': False}}}
self.assertEqual(
expected,
ec2.convert_ec2_metadata_network_config(
network_metadata_ipv6, macs_to_nics, fallback_nic='eth9'))
def test_convert_ec2_metadata_network_config_handles_local_v4_and_v6(self):
- """When dhcp6 is public and dhcp4 is set to local enable both."""
+ """When ipv6s and local-ipv4s are non-empty, enable dhcp6 and dhcp4."""
macs_to_nics = {self.mac1: 'eth9'}
network_metadata_both = copy.deepcopy(self.network_metadata)
nic1_metadata = (
@@ -644,10 +873,35 @@ class TestConvertEc2MetadataNetworkConfig(test_helpers.CiTestCase):
nic1_metadata['ipv6s'] = '2620:0:1009:fd00:e442:c88d:c04d:dc85/64'
nic1_metadata.pop('public-ipv4s')
nic1_metadata['local-ipv4s'] = '10.0.0.42' # Local ipv4 only on vpc
- expected = {'version': 1, 'config': [
- {'mac_address': self.mac1, 'type': 'physical',
- 'name': 'eth9',
- 'subnets': [{'type': 'dhcp4'}, {'type': 'dhcp6'}]}]}
+ expected = {'version': 2, 'ethernets': {'eth9': {
+ 'match': {'macaddress': self.mac1}, 'set-name': 'eth9',
+ 'dhcp4': True, 'dhcp6': True}}}
+ self.assertEqual(
+ expected,
+ ec2.convert_ec2_metadata_network_config(
+ network_metadata_both, macs_to_nics))
+
+ def test_convert_ec2_metadata_network_config_handles_multiple_nics(self):
+ """DHCP route-metric increases on secondary NICs for IPv4 and IPv6."""
+ mac2 = '06:17:04:d7:26:08'
+ macs_to_nics = {self.mac1: 'eth9', mac2: 'eth10'}
+ network_metadata_both = copy.deepcopy(self.network_metadata)
+ # Add 2nd nic info
+ network_metadata_both['interfaces']['macs'][mac2] = NIC2_MD
+ nic1_metadata = (
+ network_metadata_both['interfaces']['macs'][self.mac1])
+ nic1_metadata['ipv6s'] = '2620:0:1009:fd00:e442:c88d:c04d:dc85/64'
+ nic1_metadata.pop('public-ipv4s') # No public-ipv4 IPs in cfg
+ nic1_metadata['local-ipv4s'] = '10.0.0.42' # Local ipv4 only on vpc
+ expected = {'version': 2, 'ethernets': {
+ 'eth9': {
+ 'match': {'macaddress': self.mac1}, 'set-name': 'eth9',
+ 'dhcp4': True, 'dhcp4-overrides': {'route-metric': 100},
+ 'dhcp6': True, 'dhcp6-overrides': {'route-metric': 100}},
+ 'eth10': {
+ 'match': {'macaddress': mac2}, 'set-name': 'eth10',
+ 'dhcp4': True, 'dhcp4-overrides': {'route-metric': 200},
+ 'dhcp6': False}}}
self.assertEqual(
expected,
ec2.convert_ec2_metadata_network_config(
@@ -660,10 +914,9 @@ class TestConvertEc2MetadataNetworkConfig(test_helpers.CiTestCase):
nic1_metadata = (
network_metadata_both['interfaces']['macs'][self.mac1])
nic1_metadata['ipv6s'] = '2620:0:1009:fd00:e442:c88d:c04d:dc85/64'
- expected = {'version': 1, 'config': [
- {'mac_address': self.mac1, 'type': 'physical',
- 'name': 'eth9',
- 'subnets': [{'type': 'dhcp4'}, {'type': 'dhcp6'}]}]}
+ expected = {'version': 2, 'ethernets': {'eth9': {
+ 'match': {'macaddress': self.mac1}, 'set-name': 'eth9',
+ 'dhcp4': True, 'dhcp6': True}}}
self.assertEqual(
expected,
ec2.convert_ec2_metadata_network_config(
@@ -671,12 +924,10 @@ class TestConvertEc2MetadataNetworkConfig(test_helpers.CiTestCase):
def test_convert_ec2_metadata_gets_macs_from_get_interfaces_by_mac(self):
"""Convert Ec2 Metadata calls get_interfaces_by_mac by default."""
- expected = {'version': 1, 'config': [
- {'mac_address': self.mac1, 'type': 'physical',
- 'name': 'eth9',
- 'subnets': [{'type': 'dhcp4'}]}]}
- patch_path = (
- 'cloudinit.sources.DataSourceEc2.net.get_interfaces_by_mac')
+ expected = {'version': 2, 'ethernets': {'eth9': {
+ 'match': {'macaddress': self.mac1},
+ 'set-name': 'eth9', 'dhcp4': True, 'dhcp6': False}}}
+ patch_path = M_PATH_NET + 'get_interfaces_by_mac'
with mock.patch(patch_path) as m_get_interfaces_by_mac:
m_get_interfaces_by_mac.return_value = {self.mac1: 'eth9'}
self.assertEqual(
diff --git a/tests/unittests/test_datasource/test_gce.py b/tests/unittests/test_datasource/test_gce.py
index 4afbccff..01f4cbd1 100644
--- a/tests/unittests/test_datasource/test_gce.py
+++ b/tests/unittests/test_datasource/test_gce.py
@@ -114,7 +114,8 @@ class TestDataSourceGCE(test_helpers.HttprettyTestCase):
self.assertTrue(success)
req_header = httpretty.last_request().headers
- self.assertDictContainsSubset(HEADERS, req_header)
+ for header_name, expected_value in HEADERS.items():
+ self.assertEqual(expected_value, req_header.get(header_name))
def test_metadata(self):
# UnicodeDecodeError if set to ds.userdata instead of userdata_raw
diff --git a/tests/unittests/test_datasource/test_hetzner.py b/tests/unittests/test_datasource/test_hetzner.py
index a9c12597..d0879545 100644
--- a/tests/unittests/test_datasource/test_hetzner.py
+++ b/tests/unittests/test_datasource/test_hetzner.py
@@ -5,10 +5,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
from cloudinit.sources import DataSourceHetzner
+import cloudinit.sources.helpers.hetzner as hc_helper
from cloudinit import util, settings, helpers
from cloudinit.tests.helpers import mock, CiTestCase
+import base64
+import pytest
+
METADATA = util.load_yaml("""
hostname: cloudinit-test
instance-id: 123456
@@ -115,3 +119,22 @@ class TestDataSourceHetzner(CiTestCase):
# These are a white box attempt to ensure it did not search.
m_find_fallback.assert_not_called()
m_read_md.assert_not_called()
+
+
+class TestMaybeB64Decode:
+ """Test the maybe_b64decode helper function."""
+
+ @pytest.mark.parametrize("invalid_input", (str("not bytes"), int(4)))
+ def test_raises_error_on_non_bytes(self, invalid_input):
+ """maybe_b64decode should raise error if data is not bytes."""
+ with pytest.raises(TypeError):
+ hc_helper.maybe_b64decode(invalid_input)
+
+ @pytest.mark.parametrize("in_data,expected", [
+ # If data is not b64 encoded, then return value should be the same.
+ (b"this is my data", b"this is my data"),
+ # If data is b64 encoded, then return value should be decoded.
+ (base64.b64encode(b"data"), b"data"),
+ ])
+ def test_happy_path(self, in_data, expected):
+ assert expected == hc_helper.maybe_b64decode(in_data)
diff --git a/tests/unittests/test_datasource/test_ibmcloud.py b/tests/unittests/test_datasource/test_ibmcloud.py
index 0b54f585..9013ae9f 100644
--- a/tests/unittests/test_datasource/test_ibmcloud.py
+++ b/tests/unittests/test_datasource/test_ibmcloud.py
@@ -15,13 +15,6 @@ mock = test_helpers.mock
D_PATH = "cloudinit.sources.DataSourceIBMCloud."
-class TestIBMCloud(test_helpers.CiTestCase):
- """Test the datasource."""
- def setUp(self):
- super(TestIBMCloud, self).setUp()
- pass
-
-
@mock.patch(D_PATH + "_is_xen", return_value=True)
@mock.patch(D_PATH + "_is_ibm_provisioning")
@mock.patch(D_PATH + "util.blkid")
diff --git a/tests/unittests/test_datasource/test_maas.py b/tests/unittests/test_datasource/test_maas.py
index 2a81d3f5..41b6c27b 100644
--- a/tests/unittests/test_datasource/test_maas.py
+++ b/tests/unittests/test_datasource/test_maas.py
@@ -158,7 +158,6 @@ class TestMAASDataSource(CiTestCase):
@mock.patch("cloudinit.sources.DataSourceMAAS.url_helper.OauthUrlHelper")
class TestGetOauthHelper(CiTestCase):
- with_logs = True
base_cfg = {'consumer_key': 'FAKE_CONSUMER_KEY',
'token_key': 'FAKE_TOKEN_KEY',
'token_secret': 'FAKE_TOKEN_SECRET',
diff --git a/tests/unittests/test_datasource/test_nocloud.py b/tests/unittests/test_datasource/test_nocloud.py
index 18bea0b9..2e6b53ff 100644
--- a/tests/unittests/test_datasource/test_nocloud.py
+++ b/tests/unittests/test_datasource/test_nocloud.py
@@ -288,8 +288,23 @@ class TestNoCloudDataSource(CiTestCase):
self.mocks.enter_context(
mock.patch.object(util, 'is_FreeBSD', return_value=True))
+ def _mfind_devs_with_freebsd(
+ criteria=None, oformat='device',
+ tag=None, no_cache=False, path=None):
+ if not criteria:
+ return ["/dev/msdosfs/foo", "/dev/iso9660/foo"]
+ if criteria.startswith("LABEL="):
+ return ["/dev/msdosfs/foo", "/dev/iso9660/foo"]
+ elif criteria == "TYPE=vfat":
+ return ["/dev/msdosfs/foo"]
+ elif criteria == "TYPE=iso9660":
+ return ["/dev/iso9660/foo"]
+ return []
+
self.mocks.enter_context(
- mock.patch.object(os.path, 'exists', return_value=True))
+ mock.patch.object(
+ util, 'find_devs_with_freebsd',
+ side_effect=_mfind_devs_with_freebsd))
dsrc = dsNoCloud(sys_cfg=sys_cfg, distro=None, paths=self.paths)
ret = dsrc._get_devices('foo')
diff --git a/tests/unittests/test_datasource/test_opennebula.py b/tests/unittests/test_datasource/test_opennebula.py
index bb399f6d..9c6070a5 100644
--- a/tests/unittests/test_datasource/test_opennebula.py
+++ b/tests/unittests/test_datasource/test_opennebula.py
@@ -9,6 +9,8 @@ import os
import pwd
import unittest
+import pytest
+
TEST_VARS = {
'VAR1': 'single',
@@ -130,18 +132,18 @@ class TestOpenNebulaDataSource(CiTestCase):
def test_seed_dir_non_contextdisk(self):
self.assertRaises(ds.NonContextDiskDir, ds.read_context_disk_dir,
- self.seed_dir)
+ self.seed_dir, mock.Mock())
def test_seed_dir_empty1_context(self):
populate_dir(self.seed_dir, {'context.sh': ''})
- results = ds.read_context_disk_dir(self.seed_dir)
+ results = ds.read_context_disk_dir(self.seed_dir, mock.Mock())
self.assertIsNone(results['userdata'])
self.assertEqual(results['metadata'], {})
def test_seed_dir_empty2_context(self):
populate_context_dir(self.seed_dir, {})
- results = ds.read_context_disk_dir(self.seed_dir)
+ results = ds.read_context_disk_dir(self.seed_dir, mock.Mock())
self.assertIsNone(results['userdata'])
self.assertEqual(results['metadata'], {})
@@ -151,11 +153,11 @@ class TestOpenNebulaDataSource(CiTestCase):
self.assertRaises(ds.BrokenContextDiskDir,
ds.read_context_disk_dir,
- self.seed_dir)
+ self.seed_dir, mock.Mock())
def test_context_parser(self):
populate_context_dir(self.seed_dir, TEST_VARS)
- results = ds.read_context_disk_dir(self.seed_dir)
+ results = ds.read_context_disk_dir(self.seed_dir, mock.Mock())
self.assertTrue('metadata' in results)
self.assertEqual(TEST_VARS, results['metadata'])
@@ -166,7 +168,7 @@ class TestOpenNebulaDataSource(CiTestCase):
for k in ('SSH_KEY', 'SSH_PUBLIC_KEY'):
my_d = os.path.join(self.tmp, "%s-%i" % (k, c))
populate_context_dir(my_d, {k: '\n'.join(public_keys)})
- results = ds.read_context_disk_dir(my_d)
+ results = ds.read_context_disk_dir(my_d, mock.Mock())
self.assertTrue('metadata' in results)
self.assertTrue('public-keys' in results['metadata'])
@@ -180,7 +182,7 @@ class TestOpenNebulaDataSource(CiTestCase):
my_d = os.path.join(self.tmp, k)
populate_context_dir(my_d, {k: USER_DATA,
'USERDATA_ENCODING': ''})
- results = ds.read_context_disk_dir(my_d)
+ results = ds.read_context_disk_dir(my_d, mock.Mock())
self.assertTrue('userdata' in results)
self.assertEqual(USER_DATA, results['userdata'])
@@ -190,7 +192,7 @@ class TestOpenNebulaDataSource(CiTestCase):
for k in ('USER_DATA', 'USERDATA'):
my_d = os.path.join(self.tmp, k)
populate_context_dir(my_d, {k: b64userdata})
- results = ds.read_context_disk_dir(my_d)
+ results = ds.read_context_disk_dir(my_d, mock.Mock())
self.assertTrue('userdata' in results)
self.assertEqual(b64userdata, results['userdata'])
@@ -200,7 +202,7 @@ class TestOpenNebulaDataSource(CiTestCase):
my_d = os.path.join(self.tmp, k)
populate_context_dir(my_d, {k: util.b64e(USER_DATA),
'USERDATA_ENCODING': 'base64'})
- results = ds.read_context_disk_dir(my_d)
+ results = ds.read_context_disk_dir(my_d, mock.Mock())
self.assertTrue('userdata' in results)
self.assertEqual(USER_DATA, results['userdata'])
@@ -212,7 +214,7 @@ class TestOpenNebulaDataSource(CiTestCase):
for k in ('HOSTNAME', 'PUBLIC_IP', 'IP_PUBLIC', 'ETH0_IP'):
my_d = os.path.join(self.tmp, k)
populate_context_dir(my_d, {k: PUBLIC_IP})
- results = ds.read_context_disk_dir(my_d)
+ results = ds.read_context_disk_dir(my_d, mock.Mock())
self.assertTrue('metadata' in results)
self.assertTrue('local-hostname' in results['metadata'])
@@ -227,7 +229,7 @@ class TestOpenNebulaDataSource(CiTestCase):
# without ETH0_MAC
# for Older OpenNebula?
populate_context_dir(self.seed_dir, {'ETH0_IP': IP_BY_MACADDR})
- results = ds.read_context_disk_dir(self.seed_dir)
+ results = ds.read_context_disk_dir(self.seed_dir, mock.Mock())
self.assertTrue('network-interfaces' in results)
self.assertTrue(
@@ -237,7 +239,7 @@ class TestOpenNebulaDataSource(CiTestCase):
# ETH0_IP and ETH0_MAC
populate_context_dir(
self.seed_dir, {'ETH0_IP': IP_BY_MACADDR, 'ETH0_MAC': MACADDR})
- results = ds.read_context_disk_dir(self.seed_dir)
+ results = ds.read_context_disk_dir(self.seed_dir, mock.Mock())
self.assertTrue('network-interfaces' in results)
self.assertTrue(
@@ -249,7 +251,7 @@ class TestOpenNebulaDataSource(CiTestCase):
# "AR = [ TYPE = ETHER ]"
populate_context_dir(
self.seed_dir, {'ETH0_IP': '', 'ETH0_MAC': MACADDR})
- results = ds.read_context_disk_dir(self.seed_dir)
+ results = ds.read_context_disk_dir(self.seed_dir, mock.Mock())
self.assertTrue('network-interfaces' in results)
self.assertTrue(
@@ -263,7 +265,7 @@ class TestOpenNebulaDataSource(CiTestCase):
'ETH0_MAC': MACADDR,
'ETH0_MASK': '255.255.0.0'
})
- results = ds.read_context_disk_dir(self.seed_dir)
+ results = ds.read_context_disk_dir(self.seed_dir, mock.Mock())
self.assertTrue('network-interfaces' in results)
self.assertTrue(
@@ -277,7 +279,7 @@ class TestOpenNebulaDataSource(CiTestCase):
'ETH0_MAC': MACADDR,
'ETH0_MASK': ''
})
- results = ds.read_context_disk_dir(self.seed_dir)
+ results = ds.read_context_disk_dir(self.seed_dir, mock.Mock())
self.assertTrue('network-interfaces' in results)
self.assertTrue(
@@ -290,7 +292,7 @@ class TestOpenNebulaDataSource(CiTestCase):
'ETH0_IP6': IP6_GLOBAL,
'ETH0_MAC': MACADDR,
})
- results = ds.read_context_disk_dir(self.seed_dir)
+ results = ds.read_context_disk_dir(self.seed_dir, mock.Mock())
self.assertTrue('network-interfaces' in results)
self.assertTrue(
@@ -303,7 +305,7 @@ class TestOpenNebulaDataSource(CiTestCase):
'ETH0_IP6_ULA': IP6_ULA,
'ETH0_MAC': MACADDR,
})
- results = ds.read_context_disk_dir(self.seed_dir)
+ results = ds.read_context_disk_dir(self.seed_dir, mock.Mock())
self.assertTrue('network-interfaces' in results)
self.assertTrue(
@@ -317,7 +319,7 @@ class TestOpenNebulaDataSource(CiTestCase):
'ETH0_IP6_PREFIX_LENGTH': IP6_PREFIX,
'ETH0_MAC': MACADDR,
})
- results = ds.read_context_disk_dir(self.seed_dir)
+ results = ds.read_context_disk_dir(self.seed_dir, mock.Mock())
self.assertTrue('network-interfaces' in results)
self.assertTrue(
@@ -331,7 +333,7 @@ class TestOpenNebulaDataSource(CiTestCase):
'ETH0_IP6_PREFIX_LENGTH': '',
'ETH0_MAC': MACADDR,
})
- results = ds.read_context_disk_dir(self.seed_dir)
+ results = ds.read_context_disk_dir(self.seed_dir, mock.Mock())
self.assertTrue('network-interfaces' in results)
self.assertTrue(
@@ -355,6 +357,7 @@ class TestOpenNebulaDataSource(CiTestCase):
util.find_devs_with = orig_find_devs_with
+@mock.patch(DS_PATH + '.net.get_interfaces_by_mac', mock.Mock(return_value={}))
class TestOpenNebulaNetwork(unittest.TestCase):
system_nics = ('eth0', 'ens3')
@@ -367,7 +370,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
expected = {
'02:00:0a:12:01:01': 'ETH0',
'02:00:0a:12:0f:0f': 'ETH1', }
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
self.assertEqual(expected, net.context_devname)
def test_get_nameservers(self):
@@ -382,21 +385,21 @@ class TestOpenNebulaNetwork(unittest.TestCase):
expected = {
'addresses': ['1.2.3.6', '1.2.3.7', '1.2.3.8'],
'search': ['example.com', 'example.org']}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_nameservers('eth0')
self.assertEqual(expected, val)
def test_get_mtu(self):
"""Verify get_mtu('device') correctly returns MTU size."""
context = {'ETH0_MTU': '1280'}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_mtu('eth0')
self.assertEqual('1280', val)
def test_get_ip(self):
"""Verify get_ip('device') correctly returns IPv4 address."""
context = {'ETH0_IP': PUBLIC_IP}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_ip('eth0', MACADDR)
self.assertEqual(PUBLIC_IP, val)
@@ -407,7 +410,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
string.
"""
context = {'ETH0_IP': ''}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_ip('eth0', MACADDR)
self.assertEqual(IP_BY_MACADDR, val)
@@ -420,7 +423,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
'ETH0_IP6': IP6_GLOBAL,
'ETH0_IP6_ULA': '', }
expected = [IP6_GLOBAL]
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_ip6('eth0')
self.assertEqual(expected, val)
@@ -433,7 +436,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
'ETH0_IP6': '',
'ETH0_IP6_ULA': IP6_ULA, }
expected = [IP6_ULA]
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_ip6('eth0')
self.assertEqual(expected, val)
@@ -446,7 +449,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
'ETH0_IP6': IP6_GLOBAL,
'ETH0_IP6_ULA': IP6_ULA, }
expected = [IP6_GLOBAL, IP6_ULA]
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_ip6('eth0')
self.assertEqual(expected, val)
@@ -455,7 +458,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
Verify get_ip6_prefix('device') correctly returns IPv6 prefix.
"""
context = {'ETH0_IP6_PREFIX_LENGTH': IP6_PREFIX}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_ip6_prefix('eth0')
self.assertEqual(IP6_PREFIX, val)
@@ -466,7 +469,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
string.
"""
context = {'ETH0_IP6_PREFIX_LENGTH': ''}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_ip6_prefix('eth0')
self.assertEqual('64', val)
@@ -476,7 +479,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
address.
"""
context = {'ETH0_GATEWAY': '1.2.3.5'}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_gateway('eth0')
self.assertEqual('1.2.3.5', val)
@@ -486,7 +489,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
address.
"""
context = {'ETH0_GATEWAY6': IP6_GW}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_gateway6('eth0')
self.assertEqual(IP6_GW, val)
@@ -495,7 +498,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
Verify get_mask('device') correctly returns IPv4 subnet mask.
"""
context = {'ETH0_MASK': '255.255.0.0'}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_mask('eth0')
self.assertEqual('255.255.0.0', val)
@@ -505,7 +508,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
It returns default value '255.255.255.0' if ETH0_MASK has empty string.
"""
context = {'ETH0_MASK': ''}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_mask('eth0')
self.assertEqual('255.255.255.0', val)
@@ -514,7 +517,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
Verify get_network('device') correctly returns IPv4 network address.
"""
context = {'ETH0_NETWORK': '1.2.3.0'}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_network('eth0', MACADDR)
self.assertEqual('1.2.3.0', val)
@@ -525,7 +528,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
empty string.
"""
context = {'ETH0_NETWORK': ''}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_network('eth0', MACADDR)
self.assertEqual('10.18.1.0', val)
@@ -534,7 +537,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
Verify get_field('device', 'name') returns *context* value.
"""
context = {'ETH9_DUMMY': 'DUMMY_VALUE'}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_field('eth9', 'dummy')
self.assertEqual('DUMMY_VALUE', val)
@@ -544,7 +547,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
value.
"""
context = {'ETH9_DUMMY': 'DUMMY_VALUE'}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_field('eth9', 'dummy', 'DEFAULT_VALUE')
self.assertEqual('DUMMY_VALUE', val)
@@ -554,7 +557,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
value if context value is empty string.
"""
context = {'ETH9_DUMMY': ''}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_field('eth9', 'dummy', 'DEFAULT_VALUE')
self.assertEqual('DEFAULT_VALUE', val)
@@ -564,7 +567,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
empty string.
"""
context = {'ETH9_DUMMY': ''}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_field('eth9', 'dummy')
self.assertEqual(None, val)
@@ -574,7 +577,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
None.
"""
context = {'ETH9_DUMMY': None}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
val = net.get_field('eth9', 'dummy')
self.assertEqual(None, val)
@@ -594,7 +597,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
'match': {'macaddress': MACADDR},
'addresses': [IP_BY_MACADDR + '/' + IP4_PREFIX]}}}
m_get_phys_by_mac.return_value = {MACADDR: nic}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
self.assertEqual(net.gen_conf(), expected)
# set ETH0_GATEWAY
@@ -610,7 +613,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
'match': {'macaddress': MACADDR},
'addresses': [IP_BY_MACADDR + '/' + IP4_PREFIX]}}}
m_get_phys_by_mac.return_value = {MACADDR: nic}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
self.assertEqual(net.gen_conf(), expected)
@mock.patch(DS_PATH + ".get_physical_nics_by_mac")
@@ -629,7 +632,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
'match': {'macaddress': MACADDR},
'addresses': [IP_BY_MACADDR + '/' + IP4_PREFIX]}}}
m_get_phys_by_mac.return_value = {MACADDR: nic}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
self.assertEqual(net.gen_conf(), expected)
# set ETH0_GATEWAY6
@@ -645,7 +648,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
'match': {'macaddress': MACADDR},
'addresses': [IP_BY_MACADDR + '/' + IP4_PREFIX]}}}
m_get_phys_by_mac.return_value = {MACADDR: nic}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
self.assertEqual(net.gen_conf(), expected)
@mock.patch(DS_PATH + ".get_physical_nics_by_mac")
@@ -666,7 +669,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
'match': {'macaddress': MACADDR},
'addresses': [IP_BY_MACADDR + '/' + IP4_PREFIX]}}}
m_get_phys_by_mac.return_value = {MACADDR: nic}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
self.assertEqual(net.gen_conf(), expected)
# set ETH0_IP6, ETH0_IP6_ULA, ETH0_IP6_PREFIX_LENGTH
@@ -686,7 +689,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
IP6_GLOBAL + '/' + IP6_PREFIX,
IP6_ULA + '/' + IP6_PREFIX]}}}
m_get_phys_by_mac.return_value = {MACADDR: nic}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
self.assertEqual(net.gen_conf(), expected)
@mock.patch(DS_PATH + ".get_physical_nics_by_mac")
@@ -707,7 +710,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
'match': {'macaddress': MACADDR},
'addresses': [IP_BY_MACADDR + '/' + IP4_PREFIX]}}}
m_get_phys_by_mac.return_value = {MACADDR: nic}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
self.assertEqual(net.gen_conf(), expected)
# set DNS, ETH0_DNS, ETH0_SEARCH_DOMAIN
@@ -727,7 +730,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
'match': {'macaddress': MACADDR},
'addresses': [IP_BY_MACADDR + '/' + IP4_PREFIX]}}}
m_get_phys_by_mac.return_value = {MACADDR: nic}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
self.assertEqual(net.gen_conf(), expected)
@mock.patch(DS_PATH + ".get_physical_nics_by_mac")
@@ -746,7 +749,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
'match': {'macaddress': MACADDR},
'addresses': [IP_BY_MACADDR + '/' + IP4_PREFIX]}}}
m_get_phys_by_mac.return_value = {MACADDR: nic}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
self.assertEqual(net.gen_conf(), expected)
# set ETH0_MTU
@@ -762,14 +765,14 @@ class TestOpenNebulaNetwork(unittest.TestCase):
'match': {'macaddress': MACADDR},
'addresses': [IP_BY_MACADDR + '/' + IP4_PREFIX]}}}
m_get_phys_by_mac.return_value = {MACADDR: nic}
- net = ds.OpenNebulaNetwork(context)
+ net = ds.OpenNebulaNetwork(context, mock.Mock())
self.assertEqual(net.gen_conf(), expected)
@mock.patch(DS_PATH + ".get_physical_nics_by_mac")
def test_eth0(self, m_get_phys_by_mac):
for nic in self.system_nics:
m_get_phys_by_mac.return_value = {MACADDR: nic}
- net = ds.OpenNebulaNetwork({})
+ net = ds.OpenNebulaNetwork({}, mock.Mock())
expected = {
'version': 2,
'ethernets': {
@@ -779,6 +782,14 @@ class TestOpenNebulaNetwork(unittest.TestCase):
self.assertEqual(net.gen_conf(), expected)
+ @mock.patch(DS_PATH + ".get_physical_nics_by_mac")
+ def test_distro_passed_through(self, m_get_physical_nics_by_mac):
+ ds.OpenNebulaNetwork({}, mock.sentinel.distro)
+ self.assertEqual(
+ [mock.call(mock.sentinel.distro)],
+ m_get_physical_nics_by_mac.call_args_list,
+ )
+
def test_eth0_override(self):
self.maxDiff = None
context = {
@@ -797,7 +808,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
'ETH0_SEARCH_DOMAIN': '',
}
for nic in self.system_nics:
- net = ds.OpenNebulaNetwork(context,
+ net = ds.OpenNebulaNetwork(context, mock.Mock(),
system_nics_by_mac={MACADDR: nic})
expected = {
'version': 2,
@@ -829,7 +840,7 @@ class TestOpenNebulaNetwork(unittest.TestCase):
'ETH0_SEARCH_DOMAIN': 'example.com example.org',
}
for nic in self.system_nics:
- net = ds.OpenNebulaNetwork(context,
+ net = ds.OpenNebulaNetwork(context, mock.Mock(),
system_nics_by_mac={MACADDR: nic})
expected = {
@@ -883,7 +894,10 @@ class TestOpenNebulaNetwork(unittest.TestCase):
'ETH3_SEARCH_DOMAIN': 'third.example.com third.example.org',
}
net = ds.OpenNebulaNetwork(
- context, system_nics_by_mac={MAC_1: 'enp0s25', MAC_2: 'enp1s2'})
+ context,
+ mock.Mock(),
+ system_nics_by_mac={MAC_1: 'enp0s25', MAC_2: 'enp1s2'}
+ )
expected = {
'version': 2,
@@ -913,12 +927,43 @@ class TestOpenNebulaNetwork(unittest.TestCase):
self.assertEqual(expected, net.gen_conf())
-class TestParseShellConfig(unittest.TestCase):
+class TestParseShellConfig:
+ @pytest.mark.allow_subp_for("bash")
def test_no_seconds(self):
cfg = '\n'.join(["foo=bar", "SECONDS=2", "xx=foo"])
# we could test 'sleep 2', but that would make the test run slower.
ret = ds.parse_shell_config(cfg)
- self.assertEqual(ret, {"foo": "bar", "xx": "foo"})
+ assert ret == {"foo": "bar", "xx": "foo"}
+
+
+class TestGetPhysicalNicsByMac:
+ @pytest.mark.parametrize(
+ "interfaces_by_mac,physical_devs,expected_return",
+ [
+ # No interfaces => empty return
+ ({}, [], {}),
+ # Only virtual interface => empty return
+ ({"mac1": "virtual0"}, [], {}),
+ # Only physical interface => it is returned
+ ({"mac2": "physical0"}, ["physical0"], {"mac2": "physical0"}),
+ # Combination of physical and virtual => only physical returned
+ (
+ {"mac3": "physical1", "mac4": "virtual1"},
+ ["physical1"],
+ {"mac3": "physical1"},
+ ),
+ ],
+ )
+ def test(self, interfaces_by_mac, physical_devs, expected_return):
+ distro = mock.Mock()
+ distro.networking.is_physical.side_effect = (
+ lambda devname: devname in physical_devs
+ )
+ with mock.patch(
+ DS_PATH + ".net.get_interfaces_by_mac",
+ return_value=interfaces_by_mac,
+ ):
+ assert expected_return == ds.get_physical_nics_by_mac(distro)
def populate_context_dir(path, variables):
diff --git a/tests/unittests/test_datasource/test_openstack.py b/tests/unittests/test_datasource/test_openstack.py
index f754556f..3cfba74d 100644
--- a/tests/unittests/test_datasource/test_openstack.py
+++ b/tests/unittests/test_datasource/test_openstack.py
@@ -279,7 +279,7 @@ class TestOpenStackDataSource(test_helpers.HttprettyTestCase):
self.assertEqual(2, len(ds_os_local.files))
self.assertEqual(VENDOR_DATA, ds_os_local.vendordata_pure)
self.assertIsNone(ds_os_local.vendordata_raw)
- m_dhcp.assert_called_with('eth9')
+ m_dhcp.assert_called_with('eth9', None)
def test_bad_datasource_meta(self):
os_files = copy.deepcopy(OS_FILES)
@@ -510,6 +510,24 @@ class TestDetectOpenStack(test_helpers.CiTestCase):
'Expected detect_openstack == True on OpenTelekomCloud')
@test_helpers.mock.patch(MOCK_PATH + 'util.read_dmi_data')
+ def test_detect_openstack_sapccloud_chassis_asset_tag(self, m_dmi,
+ m_is_x86):
+ """Return True on OpenStack reporting SAP CCloud VM asset-tag."""
+ m_is_x86.return_value = True
+
+ def fake_dmi_read(dmi_key):
+ if dmi_key == 'system-product-name':
+ return 'VMware Virtual Platform' # SAP CCloud uses VMware
+ if dmi_key == 'chassis-asset-tag':
+ return 'SAP CCloud VM'
+ assert False, 'Unexpected dmi read of %s' % dmi_key
+
+ m_dmi.side_effect = fake_dmi_read
+ self.assertTrue(
+ ds.detect_openstack(),
+ 'Expected detect_openstack == True on SAP CCloud VM')
+
+ @test_helpers.mock.patch(MOCK_PATH + 'util.read_dmi_data')
def test_detect_openstack_oraclecloud_chassis_asset_tag(self, m_dmi,
m_is_x86):
"""Return True on OpenStack reporting Oracle cloud asset-tag."""
diff --git a/tests/unittests/test_datasource/test_ovf.py b/tests/unittests/test_datasource/test_ovf.py
index a19c35c8..1d088577 100644
--- a/tests/unittests/test_datasource/test_ovf.py
+++ b/tests/unittests/test_datasource/test_ovf.py
@@ -10,6 +10,7 @@ import os
from collections import OrderedDict
from textwrap import dedent
+from cloudinit import subp
from cloudinit import util
from cloudinit.tests.helpers import CiTestCase, mock, wrap_and_call
from cloudinit.helpers import Paths
@@ -48,7 +49,7 @@ def fill_properties(props, template=OVF_ENV_CONTENT):
for key, val in props.items():
lines.append(prop_tmpl.format(key=key, val=val))
indent = " "
- properties = ''.join([indent + l + "\n" for l in lines])
+ properties = ''.join([indent + line + "\n" for line in lines])
return template.format(properties=properties)
@@ -219,6 +220,88 @@ class TestDatasourceOVF(CiTestCase):
self.assertIn('Custom script is disabled by VM Administrator',
str(context.exception))
+ def test_get_data_cust_script_enabled(self):
+ """If custom script is enabled by VMware tools configuration,
+ execute the script.
+ """
+ paths = Paths({'cloud_dir': self.tdir})
+ ds = self.datasource(
+ sys_cfg={'disable_vmware_customization': False}, distro={},
+ paths=paths)
+ # Prepare the conf file
+ conf_file = self.tmp_path('test-cust', self.tdir)
+ conf_content = dedent("""\
+ [CUSTOM-SCRIPT]
+ SCRIPT-NAME = test-script
+ [MISC]
+ MARKER-ID = 12345346
+ """)
+ util.write_file(conf_file, conf_content)
+
+ # Mock custom script is enabled by return true when calling
+ # get_tools_config
+ with mock.patch(MPATH + 'get_tools_config', return_value="true"):
+ with mock.patch(MPATH + 'set_customization_status',
+ return_value=('msg', b'')):
+ with self.assertRaises(CustomScriptNotFound) as context:
+ wrap_and_call(
+ 'cloudinit.sources.DataSourceOVF',
+ {'util.read_dmi_data': 'vmware',
+ 'util.del_dir': True,
+ 'search_file': self.tdir,
+ 'wait_for_imc_cfg_file': conf_file,
+ 'get_nics_to_enable': ''},
+ ds.get_data)
+ # Verify custom script is trying to be executed
+ customscript = self.tmp_path('test-script', self.tdir)
+ self.assertIn('Script %s not found!!' % customscript,
+ str(context.exception))
+
+ def test_get_data_force_run_post_script_is_yes(self):
+ """If DEFAULT-RUN-POST-CUST-SCRIPT is yes, custom script could run if
+ enable-custom-scripts is not defined in VM Tools configuration
+ """
+ paths = Paths({'cloud_dir': self.tdir})
+ ds = self.datasource(
+ sys_cfg={'disable_vmware_customization': False}, distro={},
+ paths=paths)
+ # Prepare the conf file
+ conf_file = self.tmp_path('test-cust', self.tdir)
+ # set DEFAULT-RUN-POST-CUST-SCRIPT = yes so that enable-custom-scripts
+ # default value is TRUE
+ conf_content = dedent("""\
+ [CUSTOM-SCRIPT]
+ SCRIPT-NAME = test-script
+ [MISC]
+ MARKER-ID = 12345346
+ DEFAULT-RUN-POST-CUST-SCRIPT = yes
+ """)
+ util.write_file(conf_file, conf_content)
+
+ # Mock get_tools_config(section, key, defaultVal) to return
+ # defaultVal
+ def my_get_tools_config(*args, **kwargs):
+ return args[2]
+
+ with mock.patch(MPATH + 'get_tools_config',
+ side_effect=my_get_tools_config):
+ with mock.patch(MPATH + 'set_customization_status',
+ return_value=('msg', b'')):
+ with self.assertRaises(CustomScriptNotFound) as context:
+ wrap_and_call(
+ 'cloudinit.sources.DataSourceOVF',
+ {'util.read_dmi_data': 'vmware',
+ 'util.del_dir': True,
+ 'search_file': self.tdir,
+ 'wait_for_imc_cfg_file': conf_file,
+ 'get_nics_to_enable': ''},
+ ds.get_data)
+ # Verify custom script still runs although it is
+ # disabled by VMware Tools
+ customscript = self.tmp_path('test-script', self.tdir)
+ self.assertIn('Script %s not found!!' % customscript,
+ str(context.exception))
+
def test_get_data_non_vmware_seed_platform_info(self):
"""Platform info properly reports when on non-vmware platforms."""
paths = Paths({'cloud_dir': self.tdir, 'run_dir': self.tdir})
@@ -401,8 +484,8 @@ class TestTransportIso9660(CiTestCase):
self.assertTrue(dsovf.maybe_cdrom_device('xvdza1'))
-@mock.patch(MPATH + "util.which")
-@mock.patch(MPATH + "util.subp")
+@mock.patch(MPATH + "subp.which")
+@mock.patch(MPATH + "subp.subp")
class TestTransportVmwareGuestinfo(CiTestCase):
"""Test the com.vmware.guestInfo transport implemented in
transport_vmware_guestinfo."""
@@ -420,7 +503,7 @@ class TestTransportVmwareGuestinfo(CiTestCase):
def test_notfound_on_exit_code_1(self, m_subp, m_which):
"""If vmware-rpctool exits 1, then must return not found."""
m_which.return_value = self.rpctool_path
- m_subp.side_effect = util.ProcessExecutionError(
+ m_subp.side_effect = subp.ProcessExecutionError(
stdout="", stderr="No value found", exit_code=1, cmd=["unused"])
self.assertEqual(NOT_FOUND, dsovf.transport_vmware_guestinfo())
self.assertEqual(1, m_subp.call_count)
@@ -442,7 +525,7 @@ class TestTransportVmwareGuestinfo(CiTestCase):
def test_notfound_and_warns_on_unexpected_exit_code(self, m_subp, m_which):
"""If vmware-rpctool exits non zero or 1, warnings should be logged."""
m_which.return_value = self.rpctool_path
- m_subp.side_effect = util.ProcessExecutionError(
+ m_subp.side_effect = subp.ProcessExecutionError(
stdout=None, stderr="No value found", exit_code=2, cmd=["unused"])
self.assertEqual(NOT_FOUND, dsovf.transport_vmware_guestinfo())
self.assertEqual(1, m_subp.call_count)
diff --git a/tests/unittests/test_datasource/test_rbx.py b/tests/unittests/test_datasource/test_rbx.py
index aabf1f18..d017510e 100644
--- a/tests/unittests/test_datasource/test_rbx.py
+++ b/tests/unittests/test_datasource/test_rbx.py
@@ -4,6 +4,7 @@ from cloudinit import helpers
from cloudinit import distros
from cloudinit.sources import DataSourceRbxCloud as ds
from cloudinit.tests.helpers import mock, CiTestCase, populate_dir
+from cloudinit import subp
DS_PATH = "cloudinit.sources.DataSourceRbxCloud"
@@ -156,7 +157,7 @@ class TestRbxDataSource(CiTestCase):
expected
)
- @mock.patch(DS_PATH + '.util.subp')
+ @mock.patch(DS_PATH + '.subp.subp')
def test_gratuitous_arp_run_standard_arping(self, m_subp):
"""Test handle run arping & parameters."""
items = [
@@ -182,7 +183,7 @@ class TestRbxDataSource(CiTestCase):
], m_subp.call_args_list
)
- @mock.patch(DS_PATH + '.util.subp')
+ @mock.patch(DS_PATH + '.subp.subp')
def test_handle_rhel_like_arping(self, m_subp):
"""Test handle on RHEL-like distros."""
items = [
@@ -199,6 +200,35 @@ class TestRbxDataSource(CiTestCase):
m_subp.call_args_list
)
+ @mock.patch(
+ DS_PATH + '.subp.subp',
+ side_effect=subp.ProcessExecutionError()
+ )
+ def test_continue_on_arping_error(self, m_subp):
+ """Continue when command error"""
+ items = [
+ {
+ 'destination': '172.17.0.2',
+ 'source': '172.16.6.104'
+ },
+ {
+ 'destination': '172.17.0.2',
+ 'source': '172.16.6.104',
+ },
+ ]
+ ds.gratuitous_arp(items, self._fetch_distro('ubuntu'))
+ self.assertEqual([
+ mock.call([
+ 'arping', '-c', '2', '-S',
+ '172.16.6.104', '172.17.0.2'
+ ]),
+ mock.call([
+ 'arping', '-c', '2', '-S',
+ '172.16.6.104', '172.17.0.2'
+ ])
+ ], m_subp.call_args_list
+ )
+
def populate_cloud_metadata(path, data):
populate_dir(path, {'cloud.json': json.dumps(data)})
diff --git a/tests/unittests/test_datasource/test_scaleway.py b/tests/unittests/test_datasource/test_scaleway.py
index 1b4dd0ad..9d82bda9 100644
--- a/tests/unittests/test_datasource/test_scaleway.py
+++ b/tests/unittests/test_datasource/test_scaleway.py
@@ -353,12 +353,16 @@ class TestDataSourceScaleway(HttprettyTestCase):
self.datasource.metadata['ipv6'] = None
netcfg = self.datasource.network_config
- resp = {'version': 1,
- 'config': [{
- 'type': 'physical',
- 'name': 'ens2',
- 'subnets': [{'type': 'dhcp4'}]}]
+ resp = {
+ 'version': 1,
+ 'config': [
+ {
+ 'type': 'physical',
+ 'name': 'ens2',
+ 'subnets': [{'type': 'dhcp4'}]
}
+ ]
+ }
self.assertEqual(netcfg, resp)
@mock.patch('cloudinit.sources.DataSourceScaleway.net.find_fallback_nic')
@@ -371,25 +375,32 @@ class TestDataSourceScaleway(HttprettyTestCase):
m_get_cmdline.return_value = 'scaleway'
fallback_nic.return_value = 'ens2'
self.datasource.metadata['ipv6'] = {
- 'address': '2000:abc:4444:9876::42:999',
- 'gateway': '2000:abc:4444:9876::42:000',
- 'netmask': '127',
- }
+ 'address': '2000:abc:4444:9876::42:999',
+ 'gateway': '2000:abc:4444:9876::42:000',
+ 'netmask': '127',
+ }
netcfg = self.datasource.network_config
- resp = {'version': 1,
- 'config': [{
- 'type': 'physical',
- 'name': 'ens2',
- 'subnets': [{'type': 'dhcp4'},
- {'type': 'static',
- 'address': '2000:abc:4444:9876::42:999',
- 'gateway': '2000:abc:4444:9876::42:000',
- 'netmask': '127', }
- ]
-
- }]
+ resp = {
+ 'version': 1,
+ 'config': [
+ {
+ 'type': 'physical',
+ 'name': 'ens2',
+ 'subnets': [
+ {
+ 'type': 'dhcp4'
+ },
+ {
+ 'type': 'static',
+ 'address': '2000:abc:4444:9876::42:999',
+ 'gateway': '2000:abc:4444:9876::42:000',
+ 'netmask': '127',
+ }
+ ]
}
+ ]
+ }
self.assertEqual(netcfg, resp)
@mock.patch('cloudinit.sources.DataSourceScaleway.net.find_fallback_nic')
@@ -417,12 +428,16 @@ class TestDataSourceScaleway(HttprettyTestCase):
self.datasource.metadata['ipv6'] = None
self.datasource._network_config = sources.UNSET
- resp = {'version': 1,
- 'config': [{
- 'type': 'physical',
- 'name': 'ens2',
- 'subnets': [{'type': 'dhcp4'}]}]
+ resp = {
+ 'version': 1,
+ 'config': [
+ {
+ 'type': 'physical',
+ 'name': 'ens2',
+ 'subnets': [{'type': 'dhcp4'}]
}
+ ]
+ }
netcfg = self.datasource.network_config
self.assertEqual(netcfg, resp)
@@ -441,12 +456,16 @@ class TestDataSourceScaleway(HttprettyTestCase):
self.datasource.metadata['ipv6'] = None
self.datasource._network_config = None
- resp = {'version': 1,
- 'config': [{
- 'type': 'physical',
- 'name': 'ens2',
- 'subnets': [{'type': 'dhcp4'}]}]
+ resp = {
+ 'version': 1,
+ 'config': [
+ {
+ 'type': 'physical',
+ 'name': 'ens2',
+ 'subnets': [{'type': 'dhcp4'}]
}
+ ]
+ }
netcfg = self.datasource.network_config
self.assertEqual(netcfg, resp)
diff --git a/tests/unittests/test_datasource/test_smartos.py b/tests/unittests/test_datasource/test_smartos.py
index 62084de5..5847a384 100644
--- a/tests/unittests/test_datasource/test_smartos.py
+++ b/tests/unittests/test_datasource/test_smartos.py
@@ -12,8 +12,6 @@ order to validate return responses.
'''
-from __future__ import print_function
-
from binascii import crc32
import json
import multiprocessing
@@ -22,7 +20,7 @@ import os.path
import re
import signal
import stat
-import unittest2
+import unittest
import uuid
from cloudinit import serial
@@ -34,8 +32,8 @@ from cloudinit.sources.DataSourceSmartOS import (
from cloudinit.event import EventType
from cloudinit import helpers as c_helpers
-from cloudinit.util import (
- b64e, subp, ProcessExecutionError, which, write_file)
+from cloudinit.util import (b64e, write_file)
+from cloudinit.subp import (subp, ProcessExecutionError, which)
from cloudinit.tests.helpers import (
CiTestCase, mock, FilesystemMockingTestCase, skipIf)
@@ -669,7 +667,7 @@ class TestIdentifyFile(CiTestCase):
with self.allow_subp(["file"]):
self.assertEqual("text/plain", identify_file(fname))
- @mock.patch(DSMOS + ".util.subp")
+ @mock.patch(DSMOS + ".subp.subp")
def test_returns_none_on_error(self, m_subp):
"""On 'file' execution error, None should be returned."""
m_subp.side_effect = ProcessExecutionError("FILE_FAILED", exit_code=99)
@@ -1095,11 +1093,11 @@ class TestNetworkConversion(CiTestCase):
self.assertEqual(expected, found)
-@unittest2.skipUnless(get_smartos_environ() == SMARTOS_ENV_KVM,
- "Only supported on KVM and bhyve guests under SmartOS")
-@unittest2.skipUnless(os.access(SERIAL_DEVICE, os.W_OK),
- "Requires write access to " + SERIAL_DEVICE)
-@unittest2.skipUnless(HAS_PYSERIAL is True, "pyserial not available")
+@unittest.skipUnless(get_smartos_environ() == SMARTOS_ENV_KVM,
+ "Only supported on KVM and bhyve guests under SmartOS")
+@unittest.skipUnless(os.access(SERIAL_DEVICE, os.W_OK),
+ "Requires write access to " + SERIAL_DEVICE)
+@unittest.skipUnless(HAS_PYSERIAL is True, "pyserial not available")
class TestSerialConcurrency(CiTestCase):
"""
This class tests locking on an actual serial port, and as such can only
diff --git a/tests/unittests/test_distros/test_bsd_utils.py b/tests/unittests/test_distros/test_bsd_utils.py
new file mode 100644
index 00000000..3a68f2a9
--- /dev/null
+++ b/tests/unittests/test_distros/test_bsd_utils.py
@@ -0,0 +1,67 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import cloudinit.distros.bsd_utils as bsd_utils
+
+from cloudinit.tests.helpers import (CiTestCase, ExitStack, mock)
+
+RC_FILE = """
+if something; then
+ do something here
+fi
+hostname={hostname}
+"""
+
+
+class TestBsdUtils(CiTestCase):
+
+ def setUp(self):
+ super().setUp()
+ patches = ExitStack()
+ self.addCleanup(patches.close)
+
+ self.load_file = patches.enter_context(
+ mock.patch.object(bsd_utils.util, 'load_file'))
+
+ self.write_file = patches.enter_context(
+ mock.patch.object(bsd_utils.util, 'write_file'))
+
+ def test_get_rc_config_value(self):
+ self.load_file.return_value = 'hostname=foo\n'
+ self.assertEqual(bsd_utils.get_rc_config_value('hostname'), 'foo')
+ self.load_file.assert_called_with('/etc/rc.conf')
+
+ self.load_file.return_value = 'hostname=foo'
+ self.assertEqual(bsd_utils.get_rc_config_value('hostname'), 'foo')
+
+ self.load_file.return_value = 'hostname="foo"'
+ self.assertEqual(bsd_utils.get_rc_config_value('hostname'), 'foo')
+
+ self.load_file.return_value = "hostname='foo'"
+ self.assertEqual(bsd_utils.get_rc_config_value('hostname'), 'foo')
+
+ self.load_file.return_value = 'hostname=\'foo"'
+ self.assertEqual(bsd_utils.get_rc_config_value('hostname'), "'foo\"")
+
+ self.load_file.return_value = ''
+ self.assertEqual(bsd_utils.get_rc_config_value('hostname'), None)
+
+ self.load_file.return_value = RC_FILE.format(hostname='foo')
+ self.assertEqual(bsd_utils.get_rc_config_value('hostname'), "foo")
+
+ def test_set_rc_config_value_unchanged(self):
+ # bsd_utils.set_rc_config_value('hostname', 'foo')
+ # self.write_file.assert_called_with('/etc/rc.conf', 'hostname=foo\n')
+
+ self.load_file.return_value = RC_FILE.format(hostname='foo')
+ self.write_file.assert_not_called()
+
+ def test_set_rc_config_value(self):
+ bsd_utils.set_rc_config_value('hostname', 'foo')
+ self.write_file.assert_called_with('/etc/rc.conf', 'hostname=foo\n')
+
+ self.load_file.return_value = RC_FILE.format(hostname='foo')
+ bsd_utils.set_rc_config_value('hostname', 'bar')
+ self.write_file.assert_called_with(
+ '/etc/rc.conf',
+ RC_FILE.format(hostname='bar')
+ )
diff --git a/tests/unittests/test_distros/test_create_users.py b/tests/unittests/test_distros/test_create_users.py
index ef11784d..94ab052d 100644
--- a/tests/unittests/test_distros/test_create_users.py
+++ b/tests/unittests/test_distros/test_create_users.py
@@ -46,7 +46,7 @@ class MyBaseDistro(distros.Distro):
@mock.patch("cloudinit.distros.util.system_is_snappy", return_value=False)
-@mock.patch("cloudinit.distros.util.subp")
+@mock.patch("cloudinit.distros.subp.subp")
class TestCreateUser(CiTestCase):
with_logs = True
@@ -240,7 +240,7 @@ class TestCreateUser(CiTestCase):
[mock.call(set(['auth1']), user), # not disabled
mock.call(set(['key1']), 'foouser', options=disable_prefix)])
- @mock.patch("cloudinit.distros.util.which")
+ @mock.patch("cloudinit.distros.subp.which")
def test_lock_with_usermod_if_no_passwd(self, m_which, m_subp,
m_is_snappy):
"""Lock uses usermod --lock if no 'passwd' cmd available."""
@@ -250,7 +250,7 @@ class TestCreateUser(CiTestCase):
[mock.call(['usermod', '--lock', 'bob'])],
m_subp.call_args_list)
- @mock.patch("cloudinit.distros.util.which")
+ @mock.patch("cloudinit.distros.subp.which")
def test_lock_with_passwd_if_available(self, m_which, m_subp,
m_is_snappy):
"""Lock with only passwd will use passwd."""
@@ -260,7 +260,7 @@ class TestCreateUser(CiTestCase):
[mock.call(['passwd', '-l', 'bob'])],
m_subp.call_args_list)
- @mock.patch("cloudinit.distros.util.which")
+ @mock.patch("cloudinit.distros.subp.which")
def test_lock_raises_runtime_if_no_commands(self, m_which, m_subp,
m_is_snappy):
"""Lock with no commands available raises RuntimeError."""
diff --git a/tests/unittests/test_distros/test_debian.py b/tests/unittests/test_distros/test_debian.py
index da16a797..7ff8240b 100644
--- a/tests/unittests/test_distros/test_debian.py
+++ b/tests/unittests/test_distros/test_debian.py
@@ -5,7 +5,7 @@ from cloudinit import util
from cloudinit.tests.helpers import (FilesystemMockingTestCase, mock)
-@mock.patch("cloudinit.distros.debian.util.subp")
+@mock.patch("cloudinit.distros.debian.subp.subp")
class TestDebianApplyLocale(FilesystemMockingTestCase):
def setUp(self):
diff --git a/tests/unittests/test_distros/test_freebsd.py b/tests/unittests/test_distros/test_freebsd.py
index 8af253a2..be565b04 100644
--- a/tests/unittests/test_distros/test_freebsd.py
+++ b/tests/unittests/test_distros/test_freebsd.py
@@ -8,7 +8,7 @@ import os
class TestDeviceLookUp(CiTestCase):
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_find_freebsd_part_label(self, mock_subp):
glabel_out = '''
gptid/fa52d426-c337-11e6-8911-00155d4c5e47 N/A da0p1
@@ -19,7 +19,7 @@ gptid/fa52d426-c337-11e6-8911-00155d4c5e47 N/A da0p1
res = find_freebsd_part("/dev/label/rootfs")
self.assertEqual("da0p2", res)
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_find_freebsd_part_gpt(self, mock_subp):
glabel_out = '''
gpt/bootfs N/A vtbd0p1
diff --git a/tests/unittests/test_distros/test_generic.py b/tests/unittests/test_distros/test_generic.py
index 02b334e3..44607489 100644
--- a/tests/unittests/test_distros/test_generic.py
+++ b/tests/unittests/test_distros/test_generic.py
@@ -6,6 +6,7 @@ from cloudinit import util
from cloudinit.tests import helpers
import os
+import pytest
import shutil
import tempfile
from unittest import mock
@@ -37,24 +38,6 @@ gapmi = distros._get_arch_package_mirror_info
class TestGenericDistro(helpers.FilesystemMockingTestCase):
- def return_first(self, mlist):
- if not mlist:
- return None
- return mlist[0]
-
- def return_second(self, mlist):
- if not mlist:
- return None
- return mlist[1]
-
- def return_none(self, _mlist):
- return None
-
- def return_last(self, mlist):
- if not mlist:
- return None
- return(mlist[-1])
-
def setUp(self):
super(TestGenericDistro, self).setUp()
# Make a temp directoy for tests to use.
@@ -145,61 +128,6 @@ class TestGenericDistro(helpers.FilesystemMockingTestCase):
arch_mirrors = gapmi(package_mirrors, arch="amd64")
self.assertEqual(package_mirrors[0], arch_mirrors)
- def test_get_package_mirror_info_az_ec2(self):
- arch_mirrors = gapmi(package_mirrors, arch="amd64")
- data_source_mock = mock.Mock(availability_zone="us-east-1a")
-
- results = gpmi(arch_mirrors, data_source=data_source_mock,
- mirror_filter=self.return_first)
- self.assertEqual(results,
- {'primary': 'http://us-east-1.ec2/',
- 'security': 'http://security-mirror1-intel'})
-
- results = gpmi(arch_mirrors, data_source=data_source_mock,
- mirror_filter=self.return_second)
- self.assertEqual(results,
- {'primary': 'http://us-east-1a.clouds/',
- 'security': 'http://security-mirror2-intel'})
-
- results = gpmi(arch_mirrors, data_source=data_source_mock,
- mirror_filter=self.return_none)
- self.assertEqual(results, package_mirrors[0]['failsafe'])
-
- def test_get_package_mirror_info_az_non_ec2(self):
- arch_mirrors = gapmi(package_mirrors, arch="amd64")
- data_source_mock = mock.Mock(availability_zone="nova.cloudvendor")
-
- results = gpmi(arch_mirrors, data_source=data_source_mock,
- mirror_filter=self.return_first)
- self.assertEqual(results,
- {'primary': 'http://nova.cloudvendor.clouds/',
- 'security': 'http://security-mirror1-intel'})
-
- results = gpmi(arch_mirrors, data_source=data_source_mock,
- mirror_filter=self.return_last)
- self.assertEqual(results,
- {'primary': 'http://nova.cloudvendor.clouds/',
- 'security': 'http://security-mirror2-intel'})
-
- def test_get_package_mirror_info_none(self):
- arch_mirrors = gapmi(package_mirrors, arch="amd64")
- data_source_mock = mock.Mock(availability_zone=None)
-
- # because both search entries here replacement based on
- # availability-zone, the filter will be called with an empty list and
- # failsafe should be taken.
- results = gpmi(arch_mirrors, data_source=data_source_mock,
- mirror_filter=self.return_first)
- self.assertEqual(results,
- {'primary': 'http://fs-primary-intel',
- 'security': 'http://security-mirror1-intel'})
-
- results = gpmi(arch_mirrors, data_source=data_source_mock,
- mirror_filter=self.return_last)
- self.assertEqual(results,
- {'primary': 'http://fs-primary-intel',
- 'security': 'http://security-mirror2-intel'})
-
def test_systemd_in_use(self):
cls = distros.fetch("ubuntu")
d = cls("ubuntu", {}, None)
@@ -245,7 +173,7 @@ class TestGenericDistro(helpers.FilesystemMockingTestCase):
for d_name in ("ubuntu", "rhel"):
cls = distros.fetch(d_name)
d = cls(d_name, {}, None)
- with mock.patch("cloudinit.util.subp") as m_subp:
+ with mock.patch("cloudinit.subp.subp") as m_subp:
d.expire_passwd("myuser")
m_subp.assert_called_once_with(["passwd", "--expire", "myuser"])
@@ -253,10 +181,122 @@ class TestGenericDistro(helpers.FilesystemMockingTestCase):
"""Test FreeBSD.expire_passwd uses the pw command."""
cls = distros.fetch("freebsd")
d = cls("freebsd", {}, None)
- with mock.patch("cloudinit.util.subp") as m_subp:
+ with mock.patch("cloudinit.subp.subp") as m_subp:
d.expire_passwd("myuser")
m_subp.assert_called_once_with(
["pw", "usermod", "myuser", "-p", "01-Jan-1970"])
+class TestGetPackageMirrors:
+
+ def return_first(self, mlist):
+ if not mlist:
+ return None
+ return mlist[0]
+
+ def return_second(self, mlist):
+ if not mlist:
+ return None
+
+ return mlist[1] if len(mlist) > 1 else None
+
+ def return_none(self, _mlist):
+ return None
+
+ def return_last(self, mlist):
+ if not mlist:
+ return None
+ return(mlist[-1])
+
+ @pytest.mark.parametrize(
+ "allow_ec2_mirror, platform_type, mirrors",
+ [
+ (True, "ec2", [
+ {'primary': 'http://us-east-1.ec2/',
+ 'security': 'http://security-mirror1-intel'},
+ {'primary': 'http://us-east-1a.clouds/',
+ 'security': 'http://security-mirror2-intel'}
+ ]),
+ (True, "other", [
+ {'primary': 'http://us-east-1.ec2/',
+ 'security': 'http://security-mirror1-intel'},
+ {'primary': 'http://us-east-1a.clouds/',
+ 'security': 'http://security-mirror2-intel'}
+ ]),
+ (False, "ec2", [
+ {'primary': 'http://us-east-1.ec2/',
+ 'security': 'http://security-mirror1-intel'},
+ {'primary': 'http://us-east-1a.clouds/',
+ 'security': 'http://security-mirror2-intel'}
+ ]),
+ (False, "other", [
+ {'primary': 'http://us-east-1a.clouds/',
+ 'security': 'http://security-mirror1-intel'},
+ {'primary': 'http://fs-primary-intel',
+ 'security': 'http://security-mirror2-intel'}
+ ])
+ ])
+ def test_get_package_mirror_info_az_ec2(self,
+ allow_ec2_mirror,
+ platform_type,
+ mirrors):
+ flag_path = "cloudinit.distros." \
+ "ALLOW_EC2_MIRRORS_ON_NON_AWS_INSTANCE_TYPES"
+ with mock.patch(flag_path, allow_ec2_mirror):
+ arch_mirrors = gapmi(package_mirrors, arch="amd64")
+ data_source_mock = mock.Mock(
+ availability_zone="us-east-1a",
+ platform_type=platform_type)
+
+ results = gpmi(arch_mirrors, data_source=data_source_mock,
+ mirror_filter=self.return_first)
+ assert(results == mirrors[0])
+
+ results = gpmi(arch_mirrors, data_source=data_source_mock,
+ mirror_filter=self.return_second)
+ assert(results == mirrors[1])
+
+ results = gpmi(arch_mirrors, data_source=data_source_mock,
+ mirror_filter=self.return_none)
+ assert(results == package_mirrors[0]['failsafe'])
+
+ def test_get_package_mirror_info_az_non_ec2(self):
+ arch_mirrors = gapmi(package_mirrors, arch="amd64")
+ data_source_mock = mock.Mock(availability_zone="nova.cloudvendor")
+
+ results = gpmi(arch_mirrors, data_source=data_source_mock,
+ mirror_filter=self.return_first)
+ assert(results == {
+ 'primary': 'http://nova.cloudvendor.clouds/',
+ 'security': 'http://security-mirror1-intel'}
+ )
+
+ results = gpmi(arch_mirrors, data_source=data_source_mock,
+ mirror_filter=self.return_last)
+ assert(results == {
+ 'primary': 'http://nova.cloudvendor.clouds/',
+ 'security': 'http://security-mirror2-intel'}
+ )
+
+ def test_get_package_mirror_info_none(self):
+ arch_mirrors = gapmi(package_mirrors, arch="amd64")
+ data_source_mock = mock.Mock(availability_zone=None)
+
+ # because both search entries here replacement based on
+ # availability-zone, the filter will be called with an empty list and
+ # failsafe should be taken.
+ results = gpmi(arch_mirrors, data_source=data_source_mock,
+ mirror_filter=self.return_first)
+ assert(results == {
+ 'primary': 'http://fs-primary-intel',
+ 'security': 'http://security-mirror1-intel'}
+ )
+
+ results = gpmi(arch_mirrors, data_source=data_source_mock,
+ mirror_filter=self.return_last)
+ assert(results == {
+ 'primary': 'http://fs-primary-intel',
+ 'security': 'http://security-mirror2-intel'}
+ )
+
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_distros/test_netbsd.py b/tests/unittests/test_distros/test_netbsd.py
new file mode 100644
index 00000000..11a68d2a
--- /dev/null
+++ b/tests/unittests/test_distros/test_netbsd.py
@@ -0,0 +1,17 @@
+import cloudinit.distros.netbsd
+
+import pytest
+import unittest.mock as mock
+
+
+@pytest.mark.parametrize('with_pkgin', (True, False))
+@mock.patch("cloudinit.distros.netbsd.os")
+def test_init(m_os, with_pkgin):
+ print(with_pkgin)
+ m_os.path.exists.return_value = with_pkgin
+ cfg = {}
+
+ distro = cloudinit.distros.netbsd.NetBSD("netbsd", cfg, None)
+ expectation = ['pkgin', '-y', 'full-upgrade'] if with_pkgin else None
+ assert distro.pkg_cmd_upgrade_prefix == expectation
+ assert [mock.call('/usr/pkg/bin/pkgin')] == m_os.path.exists.call_args_list
diff --git a/tests/unittests/test_distros/test_netconfig.py b/tests/unittests/test_distros/test_netconfig.py
index ccf66161..8d7b09c8 100644
--- a/tests/unittests/test_distros/test_netconfig.py
+++ b/tests/unittests/test_distros/test_netconfig.py
@@ -12,6 +12,7 @@ from cloudinit import helpers
from cloudinit import settings
from cloudinit.tests.helpers import (
FilesystemMockingTestCase, dir2dict)
+from cloudinit import subp
from cloudinit import util
@@ -532,7 +533,7 @@ class TestNetCfgDistroRedhat(TestNetCfgDistroBase):
NETWORKING_IPV6=yes
IPV6_AUTOCONF=no
"""),
- }
+ }
# rh_distro.apply_network_config(V1_NET_CFG_IPV6, False)
self._apply_and_verify(self.distro.apply_network_config,
V1_NET_CFG_IPV6,
@@ -656,7 +657,7 @@ class TestNetCfgDistroArch(TestNetCfgDistroBase):
IP=dhcp
Interface=eth1
"""),
- }
+ }
# ub_distro.apply_network_config(V1_NET_CFG, False)
self._apply_and_verify(self.distro.apply_network_config,
@@ -688,6 +689,6 @@ class TestNetCfgDistroArch(TestNetCfgDistroBase):
def get_mode(path, target=None):
- return os.stat(util.target_path(target, path)).st_mode & 0o777
+ return os.stat(subp.target_path(target, path)).st_mode & 0o777
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_distros/test_user_data_normalize.py b/tests/unittests/test_distros/test_user_data_normalize.py
index a6faf0ef..fa48410a 100644
--- a/tests/unittests/test_distros/test_user_data_normalize.py
+++ b/tests/unittests/test_distros/test_user_data_normalize.py
@@ -307,7 +307,7 @@ class TestUGNormalize(TestCase):
self.assertEqual({'default': False}, users['joe'])
self.assertEqual({'default': False}, users['bob'])
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_create_snap_user(self, mock_subp):
mock_subp.side_effect = [('{"username": "joe", "ssh-key-count": 1}\n',
'')]
@@ -326,7 +326,7 @@ class TestUGNormalize(TestCase):
mock_subp.assert_called_with(snapcmd, capture=True, logstring=snapcmd)
self.assertEqual(username, 'joe')
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_create_snap_user_known(self, mock_subp):
mock_subp.side_effect = [('{"username": "joe", "ssh-key-count": 1}\n',
'')]
@@ -348,7 +348,7 @@ class TestUGNormalize(TestCase):
@mock.patch('cloudinit.util.system_is_snappy')
@mock.patch('cloudinit.util.is_group')
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_add_user_on_snappy_system(self, mock_subp, mock_isgrp,
mock_snappy):
mock_isgrp.return_value = False
diff --git a/tests/unittests/test_ds_identify.py b/tests/unittests/test_ds_identify.py
index 36d7fbbf..9314b244 100644
--- a/tests/unittests/test_ds_identify.py
+++ b/tests/unittests/test_ds_identify.py
@@ -6,6 +6,7 @@ import os
from uuid import uuid4
from cloudinit import safeyaml
+from cloudinit import subp
from cloudinit import util
from cloudinit.tests.helpers import (
CiTestCase, dir2dict, populate_dir, populate_dir_with_ts)
@@ -160,8 +161,8 @@ class DsIdentifyBase(CiTestCase):
rc = 0
try:
- out, err = util.subp(['sh', '-c', '. %s' % wrap], capture=True)
- except util.ProcessExecutionError as e:
+ out, err = subp.subp(['sh', '-c', '. %s' % wrap], capture=True)
+ except subp.ProcessExecutionError as e:
rc = e.exit_code
out = e.stdout
err = e.stderr
@@ -272,6 +273,10 @@ class TestDsIdentify(DsIdentifyBase):
"""Rbx datasource has a disk with LABEL=CLOUDMD."""
self._test_ds_found('RbxCloud')
+ def test_rbx_cloud_lower(self):
+ """Rbx datasource has a disk with LABEL=cloudmd."""
+ self._test_ds_found('RbxCloudLower')
+
def test_config_drive_upper(self):
"""ConfigDrive datasource has a disk with LABEL=CONFIG-2."""
self._test_ds_found('ConfigDriveUpper')
@@ -447,6 +452,10 @@ class TestDsIdentify(DsIdentifyBase):
"""Open Telecom identification."""
self._test_ds_found('OpenStack-OpenTelekom')
+ def test_openstack_sap_ccloud(self):
+ """SAP Converged Cloud identification"""
+ self._test_ds_found('OpenStack-SAPCCloud')
+
def test_openstack_asset_tag_nova(self):
"""OpenStack identification via asset tag OpenStack Nova."""
self._test_ds_found('OpenStack-AssetTag-Nova')
@@ -568,6 +577,10 @@ class TestDsIdentify(DsIdentifyBase):
"""NoCloud is found with uppercase filesystem label."""
self._test_ds_found('NoCloudUpper')
+ def test_nocloud_fatboot(self):
+ """NoCloud fatboot label - LP: #184166."""
+ self._test_ds_found('NoCloud-fatboot')
+
def test_nocloud_seed(self):
"""Nocloud seed directory."""
self._test_ds_found('NoCloud-seed')
@@ -607,8 +620,10 @@ class TestDsIdentify(DsIdentifyBase):
ret = self._check_via_dict(
cust, RC_FOUND,
func=".", args=[os.path.join(rootd, mpp)], rootd=rootd)
- line = [l for l in ret.stdout.splitlines() if l.startswith(pre)][0]
- toks = line.replace(pre, "").split(":")
+ match = [
+ line for line in ret.stdout.splitlines() if line.startswith(pre)
+ ][0]
+ toks = match.replace(pre, "").split(":")
expected = ["/sbin", "/bin", "/usr/sbin", "/usr/bin", "/mycust/path"]
self.assertEqual(expected, [p for p in expected if p in toks],
"path did not have expected tokens")
@@ -805,6 +820,20 @@ VALID_CFG = {
'dev/vdb': 'pretend iso content for cidata\n',
}
},
+ 'NoCloud-fatboot': {
+ 'ds': 'NoCloud',
+ 'mocks': [
+ MOCK_VIRT_IS_XEN,
+ {'name': 'blkid', 'ret': 0,
+ 'out': blkid_out(
+ BLKID_UEFI_UBUNTU +
+ [{'DEVNAME': 'xvdb', 'TYPE': 'vfat', 'SEC_TYPE': 'msdos',
+ 'UUID': '355a-4FC2', 'LABEL_FATBOOT': 'cidata'}])},
+ ],
+ 'files': {
+ 'dev/vdb': 'pretend iso content for cidata\n',
+ }
+ },
'NoCloud-seed': {
'ds': 'NoCloud',
'files': {
@@ -834,6 +863,12 @@ VALID_CFG = {
'files': {P_CHASSIS_ASSET_TAG: 'OpenTelekomCloud\n'},
'mocks': [MOCK_VIRT_IS_XEN],
},
+ 'OpenStack-SAPCCloud': {
+ # SAP CCloud hosts use OpenStack on VMware
+ 'ds': 'OpenStack',
+ 'files': {P_CHASSIS_ASSET_TAG: 'SAP CCloud VM\n'},
+ 'mocks': [MOCK_VIRT_IS_VMWARE],
+ },
'OpenStack-AssetTag-Nova': {
# VMware vSphere can't modify product-name, LP: #1669875
'ds': 'OpenStack',
@@ -935,6 +970,18 @@ VALID_CFG = {
)},
],
},
+ 'RbxCloudLower': {
+ 'ds': 'RbxCloud',
+ 'mocks': [
+ {'name': 'blkid', 'ret': 0,
+ 'out': blkid_out(
+ [{'DEVNAME': 'vda1', 'TYPE': 'vfat', 'PARTUUID': uuid4()},
+ {'DEVNAME': 'vda2', 'TYPE': 'ext4',
+ 'LABEL': 'cloudimg-rootfs', 'PARTUUID': uuid4()},
+ {'DEVNAME': 'vdb', 'TYPE': 'vfat', 'LABEL': 'cloudmd'}]
+ )},
+ ],
+ },
'Hetzner': {
'ds': 'Hetzner',
'files': {P_SYS_VENDOR: 'Hetzner\n'},
@@ -1028,11 +1075,11 @@ VALID_CFG = {
'Ec2-E24Cloud': {
'ds': 'Ec2',
'files': {P_SYS_VENDOR: 'e24cloud\n'},
- },
+ },
'Ec2-E24Cloud-negative': {
'ds': 'Ec2',
'files': {P_SYS_VENDOR: 'e24cloudyday\n'},
- }
+ }
}
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_apk_configure.py b/tests/unittests/test_handler/test_handler_apk_configure.py
new file mode 100644
index 00000000..8acc0b33
--- /dev/null
+++ b/tests/unittests/test_handler/test_handler_apk_configure.py
@@ -0,0 +1,299 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+""" test_apk_configure
+Test creation of repositories file
+"""
+
+import logging
+import os
+import textwrap
+
+from cloudinit import (cloud, helpers, util)
+
+from cloudinit.config import cc_apk_configure
+from cloudinit.tests.helpers import (FilesystemMockingTestCase, mock)
+
+REPO_FILE = "/etc/apk/repositories"
+DEFAULT_MIRROR_URL = "https://alpine.global.ssl.fastly.net/alpine"
+CC_APK = 'cloudinit.config.cc_apk_configure'
+
+
+class TestNoConfig(FilesystemMockingTestCase):
+ def setUp(self):
+ super(TestNoConfig, self).setUp()
+ self.add_patch(CC_APK + '._write_repositories_file', 'm_write_repos')
+ self.name = "apk-configure"
+ self.cloud_init = None
+ self.log = logging.getLogger("TestNoConfig")
+ self.args = []
+
+ def test_no_config(self):
+ """
+ Test that nothing is done if no apk-configure
+ configuration is provided.
+ """
+ config = util.get_builtin_cfg()
+
+ cc_apk_configure.handle(self.name, config, self.cloud_init,
+ self.log, self.args)
+
+ self.assertEqual(0, self.m_write_repos.call_count)
+
+
+class TestConfig(FilesystemMockingTestCase):
+ def setUp(self):
+ super(TestConfig, self).setUp()
+ self.new_root = self.tmp_dir()
+ self.new_root = self.reRoot(root=self.new_root)
+ for dirname in ['tmp', 'etc/apk']:
+ util.ensure_dir(os.path.join(self.new_root, dirname))
+ self.paths = helpers.Paths({'templates_dir': self.new_root})
+ self.name = "apk-configure"
+ self.cloud = cloud.Cloud(None, self.paths, None, None, None)
+ self.log = logging.getLogger("TestNoConfig")
+ self.args = []
+
+ @mock.patch(CC_APK + '._write_repositories_file')
+ def test_no_repo_settings(self, m_write_repos):
+ """
+ Test that nothing is written if the 'alpine-repo' key
+ is not present.
+ """
+ config = {"apk_repos": {}}
+
+ cc_apk_configure.handle(self.name, config, self.cloud, self.log,
+ self.args)
+
+ self.assertEqual(0, m_write_repos.call_count)
+
+ @mock.patch(CC_APK + '._write_repositories_file')
+ def test_empty_repo_settings(self, m_write_repos):
+ """
+ Test that nothing is written if 'alpine_repo' list is empty.
+ """
+ config = {"apk_repos": {"alpine_repo": []}}
+
+ cc_apk_configure.handle(self.name, config, self.cloud, self.log,
+ self.args)
+
+ self.assertEqual(0, m_write_repos.call_count)
+
+ def test_only_main_repo(self):
+ """
+ Test when only details of main repo is written to file.
+ """
+ alpine_version = 'v3.12'
+ config = {
+ "apk_repos": {
+ "alpine_repo": {
+ "version": alpine_version
+ }
+ }
+ }
+
+ cc_apk_configure.handle(self.name, config, self.cloud, self.log,
+ self.args)
+
+ expected_content = textwrap.dedent("""\
+ #
+ # Created by cloud-init
+ #
+ # This file is written on first boot of an instance
+ #
+
+ {0}/{1}/main
+
+ """.format(DEFAULT_MIRROR_URL, alpine_version))
+
+ self.assertEqual(expected_content, util.load_file(REPO_FILE))
+
+ def test_main_and_community_repos(self):
+ """
+ Test when only details of main and community repos are
+ written to file.
+ """
+ alpine_version = 'edge'
+ config = {
+ "apk_repos": {
+ "alpine_repo": {
+ "version": alpine_version,
+ "community_enabled": True
+ }
+ }
+ }
+
+ cc_apk_configure.handle(self.name, config, self.cloud, self.log,
+ self.args)
+
+ expected_content = textwrap.dedent("""\
+ #
+ # Created by cloud-init
+ #
+ # This file is written on first boot of an instance
+ #
+
+ {0}/{1}/main
+ {0}/{1}/community
+
+ """.format(DEFAULT_MIRROR_URL, alpine_version))
+
+ self.assertEqual(expected_content, util.load_file(REPO_FILE))
+
+ def test_main_community_testing_repos(self):
+ """
+ Test when details of main, community and testing repos
+ are written to file.
+ """
+ alpine_version = 'v3.12'
+ config = {
+ "apk_repos": {
+ "alpine_repo": {
+ "version": alpine_version,
+ "community_enabled": True,
+ "testing_enabled": True
+ }
+ }
+ }
+
+ cc_apk_configure.handle(self.name, config, self.cloud, self.log,
+ self.args)
+
+ expected_content = textwrap.dedent("""\
+ #
+ # Created by cloud-init
+ #
+ # This file is written on first boot of an instance
+ #
+
+ {0}/{1}/main
+ {0}/{1}/community
+ #
+ # Testing - using with non-Edge installation may cause problems!
+ #
+ {0}/edge/testing
+
+ """.format(DEFAULT_MIRROR_URL, alpine_version))
+
+ self.assertEqual(expected_content, util.load_file(REPO_FILE))
+
+ def test_edge_main_community_testing_repos(self):
+ """
+ Test when details of main, community and testing repos
+ for Edge version of Alpine are written to file.
+ """
+ alpine_version = 'edge'
+ config = {
+ "apk_repos": {
+ "alpine_repo": {
+ "version": alpine_version,
+ "community_enabled": True,
+ "testing_enabled": True
+ }
+ }
+ }
+
+ cc_apk_configure.handle(self.name, config, self.cloud, self.log,
+ self.args)
+
+ expected_content = textwrap.dedent("""\
+ #
+ # Created by cloud-init
+ #
+ # This file is written on first boot of an instance
+ #
+
+ {0}/{1}/main
+ {0}/{1}/community
+ {0}/{1}/testing
+
+ """.format(DEFAULT_MIRROR_URL, alpine_version))
+
+ self.assertEqual(expected_content, util.load_file(REPO_FILE))
+
+ def test_main_community_testing_local_repos(self):
+ """
+ Test when details of main, community, testing and
+ local repos are written to file.
+ """
+ alpine_version = 'v3.12'
+ local_repo_url = 'http://some.mirror/whereever'
+ config = {
+ "apk_repos": {
+ "alpine_repo": {
+ "version": alpine_version,
+ "community_enabled": True,
+ "testing_enabled": True
+ },
+ "local_repo_base_url": local_repo_url
+ }
+ }
+
+ cc_apk_configure.handle(self.name, config, self.cloud, self.log,
+ self.args)
+
+ expected_content = textwrap.dedent("""\
+ #
+ # Created by cloud-init
+ #
+ # This file is written on first boot of an instance
+ #
+
+ {0}/{1}/main
+ {0}/{1}/community
+ #
+ # Testing - using with non-Edge installation may cause problems!
+ #
+ {0}/edge/testing
+
+ #
+ # Local repo
+ #
+ {2}/{1}
+
+ """.format(DEFAULT_MIRROR_URL, alpine_version, local_repo_url))
+
+ self.assertEqual(expected_content, util.load_file(REPO_FILE))
+
+ def test_edge_main_community_testing_local_repos(self):
+ """
+ Test when details of main, community, testing and local repos
+ for Edge version of Alpine are written to file.
+ """
+ alpine_version = 'edge'
+ local_repo_url = 'http://some.mirror/whereever'
+ config = {
+ "apk_repos": {
+ "alpine_repo": {
+ "version": alpine_version,
+ "community_enabled": True,
+ "testing_enabled": True
+ },
+ "local_repo_base_url": local_repo_url
+ }
+ }
+
+ cc_apk_configure.handle(self.name, config, self.cloud, self.log,
+ self.args)
+
+ expected_content = textwrap.dedent("""\
+ #
+ # Created by cloud-init
+ #
+ # This file is written on first boot of an instance
+ #
+
+ {0}/{1}/main
+ {0}/{1}/community
+ {0}/edge/testing
+
+ #
+ # Local repo
+ #
+ {2}/{1}
+
+ """.format(DEFAULT_MIRROR_URL, alpine_version, local_repo_url))
+
+ self.assertEqual(expected_content, util.load_file(REPO_FILE))
+
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v1.py b/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v1.py
index 69009a44..369480be 100644
--- a/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v1.py
+++ b/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v1.py
@@ -13,6 +13,7 @@ from cloudinit import cloud
from cloudinit import distros
from cloudinit import helpers
from cloudinit import templater
+from cloudinit import subp
from cloudinit import util
from cloudinit.config import cc_apt_configure
@@ -66,7 +67,7 @@ class TestAptSourceConfigSourceList(t_help.FilesystemMockingTestCase):
"""
def setUp(self):
super(TestAptSourceConfigSourceList, self).setUp()
- self.subp = util.subp
+ self.subp = subp.subp
self.new_root = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.new_root)
@@ -100,6 +101,7 @@ class TestAptSourceConfigSourceList(t_help.FilesystemMockingTestCase):
cfg = {'apt_mirror_search': mirror}
else:
cfg = {'apt_mirror': mirror}
+
mycloud = self._get_cloud(distro)
with mock.patch.object(util, 'write_file') as mockwf:
@@ -107,8 +109,9 @@ class TestAptSourceConfigSourceList(t_help.FilesystemMockingTestCase):
return_value="faketmpl") as mocklf:
with mock.patch.object(os.path, 'isfile',
return_value=True) as mockisfile:
- with mock.patch.object(templater, 'render_string',
- return_value="fake") as mockrnd:
+ with mock.patch.object(
+ templater, 'render_string',
+ return_value='fake') as mockrnd:
with mock.patch.object(util, 'rename'):
cc_apt_configure.handle("test", cfg, mycloud,
LOG, None)
@@ -176,7 +179,7 @@ class TestAptSourceConfigSourceList(t_help.FilesystemMockingTestCase):
# the second mock restores the original subp
with mock.patch.object(util, 'write_file') as mockwrite:
- with mock.patch.object(util, 'subp', self.subp):
+ with mock.patch.object(subp, 'subp', self.subp):
with mock.patch.object(Distro, 'get_primary_arch',
return_value='amd64'):
cc_apt_configure.handle("notimportant", cfg, mycloud,
diff --git a/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v3.py b/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v3.py
index 0aa3d51a..b96fd4d4 100644
--- a/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v3.py
+++ b/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v3.py
@@ -13,6 +13,7 @@ from unittest.mock import call
from cloudinit import cloud
from cloudinit import distros
from cloudinit import helpers
+from cloudinit import subp
from cloudinit import util
from cloudinit.config import cc_apt_configure
@@ -94,7 +95,7 @@ class TestAptSourceConfigSourceList(t_help.FilesystemMockingTestCase):
"""TestAptSourceConfigSourceList - Class to test sources list rendering"""
def setUp(self):
super(TestAptSourceConfigSourceList, self).setUp()
- self.subp = util.subp
+ self.subp = subp.subp
self.new_root = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.new_root)
@@ -222,7 +223,7 @@ class TestAptSourceConfigSourceList(t_help.FilesystemMockingTestCase):
# the second mock restores the original subp
with mock.patch.object(util, 'write_file') as mockwrite:
- with mock.patch.object(util, 'subp', self.subp):
+ with mock.patch.object(subp, 'subp', self.subp):
with mock.patch.object(Distro, 'get_primary_arch',
return_value='amd64'):
cc_apt_configure.handle("notimportant", cfg, mycloud,
diff --git a/tests/unittests/test_handler/test_handler_apt_source_v1.py b/tests/unittests/test_handler/test_handler_apt_source_v1.py
index 866752ef..367971cb 100644
--- a/tests/unittests/test_handler/test_handler_apt_source_v1.py
+++ b/tests/unittests/test_handler/test_handler_apt_source_v1.py
@@ -14,6 +14,7 @@ from unittest.mock import call
from cloudinit.config import cc_apt_configure
from cloudinit import gpg
+from cloudinit import subp
from cloudinit import util
from cloudinit.tests.helpers import TestCase
@@ -42,10 +43,17 @@ class FakeDistro(object):
return
+class FakeDatasource:
+ """Fake Datasource helper object"""
+ def __init__(self):
+ self.region = 'region'
+
+
class FakeCloud(object):
"""Fake Cloud helper object"""
def __init__(self):
self.distro = FakeDistro()
+ self.datasource = FakeDatasource()
class TestAptSourceConfig(TestCase):
@@ -271,7 +279,7 @@ class TestAptSourceConfig(TestCase):
"""
cfg = self.wrapv1conf(cfg)
- with mock.patch.object(util, 'subp',
+ with mock.patch.object(subp, 'subp',
return_value=('fakekey 1234', '')) as mockobj:
cc_apt_configure.handle("test", cfg, self.fakecloud, None, None)
@@ -356,7 +364,7 @@ class TestAptSourceConfig(TestCase):
"""
cfg = self.wrapv1conf([cfg])
- with mock.patch.object(util, 'subp') as mockobj:
+ with mock.patch.object(subp, 'subp') as mockobj:
cc_apt_configure.handle("test", cfg, self.fakecloud, None, None)
mockobj.assert_called_with(['apt-key', 'add', '-'],
@@ -398,7 +406,7 @@ class TestAptSourceConfig(TestCase):
'filename': self.aptlistfile}
cfg = self.wrapv1conf([cfg])
- with mock.patch.object(util, 'subp') as mockobj:
+ with mock.patch.object(subp, 'subp') as mockobj:
cc_apt_configure.handle("test", cfg, self.fakecloud, None, None)
mockobj.assert_called_once_with(['apt-key', 'add', '-'],
@@ -413,7 +421,7 @@ class TestAptSourceConfig(TestCase):
'filename': self.aptlistfile}
cfg = self.wrapv1conf([cfg])
- with mock.patch.object(util, 'subp',
+ with mock.patch.object(subp, 'subp',
return_value=('fakekey 1212', '')) as mockobj:
cc_apt_configure.handle("test", cfg, self.fakecloud, None, None)
@@ -476,7 +484,7 @@ class TestAptSourceConfig(TestCase):
'filename': self.aptlistfile}
cfg = self.wrapv1conf([cfg])
- with mock.patch.object(util, 'subp') as mockobj:
+ with mock.patch.object(subp, 'subp') as mockobj:
cc_apt_configure.handle("test", cfg, self.fakecloud, None, None)
mockobj.assert_called_once_with(['add-apt-repository',
'ppa:smoser/cloud-init-test'],
@@ -495,7 +503,7 @@ class TestAptSourceConfig(TestCase):
'filename': self.aptlistfile3}
cfg = self.wrapv1conf([cfg1, cfg2, cfg3])
- with mock.patch.object(util, 'subp') as mockobj:
+ with mock.patch.object(subp, 'subp') as mockobj:
cc_apt_configure.handle("test", cfg, self.fakecloud,
None, None)
calls = [call(['add-apt-repository', 'ppa:smoser/cloud-init-test'],
diff --git a/tests/unittests/test_handler/test_handler_apt_source_v3.py b/tests/unittests/test_handler/test_handler_apt_source_v3.py
index 90949b6d..ac847238 100644
--- a/tests/unittests/test_handler/test_handler_apt_source_v3.py
+++ b/tests/unittests/test_handler/test_handler_apt_source_v3.py
@@ -18,6 +18,7 @@ from cloudinit import cloud
from cloudinit import distros
from cloudinit import gpg
from cloudinit import helpers
+from cloudinit import subp
from cloudinit import util
from cloudinit.config import cc_apt_configure
@@ -48,6 +49,18 @@ MOCK_LSB_RELEASE_DATA = {
'release': '18.04', 'codename': 'bionic'}
+class FakeDatasource:
+ """Fake Datasource helper object"""
+ def __init__(self):
+ self.region = 'region'
+
+
+class FakeCloud:
+ """Fake Cloud helper object"""
+ def __init__(self):
+ self.datasource = FakeDatasource()
+
+
class TestAptSourceConfig(t_help.FilesystemMockingTestCase):
"""TestAptSourceConfig
Main Class to test apt configs
@@ -221,7 +234,7 @@ class TestAptSourceConfig(t_help.FilesystemMockingTestCase):
"""
params = self._get_default_params()
- with mock.patch("cloudinit.util.subp",
+ with mock.patch("cloudinit.subp.subp",
return_value=('fakekey 1234', '')) as mockobj:
self._add_apt_sources(cfg, TARGET, template_params=params,
aa_repo_match=self.matcher)
@@ -296,7 +309,7 @@ class TestAptSourceConfig(t_help.FilesystemMockingTestCase):
' xenial main'),
'key': "fakekey 4321"}}
- with mock.patch.object(util, 'subp') as mockobj:
+ with mock.patch.object(subp, 'subp') as mockobj:
self._add_apt_sources(cfg, TARGET, template_params=params,
aa_repo_match=self.matcher)
@@ -318,7 +331,7 @@ class TestAptSourceConfig(t_help.FilesystemMockingTestCase):
params = self._get_default_params()
cfg = {self.aptlistfile: {'key': "fakekey 4242"}}
- with mock.patch.object(util, 'subp') as mockobj:
+ with mock.patch.object(subp, 'subp') as mockobj:
self._add_apt_sources(cfg, TARGET, template_params=params,
aa_repo_match=self.matcher)
@@ -333,7 +346,7 @@ class TestAptSourceConfig(t_help.FilesystemMockingTestCase):
params = self._get_default_params()
cfg = {self.aptlistfile: {'keyid': "03683F77"}}
- with mock.patch.object(util, 'subp',
+ with mock.patch.object(subp, 'subp',
return_value=('fakekey 1212', '')) as mockobj:
self._add_apt_sources(cfg, TARGET, template_params=params,
aa_repo_match=self.matcher)
@@ -416,7 +429,7 @@ class TestAptSourceConfig(t_help.FilesystemMockingTestCase):
params = self._get_default_params()
cfg = {self.aptlistfile: {'source': 'ppa:smoser/cloud-init-test'}}
- with mock.patch("cloudinit.util.subp") as mockobj:
+ with mock.patch("cloudinit.subp.subp") as mockobj:
self._add_apt_sources(cfg, TARGET, template_params=params,
aa_repo_match=self.matcher)
mockobj.assert_any_call(['add-apt-repository',
@@ -432,7 +445,7 @@ class TestAptSourceConfig(t_help.FilesystemMockingTestCase):
self.aptlistfile2: {'source': 'ppa:smoser/cloud-init-test2'},
self.aptlistfile3: {'source': 'ppa:smoser/cloud-init-test3'}}
- with mock.patch("cloudinit.util.subp") as mockobj:
+ with mock.patch("cloudinit.subp.subp") as mockobj:
self._add_apt_sources(cfg, TARGET, template_params=params,
aa_repo_match=self.matcher)
calls = [call(['add-apt-repository', 'ppa:smoser/cloud-init-test'],
@@ -470,7 +483,7 @@ class TestAptSourceConfig(t_help.FilesystemMockingTestCase):
fromfn = ("%s/%s_%s" % (pre, archive, post))
tofn = ("%s/test.ubuntu.com_%s" % (pre, post))
- mirrors = cc_apt_configure.find_apt_mirror_info(cfg, None, arch)
+ mirrors = cc_apt_configure.find_apt_mirror_info(cfg, FakeCloud(), arch)
self.assertEqual(mirrors['MIRROR'],
"http://test.ubuntu.com/%s/" % component)
@@ -558,7 +571,8 @@ class TestAptSourceConfig(t_help.FilesystemMockingTestCase):
"security": [{'arches': ["default"],
"uri": smir}]}
- mirrors = cc_apt_configure.find_apt_mirror_info(cfg, None, 'amd64')
+ mirrors = cc_apt_configure.find_apt_mirror_info(
+ cfg, FakeCloud(), 'amd64')
self.assertEqual(mirrors['MIRROR'],
pmir)
@@ -593,7 +607,7 @@ class TestAptSourceConfig(t_help.FilesystemMockingTestCase):
"security": [{'arches': ["default"], "uri": "nothis-security"},
{'arches': [arch], "uri": smir}]}
- mirrors = cc_apt_configure.find_apt_mirror_info(cfg, None, arch)
+ mirrors = cc_apt_configure.find_apt_mirror_info(cfg, FakeCloud(), arch)
self.assertEqual(mirrors['PRIMARY'], pmir)
self.assertEqual(mirrors['MIRROR'], pmir)
@@ -612,7 +626,8 @@ class TestAptSourceConfig(t_help.FilesystemMockingTestCase):
{'arches': ["default"],
"uri": smir}]}
- mirrors = cc_apt_configure.find_apt_mirror_info(cfg, None, 'amd64')
+ mirrors = cc_apt_configure.find_apt_mirror_info(
+ cfg, FakeCloud(), 'amd64')
self.assertEqual(mirrors['MIRROR'],
pmir)
@@ -670,9 +685,9 @@ class TestAptSourceConfig(t_help.FilesystemMockingTestCase):
"security": [{'arches': ["default"],
"search": ["sfailme", smir]}]}
- with mock.patch.object(cc_apt_configure, 'search_for_mirror',
+ with mock.patch.object(cc_apt_configure.util, 'search_for_mirror',
side_effect=[pmir, smir]) as mocksearch:
- mirrors = cc_apt_configure.find_apt_mirror_info(cfg, None,
+ mirrors = cc_apt_configure.find_apt_mirror_info(cfg, FakeCloud(),
'amd64')
calls = [call(["pfailme", pmir]),
@@ -709,9 +724,10 @@ class TestAptSourceConfig(t_help.FilesystemMockingTestCase):
mockgm.assert_has_calls(calls)
# should not be called, since primary is specified
- with mock.patch.object(cc_apt_configure,
+ with mock.patch.object(cc_apt_configure.util,
'search_for_mirror') as mockse:
- mirrors = cc_apt_configure.find_apt_mirror_info(cfg, None, arch)
+ mirrors = cc_apt_configure.find_apt_mirror_info(
+ cfg, FakeCloud(), arch)
mockse.assert_not_called()
self.assertEqual(mirrors['MIRROR'],
@@ -974,7 +990,7 @@ deb http://ubuntu.com/ubuntu/ xenial-proposed main""")
mocksdns.assert_has_calls(calls)
# first return is for the non-dns call before
- with mock.patch.object(cc_apt_configure, 'search_for_mirror',
+ with mock.patch.object(cc_apt_configure.util, 'search_for_mirror',
side_effect=[None, pmir, None, smir]) as mockse:
mirrors = cc_apt_configure.find_apt_mirror_info(cfg, mycloud, arch)
@@ -996,7 +1012,7 @@ deb http://ubuntu.com/ubuntu/ xenial-proposed main""")
class TestDebconfSelections(TestCase):
- @mock.patch("cloudinit.config.cc_apt_configure.util.subp")
+ @mock.patch("cloudinit.config.cc_apt_configure.subp.subp")
def test_set_sel_appends_newline_if_absent(self, m_subp):
"""Automatically append a newline to debconf-set-selections config."""
selections = b'some/setting boolean true'
@@ -1033,7 +1049,9 @@ class TestDebconfSelections(TestCase):
# assumes called with *args value.
selections = m_set_sel.call_args_list[0][0][0].decode()
- missing = [l for l in lines if l not in selections.splitlines()]
+ missing = [
+ line for line in lines if line not in selections.splitlines()
+ ]
self.assertEqual([], missing)
@mock.patch("cloudinit.config.cc_apt_configure.dpkg_reconfigure")
@@ -1079,7 +1097,7 @@ class TestDebconfSelections(TestCase):
self.assertTrue(m_get_inst.called)
self.assertEqual(m_dpkg_r.call_count, 0)
- @mock.patch("cloudinit.config.cc_apt_configure.util.subp")
+ @mock.patch("cloudinit.config.cc_apt_configure.subp.subp")
def test_dpkg_reconfigure_does_reconfigure(self, m_subp):
target = "/foo-target"
@@ -1102,12 +1120,12 @@ class TestDebconfSelections(TestCase):
'cloud-init']
self.assertEqual(expected, found)
- @mock.patch("cloudinit.config.cc_apt_configure.util.subp")
+ @mock.patch("cloudinit.config.cc_apt_configure.subp.subp")
def test_dpkg_reconfigure_not_done_on_no_data(self, m_subp):
cc_apt_configure.dpkg_reconfigure([])
m_subp.assert_not_called()
- @mock.patch("cloudinit.config.cc_apt_configure.util.subp")
+ @mock.patch("cloudinit.config.cc_apt_configure.subp.subp")
def test_dpkg_reconfigure_not_done_if_no_cleaners(self, m_subp):
cc_apt_configure.dpkg_reconfigure(['pkgfoo', 'pkgbar'])
m_subp.assert_not_called()
diff --git a/tests/unittests/test_handler/test_handler_bootcmd.py b/tests/unittests/test_handler/test_handler_bootcmd.py
index a76760fa..b53d60d4 100644
--- a/tests/unittests/test_handler/test_handler_bootcmd.py
+++ b/tests/unittests/test_handler/test_handler_bootcmd.py
@@ -2,7 +2,7 @@
from cloudinit.config.cc_bootcmd import handle, schema
from cloudinit.sources import DataSourceNone
-from cloudinit import (distros, helpers, cloud, util)
+from cloudinit import (distros, helpers, cloud, subp, util)
from cloudinit.tests.helpers import (
CiTestCase, mock, SchemaTestCaseMixin, skipUnlessJsonSchema)
@@ -36,7 +36,7 @@ class TestBootcmd(CiTestCase):
def setUp(self):
super(TestBootcmd, self).setUp()
- self.subp = util.subp
+ self.subp = subp.subp
self.new_root = self.tmp_dir()
def _get_cloud(self, distro):
@@ -130,7 +130,7 @@ class TestBootcmd(CiTestCase):
with mock.patch(self._etmpfile_path, FakeExtendedTempFile):
with self.allow_subp(['/bin/sh']):
- with self.assertRaises(util.ProcessExecutionError) as ctxt:
+ with self.assertRaises(subp.ProcessExecutionError) as ctxt:
handle('does-not-matter', valid_config, cc, LOG, [])
self.assertIn(
'Unexpected error while running command.\n'
diff --git a/tests/unittests/test_handler/test_handler_ca_certs.py b/tests/unittests/test_handler/test_handler_ca_certs.py
index 5b4105dd..e74a0a08 100644
--- a/tests/unittests/test_handler/test_handler_ca_certs.py
+++ b/tests/unittests/test_handler/test_handler_ca_certs.py
@@ -1,8 +1,10 @@
# This file is part of cloud-init. See LICENSE file for license information.
from cloudinit import cloud
+from cloudinit import distros
from cloudinit.config import cc_ca_certs
from cloudinit import helpers
+from cloudinit import subp
from cloudinit import util
from cloudinit.tests.helpers import TestCase
@@ -11,13 +13,9 @@ import logging
import shutil
import tempfile
import unittest
+from contextlib import ExitStack
from unittest import mock
-try:
- from contextlib import ExitStack
-except ImportError:
- from contextlib2 import ExitStack
-
class TestNoConfig(unittest.TestCase):
def setUp(self):
@@ -49,8 +47,9 @@ class TestConfig(TestCase):
def setUp(self):
super(TestConfig, self).setUp()
self.name = "ca-certs"
+ distro = self._fetch_distro('ubuntu')
self.paths = None
- self.cloud = cloud.Cloud(None, self.paths, None, None, None)
+ self.cloud = cloud.Cloud(None, self.paths, None, distro, None)
self.log = logging.getLogger("TestNoConfig")
self.args = []
@@ -65,6 +64,11 @@ class TestConfig(TestCase):
self.mock_remove = self.mocks.enter_context(
mock.patch.object(cc_ca_certs, 'remove_default_ca_certs'))
+ def _fetch_distro(self, kind):
+ cls = distros.fetch(kind)
+ paths = helpers.Paths({})
+ return cls(kind, {}, paths)
+
def test_no_trusted_list(self):
"""
Test that no certificates are written if the 'trusted' key is not
@@ -204,6 +208,28 @@ class TestAddCaCerts(TestCase):
mock_load.assert_called_once_with("/etc/ca-certificates.conf")
+ def test_single_cert_to_empty_existing_ca_file(self):
+ """Test adding a single certificate to the trusted CAs
+ when existing ca-certificates.conf is empty"""
+ cert = "CERT1\nLINE2\nLINE3"
+
+ expected = "cloud-init-ca-certs.crt\n"
+
+ with ExitStack() as mocks:
+ mock_write = mocks.enter_context(
+ mock.patch.object(util, 'write_file', autospec=True))
+ mock_stat = mocks.enter_context(
+ mock.patch("cloudinit.config.cc_ca_certs.os.stat")
+ )
+ mock_stat.return_value.st_size = 0
+
+ cc_ca_certs.add_ca_certs([cert])
+
+ mock_write.assert_has_calls([
+ mock.call("/usr/share/ca-certificates/cloud-init-ca-certs.crt",
+ cert, mode=0o644),
+ mock.call("/etc/ca-certificates.conf", expected, omode="wb")])
+
def test_multiple_certs(self):
"""Test adding multiple certificates to the trusted CAs."""
certs = ["CERT1\nLINE2\nLINE3", "CERT2\nLINE2\nLINE3"]
@@ -232,7 +258,7 @@ class TestAddCaCerts(TestCase):
class TestUpdateCaCerts(unittest.TestCase):
def test_commands(self):
- with mock.patch.object(util, 'subp') as mockobj:
+ with mock.patch.object(subp, 'subp') as mockobj:
cc_ca_certs.update_ca_certs()
mockobj.assert_called_once_with(
["update-ca-certificates"], capture=False)
@@ -254,9 +280,9 @@ class TestRemoveDefaultCaCerts(TestCase):
mock.patch.object(util, 'delete_dir_contents'))
mock_write = mocks.enter_context(
mock.patch.object(util, 'write_file'))
- mock_subp = mocks.enter_context(mock.patch.object(util, 'subp'))
+ mock_subp = mocks.enter_context(mock.patch.object(subp, 'subp'))
- cc_ca_certs.remove_default_ca_certs()
+ cc_ca_certs.remove_default_ca_certs('ubuntu')
mock_delete.assert_has_calls([
mock.call("/usr/share/ca-certificates/"),
diff --git a/tests/unittests/test_handler/test_handler_chef.py b/tests/unittests/test_handler/test_handler_chef.py
index 2dab3a54..7918c609 100644
--- a/tests/unittests/test_handler/test_handler_chef.py
+++ b/tests/unittests/test_handler/test_handler_chef.py
@@ -41,7 +41,7 @@ class TestInstallChefOmnibus(HttprettyTestCase):
httpretty.GET, cc_chef.OMNIBUS_URL, body=response, status=200)
ret = (None, None) # stdout, stderr but capture=False
- with mock.patch("cloudinit.config.cc_chef.util.subp_blob_in_tempfile",
+ with mock.patch("cloudinit.config.cc_chef.subp_blob_in_tempfile",
return_value=ret) as m_subp_blob:
cc_chef.install_chef_from_omnibus()
# admittedly whitebox, but assuming subp_blob_in_tempfile works
@@ -52,7 +52,7 @@ class TestInstallChefOmnibus(HttprettyTestCase):
m_subp_blob.call_args_list)
@mock.patch('cloudinit.config.cc_chef.url_helper.readurl')
- @mock.patch('cloudinit.config.cc_chef.util.subp_blob_in_tempfile')
+ @mock.patch('cloudinit.config.cc_chef.subp_blob_in_tempfile')
def test_install_chef_from_omnibus_retries_url(self, m_subp_blob, m_rdurl):
"""install_chef_from_omnibus retries OMNIBUS_URL upon failure."""
@@ -65,23 +65,23 @@ class TestInstallChefOmnibus(HttprettyTestCase):
cc_chef.install_chef_from_omnibus()
expected_kwargs = {'retries': cc_chef.OMNIBUS_URL_RETRIES,
'url': cc_chef.OMNIBUS_URL}
- self.assertItemsEqual(expected_kwargs, m_rdurl.call_args_list[0][1])
+ self.assertCountEqual(expected_kwargs, m_rdurl.call_args_list[0][1])
cc_chef.install_chef_from_omnibus(retries=10)
expected_kwargs = {'retries': 10,
'url': cc_chef.OMNIBUS_URL}
- self.assertItemsEqual(expected_kwargs, m_rdurl.call_args_list[1][1])
+ self.assertCountEqual(expected_kwargs, m_rdurl.call_args_list[1][1])
expected_subp_kwargs = {
'args': ['-v', '2.0'],
'basename': 'chef-omnibus-install',
'blob': m_rdurl.return_value.contents,
'capture': False
}
- self.assertItemsEqual(
+ self.assertCountEqual(
expected_subp_kwargs,
m_subp_blob.call_args_list[0][1])
@mock.patch("cloudinit.config.cc_chef.OMNIBUS_URL", OMNIBUS_URL_HTTP)
- @mock.patch('cloudinit.config.cc_chef.util.subp_blob_in_tempfile')
+ @mock.patch('cloudinit.config.cc_chef.subp_blob_in_tempfile')
def test_install_chef_from_omnibus_has_omnibus_version(self, m_subp_blob):
"""install_chef_from_omnibus provides version arg to OMNIBUS_URL."""
chef_outfile = self.tmp_path('chef.out', self.new_root)
@@ -97,7 +97,7 @@ class TestInstallChefOmnibus(HttprettyTestCase):
'blob': response,
'capture': False
}
- self.assertItemsEqual(expected_kwargs, called_kwargs)
+ self.assertCountEqual(expected_kwargs, called_kwargs)
class TestChef(FilesystemMockingTestCase):
@@ -130,6 +130,7 @@ class TestChef(FilesystemMockingTestCase):
# This should create a file of the format...
# Created by cloud-init v. 0.7.6 on Sat, 11 Oct 2014 23:57:21 +0000
+ chef_license "accept"
log_level :info
ssl_verify_mode :verify_none
log_location "/var/log/chef/client.log"
@@ -153,6 +154,7 @@ class TestChef(FilesystemMockingTestCase):
util.write_file('/etc/cloud/templates/chef_client.rb.tmpl', tpl_file)
cfg = {
'chef': {
+ 'chef_license': "accept",
'server_url': 'localhost',
'validation_name': 'bob',
'validation_key': "/etc/chef/vkey.pem",
diff --git a/tests/unittests/test_handler/test_handler_disk_setup.py b/tests/unittests/test_handler/test_handler_disk_setup.py
index 0e51f17a..4f4a57fa 100644
--- a/tests/unittests/test_handler/test_handler_disk_setup.py
+++ b/tests/unittests/test_handler/test_handler_disk_setup.py
@@ -44,7 +44,7 @@ class TestGetMbrHddSize(TestCase):
super(TestGetMbrHddSize, self).setUp()
self.patches = ExitStack()
self.subp = self.patches.enter_context(
- mock.patch.object(cc_disk_setup.util, 'subp'))
+ mock.patch.object(cc_disk_setup.subp, 'subp'))
def tearDown(self):
super(TestGetMbrHddSize, self).tearDown()
@@ -173,7 +173,7 @@ class TestUpdateFsSetupDevices(TestCase):
@mock.patch('cloudinit.config.cc_disk_setup.find_device_node',
return_value=('/dev/xdb1', False))
@mock.patch('cloudinit.config.cc_disk_setup.device_type', return_value=None)
-@mock.patch('cloudinit.config.cc_disk_setup.util.subp', return_value=('', ''))
+@mock.patch('cloudinit.config.cc_disk_setup.subp.subp', return_value=('', ''))
class TestMkfsCommandHandling(CiTestCase):
with_logs = True
@@ -204,7 +204,7 @@ class TestMkfsCommandHandling(CiTestCase):
subp.assert_called_once_with(
'mkfs -t ext4 -L with_cmd /dev/xdb1', shell=True)
- @mock.patch('cloudinit.config.cc_disk_setup.util.which')
+ @mock.patch('cloudinit.config.cc_disk_setup.subp.which')
def test_overwrite_and_extra_opts_without_cmd(self, m_which, subp, *args):
"""mkfs observes extra_opts and overwrite settings when cmd is not
present."""
@@ -222,7 +222,7 @@ class TestMkfsCommandHandling(CiTestCase):
'-L', 'without_cmd', '-F', 'are', 'added'],
shell=False)
- @mock.patch('cloudinit.config.cc_disk_setup.util.which')
+ @mock.patch('cloudinit.config.cc_disk_setup.subp.which')
def test_mkswap(self, m_which, subp, *args):
"""mkfs observes extra_opts and overwrite settings when cmd is not
present."""
diff --git a/tests/unittests/test_handler/test_handler_etc_hosts.py b/tests/unittests/test_handler/test_handler_etc_hosts.py
index d854afcb..e3778b11 100644
--- a/tests/unittests/test_handler/test_handler_etc_hosts.py
+++ b/tests/unittests/test_handler/test_handler_etc_hosts.py
@@ -44,8 +44,8 @@ class TestHostsFile(t_help.FilesystemMockingTestCase):
self.patchUtils(self.tmp)
cc_update_etc_hosts.handle('test', cfg, cc, LOG, [])
contents = util.load_file('%s/etc/hosts' % self.tmp)
- if '127.0.0.1\tcloud-init.test.us\tcloud-init' not in contents:
- self.assertIsNone('No entry for 127.0.0.1 in etc/hosts')
+ if '127.0.1.1\tcloud-init.test.us\tcloud-init' not in contents:
+ self.assertIsNone('No entry for 127.0.1.1 in etc/hosts')
if '192.168.1.1\tblah.blah.us\tblah' not in contents:
self.assertIsNone('Default etc/hosts content modified')
@@ -64,7 +64,7 @@ class TestHostsFile(t_help.FilesystemMockingTestCase):
self.patchUtils(self.tmp)
cc_update_etc_hosts.handle('test', cfg, cc, LOG, [])
contents = util.load_file('%s/etc/hosts' % self.tmp)
- if '127.0.0.1 cloud-init.test.us cloud-init' not in contents:
- self.assertIsNone('No entry for 127.0.0.1 in etc/hosts')
+ if '127.0.1.1 cloud-init.test.us cloud-init' not in contents:
+ self.assertIsNone('No entry for 127.0.1.1 in etc/hosts')
if '::1 cloud-init.test.us cloud-init' not in contents:
self.assertIsNone('No entry for 127.0.0.1 in etc/hosts')
diff --git a/tests/unittests/test_handler/test_handler_growpart.py b/tests/unittests/test_handler/test_handler_growpart.py
index 43b53745..7f039b79 100644
--- a/tests/unittests/test_handler/test_handler_growpart.py
+++ b/tests/unittests/test_handler/test_handler_growpart.py
@@ -2,7 +2,7 @@
from cloudinit import cloud
from cloudinit.config import cc_growpart
-from cloudinit import util
+from cloudinit import subp
from cloudinit.tests.helpers import TestCase
@@ -11,13 +11,9 @@ import logging
import os
import re
import unittest
+from contextlib import ExitStack
from unittest import mock
-try:
- from contextlib import ExitStack
-except ImportError:
- from contextlib2 import ExitStack
-
# growpart:
# mode: auto # off, on, auto, 'growpart'
# devices: ['root']
@@ -99,7 +95,7 @@ class TestConfig(TestCase):
@mock.patch.dict("os.environ", clear=True)
def test_no_resizers_auto_is_fine(self):
with mock.patch.object(
- util, 'subp',
+ subp, 'subp',
return_value=(HELP_GROWPART_NO_RESIZE, "")) as mockobj:
config = {'growpart': {'mode': 'auto'}}
@@ -113,7 +109,7 @@ class TestConfig(TestCase):
@mock.patch.dict("os.environ", clear=True)
def test_no_resizers_mode_growpart_is_exception(self):
with mock.patch.object(
- util, 'subp',
+ subp, 'subp',
return_value=(HELP_GROWPART_NO_RESIZE, "")) as mockobj:
config = {'growpart': {'mode': "growpart"}}
self.assertRaises(
@@ -126,7 +122,7 @@ class TestConfig(TestCase):
@mock.patch.dict("os.environ", clear=True)
def test_mode_auto_prefers_growpart(self):
with mock.patch.object(
- util, 'subp',
+ subp, 'subp',
return_value=(HELP_GROWPART_RESIZE, "")) as mockobj:
ret = cc_growpart.resizer_factory(mode="auto")
self.assertIsInstance(ret, cc_growpart.ResizeGrowPart)
@@ -137,7 +133,7 @@ class TestConfig(TestCase):
@mock.patch.dict("os.environ", clear=True)
def test_mode_auto_falls_back_to_gpart(self):
with mock.patch.object(
- util, 'subp',
+ subp, 'subp',
return_value=("", HELP_GPART)) as mockobj:
ret = cc_growpart.resizer_factory(mode="auto")
self.assertIsInstance(ret, cc_growpart.ResizeGpart)
diff --git a/tests/unittests/test_handler/test_handler_landscape.py b/tests/unittests/test_handler/test_handler_landscape.py
index db92a7e2..7d165687 100644
--- a/tests/unittests/test_handler/test_handler_landscape.py
+++ b/tests/unittests/test_handler/test_handler_landscape.py
@@ -49,8 +49,8 @@ class TestLandscape(FilesystemMockingTestCase):
"'landscape' key existed in config, but not a dict",
str(context_manager.exception))
- @mock.patch('cloudinit.config.cc_landscape.util')
- def test_handler_restarts_landscape_client(self, m_util):
+ @mock.patch('cloudinit.config.cc_landscape.subp')
+ def test_handler_restarts_landscape_client(self, m_subp):
"""handler restarts lansdscape-client after install."""
mycloud = self._get_cloud('ubuntu')
cfg = {'landscape': {'client': {}}}
@@ -60,7 +60,7 @@ class TestLandscape(FilesystemMockingTestCase):
cc_landscape.handle, 'notimportant', cfg, mycloud, LOG, None)
self.assertEqual(
[mock.call(['service', 'landscape-client', 'restart'])],
- m_util.subp.call_args_list)
+ m_subp.subp.call_args_list)
def test_handler_installs_client_and_creates_config_file(self):
"""Write landscape client.conf and install landscape-client."""
diff --git a/tests/unittests/test_handler/test_handler_locale.py b/tests/unittests/test_handler/test_handler_locale.py
index 2b22559f..47e7d804 100644
--- a/tests/unittests/test_handler/test_handler_locale.py
+++ b/tests/unittests/test_handler/test_handler_locale.py
@@ -29,8 +29,6 @@ LOG = logging.getLogger(__name__)
class TestLocale(t_help.FilesystemMockingTestCase):
- with_logs = True
-
def setUp(self):
super(TestLocale, self).setUp()
self.new_root = tempfile.mkdtemp()
@@ -86,7 +84,7 @@ class TestLocale(t_help.FilesystemMockingTestCase):
util.write_file(locale_conf, 'LANG="en_US.UTF-8"\n')
cfg = {'locale': 'C.UTF-8'}
cc = self._get_cloud('ubuntu')
- with mock.patch('cloudinit.distros.debian.util.subp') as m_subp:
+ with mock.patch('cloudinit.distros.debian.subp.subp') as m_subp:
with mock.patch('cloudinit.distros.debian.LOCALE_CONF_FN',
locale_conf):
cc_locale.handle('cc_locale', cfg, cc, LOG, [])
diff --git a/tests/unittests/test_handler/test_handler_lxd.py b/tests/unittests/test_handler/test_handler_lxd.py
index 40b521e5..21011204 100644
--- a/tests/unittests/test_handler/test_handler_lxd.py
+++ b/tests/unittests/test_handler/test_handler_lxd.py
@@ -31,13 +31,13 @@ class TestLxd(t_help.CiTestCase):
return cc
@mock.patch("cloudinit.config.cc_lxd.maybe_cleanup_default")
- @mock.patch("cloudinit.config.cc_lxd.util")
- def test_lxd_init(self, mock_util, m_maybe_clean):
+ @mock.patch("cloudinit.config.cc_lxd.subp")
+ def test_lxd_init(self, mock_subp, m_maybe_clean):
cc = self._get_cloud('ubuntu')
- mock_util.which.return_value = True
+ mock_subp.which.return_value = True
m_maybe_clean.return_value = None
cc_lxd.handle('cc_lxd', self.lxd_cfg, cc, self.logger, [])
- self.assertTrue(mock_util.which.called)
+ self.assertTrue(mock_subp.which.called)
# no bridge config, so maybe_cleanup should not be called.
self.assertFalse(m_maybe_clean.called)
self.assertEqual(
@@ -45,14 +45,14 @@ class TestLxd(t_help.CiTestCase):
mock.call(
['lxd', 'init', '--auto', '--network-address=0.0.0.0',
'--storage-backend=zfs', '--storage-pool=poolname'])],
- mock_util.subp.call_args_list)
+ mock_subp.subp.call_args_list)
@mock.patch("cloudinit.config.cc_lxd.maybe_cleanup_default")
- @mock.patch("cloudinit.config.cc_lxd.util")
- def test_lxd_install(self, mock_util, m_maybe_clean):
+ @mock.patch("cloudinit.config.cc_lxd.subp")
+ def test_lxd_install(self, mock_subp, m_maybe_clean):
cc = self._get_cloud('ubuntu')
cc.distro = mock.MagicMock()
- mock_util.which.return_value = None
+ mock_subp.which.return_value = None
cc_lxd.handle('cc_lxd', self.lxd_cfg, cc, self.logger, [])
self.assertNotIn('WARN', self.logs.getvalue())
self.assertTrue(cc.distro.install_packages.called)
@@ -62,23 +62,23 @@ class TestLxd(t_help.CiTestCase):
self.assertEqual(sorted(install_pkg), ['lxd', 'zfsutils-linux'])
@mock.patch("cloudinit.config.cc_lxd.maybe_cleanup_default")
- @mock.patch("cloudinit.config.cc_lxd.util")
- def test_no_init_does_nothing(self, mock_util, m_maybe_clean):
+ @mock.patch("cloudinit.config.cc_lxd.subp")
+ def test_no_init_does_nothing(self, mock_subp, m_maybe_clean):
cc = self._get_cloud('ubuntu')
cc.distro = mock.MagicMock()
cc_lxd.handle('cc_lxd', {'lxd': {}}, cc, self.logger, [])
self.assertFalse(cc.distro.install_packages.called)
- self.assertFalse(mock_util.subp.called)
+ self.assertFalse(mock_subp.subp.called)
self.assertFalse(m_maybe_clean.called)
@mock.patch("cloudinit.config.cc_lxd.maybe_cleanup_default")
- @mock.patch("cloudinit.config.cc_lxd.util")
- def test_no_lxd_does_nothing(self, mock_util, m_maybe_clean):
+ @mock.patch("cloudinit.config.cc_lxd.subp")
+ def test_no_lxd_does_nothing(self, mock_subp, m_maybe_clean):
cc = self._get_cloud('ubuntu')
cc.distro = mock.MagicMock()
cc_lxd.handle('cc_lxd', {'package_update': True}, cc, self.logger, [])
self.assertFalse(cc.distro.install_packages.called)
- self.assertFalse(mock_util.subp.called)
+ self.assertFalse(mock_subp.subp.called)
self.assertFalse(m_maybe_clean.called)
def test_lxd_debconf_new_full(self):
diff --git a/tests/unittests/test_handler/test_handler_mcollective.py b/tests/unittests/test_handler/test_handler_mcollective.py
index c013a538..6891e15f 100644
--- a/tests/unittests/test_handler/test_handler_mcollective.py
+++ b/tests/unittests/test_handler/test_handler_mcollective.py
@@ -136,8 +136,9 @@ class TestHandler(t_help.TestCase):
cc = cloud.Cloud(ds, paths, {}, d, None)
return cc
+ @t_help.mock.patch("cloudinit.config.cc_mcollective.subp")
@t_help.mock.patch("cloudinit.config.cc_mcollective.util")
- def test_mcollective_install(self, mock_util):
+ def test_mcollective_install(self, mock_util, mock_subp):
cc = self._get_cloud('ubuntu')
cc.distro = t_help.mock.MagicMock()
mock_util.load_file.return_value = b""
@@ -147,8 +148,8 @@ class TestHandler(t_help.TestCase):
install_pkg = cc.distro.install_packages.call_args_list[0][0][0]
self.assertEqual(install_pkg, ('mcollective',))
- self.assertTrue(mock_util.subp.called)
- self.assertEqual(mock_util.subp.call_args_list[0][0][0],
+ self.assertTrue(mock_subp.subp.called)
+ self.assertEqual(mock_subp.subp.call_args_list[0][0][0],
['service', 'mcollective', 'restart'])
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_mounts.py b/tests/unittests/test_handler/test_handler_mounts.py
index 05ac183e..e87069f6 100644
--- a/tests/unittests/test_handler/test_handler_mounts.py
+++ b/tests/unittests/test_handler/test_handler_mounts.py
@@ -127,6 +127,119 @@ class TestSanitizeDevname(test_helpers.FilesystemMockingTestCase):
cc_mounts.sanitize_devname(
'ephemeral0.1', lambda x: disk_path, mock.Mock()))
+ def test_network_device_returns_network_device(self):
+ disk_path = 'netdevice:/path'
+ self.assertEqual(
+ disk_path,
+ cc_mounts.sanitize_devname(disk_path, None, mock.Mock()))
+
+
+class TestSwapFileCreation(test_helpers.FilesystemMockingTestCase):
+
+ def setUp(self):
+ super(TestSwapFileCreation, self).setUp()
+ self.new_root = self.tmp_dir()
+ self.patchOS(self.new_root)
+
+ self.fstab_path = os.path.join(self.new_root, 'etc/fstab')
+ self.swap_path = os.path.join(self.new_root, 'swap.img')
+ self._makedirs('/etc')
+
+ self.add_patch('cloudinit.config.cc_mounts.FSTAB_PATH',
+ 'mock_fstab_path',
+ self.fstab_path,
+ autospec=False)
+
+ self.add_patch('cloudinit.config.cc_mounts.subp.subp',
+ 'm_subp_subp')
+
+ self.add_patch('cloudinit.config.cc_mounts.util.mounts',
+ 'mock_util_mounts',
+ return_value={
+ '/dev/sda1': {'fstype': 'ext4',
+ 'mountpoint': '/',
+ 'opts': 'rw,relatime,discard'
+ }})
+
+ self.mock_cloud = mock.Mock()
+ self.mock_log = mock.Mock()
+ self.mock_cloud.device_name_to_device = self.device_name_to_device
+
+ self.cc = {
+ 'swap': {
+ 'filename': self.swap_path,
+ 'size': '512',
+ 'maxsize': '512'}}
+
+ def _makedirs(self, directory):
+ directory = os.path.join(self.new_root, directory.lstrip('/'))
+ if not os.path.exists(directory):
+ os.makedirs(directory)
+
+ def device_name_to_device(self, path):
+ if path == 'swap':
+ return self.swap_path
+ else:
+ dev = None
+
+ return dev
+
+ @mock.patch('cloudinit.util.get_mount_info')
+ @mock.patch('cloudinit.util.kernel_version')
+ def test_swap_creation_method_fallocate_on_xfs(self, m_kernel_version,
+ m_get_mount_info):
+ m_kernel_version.return_value = (4, 20)
+ m_get_mount_info.return_value = ["", "xfs"]
+
+ cc_mounts.handle(None, self.cc, self.mock_cloud, self.mock_log, [])
+ self.m_subp_subp.assert_has_calls([
+ mock.call(['fallocate', '-l', '0M', self.swap_path], capture=True),
+ mock.call(['mkswap', self.swap_path]),
+ mock.call(['swapon', '-a'])])
+
+ @mock.patch('cloudinit.util.get_mount_info')
+ @mock.patch('cloudinit.util.kernel_version')
+ def test_swap_creation_method_xfs(self, m_kernel_version,
+ m_get_mount_info):
+ m_kernel_version.return_value = (3, 18)
+ m_get_mount_info.return_value = ["", "xfs"]
+
+ cc_mounts.handle(None, self.cc, self.mock_cloud, self.mock_log, [])
+ self.m_subp_subp.assert_has_calls([
+ mock.call(['dd', 'if=/dev/zero',
+ 'of=' + self.swap_path,
+ 'bs=1M', 'count=0'], capture=True),
+ mock.call(['mkswap', self.swap_path]),
+ mock.call(['swapon', '-a'])])
+
+ @mock.patch('cloudinit.util.get_mount_info')
+ @mock.patch('cloudinit.util.kernel_version')
+ def test_swap_creation_method_btrfs(self, m_kernel_version,
+ m_get_mount_info):
+ m_kernel_version.return_value = (4, 20)
+ m_get_mount_info.return_value = ["", "btrfs"]
+
+ cc_mounts.handle(None, self.cc, self.mock_cloud, self.mock_log, [])
+ self.m_subp_subp.assert_has_calls([
+ mock.call(['dd', 'if=/dev/zero',
+ 'of=' + self.swap_path,
+ 'bs=1M', 'count=0'], capture=True),
+ mock.call(['mkswap', self.swap_path]),
+ mock.call(['swapon', '-a'])])
+
+ @mock.patch('cloudinit.util.get_mount_info')
+ @mock.patch('cloudinit.util.kernel_version')
+ def test_swap_creation_method_ext4(self, m_kernel_version,
+ m_get_mount_info):
+ m_kernel_version.return_value = (5, 14)
+ m_get_mount_info.return_value = ["", "ext4"]
+
+ cc_mounts.handle(None, self.cc, self.mock_cloud, self.mock_log, [])
+ self.m_subp_subp.assert_has_calls([
+ mock.call(['fallocate', '-l', '0M', self.swap_path], capture=True),
+ mock.call(['mkswap', self.swap_path]),
+ mock.call(['swapon', '-a'])])
+
class TestFstabHandling(test_helpers.FilesystemMockingTestCase):
@@ -149,8 +262,8 @@ class TestFstabHandling(test_helpers.FilesystemMockingTestCase):
'mock_is_block_device',
return_value=True)
- self.add_patch('cloudinit.config.cc_mounts.util.subp',
- 'm_util_subp')
+ self.add_patch('cloudinit.config.cc_mounts.subp.subp',
+ 'm_subp_subp')
self.add_patch('cloudinit.config.cc_mounts.util.mounts',
'mock_util_mounts',
@@ -177,6 +290,18 @@ class TestFstabHandling(test_helpers.FilesystemMockingTestCase):
return dev
+ def test_no_fstab(self):
+ """ Handle images which do not include an fstab. """
+ self.assertFalse(os.path.exists(cc_mounts.FSTAB_PATH))
+ fstab_expected_content = (
+ '%s\tnone\tswap\tsw,comment=cloudconfig\t'
+ '0\t0\n' % (self.swap_path,)
+ )
+ cc_mounts.handle(None, {}, self.mock_cloud, self.mock_log, [])
+ with open(cc_mounts.FSTAB_PATH, 'r') as fd:
+ fstab_new_content = fd.read()
+ self.assertEqual(fstab_expected_content, fstab_new_content)
+
def test_swap_integrity(self):
'''Ensure that the swap file is correctly created and can
swapon successfully. Fixing the corner case of:
@@ -254,15 +379,18 @@ class TestFstabHandling(test_helpers.FilesystemMockingTestCase):
'/dev/vdb /mnt auto defaults,noexec,comment=cloudconfig 0 2\n'
)
fstab_expected_content = fstab_original_content
- cc = {'mounts': [
- ['/dev/vdb', '/mnt', 'auto', 'defaults,noexec']]}
+ cc = {
+ 'mounts': [
+ ['/dev/vdb', '/mnt', 'auto', 'defaults,noexec']
+ ]
+ }
with open(cc_mounts.FSTAB_PATH, 'w') as fd:
fd.write(fstab_original_content)
with open(cc_mounts.FSTAB_PATH, 'r') as fd:
fstab_new_content = fd.read()
self.assertEqual(fstab_expected_content, fstab_new_content)
cc_mounts.handle(None, cc, self.mock_cloud, self.mock_log, [])
- self.m_util_subp.assert_has_calls([
+ self.m_subp_subp.assert_has_calls([
mock.call(['mount', '-a']),
mock.call(['systemctl', 'daemon-reload'])])
diff --git a/tests/unittests/test_handler/test_handler_ntp.py b/tests/unittests/test_handler/test_handler_ntp.py
index 463d892a..6b9c8377 100644
--- a/tests/unittests/test_handler/test_handler_ntp.py
+++ b/tests/unittests/test_handler/test_handler_ntp.py
@@ -83,50 +83,50 @@ class TestNtp(FilesystemMockingTestCase):
ntpconfig['template_name'] = os.path.basename(confpath)
return ntpconfig
- @mock.patch("cloudinit.config.cc_ntp.util")
- def test_ntp_install(self, mock_util):
+ @mock.patch("cloudinit.config.cc_ntp.subp")
+ def test_ntp_install(self, mock_subp):
"""ntp_install_client runs install_func when check_exe is absent."""
- mock_util.which.return_value = None # check_exe not found.
+ mock_subp.which.return_value = None # check_exe not found.
install_func = mock.MagicMock()
cc_ntp.install_ntp_client(install_func,
packages=['ntpx'], check_exe='ntpdx')
- mock_util.which.assert_called_with('ntpdx')
+ mock_subp.which.assert_called_with('ntpdx')
install_func.assert_called_once_with(['ntpx'])
- @mock.patch("cloudinit.config.cc_ntp.util")
- def test_ntp_install_not_needed(self, mock_util):
+ @mock.patch("cloudinit.config.cc_ntp.subp")
+ def test_ntp_install_not_needed(self, mock_subp):
"""ntp_install_client doesn't install when check_exe is found."""
client = 'chrony'
- mock_util.which.return_value = [client] # check_exe found.
+ mock_subp.which.return_value = [client] # check_exe found.
install_func = mock.MagicMock()
cc_ntp.install_ntp_client(install_func, packages=[client],
check_exe=client)
install_func.assert_not_called()
- @mock.patch("cloudinit.config.cc_ntp.util")
- def test_ntp_install_no_op_with_empty_pkg_list(self, mock_util):
+ @mock.patch("cloudinit.config.cc_ntp.subp")
+ def test_ntp_install_no_op_with_empty_pkg_list(self, mock_subp):
"""ntp_install_client runs install_func with empty list"""
- mock_util.which.return_value = None # check_exe not found
+ mock_subp.which.return_value = None # check_exe not found
install_func = mock.MagicMock()
cc_ntp.install_ntp_client(install_func, packages=[],
check_exe='timesyncd')
install_func.assert_called_once_with([])
- @mock.patch("cloudinit.config.cc_ntp.util")
- def test_reload_ntp_defaults(self, mock_util):
+ @mock.patch("cloudinit.config.cc_ntp.subp")
+ def test_reload_ntp_defaults(self, mock_subp):
"""Test service is restarted/reloaded (defaults)"""
service = 'ntp_service_name'
cmd = ['service', service, 'restart']
cc_ntp.reload_ntp(service)
- mock_util.subp.assert_called_with(cmd, capture=True)
+ mock_subp.subp.assert_called_with(cmd, capture=True)
- @mock.patch("cloudinit.config.cc_ntp.util")
- def test_reload_ntp_systemd(self, mock_util):
+ @mock.patch("cloudinit.config.cc_ntp.subp")
+ def test_reload_ntp_systemd(self, mock_subp):
"""Test service is restarted/reloaded (systemd)"""
service = 'ntp_service_name'
cc_ntp.reload_ntp(service, systemd=True)
cmd = ['systemctl', 'reload-or-restart', service]
- mock_util.subp.assert_called_with(cmd, capture=True)
+ mock_subp.subp.assert_called_with(cmd, capture=True)
def test_ntp_rename_ntp_conf(self):
"""When NTP_CONF exists, rename_ntp moves it."""
@@ -239,6 +239,35 @@ class TestNtp(FilesystemMockingTestCase):
self.assertEqual(delta[distro][client][key],
result[client][key])
+ def _get_expected_pools(self, pools, distro, client):
+ if client in ['ntp', 'chrony']:
+ if client == 'ntp' and distro == 'alpine':
+ # NTP for Alpine Linux is Busybox's ntp which does not
+ # support 'pool' lines in its configuration file.
+ expected_pools = []
+ else:
+ expected_pools = [
+ 'pool {0} iburst'.format(pool) for pool in pools]
+ elif client == 'systemd-timesyncd':
+ expected_pools = " ".join(pools)
+
+ return expected_pools
+
+ def _get_expected_servers(self, servers, distro, client):
+ if client in ['ntp', 'chrony']:
+ if client == 'ntp' and distro == 'alpine':
+ # NTP for Alpine Linux is Busybox's ntp which only supports
+ # 'server' lines without iburst option.
+ expected_servers = [
+ 'server {0}'.format(srv) for srv in servers]
+ else:
+ expected_servers = [
+ 'server {0} iburst'.format(srv) for srv in servers]
+ elif client == 'systemd-timesyncd':
+ expected_servers = " ".join(servers)
+
+ return expected_servers
+
def test_ntp_handler_real_distro_ntp_templates(self):
"""Test ntp handler renders the shipped distro ntp client templates."""
pools = ['0.mycompany.pool.ntp.org', '3.mycompany.pool.ntp.org']
@@ -269,27 +298,35 @@ class TestNtp(FilesystemMockingTestCase):
content = util.load_file(confpath)
if client in ['ntp', 'chrony']:
content_lines = content.splitlines()
- expected_servers = [
- 'server {0} iburst'.format(srv) for srv in servers]
+ expected_servers = self._get_expected_servers(servers,
+ distro,
+ client)
print('distro=%s client=%s' % (distro, client))
for sline in expected_servers:
self.assertIn(sline, content_lines,
('failed to render {0} conf'
' for distro:{1}'.format(client,
distro)))
- expected_pools = [
- 'pool {0} iburst'.format(pool) for pool in pools]
- for pline in expected_pools:
- self.assertIn(pline, content_lines,
- ('failed to render {0} conf'
- ' for distro:{1}'.format(client,
- distro)))
+ expected_pools = self._get_expected_pools(pools, distro,
+ client)
+ if expected_pools != []:
+ for pline in expected_pools:
+ self.assertIn(pline, content_lines,
+ ('failed to render {0} conf'
+ ' for distro:{1}'.format(client,
+ distro)))
elif client == 'systemd-timesyncd':
+ expected_servers = self._get_expected_servers(servers,
+ distro,
+ client)
+ expected_pools = self._get_expected_pools(pools,
+ distro,
+ client)
expected_content = (
"# cloud-init generated file\n" +
"# See timesyncd.conf(5) for details.\n\n" +
- "[Time]\nNTP=%s %s \n" % (" ".join(servers),
- " ".join(pools)))
+ "[Time]\nNTP=%s %s \n" % (expected_servers,
+ expected_pools))
self.assertEqual(expected_content, content)
def test_no_ntpcfg_does_nothing(self):
@@ -312,10 +349,20 @@ class TestNtp(FilesystemMockingTestCase):
confpath = ntpconfig['confpath']
m_select.return_value = ntpconfig
cc_ntp.handle('cc_ntp', valid_empty_config, mycloud, None, [])
- pools = cc_ntp.generate_server_names(mycloud.distro.name)
- self.assertEqual(
- "servers []\npools {0}\n".format(pools),
- util.load_file(confpath))
+ if distro == 'alpine':
+ # _mock_ntp_client_config call above did not specify a
+ # client value and so it defaults to "ntp" which on
+ # Alpine Linux only supports servers and not pools.
+
+ servers = cc_ntp.generate_server_names(mycloud.distro.name)
+ self.assertEqual(
+ "servers {0}\npools []\n".format(servers),
+ util.load_file(confpath))
+ else:
+ pools = cc_ntp.generate_server_names(mycloud.distro.name)
+ self.assertEqual(
+ "servers []\npools {0}\n".format(pools),
+ util.load_file(confpath))
self.assertNotIn('Invalid config:', self.logs.getvalue())
@skipUnlessJsonSchema()
@@ -374,18 +421,19 @@ class TestNtp(FilesystemMockingTestCase):
invalid_config = {
'ntp': {'invalidkey': 1, 'pools': ['0.mycompany.pool.ntp.org']}}
for distro in cc_ntp.distros:
- mycloud = self._get_cloud(distro)
- ntpconfig = self._mock_ntp_client_config(distro=distro)
- confpath = ntpconfig['confpath']
- m_select.return_value = ntpconfig
- cc_ntp.handle('cc_ntp', invalid_config, mycloud, None, [])
- self.assertIn(
- "Invalid config:\nntp: Additional properties are not allowed "
- "('invalidkey' was unexpected)",
- self.logs.getvalue())
- self.assertEqual(
- "servers []\npools ['0.mycompany.pool.ntp.org']\n",
- util.load_file(confpath))
+ if distro != 'alpine':
+ mycloud = self._get_cloud(distro)
+ ntpconfig = self._mock_ntp_client_config(distro=distro)
+ confpath = ntpconfig['confpath']
+ m_select.return_value = ntpconfig
+ cc_ntp.handle('cc_ntp', invalid_config, mycloud, None, [])
+ self.assertIn(
+ "Invalid config:\nntp: Additional properties are not "
+ "allowed ('invalidkey' was unexpected)",
+ self.logs.getvalue())
+ self.assertEqual(
+ "servers []\npools ['0.mycompany.pool.ntp.org']\n",
+ util.load_file(confpath))
@skipUnlessJsonSchema()
@mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
@@ -440,9 +488,10 @@ class TestNtp(FilesystemMockingTestCase):
cc_ntp.handle('notimportant', cfg, mycloud, None, None)
self.assertEqual(0, m_select.call_count)
+ @mock.patch("cloudinit.config.cc_ntp.subp")
@mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
@mock.patch("cloudinit.distros.Distro.uses_systemd")
- def test_ntp_the_whole_package(self, m_sysd, m_select):
+ def test_ntp_the_whole_package(self, m_sysd, m_select, m_subp):
"""Test enabled config renders template, and restarts service """
cfg = {'ntp': {'enabled': True}}
for distro in cc_ntp.distros:
@@ -451,24 +500,35 @@ class TestNtp(FilesystemMockingTestCase):
confpath = ntpconfig['confpath']
service_name = ntpconfig['service_name']
m_select.return_value = ntpconfig
- pools = cc_ntp.generate_server_names(mycloud.distro.name)
- # force uses systemd path
- m_sysd.return_value = True
+
+ hosts = cc_ntp.generate_server_names(mycloud.distro.name)
+ uses_systemd = True
+ expected_service_call = ['systemctl', 'reload-or-restart',
+ service_name]
+ expected_content = "servers []\npools {0}\n".format(hosts)
+
+ if distro == 'alpine':
+ uses_systemd = False
+ expected_service_call = ['service', service_name, 'restart']
+ # _mock_ntp_client_config call above did not specify a client
+ # value and so it defaults to "ntp" which on Alpine Linux only
+ # supports servers and not pools.
+ expected_content = "servers {0}\npools []\n".format(hosts)
+
+ m_sysd.return_value = uses_systemd
with mock.patch('cloudinit.config.cc_ntp.util') as m_util:
# allow use of util.mergemanydict
m_util.mergemanydict.side_effect = util.mergemanydict
# default client is present
- m_util.which.return_value = True
+ m_subp.which.return_value = True
# use the config 'enabled' value
m_util.is_false.return_value = util.is_false(
cfg['ntp']['enabled'])
cc_ntp.handle('notimportant', cfg, mycloud, None, None)
- m_util.subp.assert_called_with(
- ['systemctl', 'reload-or-restart',
- service_name], capture=True)
- self.assertEqual(
- "servers []\npools {0}\n".format(pools),
- util.load_file(confpath))
+ m_subp.subp.assert_called_with(
+ expected_service_call, capture=True)
+
+ self.assertEqual(expected_content, util.load_file(confpath))
def test_opensuse_picks_chrony(self):
"""Test opensuse picks chrony or ntp on certain distro versions"""
@@ -503,7 +563,7 @@ class TestNtp(FilesystemMockingTestCase):
expected_client = mycloud.distro.preferred_ntp_clients[0]
self.assertEqual('ntp', expected_client)
- @mock.patch('cloudinit.config.cc_ntp.util.which')
+ @mock.patch('cloudinit.config.cc_ntp.subp.which')
def test_snappy_system_picks_timesyncd(self, m_which):
"""Test snappy systems prefer installed clients"""
@@ -528,7 +588,7 @@ class TestNtp(FilesystemMockingTestCase):
self.assertEqual(sorted(expected_cfg), sorted(cfg))
self.assertEqual(sorted(expected_cfg), sorted(result))
- @mock.patch('cloudinit.config.cc_ntp.util.which')
+ @mock.patch('cloudinit.config.cc_ntp.subp.which')
def test_ntp_distro_searches_all_preferred_clients(self, m_which):
"""Test select_ntp_client search all distro perferred clients """
# nothing is installed
@@ -546,7 +606,7 @@ class TestNtp(FilesystemMockingTestCase):
m_which.assert_has_calls(expected_calls)
self.assertEqual(sorted(expected_cfg), sorted(cfg))
- @mock.patch('cloudinit.config.cc_ntp.util.which')
+ @mock.patch('cloudinit.config.cc_ntp.subp.which')
def test_user_cfg_ntp_client_auto_uses_distro_clients(self, m_which):
"""Test user_cfg.ntp_client='auto' defaults to distro search"""
# nothing is installed
@@ -566,7 +626,7 @@ class TestNtp(FilesystemMockingTestCase):
@mock.patch('cloudinit.config.cc_ntp.write_ntp_config_template')
@mock.patch('cloudinit.cloud.Cloud.get_template_filename')
- @mock.patch('cloudinit.config.cc_ntp.util.which')
+ @mock.patch('cloudinit.config.cc_ntp.subp.which')
def test_ntp_custom_client_overrides_installed_clients(self, m_which,
m_tmpfn, m_write):
"""Test user client is installed despite other clients present """
@@ -582,7 +642,7 @@ class TestNtp(FilesystemMockingTestCase):
m_install.assert_called_with([client])
m_which.assert_called_with(client)
- @mock.patch('cloudinit.config.cc_ntp.util.which')
+ @mock.patch('cloudinit.config.cc_ntp.subp.which')
def test_ntp_system_config_overrides_distro_builtin_clients(self, m_which):
"""Test distro system_config overrides builtin preferred ntp clients"""
system_client = 'chrony'
@@ -597,7 +657,7 @@ class TestNtp(FilesystemMockingTestCase):
self.assertEqual(sorted(expected_cfg), sorted(result))
m_which.assert_has_calls([])
- @mock.patch('cloudinit.config.cc_ntp.util.which')
+ @mock.patch('cloudinit.config.cc_ntp.subp.which')
def test_ntp_user_config_overrides_system_cfg(self, m_which):
"""Test user-data overrides system_config ntp_client"""
system_client = 'chrony'
diff --git a/tests/unittests/test_handler/test_handler_power_state.py b/tests/unittests/test_handler/test_handler_power_state.py
index 0d8d17b9..93b24fdc 100644
--- a/tests/unittests/test_handler/test_handler_power_state.py
+++ b/tests/unittests/test_handler/test_handler_power_state.py
@@ -11,62 +11,63 @@ from cloudinit.tests.helpers import mock
class TestLoadPowerState(t_help.TestCase):
def test_no_config(self):
# completely empty config should mean do nothing
- (cmd, _timeout, _condition) = psc.load_power_state({})
+ (cmd, _timeout, _condition) = psc.load_power_state({}, 'ubuntu')
self.assertIsNone(cmd)
def test_irrelevant_config(self):
# no power_state field in config should return None for cmd
- (cmd, _timeout, _condition) = psc.load_power_state({'foo': 'bar'})
+ (cmd, _timeout, _condition) = psc.load_power_state({'foo': 'bar'},
+ 'ubuntu')
self.assertIsNone(cmd)
def test_invalid_mode(self):
cfg = {'power_state': {'mode': 'gibberish'}}
- self.assertRaises(TypeError, psc.load_power_state, cfg)
+ self.assertRaises(TypeError, psc.load_power_state, cfg, 'ubuntu')
cfg = {'power_state': {'mode': ''}}
- self.assertRaises(TypeError, psc.load_power_state, cfg)
+ self.assertRaises(TypeError, psc.load_power_state, cfg, 'ubuntu')
def test_empty_mode(self):
cfg = {'power_state': {'message': 'goodbye'}}
- self.assertRaises(TypeError, psc.load_power_state, cfg)
+ self.assertRaises(TypeError, psc.load_power_state, cfg, 'ubuntu')
def test_valid_modes(self):
cfg = {'power_state': {}}
for mode in ('halt', 'poweroff', 'reboot'):
cfg['power_state']['mode'] = mode
- check_lps_ret(psc.load_power_state(cfg), mode=mode)
+ check_lps_ret(psc.load_power_state(cfg, 'ubuntu'), mode=mode)
def test_invalid_delay(self):
cfg = {'power_state': {'mode': 'poweroff', 'delay': 'goodbye'}}
- self.assertRaises(TypeError, psc.load_power_state, cfg)
+ self.assertRaises(TypeError, psc.load_power_state, cfg, 'ubuntu')
def test_valid_delay(self):
cfg = {'power_state': {'mode': 'poweroff', 'delay': ''}}
for delay in ("now", "+1", "+30"):
cfg['power_state']['delay'] = delay
- check_lps_ret(psc.load_power_state(cfg))
+ check_lps_ret(psc.load_power_state(cfg, 'ubuntu'))
def test_message_present(self):
cfg = {'power_state': {'mode': 'poweroff', 'message': 'GOODBYE'}}
- ret = psc.load_power_state(cfg)
- check_lps_ret(psc.load_power_state(cfg))
+ ret = psc.load_power_state(cfg, 'ubuntu')
+ check_lps_ret(psc.load_power_state(cfg, 'ubuntu'))
self.assertIn(cfg['power_state']['message'], ret[0])
def test_no_message(self):
# if message is not present, then no argument should be passed for it
cfg = {'power_state': {'mode': 'poweroff'}}
- (cmd, _timeout, _condition) = psc.load_power_state(cfg)
+ (cmd, _timeout, _condition) = psc.load_power_state(cfg, 'ubuntu')
self.assertNotIn("", cmd)
- check_lps_ret(psc.load_power_state(cfg))
+ check_lps_ret(psc.load_power_state(cfg, 'ubuntu'))
self.assertTrue(len(cmd) == 3)
def test_condition_null_raises(self):
cfg = {'power_state': {'mode': 'poweroff', 'condition': None}}
- self.assertRaises(TypeError, psc.load_power_state, cfg)
+ self.assertRaises(TypeError, psc.load_power_state, cfg, 'ubuntu')
def test_condition_default_is_true(self):
cfg = {'power_state': {'mode': 'poweroff'}}
- _cmd, _timeout, cond = psc.load_power_state(cfg)
+ _cmd, _timeout, cond = psc.load_power_state(cfg, 'ubuntu')
self.assertEqual(cond, True)
diff --git a/tests/unittests/test_handler/test_handler_puppet.py b/tests/unittests/test_handler/test_handler_puppet.py
index 1494177d..62388ac6 100644
--- a/tests/unittests/test_handler/test_handler_puppet.py
+++ b/tests/unittests/test_handler/test_handler_puppet.py
@@ -12,13 +12,11 @@ import textwrap
LOG = logging.getLogger(__name__)
-@mock.patch('cloudinit.config.cc_puppet.util')
+@mock.patch('cloudinit.config.cc_puppet.subp.subp')
@mock.patch('cloudinit.config.cc_puppet.os')
class TestAutostartPuppet(CiTestCase):
- with_logs = True
-
- def test_wb_autostart_puppet_updates_puppet_default(self, m_os, m_util):
+ def test_wb_autostart_puppet_updates_puppet_default(self, m_os, m_subp):
"""Update /etc/default/puppet to autostart if it exists."""
def _fake_exists(path):
@@ -29,9 +27,9 @@ class TestAutostartPuppet(CiTestCase):
self.assertEqual(
[mock.call(['sed', '-i', '-e', 's/^START=.*/START=yes/',
'/etc/default/puppet'], capture=False)],
- m_util.subp.call_args_list)
+ m_subp.call_args_list)
- def test_wb_autostart_pupppet_enables_puppet_systemctl(self, m_os, m_util):
+ def test_wb_autostart_pupppet_enables_puppet_systemctl(self, m_os, m_subp):
"""If systemctl is present, enable puppet via systemctl."""
def _fake_exists(path):
@@ -41,9 +39,9 @@ class TestAutostartPuppet(CiTestCase):
cc_puppet._autostart_puppet(LOG)
expected_calls = [mock.call(
['/bin/systemctl', 'enable', 'puppet.service'], capture=False)]
- self.assertEqual(expected_calls, m_util.subp.call_args_list)
+ self.assertEqual(expected_calls, m_subp.call_args_list)
- def test_wb_autostart_pupppet_enables_puppet_chkconfig(self, m_os, m_util):
+ def test_wb_autostart_pupppet_enables_puppet_chkconfig(self, m_os, m_subp):
"""If chkconfig is present, enable puppet via checkcfg."""
def _fake_exists(path):
@@ -53,7 +51,7 @@ class TestAutostartPuppet(CiTestCase):
cc_puppet._autostart_puppet(LOG)
expected_calls = [mock.call(
['/sbin/chkconfig', 'puppet', 'on'], capture=False)]
- self.assertEqual(expected_calls, m_util.subp.call_args_list)
+ self.assertEqual(expected_calls, m_subp.call_args_list)
@mock.patch('cloudinit.config.cc_puppet._autostart_puppet')
@@ -83,7 +81,7 @@ class TestPuppetHandle(CiTestCase):
"no 'puppet' configuration found", self.logs.getvalue())
self.assertEqual(0, m_auto.call_count)
- @mock.patch('cloudinit.config.cc_puppet.util.subp')
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp')
def test_handler_puppet_config_starts_puppet_service(self, m_subp, m_auto):
"""Cloud-config 'puppet' configuration starts puppet."""
mycloud = self._get_cloud('ubuntu')
@@ -94,7 +92,7 @@ class TestPuppetHandle(CiTestCase):
[mock.call(['service', 'puppet', 'start'], capture=False)],
m_subp.call_args_list)
- @mock.patch('cloudinit.config.cc_puppet.util.subp')
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp')
def test_handler_empty_puppet_config_installs_puppet(self, m_subp, m_auto):
"""Cloud-config empty 'puppet' configuration installs latest puppet."""
mycloud = self._get_cloud('ubuntu')
@@ -105,7 +103,7 @@ class TestPuppetHandle(CiTestCase):
[mock.call(('puppet', None))],
mycloud.distro.install_packages.call_args_list)
- @mock.patch('cloudinit.config.cc_puppet.util.subp')
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp')
def test_handler_puppet_config_installs_puppet_on_true(self, m_subp, _):
"""Cloud-config with 'puppet' key installs when 'install' is True."""
mycloud = self._get_cloud('ubuntu')
@@ -116,7 +114,7 @@ class TestPuppetHandle(CiTestCase):
[mock.call(('puppet', None))],
mycloud.distro.install_packages.call_args_list)
- @mock.patch('cloudinit.config.cc_puppet.util.subp')
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp')
def test_handler_puppet_config_installs_puppet_version(self, m_subp, _):
"""Cloud-config 'puppet' configuration can specify a version."""
mycloud = self._get_cloud('ubuntu')
@@ -127,7 +125,7 @@ class TestPuppetHandle(CiTestCase):
[mock.call(('puppet', '3.8'))],
mycloud.distro.install_packages.call_args_list)
- @mock.patch('cloudinit.config.cc_puppet.util.subp')
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp')
def test_handler_puppet_config_updates_puppet_conf(self, m_subp, m_auto):
"""When 'conf' is provided update values in PUPPET_CONF_PATH."""
mycloud = self._get_cloud('ubuntu')
@@ -143,7 +141,7 @@ class TestPuppetHandle(CiTestCase):
expected = '[agent]\nserver = puppetmaster.example.org\nother = 3\n\n'
self.assertEqual(expected, content)
- @mock.patch('cloudinit.config.cc_puppet.util.subp')
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp')
def test_handler_puppet_writes_csr_attributes_file(self, m_subp, m_auto):
"""When csr_attributes is provided
creates file in PUPPET_CSR_ATTRIBUTES_PATH."""
@@ -151,15 +149,20 @@ class TestPuppetHandle(CiTestCase):
mycloud.distro = mock.MagicMock()
cfg = {
'puppet': {
- 'csr_attributes': {
- 'custom_attributes': {
- '1.2.840.113549.1.9.7': '342thbjkt82094y0ut'
- 'hhor289jnqthpc2290'},
- 'extension_requests': {
- 'pp_uuid': 'ED803750-E3C7-44F5-BB08-41A04433FE2E',
- 'pp_image_name': 'my_ami_image',
- 'pp_preshared_key': '342thbjkt82094y0uthhor289jnqthpc2290'}
- }}}
+ 'csr_attributes': {
+ 'custom_attributes': {
+ '1.2.840.113549.1.9.7':
+ '342thbjkt82094y0uthhor289jnqthpc2290'
+ },
+ 'extension_requests': {
+ 'pp_uuid': 'ED803750-E3C7-44F5-BB08-41A04433FE2E',
+ 'pp_image_name': 'my_ami_image',
+ 'pp_preshared_key':
+ '342thbjkt82094y0uthhor289jnqthpc2290'
+ }
+ }
+ }
+ }
csr_attributes = 'cloudinit.config.cc_puppet.' \
'PUPPET_CSR_ATTRIBUTES_PATH'
with mock.patch(csr_attributes, self.csr_attributes_path):
diff --git a/tests/unittests/test_handler/test_handler_runcmd.py b/tests/unittests/test_handler/test_handler_runcmd.py
index 9ce334ac..73237d68 100644
--- a/tests/unittests/test_handler/test_handler_runcmd.py
+++ b/tests/unittests/test_handler/test_handler_runcmd.py
@@ -2,7 +2,7 @@
from cloudinit.config.cc_runcmd import handle, schema
from cloudinit.sources import DataSourceNone
-from cloudinit import (distros, helpers, cloud, util)
+from cloudinit import (distros, helpers, cloud, subp, util)
from cloudinit.tests.helpers import (
CiTestCase, FilesystemMockingTestCase, SchemaTestCaseMixin,
skipUnlessJsonSchema)
@@ -20,7 +20,7 @@ class TestRuncmd(FilesystemMockingTestCase):
def setUp(self):
super(TestRuncmd, self).setUp()
- self.subp = util.subp
+ self.subp = subp.subp
self.new_root = self.tmp_dir()
def _get_cloud(self, distro):
diff --git a/tests/unittests/test_handler/test_handler_seed_random.py b/tests/unittests/test_handler/test_handler_seed_random.py
index abecc53b..85167f19 100644
--- a/tests/unittests/test_handler/test_handler_seed_random.py
+++ b/tests/unittests/test_handler/test_handler_seed_random.py
@@ -17,6 +17,7 @@ from io import BytesIO
from cloudinit import cloud
from cloudinit import distros
from cloudinit import helpers
+from cloudinit import subp
from cloudinit import util
from cloudinit.sources import DataSourceNone
@@ -35,8 +36,8 @@ class TestRandomSeed(t_help.TestCase):
self.unapply = []
# by default 'which' has nothing in its path
- self.apply_patches([(util, 'which', self._which)])
- self.apply_patches([(util, 'subp', self._subp)])
+ self.apply_patches([(subp, 'which', self._which)])
+ self.apply_patches([(subp, 'subp', self._subp)])
self.subp_called = []
self.whichdata = {}
diff --git a/tests/unittests/test_handler/test_handler_spacewalk.py b/tests/unittests/test_handler/test_handler_spacewalk.py
index 410e6f77..26f7648f 100644
--- a/tests/unittests/test_handler/test_handler_spacewalk.py
+++ b/tests/unittests/test_handler/test_handler_spacewalk.py
@@ -1,7 +1,7 @@
# This file is part of cloud-init. See LICENSE file for license information.
from cloudinit.config import cc_spacewalk
-from cloudinit import util
+from cloudinit import subp
from cloudinit.tests import helpers
@@ -19,20 +19,20 @@ class TestSpacewalk(helpers.TestCase):
}
}
- @mock.patch("cloudinit.config.cc_spacewalk.util.subp")
- def test_not_is_registered(self, mock_util_subp):
- mock_util_subp.side_effect = util.ProcessExecutionError(exit_code=1)
+ @mock.patch("cloudinit.config.cc_spacewalk.subp.subp")
+ def test_not_is_registered(self, mock_subp):
+ mock_subp.side_effect = subp.ProcessExecutionError(exit_code=1)
self.assertFalse(cc_spacewalk.is_registered())
- @mock.patch("cloudinit.config.cc_spacewalk.util.subp")
- def test_is_registered(self, mock_util_subp):
- mock_util_subp.side_effect = None
+ @mock.patch("cloudinit.config.cc_spacewalk.subp.subp")
+ def test_is_registered(self, mock_subp):
+ mock_subp.side_effect = None
self.assertTrue(cc_spacewalk.is_registered())
- @mock.patch("cloudinit.config.cc_spacewalk.util.subp")
- def test_do_register(self, mock_util_subp):
+ @mock.patch("cloudinit.config.cc_spacewalk.subp.subp")
+ def test_do_register(self, mock_subp):
cc_spacewalk.do_register(**self.space_cfg['spacewalk'])
- mock_util_subp.assert_called_with([
+ mock_subp.assert_called_with([
'rhnreg_ks',
'--serverUrl', 'https://localhost/XMLRPC',
'--profilename', 'test',
diff --git a/tests/unittests/test_handler/test_handler_write_files.py b/tests/unittests/test_handler/test_handler_write_files.py
index ed0a4da2..727681d3 100644
--- a/tests/unittests/test_handler/test_handler_write_files.py
+++ b/tests/unittests/test_handler/test_handler_write_files.py
@@ -1,15 +1,19 @@
# This file is part of cloud-init. See LICENSE file for license information.
import base64
+import copy
import gzip
import io
import shutil
import tempfile
+from cloudinit.config.cc_write_files import (
+ handle, decode_perms, write_files)
from cloudinit import log as logging
from cloudinit import util
-from cloudinit.config.cc_write_files import write_files, decode_perms
-from cloudinit.tests.helpers import CiTestCase, FilesystemMockingTestCase
+
+from cloudinit.tests.helpers import (
+ CiTestCase, FilesystemMockingTestCase, mock, skipUnlessJsonSchema)
LOG = logging.getLogger(__name__)
@@ -36,13 +40,90 @@ YAML_CONTENT_EXPECTED = {
'/tmp/message': "hi mom line 1\nhi mom line 2\n",
}
+VALID_SCHEMA = {
+ 'write_files': [
+ {'append': False, 'content': 'a', 'encoding': 'gzip', 'owner': 'jeff',
+ 'path': '/some', 'permissions': '0777'}
+ ]
+}
+
+INVALID_SCHEMA = { # Dropped required path key
+ 'write_files': [
+ {'append': False, 'content': 'a', 'encoding': 'gzip', 'owner': 'jeff',
+ 'permissions': '0777'}
+ ]
+}
+
+
+@skipUnlessJsonSchema()
+@mock.patch('cloudinit.config.cc_write_files.write_files')
+class TestWriteFilesSchema(CiTestCase):
+
+ with_logs = True
+
+ def test_schema_validation_warns_missing_path(self, m_write_files):
+ """The only required file item property is 'path'."""
+ cc = self.tmp_cloud('ubuntu')
+ valid_config = {'write_files': [{'path': '/some/path'}]}
+ handle('cc_write_file', valid_config, cc, LOG, [])
+ self.assertNotIn('Invalid config:', self.logs.getvalue())
+ handle('cc_write_file', INVALID_SCHEMA, cc, LOG, [])
+ self.assertIn('Invalid config:', self.logs.getvalue())
+ self.assertIn("'path' is a required property", self.logs.getvalue())
+
+ def test_schema_validation_warns_non_string_type_for_files(
+ self, m_write_files):
+ """Schema validation warns of non-string values for each file item."""
+ cc = self.tmp_cloud('ubuntu')
+ for key in VALID_SCHEMA['write_files'][0].keys():
+ if key == 'append':
+ key_type = 'boolean'
+ else:
+ key_type = 'string'
+ invalid_config = copy.deepcopy(VALID_SCHEMA)
+ invalid_config['write_files'][0][key] = 1
+ handle('cc_write_file', invalid_config, cc, LOG, [])
+ self.assertIn(
+ mock.call('cc_write_file', invalid_config['write_files']),
+ m_write_files.call_args_list)
+ self.assertIn(
+ 'write_files.0.%s: 1 is not of type \'%s\'' % (key, key_type),
+ self.logs.getvalue())
+ self.assertIn('Invalid config:', self.logs.getvalue())
+
+ def test_schema_validation_warns_on_additional_undefined_propertes(
+ self, m_write_files):
+ """Schema validation warns on additional undefined file properties."""
+ cc = self.tmp_cloud('ubuntu')
+ invalid_config = copy.deepcopy(VALID_SCHEMA)
+ invalid_config['write_files'][0]['bogus'] = 'value'
+ handle('cc_write_file', invalid_config, cc, LOG, [])
+ self.assertIn(
+ "Invalid config:\nwrite_files.0: Additional properties"
+ " are not allowed ('bogus' was unexpected)",
+ self.logs.getvalue())
+
class TestWriteFiles(FilesystemMockingTestCase):
+
+ with_logs = True
+
def setUp(self):
super(TestWriteFiles, self).setUp()
self.tmp = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.tmp)
+ @skipUnlessJsonSchema()
+ def test_handler_schema_validation_warns_non_array_type(self):
+ """Schema validation warns of non-array value."""
+ invalid_config = {'write_files': 1}
+ cc = self.tmp_cloud('ubuntu')
+ with self.assertRaises(TypeError):
+ handle('cc_write_file', invalid_config, cc, LOG, [])
+ self.assertIn(
+ 'Invalid config:\nwrite_files: 1 is not of type \'array\'',
+ self.logs.getvalue())
+
def test_simple(self):
self.patchUtils(self.tmp)
expected = "hello world\n"
diff --git a/tests/unittests/test_handler/test_handler_yum_add_repo.py b/tests/unittests/test_handler/test_handler_yum_add_repo.py
index 0675bd8f..7c61bbf9 100644
--- a/tests/unittests/test_handler/test_handler_yum_add_repo.py
+++ b/tests/unittests/test_handler/test_handler_yum_add_repo.py
@@ -1,14 +1,13 @@
# This file is part of cloud-init. See LICENSE file for license information.
-from cloudinit.config import cc_yum_add_repo
-from cloudinit import util
-
-from cloudinit.tests import helpers
-
+import configparser
import logging
import shutil
import tempfile
-from io import StringIO
+
+from cloudinit import util
+from cloudinit.config import cc_yum_add_repo
+from cloudinit.tests import helpers
LOG = logging.getLogger(__name__)
@@ -54,7 +53,8 @@ class TestConfig(helpers.FilesystemMockingTestCase):
self.patchUtils(self.tmp)
cc_yum_add_repo.handle('yum_add_repo', cfg, None, LOG, [])
contents = util.load_file("/etc/yum.repos.d/epel_testing.repo")
- parser = self.parse_and_read(StringIO(contents))
+ parser = configparser.ConfigParser()
+ parser.read_string(contents)
expected = {
'epel_testing': {
'name': 'Extra Packages for Enterprise Linux 5 - Testing',
@@ -90,7 +90,8 @@ class TestConfig(helpers.FilesystemMockingTestCase):
self.patchUtils(self.tmp)
cc_yum_add_repo.handle('yum_add_repo', cfg, None, LOG, [])
contents = util.load_file("/etc/yum.repos.d/puppetlabs_products.repo")
- parser = self.parse_and_read(StringIO(contents))
+ parser = configparser.ConfigParser()
+ parser.read_string(contents)
expected = {
'puppetlabs_products': {
'name': 'Puppet Labs Products El 6 - $basearch',
diff --git a/tests/unittests/test_handler/test_handler_zypper_add_repo.py b/tests/unittests/test_handler/test_handler_zypper_add_repo.py
index 9685ff28..0fb1de1a 100644
--- a/tests/unittests/test_handler/test_handler_zypper_add_repo.py
+++ b/tests/unittests/test_handler/test_handler_zypper_add_repo.py
@@ -1,17 +1,15 @@
# This file is part of cloud-init. See LICENSE file for license information.
+import configparser
import glob
+import logging
import os
-from io import StringIO
-from cloudinit.config import cc_zypper_add_repo
from cloudinit import util
-
+from cloudinit.config import cc_zypper_add_repo
from cloudinit.tests import helpers
from cloudinit.tests.helpers import mock
-import logging
-
LOG = logging.getLogger(__name__)
@@ -66,7 +64,8 @@ class TestConfig(helpers.FilesystemMockingTestCase):
root_d = self.tmp_dir()
cc_zypper_add_repo._write_repos(cfg['repos'], root_d)
contents = util.load_file("%s/testing-foo.repo" % root_d)
- parser = self.parse_and_read(StringIO(contents))
+ parser = configparser.ConfigParser()
+ parser.read_string(contents)
expected = {
'testing-foo': {
'name': 'test-foo',
diff --git a/tests/unittests/test_handler/test_schema.py b/tests/unittests/test_handler/test_schema.py
index 987a89c9..44292571 100644
--- a/tests/unittests/test_handler/test_schema.py
+++ b/tests/unittests/test_handler/test_schema.py
@@ -1,5 +1,5 @@
# This file is part of cloud-init. See LICENSE file for license information.
-
+import cloudinit
from cloudinit.config.schema import (
CLOUD_CONFIG_HEADER, SchemaValidationError, annotated_cloudconfig_file,
get_schema_doc, get_schema, validate_cloudconfig_file,
@@ -10,7 +10,9 @@ from cloudinit.tests.helpers import CiTestCase, mock, skipUnlessJsonSchema
from copy import copy
import os
+import pytest
from io import StringIO
+from pathlib import Path
from textwrap import dedent
from yaml import safe_load
@@ -20,16 +22,21 @@ class GetSchemaTest(CiTestCase):
def test_get_schema_coalesces_known_schema(self):
"""Every cloudconfig module with schema is listed in allOf keyword."""
schema = get_schema()
- self.assertItemsEqual(
+ self.assertCountEqual(
[
+ 'cc_apk_configure',
+ 'cc_apt_configure',
'cc_bootcmd',
+ 'cc_locale',
'cc_ntp',
'cc_resizefs',
'cc_runcmd',
'cc_snap',
'cc_ubuntu_advantage',
'cc_ubuntu_drivers',
- 'cc_zypper_add_repo'
+ 'cc_write_files',
+ 'cc_zypper_add_repo',
+ 'cc_chef'
],
[subschema['id'] for subschema in schema['allOf']])
self.assertEqual('cloud-config-schema', schema['id'])
@@ -38,7 +45,7 @@ class GetSchemaTest(CiTestCase):
schema['$schema'])
# FULL_SCHEMA is updated by the get_schema call
from cloudinit.config.schema import FULL_SCHEMA
- self.assertItemsEqual(['id', '$schema', 'allOf'], FULL_SCHEMA.keys())
+ self.assertCountEqual(['id', '$schema', 'allOf'], FULL_SCHEMA.keys())
def test_get_schema_returns_global_when_set(self):
"""When FULL_SCHEMA global is already set, get_schema returns it."""
@@ -110,6 +117,23 @@ class ValidateCloudConfigSchemaTest(CiTestCase):
str(context_mgr.exception))
+class TestCloudConfigExamples:
+ schema = get_schema()
+ params = [
+ (schema["id"], example)
+ for schema in schema["allOf"] for example in schema["examples"]]
+
+ @pytest.mark.parametrize("schema_id,example", params)
+ @skipUnlessJsonSchema()
+ def test_validateconfig_schema_of_example(self, schema_id, example):
+ """ For a given example in a config module we test if it is valid
+ according to the unified schema of all config modules
+ """
+ config_load = safe_load(example)
+ validate_cloudconfig_schema(
+ config_load, self.schema, strict=True)
+
+
class ValidateCloudConfigFileTest(CiTestCase):
"""Tests for validate_cloudconfig_file."""
@@ -268,6 +292,41 @@ class GetSchemaDocTest(CiTestCase):
"""),
get_schema_doc(full_schema))
+ def test_get_schema_doc_properly_parse_description(self):
+ """get_schema_doc description properly formatted"""
+ full_schema = copy(self.required_schema)
+ full_schema.update(
+ {'properties': {
+ 'p1': {
+ 'type': 'string',
+ 'description': dedent("""\
+ This item
+ has the
+ following options:
+
+ - option1
+ - option2
+ - option3
+
+ The default value is
+ option1""")
+ }
+ }}
+ )
+
+ self.assertIn(
+ dedent("""
+ **Config schema**:
+ **p1:** (string) This item has the following options:
+
+ - option1
+ - option2
+ - option3
+
+ The default value is option1
+ """),
+ get_schema_doc(full_schema))
+
def test_get_schema_doc_raises_key_errors(self):
"""get_schema_doc raises KeyErrors on missing keys."""
for key in self.required_schema:
@@ -345,34 +404,30 @@ class MainTest(CiTestCase):
def test_main_missing_args(self):
"""Main exits non-zero and reports an error on missing parameters."""
- with mock.patch('sys.exit', side_effect=self.sys_exit):
- with mock.patch('sys.argv', ['mycmd']):
- with mock.patch('sys.stderr', new_callable=StringIO) as \
- m_stderr:
- with self.assertRaises(SystemExit) as context_manager:
- main()
+ with mock.patch('sys.argv', ['mycmd']):
+ with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr:
+ with self.assertRaises(SystemExit) as context_manager:
+ main()
self.assertEqual(1, context_manager.exception.code)
self.assertEqual(
- 'Expected either --config-file argument or --doc\n',
+ 'Expected either --config-file argument or --docs\n',
m_stderr.getvalue())
def test_main_absent_config_file(self):
"""Main exits non-zero when config file is absent."""
myargs = ['mycmd', '--annotate', '--config-file', 'NOT_A_FILE']
- with mock.patch('sys.exit', side_effect=self.sys_exit):
- with mock.patch('sys.argv', myargs):
- with mock.patch('sys.stderr', new_callable=StringIO) as \
- m_stderr:
- with self.assertRaises(SystemExit) as context_manager:
- main()
+ with mock.patch('sys.argv', myargs):
+ with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr:
+ with self.assertRaises(SystemExit) as context_manager:
+ main()
self.assertEqual(1, context_manager.exception.code)
self.assertEqual(
'Configfile NOT_A_FILE does not exist\n',
m_stderr.getvalue())
def test_main_prints_docs(self):
- """When --doc parameter is provided, main generates documentation."""
- myargs = ['mycmd', '--doc']
+ """When --docs parameter is provided, main generates documentation."""
+ myargs = ['mycmd', '--docs', 'all']
with mock.patch('sys.argv', myargs):
with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout:
self.assertEqual(0, main(), 'Expected 0 exit code')
@@ -430,4 +485,23 @@ class CloudTestsIntegrationTest(CiTestCase):
if errors:
raise AssertionError(', '.join(errors))
+
+def _get_schema_doc_examples():
+ examples_dir = Path(
+ cloudinit.__file__).parent.parent / 'doc' / 'examples'
+ assert examples_dir.is_dir()
+
+ all_text_files = (f for f in examples_dir.glob('cloud-config*.txt')
+ if not f.name.startswith('cloud-config-archive'))
+ return all_text_files
+
+
+class TestSchemaDocExamples:
+ schema = get_schema()
+
+ @pytest.mark.parametrize("example_path", _get_schema_doc_examples())
+ @skipUnlessJsonSchema()
+ def test_schema_doc_examples(self, example_path):
+ validate_cloudconfig_file(str(example_path), self.schema)
+
# vi: ts=4 expandtab syntax=python
diff --git a/tests/unittests/test_net.py b/tests/unittests/test_net.py
index bedd05fe..54cc8469 100644
--- a/tests/unittests/test_net.py
+++ b/tests/unittests/test_net.py
@@ -8,6 +8,7 @@ from cloudinit.net import (
renderers, sysconfig)
from cloudinit.sources.helpers import openstack
from cloudinit import temp_utils
+from cloudinit import subp
from cloudinit import util
from cloudinit import safeyaml as yaml
@@ -24,6 +25,7 @@ import re
import textwrap
from yaml.serializer import Serializer
+import pytest
DHCP_CONTENT_1 = """
DEVICE='eth0'
@@ -424,6 +426,11 @@ network:
mtu: 9000
parameters:
gratuitous-arp: 2
+ bond2:
+ interfaces:
+ - ens5
+ macaddress: 68:05:ca:64:d3:6e
+ mtu: 9000
ethernets:
ens3:
dhcp4: false
@@ -435,6 +442,11 @@ network:
dhcp6: false
match:
macaddress: 52:54:00:11:22:ff
+ ens5:
+ dhcp4: false
+ dhcp6: false
+ match:
+ macaddress: 52:54:00:99:11:99
version: 2
"""
@@ -943,7 +955,7 @@ NETWORK_CONFIGS = {
dhcp6: true
""").rstrip(' '),
'expected_sysconfig_opensuse': {
- 'ifcfg-iface0': textwrap.dedent("""\
+ 'ifcfg-iface0': textwrap.dedent("""\
BOOTPROTO=dhcp
DHCLIENT6_MODE=managed
STARTMODE=auto""")
@@ -1027,7 +1039,7 @@ NETWORK_CONFIGS = {
},
'v6_and_v4': {
'expected_sysconfig_opensuse': {
- 'ifcfg-iface0': textwrap.dedent("""\
+ 'ifcfg-iface0': textwrap.dedent("""\
BOOTPROTO=dhcp
DHCLIENT6_MODE=managed
STARTMODE=auto""")
@@ -3191,7 +3203,7 @@ USERCTL=no
def test_check_ifcfg_rh(self):
"""ifcfg-rh plugin is added NetworkManager.conf if conf present."""
render_dir = self.tmp_dir()
- nm_cfg = util.target_path(render_dir, path=self.nm_cfg_file)
+ nm_cfg = subp.target_path(render_dir, path=self.nm_cfg_file)
util.ensure_dir(os.path.dirname(nm_cfg))
# write a template nm.conf, note plugins is a list here
@@ -3214,7 +3226,7 @@ USERCTL=no
"""ifcfg-rh plugin is append when plugins is a string."""
render_dir = self.tmp_path("render")
os.makedirs(render_dir)
- nm_cfg = util.target_path(render_dir, path=self.nm_cfg_file)
+ nm_cfg = subp.target_path(render_dir, path=self.nm_cfg_file)
util.ensure_dir(os.path.dirname(nm_cfg))
# write a template nm.conf, note plugins is a value here
@@ -3239,7 +3251,7 @@ USERCTL=no
"""enable_ifcfg_plugin creates plugins value if missing."""
render_dir = self.tmp_path("render")
os.makedirs(render_dir)
- nm_cfg = util.target_path(render_dir, path=self.nm_cfg_file)
+ nm_cfg = subp.target_path(render_dir, path=self.nm_cfg_file)
util.ensure_dir(os.path.dirname(nm_cfg))
# write a template nm.conf, note plugins is missing
@@ -3331,7 +3343,7 @@ USERCTL=no
USERCTL=no
VLAN=yes
""")
- }
+ }
self._compare_files_to_expected(
expected, self._render_and_read(network_config=v2data))
@@ -3405,7 +3417,7 @@ USERCTL=no
TYPE=Ethernet
USERCTL=no
"""),
- }
+ }
for dhcp_ver in ('dhcp4', 'dhcp6'):
v2data = copy.deepcopy(v2base)
if dhcp_ver == 'dhcp6':
@@ -3919,7 +3931,7 @@ class TestNetplanCleanDefault(CiTestCase):
files = sorted(populate_dir(tmpd, content))
netplan._clean_default(target=tmpd)
found = [t for t in files if os.path.exists(t)]
- expected = [util.target_path(tmpd, f) for f in (astamp, anet, ayaml)]
+ expected = [subp.target_path(tmpd, f) for f in (astamp, anet, ayaml)]
self.assertEqual(sorted(expected), found)
@@ -3932,7 +3944,7 @@ class TestNetplanPostcommands(CiTestCase):
@mock.patch.object(netplan.Renderer, '_netplan_generate')
@mock.patch.object(netplan.Renderer, '_net_setup_link')
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_netplan_render_calls_postcmds(self, mock_subp,
mock_netplan_generate,
mock_net_setup_link):
@@ -3946,7 +3958,7 @@ class TestNetplanPostcommands(CiTestCase):
render_target = 'netplan.yaml'
renderer = netplan.Renderer(
{'netplan_path': render_target, 'postcmds': True})
- mock_subp.side_effect = iter([util.ProcessExecutionError])
+ mock_subp.side_effect = iter([subp.ProcessExecutionError])
renderer.render_network_state(ns, target=render_dir)
mock_netplan_generate.assert_called_with(run=True)
@@ -3954,7 +3966,7 @@ class TestNetplanPostcommands(CiTestCase):
@mock.patch('cloudinit.util.SeLinuxGuard')
@mock.patch.object(netplan, "get_devicelist")
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_netplan_postcmds(self, mock_subp, mock_devlist, mock_sel):
mock_sel.__enter__ = mock.Mock(return_value=False)
mock_sel.__exit__ = mock.Mock()
@@ -3970,7 +3982,7 @@ class TestNetplanPostcommands(CiTestCase):
renderer = netplan.Renderer(
{'netplan_path': render_target, 'postcmds': True})
mock_subp.side_effect = iter([
- util.ProcessExecutionError,
+ subp.ProcessExecutionError,
('', ''),
('', ''),
])
@@ -4017,6 +4029,8 @@ class TestEniNetworkStateToEni(CiTestCase):
class TestCmdlineConfigParsing(CiTestCase):
+ with_logs = True
+
simple_cfg = {
'config': [{"type": "physical", "name": "eth0",
"mac_address": "c0:d6:9f:2c:e8:80",
@@ -4066,6 +4080,21 @@ class TestCmdlineConfigParsing(CiTestCase):
found = cmdline.read_kernel_cmdline_config(cmdline=raw_cmdline)
self.assertEqual(found, self.simple_cfg)
+ def test_cmdline_with_net_config_disabled(self):
+ raw_cmdline = 'ro network-config=disabled root=foo'
+ found = cmdline.read_kernel_cmdline_config(cmdline=raw_cmdline)
+ self.assertEqual(found, {'config': 'disabled'})
+
+ def test_cmdline_with_net_config_unencoded_logs_error(self):
+ """network-config cannot be unencoded besides 'disabled'."""
+ raw_cmdline = 'ro network-config={config:disabled} root=foo'
+ found = cmdline.read_kernel_cmdline_config(cmdline=raw_cmdline)
+ self.assertIsNone(found)
+ expected_log = (
+ 'ERROR: Expected base64 encoded kernel commandline parameter'
+ ' network-config. Ignoring network-config={config:disabled}.')
+ self.assertIn(expected_log, self.logs.getvalue())
+
def test_cmdline_with_b64_gz(self):
data = _gzip_data(json.dumps(self.simple_cfg).encode())
encoded_text = base64.b64encode(data).decode()
@@ -4242,7 +4271,7 @@ class TestNetplanRoundTrip(CiTestCase):
def setUp(self):
super(TestNetplanRoundTrip, self).setUp()
- self.add_patch('cloudinit.net.netplan.util.subp', 'm_subp')
+ self.add_patch('cloudinit.net.netplan.subp.subp', 'm_subp')
self.m_subp.return_value = (self.NETPLAN_INFO_OUT, '')
def _render_and_read(self, network_config=None, state=None,
@@ -4654,6 +4683,51 @@ class TestEniRoundTrip(CiTestCase):
files['/etc/network/interfaces'].splitlines())
+class TestRenderersSelect:
+
+ @pytest.mark.parametrize(
+ 'renderer_selected,netplan,eni,nm,scfg,sys', (
+ # -netplan -ifupdown -nm -scfg -sys raises error
+ (net.RendererNotFoundError, False, False, False, False, False),
+ # -netplan +ifupdown -nm -scfg -sys selects eni
+ ('eni', False, True, False, False, False),
+ # +netplan +ifupdown -nm -scfg -sys selects eni
+ ('eni', True, True, False, False, False),
+ # +netplan -ifupdown -nm -scfg -sys selects netplan
+ ('netplan', True, False, False, False, False),
+ # Ubuntu with Network-Manager installed
+ # +netplan -ifupdown +nm -scfg -sys selects netplan
+ ('netplan', True, False, True, False, False),
+ # Centos/OpenSuse with Network-Manager installed selects sysconfig
+ # -netplan -ifupdown +nm -scfg +sys selects netplan
+ ('sysconfig', False, False, True, False, True),
+ ),
+ )
+ @mock.patch("cloudinit.net.renderers.netplan.available")
+ @mock.patch("cloudinit.net.renderers.sysconfig.available")
+ @mock.patch("cloudinit.net.renderers.sysconfig.available_sysconfig")
+ @mock.patch("cloudinit.net.renderers.sysconfig.available_nm")
+ @mock.patch("cloudinit.net.renderers.eni.available")
+ def test_valid_renderer_from_defaults_depending_on_availability(
+ self, m_eni_avail, m_nm_avail, m_scfg_avail, m_sys_avail,
+ m_netplan_avail, renderer_selected, netplan, eni, nm, scfg, sys
+ ):
+ """Assert proper renderer per DEFAULT_PRIORITY given availability."""
+ m_eni_avail.return_value = eni # ifupdown pkg presence
+ m_nm_avail.return_value = nm # network-manager presence
+ m_scfg_avail.return_value = scfg # sysconfig presence
+ m_sys_avail.return_value = sys # sysconfig/ifup/down presence
+ m_netplan_avail.return_value = netplan # netplan presence
+ if isinstance(renderer_selected, str):
+ (renderer_name, _rnd_class) = renderers.select(
+ priority=renderers.DEFAULT_PRIORITY
+ )
+ assert renderer_selected == renderer_name
+ else:
+ with pytest.raises(renderer_selected):
+ renderers.select(priority=renderers.DEFAULT_PRIORITY)
+
+
class TestNetRenderers(CiTestCase):
@mock.patch("cloudinit.net.renderers.sysconfig.available")
@mock.patch("cloudinit.net.renderers.eni.available")
@@ -4697,58 +4771,18 @@ class TestNetRenderers(CiTestCase):
self.assertRaises(net.RendererNotFoundError, renderers.select,
priority=['sysconfig', 'eni'])
- @mock.patch("cloudinit.net.renderers.netplan.available")
- @mock.patch("cloudinit.net.renderers.sysconfig.available")
- @mock.patch("cloudinit.net.renderers.sysconfig.available_sysconfig")
- @mock.patch("cloudinit.net.renderers.sysconfig.available_nm")
- @mock.patch("cloudinit.net.renderers.eni.available")
- @mock.patch("cloudinit.net.renderers.sysconfig.util.get_linux_distro")
- def test_sysconfig_selected_on_sysconfig_enabled_distros(self, m_distro,
- m_eni, m_sys_nm,
- m_sys_scfg,
- m_sys_avail,
- m_netplan):
- """sysconfig only selected on specific distros (rhel/sles)."""
-
- # Ubuntu with Network-Manager installed
- m_eni.return_value = False # no ifupdown (ifquery)
- m_sys_scfg.return_value = False # no sysconfig/ifup/ifdown
- m_sys_nm.return_value = True # network-manager is installed
- m_netplan.return_value = True # netplan is installed
- m_sys_avail.return_value = False # no sysconfig on Ubuntu
- m_distro.return_value = ('ubuntu', None, None)
- self.assertEqual('netplan', renderers.select(priority=None)[0])
-
- # Centos with Network-Manager installed
- m_eni.return_value = False # no ifupdown (ifquery)
- m_sys_scfg.return_value = False # no sysconfig/ifup/ifdown
- m_sys_nm.return_value = True # network-manager is installed
- m_netplan.return_value = False # netplan is not installed
- m_sys_avail.return_value = True # sysconfig is available on centos
- m_distro.return_value = ('centos', None, None)
- self.assertEqual('sysconfig', renderers.select(priority=None)[0])
-
- # OpenSuse with Network-Manager installed
- m_eni.return_value = False # no ifupdown (ifquery)
- m_sys_scfg.return_value = False # no sysconfig/ifup/ifdown
- m_sys_nm.return_value = True # network-manager is installed
- m_netplan.return_value = False # netplan is not installed
- m_sys_avail.return_value = True # sysconfig is available on opensuse
- m_distro.return_value = ('opensuse', None, None)
- self.assertEqual('sysconfig', renderers.select(priority=None)[0])
-
@mock.patch("cloudinit.net.sysconfig.available_sysconfig")
@mock.patch("cloudinit.util.get_linux_distro")
def test_sysconfig_available_uses_variant_mapping(self, m_distro, m_avail):
m_avail.return_value = True
distro_values = [
- ('opensuse', '', ''),
- ('opensuse-leap', '', ''),
- ('opensuse-tumbleweed', '', ''),
- ('sles', '', ''),
- ('centos', '', ''),
- ('fedora', '', ''),
- ('redhat', '', ''),
+ ('opensuse', '', ''),
+ ('opensuse-leap', '', ''),
+ ('opensuse-tumbleweed', '', ''),
+ ('sles', '', ''),
+ ('centos', '', ''),
+ ('fedora', '', ''),
+ ('redhat', '', ''),
]
for (distro_name, distro_version, flavor) in distro_values:
m_distro.return_value = (distro_name, distro_version, flavor)
@@ -5134,7 +5168,7 @@ def _gzip_data(data):
class TestRenameInterfaces(CiTestCase):
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_rename_all(self, mock_subp):
renames = [
('00:11:22:33:44:55', 'interface0', 'virtio_net', '0x3'),
@@ -5165,7 +5199,7 @@ class TestRenameInterfaces(CiTestCase):
capture=True),
])
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_rename_no_driver_no_device_id(self, mock_subp):
renames = [
('00:11:22:33:44:55', 'interface0', None, None),
@@ -5196,7 +5230,7 @@ class TestRenameInterfaces(CiTestCase):
capture=True),
])
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_rename_all_bounce(self, mock_subp):
renames = [
('00:11:22:33:44:55', 'interface0', 'virtio_net', '0x3'),
@@ -5231,7 +5265,7 @@ class TestRenameInterfaces(CiTestCase):
mock.call(['ip', 'link', 'set', 'interface2', 'up'], capture=True)
])
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_rename_duplicate_macs(self, mock_subp):
renames = [
('00:11:22:33:44:55', 'eth0', 'hv_netsvc', '0x3'),
@@ -5260,7 +5294,7 @@ class TestRenameInterfaces(CiTestCase):
capture=True),
])
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_rename_duplicate_macs_driver_no_devid(self, mock_subp):
renames = [
('00:11:22:33:44:55', 'eth0', 'hv_netsvc', None),
@@ -5289,7 +5323,7 @@ class TestRenameInterfaces(CiTestCase):
capture=True),
])
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_rename_multi_mac_dups(self, mock_subp):
renames = [
('00:11:22:33:44:55', 'eth0', 'hv_netsvc', '0x3'),
@@ -5328,7 +5362,7 @@ class TestRenameInterfaces(CiTestCase):
capture=True),
])
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_rename_macs_case_insensitive(self, mock_subp):
"""_rename_interfaces must support upper or lower case macs."""
renames = [
diff --git a/tests/unittests/test_net_freebsd.py b/tests/unittests/test_net_freebsd.py
index 48296c30..414b4830 100644
--- a/tests/unittests/test_net_freebsd.py
+++ b/tests/unittests/test_net_freebsd.py
@@ -7,7 +7,7 @@ SAMPLE_FREEBSD_IFCONFIG_OUT = readResource("netinfo/freebsd-ifconfig-output")
class TestInterfacesByMac(CiTestCase):
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
@mock.patch('cloudinit.util.is_FreeBSD')
def test_get_interfaces_by_mac(self, mock_is_FreeBSD, mock_subp):
mock_is_FreeBSD.return_value = True
diff --git a/tests/unittests/test_render_cloudcfg.py b/tests/unittests/test_render_cloudcfg.py
new file mode 100644
index 00000000..495e2669
--- /dev/null
+++ b/tests/unittests/test_render_cloudcfg.py
@@ -0,0 +1,59 @@
+"""Tests for tools/render-cloudcfg"""
+
+import os
+import sys
+
+import pytest
+
+from cloudinit import subp
+from cloudinit import util
+
+# TODO(Look to align with tools.render-cloudcfg or cloudinit.distos.OSFAMILIES)
+DISTRO_VARIANTS = ["amazon", "arch", "centos", "debian", "fedora", "freebsd",
+ "netbsd", "openbsd", "rhel", "suse", "ubuntu", "unknown"]
+
+
+@pytest.mark.allow_subp_for(sys.executable)
+class TestRenderCloudCfg:
+
+ cmd = [sys.executable, os.path.realpath('tools/render-cloudcfg')]
+ tmpl_path = os.path.realpath('config/cloud.cfg.tmpl')
+
+ @pytest.mark.parametrize('variant', (DISTRO_VARIANTS))
+ def test_variant_sets_distro_in_cloud_cfg(self, variant, tmpdir):
+ outfile = tmpdir.join('outcfg').strpath
+ subp.subp(
+ self.cmd + ['--variant', variant, self.tmpl_path, outfile])
+ with open(outfile) as stream:
+ system_cfg = util.load_yaml(stream.read())
+ if variant == 'unknown':
+ variant = 'ubuntu' # Unknown is defaulted to ubuntu
+ assert system_cfg['system_info']['distro'] == variant
+
+ @pytest.mark.parametrize('variant', (DISTRO_VARIANTS))
+ def test_variant_sets_default_user_in_cloud_cfg(self, variant, tmpdir):
+ outfile = tmpdir.join('outcfg').strpath
+ subp.subp(
+ self.cmd + ['--variant', variant, self.tmpl_path, outfile])
+ with open(outfile) as stream:
+ system_cfg = util.load_yaml(stream.read())
+
+ default_user_exceptions = {
+ 'amazon': 'ec2-user', 'debian': 'ubuntu', 'unknown': 'ubuntu'}
+ default_user = system_cfg['system_info']['default_user']['name']
+ assert default_user == default_user_exceptions.get(variant, variant)
+
+ @pytest.mark.parametrize('variant,renderers', (
+ ('freebsd', ['freebsd']), ('netbsd', ['netbsd']),
+ ('openbsd', ['openbsd']), ('ubuntu', ['netplan', 'eni', 'sysconfig']))
+ )
+ def test_variant_sets_network_renderer_priority_in_cloud_cfg(
+ self, variant, renderers, tmpdir
+ ):
+ outfile = tmpdir.join('outcfg').strpath
+ subp.subp(
+ self.cmd + ['--variant', variant, self.tmpl_path, outfile])
+ with open(outfile) as stream:
+ system_cfg = util.load_yaml(stream.read())
+
+ assert renderers == system_cfg['system_info']['network']['renderers']
diff --git a/tests/unittests/test_reporting.py b/tests/unittests/test_reporting.py
index 6814030e..9f11fd5c 100644
--- a/tests/unittests/test_reporting.py
+++ b/tests/unittests/test_reporting.py
@@ -349,7 +349,6 @@ class TestReportingEventStack(TestCase):
with parent:
with child:
pass
- pass
self.assertEqual(report_start.call_count, 0)
self.assertEqual(report_finish.call_count, 0)
diff --git a/tests/unittests/test_reporting_hyperv.py b/tests/unittests/test_reporting_hyperv.py
index b3e083c6..47ede670 100644
--- a/tests/unittests/test_reporting_hyperv.py
+++ b/tests/unittests/test_reporting_hyperv.py
@@ -1,7 +1,9 @@
# This file is part of cloud-init. See LICENSE file for license information.
+import base64
+import zlib
-from cloudinit.reporting import events
-from cloudinit.reporting.handlers import HyperVKvpReportingHandler
+from cloudinit.reporting import events, instantiated_handler_registry
+from cloudinit.reporting.handlers import HyperVKvpReportingHandler, LogHandler
import json
import os
@@ -72,7 +74,7 @@ class TextKvpReporter(CiTestCase):
def test_event_very_long(self):
reporter = HyperVKvpReportingHandler(
kvp_file_path=self.tmp_file_path)
- description = 'ab' * reporter.HV_KVP_EXCHANGE_MAX_VALUE_SIZE
+ description = 'ab' * reporter.HV_KVP_AZURE_MAX_VALUE_SIZE
long_event = events.FinishReportingEvent(
'event_name',
description,
@@ -93,10 +95,15 @@ class TextKvpReporter(CiTestCase):
def test_not_truncate_kvp_file_modified_after_boot(self):
with open(self.tmp_file_path, "wb+") as f:
kvp = {'key': 'key1', 'value': 'value1'}
- data = (struct.pack("%ds%ds" % (
+ data = struct.pack(
+ "%ds%ds"
+ % (
HyperVKvpReportingHandler.HV_KVP_EXCHANGE_MAX_KEY_SIZE,
- HyperVKvpReportingHandler.HV_KVP_EXCHANGE_MAX_VALUE_SIZE),
- kvp['key'].encode('utf-8'), kvp['value'].encode('utf-8')))
+ HyperVKvpReportingHandler.HV_KVP_EXCHANGE_MAX_VALUE_SIZE,
+ ),
+ kvp["key"].encode("utf-8"),
+ kvp["value"].encode("utf-8"),
+ )
f.write(data)
cur_time = time.time()
os.utime(self.tmp_file_path, (cur_time, cur_time))
@@ -131,11 +138,13 @@ class TextKvpReporter(CiTestCase):
self.assertEqual(0, len(kvps))
@mock.patch('cloudinit.distros.uses_systemd')
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_get_boot_telemetry(self, m_subp, m_sysd):
reporter = HyperVKvpReportingHandler(kvp_file_path=self.tmp_file_path)
- datetime_pattern = r"\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]"
- r"\d:[0-5]\d\.\d+([+-][0-2]\d:[0-5]\d|Z)"
+ datetime_pattern = (
+ r"\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]"
+ r"\d:[0-5]\d\.\d+([+-][0-2]\d:[0-5]\d|Z)"
+ )
# get_boot_telemetry makes two subp calls to systemctl. We provide
# a list of values that the subp calls should return
@@ -192,6 +201,72 @@ class TextKvpReporter(CiTestCase):
if "test_diagnostic" not in evt_msg:
raise AssertionError("missing expected diagnostic message")
+ def test_report_compressed_event(self):
+ reporter = HyperVKvpReportingHandler(kvp_file_path=self.tmp_file_path)
+ try:
+ instantiated_handler_registry.register_item("telemetry", reporter)
+ event_desc = b'test_compressed'
+ azure.report_compressed_event(
+ "compressed event", event_desc)
+
+ self.validate_compressed_kvps(reporter, 1, [event_desc])
+ finally:
+ instantiated_handler_registry.unregister_item("telemetry",
+ force=False)
+
+ @mock.patch.object(LogHandler, 'publish_event')
+ def test_push_log_to_kvp(self, publish_event):
+ reporter = HyperVKvpReportingHandler(kvp_file_path=self.tmp_file_path)
+ try:
+ instantiated_handler_registry.register_item("telemetry", reporter)
+ log_file = self.tmp_path("cloud-init.log")
+ azure.MAX_LOG_TO_KVP_LENGTH = 100
+ azure.LOG_PUSHED_TO_KVP_MARKER_FILE = self.tmp_path(
+ 'log_pushed_to_kvp')
+ with open(log_file, "w") as f:
+ log_content = "A" * 50 + "B" * 100
+ f.write(log_content)
+ azure.push_log_to_kvp(log_file)
+
+ with open(log_file, "a") as f:
+ extra_content = "C" * 10
+ f.write(extra_content)
+ azure.push_log_to_kvp(log_file)
+
+ for call_arg in publish_event.call_args_list:
+ event = call_arg[0][0]
+ self.assertNotEqual(
+ event.event_type, azure.COMPRESSED_EVENT_TYPE)
+ self.validate_compressed_kvps(
+ reporter, 1,
+ [log_content[-azure.MAX_LOG_TO_KVP_LENGTH:].encode()])
+ finally:
+ instantiated_handler_registry.unregister_item("telemetry",
+ force=False)
+
+ def validate_compressed_kvps(self, reporter, count, values):
+ reporter.q.join()
+ kvps = list(reporter._iterate_kvps(0))
+ compressed_count = 0
+ for i in range(len(kvps)):
+ kvp = kvps[i]
+ kvp_value = kvp['value']
+ kvp_value_json = json.loads(kvp_value)
+ evt_msg = kvp_value_json["msg"]
+ evt_type = kvp_value_json["type"]
+ if evt_type != azure.COMPRESSED_EVENT_TYPE:
+ continue
+ evt_msg_json = json.loads(evt_msg)
+ evt_encoding = evt_msg_json["encoding"]
+ evt_data = zlib.decompress(
+ base64.decodebytes(evt_msg_json["data"].encode("ascii")))
+
+ self.assertLess(compressed_count, len(values))
+ self.assertEqual(evt_data, values[compressed_count])
+ self.assertEqual(evt_encoding, "gz+b64")
+ compressed_count += 1
+ self.assertEqual(compressed_count, count)
+
def test_unique_kvp_key(self):
reporter = HyperVKvpReportingHandler(kvp_file_path=self.tmp_file_path)
evt1 = events.ReportingEvent(
diff --git a/tests/unittests/test_rh_subscription.py b/tests/unittests/test_rh_subscription.py
index 4cd27eed..53d3cd5a 100644
--- a/tests/unittests/test_rh_subscription.py
+++ b/tests/unittests/test_rh_subscription.py
@@ -6,7 +6,7 @@ import copy
import logging
from cloudinit.config import cc_rh_subscription
-from cloudinit import util
+from cloudinit import subp
from cloudinit.tests.helpers import CiTestCase, mock
@@ -56,7 +56,7 @@ class GoodTests(CiTestCase):
'''
reg = "The system has been registered with ID:" \
" 12345678-abde-abcde-1234-1234567890abc"
- m_sman_cli.side_effect = [util.ProcessExecutionError, (reg, 'bar')]
+ m_sman_cli.side_effect = [subp.ProcessExecutionError, (reg, 'bar')]
self.handle(self.name, self.config, self.cloud_init,
self.log, self.args)
self.assertIn(mock.call(['identity']), m_sman_cli.call_args_list)
@@ -93,7 +93,7 @@ class GoodTests(CiTestCase):
reg = "The system has been registered with ID:" \
" 12345678-abde-abcde-1234-1234567890abc"
m_sman_cli.side_effect = [
- util.ProcessExecutionError,
+ subp.ProcessExecutionError,
(reg, 'bar'),
('Service level set to: self-support', ''),
('pool1\npool3\n', ''), ('pool2\n', ''), ('', ''),
@@ -161,7 +161,7 @@ class TestBadInput(CiTestCase):
def test_no_password(self, m_sman_cli):
'''Attempt to register without the password key/value.'''
- m_sman_cli.side_effect = [util.ProcessExecutionError,
+ m_sman_cli.side_effect = [subp.ProcessExecutionError,
(self.reg, 'bar')]
self.handle(self.name, self.config_no_password, self.cloud_init,
self.log, self.args)
@@ -169,7 +169,7 @@ class TestBadInput(CiTestCase):
def test_no_org(self, m_sman_cli):
'''Attempt to register without the org key/value.'''
- m_sman_cli.side_effect = [util.ProcessExecutionError]
+ m_sman_cli.side_effect = [subp.ProcessExecutionError]
self.handle(self.name, self.config_no_key, self.cloud_init,
self.log, self.args)
m_sman_cli.assert_called_with(['identity'])
@@ -182,7 +182,7 @@ class TestBadInput(CiTestCase):
def test_service_level_without_auto(self, m_sman_cli):
'''Attempt to register using service-level without auto-attach key.'''
- m_sman_cli.side_effect = [util.ProcessExecutionError,
+ m_sman_cli.side_effect = [subp.ProcessExecutionError,
(self.reg, 'bar')]
self.handle(self.name, self.config_service, self.cloud_init,
self.log, self.args)
@@ -195,7 +195,7 @@ class TestBadInput(CiTestCase):
'''
Register with pools that are not in the format of a list
'''
- m_sman_cli.side_effect = [util.ProcessExecutionError,
+ m_sman_cli.side_effect = [subp.ProcessExecutionError,
(self.reg, 'bar')]
self.handle(self.name, self.config_badpool, self.cloud_init,
self.log, self.args)
@@ -208,7 +208,7 @@ class TestBadInput(CiTestCase):
'''
Register with repos that are not in the format of a list
'''
- m_sman_cli.side_effect = [util.ProcessExecutionError,
+ m_sman_cli.side_effect = [subp.ProcessExecutionError,
(self.reg, 'bar')]
self.handle(self.name, self.config_badrepo, self.cloud_init,
self.log, self.args)
@@ -222,7 +222,7 @@ class TestBadInput(CiTestCase):
'''
Attempt to register with a key that we don't know
'''
- m_sman_cli.side_effect = [util.ProcessExecutionError,
+ m_sman_cli.side_effect = [subp.ProcessExecutionError,
(self.reg, 'bar')]
self.handle(self.name, self.config_badkey, self.cloud_init,
self.log, self.args)
diff --git a/tests/unittests/test_sshutil.py b/tests/unittests/test_sshutil.py
index 0be41924..fd1d1bac 100644
--- a/tests/unittests/test_sshutil.py
+++ b/tests/unittests/test_sshutil.py
@@ -15,6 +15,9 @@ FakePwEnt.__new__.__defaults__ = tuple(
"UNSET_%s" % n for n in FakePwEnt._fields)
+# Do not use these public keys, most of them are fetched from
+# the testdata for OpenSSH, and their private keys are available
+# https://github.com/openssh/openssh-portable/tree/master/regress/unittests/sshkey/testdata
VALID_CONTENT = {
'dsa': (
"AAAAB3NzaC1kc3MAAACBAIrjOQSlSea19bExXBMBKBvcLhBoVvNBjCppNzllipF"
@@ -41,24 +44,238 @@ VALID_CONTENT = {
"YWpMfYdPUnE7u536WqzFmsaqJctz3gBxH9Ex7dFtrxR4qiqEr9Qtlu3xGn7Bw07"
"/+i1D+ey3ONkZLN+LQ714cgj8fRS4Hj29SCmXp5Kt5/82cD/VN3NtHw=="
),
+ 'ed25519': (
+ "AAAAC3NzaC1lZDI1NTE5AAAAIA1J77+CrJ8p6/vWCEzuylqJNMHUP/XmeYyGVWb"
+ "8lnDd"
+ ),
+ 'ecdsa-sha2-nistp256-cert-v01@openssh.com': (
+ "AAAAKGVjZHNhLXNoYTItbmlzdHAyNTYtY2VydC12MDFAb3BlbnNzaC5jb20AAAA"
+ "gQIfwT/+UX68/hlKsdKuaOuAVB6ftTg03SlP/uH4OBEwAAAAIbmlzdHAyNTYAAA"
+ "BBBEjA0gjJmPM6La3sXyfNlnjilvvGY6I2M8SvJj4o3X/46wcUbPWTaj4RF3EXw"
+ "HvNxplYBwdPlk2zEecvf9Cs2BMAAAAAAAAAAAAAAAEAAAAYa2V5cy9lY2RzYS1z"
+ "aGEyLW5pc3RwMjU2AAAAAAAAAAAAAAAA//////////8AAAAAAAAAggAAABVwZXJ"
+ "taXQtWDExLWZvcndhcmRpbmcAAAAAAAAAF3Blcm1pdC1hZ2VudC1mb3J3YXJkaW"
+ "5nAAAAAAAAABZwZXJtaXQtcG9ydC1mb3J3YXJkaW5nAAAAAAAAAApwZXJtaXQtc"
+ "HR5AAAAAAAAAA5wZXJtaXQtdXNlci1yYwAAAAAAAAAAAAAAaAAAABNlY2RzYS1z"
+ "aGEyLW5pc3RwMjU2AAAACG5pc3RwMjU2AAAAQQRH6Y9Q1+ocQ8ETKW3LjQqtxg7"
+ "OuSSDacxmmQatQVaIawwjCbmntyEAqmVj3v9ElDSXnO5m7TyYMBQu4+vsh76RAA"
+ "AAZQAAABNlY2RzYS1zaGEyLW5pc3RwMjU2AAAASgAAACEA47Cl2MMhr+glPGuxx"
+ "2tM3QXkDcwdP0SxSEW5yy4XV5oAAAAhANNMm1cdVlAt3hmycQgdD82zPlg5YvVO"
+ "iN0SQTbgVD8i"
+ ),
'ecdsa-sha2-nistp256': (
- "AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBMy/WuXq5MF"
- "r5hVQ9EEKKUTF7vUaOkgxUh6bNsCs9SFMVslIm1zM/WJYwUv52LdEePjtDYiV4A"
- "l2XthJ9/bs7Pc="
+ "AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBEjA0gjJmPM"
+ "6La3sXyfNlnjilvvGY6I2M8SvJj4o3X/46wcUbPWTaj4RF3EXwHvNxplYBwdPlk"
+ "2zEecvf9Cs2BM="
),
- 'ecdsa-sha2-nistp521': (
- "AAAAE2VjZHNhLXNoYTItbmlzdHA1MjEAAAAIbmlzdHA1MjEAAACFBABOdNTkh9F"
- "McK4hZRLs5LTXBEXwNr0+Yg9uvJYRFcz2ZlnjYX9tM4Z3QQFjqogU4pU+zpKLqZ"
- "5VE4Jcnb1T608UywBIdXkSFZT8trGJqBv9nFWGgmTX3KP8kiBbihpuv1cGwglPl"
- "Hxs50A42iP0JiT7auGtEAGsu/uMql323GTGb4171Q=="
+ 'ecdsa-sha2-nistp384-cert-v01@openssh.com': (
+ "AAAAKGVjZHNhLXNoYTItbmlzdHAzODQtY2VydC12MDFAb3BlbnNzaC5jb20AAAA"
+ "grnSvDsK1EnCZndO1IyGWcGkVgVSkPWi/XO2ybPFyLVUAAAAIbmlzdHAzODQAAA"
+ "BhBAaYSQs+8TT0Tzciy0dorwhur6yzOGUrYQ6ueUQYWbE7eNdHmhsVrlpGPgSaY"
+ "ByhXtAJiPOMqLU5h0eb3sCtM3ek4NvjXFTGTqPrrxJI6q0OsgrtkGE7UM9ZsfMm"
+ "7q6BOAAAAAAAAAAAAAAAAQAAABhrZXlzL2VjZHNhLXNoYTItbmlzdHAzODQAAAA"
+ "AAAAAAAAAAAD//////////wAAAAAAAACCAAAAFXBlcm1pdC1YMTEtZm9yd2FyZG"
+ "luZwAAAAAAAAAXcGVybWl0LWFnZW50LWZvcndhcmRpbmcAAAAAAAAAFnBlcm1pd"
+ "C1wb3J0LWZvcndhcmRpbmcAAAAAAAAACnBlcm1pdC1wdHkAAAAAAAAADnBlcm1p"
+ "dC11c2VyLXJjAAAAAAAAAAAAAACIAAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAA"
+ "IbmlzdHAzODQAAABhBLWbubcMzcWc7lMTCMGVXZlaVvUOHLjpr6SOOScFFrd8K9"
+ "Gl8nYELST5HZ1gym65m+MG6/tbrUWIY/flLWNIe+WtqxrdPPGdIhFruCwNw2peZ"
+ "SbQOa/o3AGnJ/vO6EKEGAAAAIQAAAATZWNkc2Etc2hhMi1uaXN0cDM4NAAAAGkA"
+ "AAAxAL10JHd5bvnbpD+fet/k1YE1BEIrqGXaoIIJ9ReE5H4nTK1uQJzMD7+wwGK"
+ "RVYqYQgAAADAiit0UCMDAUbjD+R2x4LvU3x/t8G3sdqDLRNfMRpjZpvcS8AwC+Y"
+ "VFVSQNn0AyzW0="
),
'ecdsa-sha2-nistp384': (
- "AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBAnoqFU9Gnl"
- "LcsEuCJnobs/c6whzvjCgouaOO61kgXNtIxyF4Wkutg6xaGYgBBt/phb7a2TurI"
- "bcIBuzJ/mP22UyUAbNnBfStAEBmYbrTf1EfiMCYUAr1XnL0UdYmZ8HFg=="
+ "AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBAaYSQs+8TT"
+ "0Tzciy0dorwhur6yzOGUrYQ6ueUQYWbE7eNdHmhsVrlpGPgSaYByhXtAJiPOMqL"
+ "U5h0eb3sCtM3ek4NvjXFTGTqPrrxJI6q0OsgrtkGE7UM9ZsfMm7q6BOA=="
+ ),
+ 'ecdsa-sha2-nistp521-cert-v01@openssh.com': (
+ "AAAAKGVjZHNhLXNoYTItbmlzdHA1MjEtY2VydC12MDFAb3BlbnNzaC5jb20AAAA"
+ "gGmRzkkMvRFk1V5U3m3mQ2nfW20SJVXk1NKnT5iZGDcEAAAAIbmlzdHA1MjEAAA"
+ "CFBAHosAOHAI1ZkerbKYQ72S6uit1u77PCj/OalZtXgsxv0TTAZB273puG2X94C"
+ "Q8yyNHcby87zFZHdv5BSKyZ/cyREAAeiAcSakop9VS3+bUfZpEIqwBZXarwUjnR"
+ "nxprkcQ0rfCCdagkGZr/OA7DemK2D8tKLTHsKoEEWNImo6/pXDkFxAAAAAAAAAA"
+ "AAAAAAQAAABhrZXlzL2VjZHNhLXNoYTItbmlzdHA1MjEAAAAAAAAAAAAAAAD///"
+ "///////wAAAAAAAACCAAAAFXBlcm1pdC1YMTEtZm9yd2FyZGluZwAAAAAAAAAXc"
+ "GVybWl0LWFnZW50LWZvcndhcmRpbmcAAAAAAAAAFnBlcm1pdC1wb3J0LWZvcndh"
+ "cmRpbmcAAAAAAAAACnBlcm1pdC1wdHkAAAAAAAAADnBlcm1pdC11c2VyLXJjAAA"
+ "AAAAAAAAAAACsAAAAE2VjZHNhLXNoYTItbmlzdHA1MjEAAAAIbmlzdHA1MjEAAA"
+ "CFBAC6hFVXM1XEg/7qKkp5sLZuANGQVW88b5pPn2ZcK0td9IQstLH6BwWuZ6MPE"
+ "ogiDlvx9HD1BaKGBBfkxgOY8NGFzQHbjU9eTWH3gt0RATDbZsij1pSkFPnAXdU9"
+ "SjfogYloI2xdHaTCgWp3zgsUV+BBQ0QGGv2MqqcOmrF0f5YEJeOffAAAAKcAAAA"
+ "TZWNkc2Etc2hhMi1uaXN0cDUyMQAAAIwAAABCAT+vSOYPuYVTDopDW08576d5Sb"
+ "edXQMOu1op4CQIm98VKtAXvu5dfioi5VYAqpte8M+UxEMOMiQWJp+U9exYf6LuA"
+ "AAAQgEzkIpX3yKXPaPcK17mNx40ujEDitm4ARmbhAge0sFhZtf7YIgI55b6vkI8"
+ "JvMJkzQCBF1cpNOaIpVh1nFZNBphMQ=="
+ ),
+ 'ecdsa-sha2-nistp521': (
+ "AAAAE2VjZHNhLXNoYTItbmlzdHA1MjEAAAAIbmlzdHA1MjEAAACFBAHosAOHAI1"
+ "ZkerbKYQ72S6uit1u77PCj/OalZtXgsxv0TTAZB273puG2X94CQ8yyNHcby87zF"
+ "ZHdv5BSKyZ/cyREAAeiAcSakop9VS3+bUfZpEIqwBZXarwUjnRnxprkcQ0rfCCd"
+ "agkGZr/OA7DemK2D8tKLTHsKoEEWNImo6/pXDkFxA=="
+ ),
+ 'sk-ecdsa-sha2-nistp256-cert-v01@openssh.com': (
+ "AAAAIHNzaC1lZDI1NTE5LWNlcnQtdjAxQG9wZW5zc2guY29tAAAAIIxzuxl4z3u"
+ "wAIslne8Huft+1n1IhHAlNbWZkQyyECCGAAAAIFOG6kY7Rf4UtCFvPwKgo/BztX"
+ "ck2xC4a2WyA34XtIwZAAAAAAAAAAgAAAACAAAABmp1bGl1cwAAABIAAAAFaG9zd"
+ "DEAAAAFaG9zdDIAAAAANowB8AAAAABNHmBwAAAAAAAAAAAAAAAAAAAAMwAAAAtz"
+ "c2gtZWQyNTUxOQAAACBThupGO0X+FLQhbz8CoKPwc7V3JNsQuGtlsgN+F7SMGQA"
+ "AAFMAAAALc3NoLWVkMjU1MTkAAABABGTn+Bmz86Ajk+iqKCSdP5NClsYzn4alJd"
+ "0V5bizhP0Kumc/HbqQfSt684J1WdSzih+EjvnTgBhK9jTBKb90AQ=="
+ ),
+ 'sk-ecdsa-sha2-nistp256@openssh.com': (
+ "AAAAInNrLWVjZHNhLXNoYTItbmlzdHAyNTZAb3BlbnNzaC5jb20AAAAIbmlzdHA"
+ "yNTYAAABBBIELQJ2DgvaX1yQlKFokfWM2suuaCFI2qp0eJodHyg6O4ifxc3XpRK"
+ "d1OS8dNYQtE/YjdXSrA+AOnMF5ns2Nkx4AAAAEc3NoOg=="
+ ),
+ 'sk-ssh-ed25519-cert-v01@openssh.com': (
+ "AAAAIHNzaC1lZDI1NTE5LWNlcnQtdjAxQG9wZW5zc2guY29tAAAAIIxzuxl4z3u"
+ "wAIslne8Huft+1n1IhHAlNbWZkQyyECCGAAAAIFOG6kY7Rf4UtCFvPwKgo/BztX"
+ "ck2xC4a2WyA34XtIwZAAAAAAAAAAgAAAACAAAABmp1bGl1cwAAABIAAAAFaG9zd"
+ "DEAAAAFaG9zdDIAAAAANowB8AAAAABNHmBwAAAAAAAAAAAAAAAAAAAAMwAAAAtz"
+ "c2gtZWQyNTUxOQAAACBThupGO0X+FLQhbz8CoKPwc7V3JNsQuGtlsgN+F7SMGQA"
+ "AAFMAAAALc3NoLWVkMjU1MTkAAABABGTn+Bmz86Ajk+iqKCSdP5NClsYzn4alJd"
+ "0V5bizhP0Kumc/HbqQfSt684J1WdSzih+EjvnTgBhK9jTBKb90AQ=="
+ ),
+ 'sk-ssh-ed25519@openssh.com': (
+ "AAAAGnNrLXNzaC1lZDI1NTE5QG9wZW5zc2guY29tAAAAICFo/k5LU8863u66YC9"
+ "eUO2170QduohPURkQnbLa/dczAAAABHNzaDo="
+ ),
+ 'ssh-dss-cert-v01@openssh.com': (
+ "AAAAHHNzaC1kc3MtY2VydC12MDFAb3BlbnNzaC5jb20AAAAgdTlbNU9Hn9Qng3F"
+ "HxwH971bxCIoq1ern/QWFFDWXgmYAAACBAPqS600VGwdPAQC/p3f0uGyrLVql0c"
+ "Fn1zYd/JGvtabKnIYjLaYprje/NcjwI3CZFJiz4Dp3S8kLs+X5/1DMn/Tg1Y4D4"
+ "yLB+6vCtHcJF7rVBFhvw/KZwc7G54ez3khyOtsg82fzpyOc8/mq+/+C5TMKO7DD"
+ "jMF0k5emWKCsa3ZfAAAAFQCjA/+dKkMu4/CWjJPtfl7YNaStNQAAAIEA7uX1BVV"
+ "tJKjLmWrpw62+l/xSXA5rr7MHBuWjiCYV3VHBfXJaQDyRDtGuEJKDwdzqYgacpG"
+ "ApGWL/cuBtJ9nShsUl6GRG0Ra03g+Hx9VR5LviJBsjAVB4qVgciU1NGga0Bt2Le"
+ "cd1X4EGQRBzVXeuOpiqGM6jP/I2yDMs0Pboet0AAACBAOdXpyfmobEBaOqZAuvg"
+ "j1P0uhjG2P31Ufurv22FWPBU3A9qrkxbOXwE0LwvjCvrsQV/lrYhJz/tiys40Ve"
+ "ahulWZE5SAHMXGIf95LiLSgaXMjko7joot+LK84ltLymwZ4QMnYjnZSSclf1Uuy"
+ "QMcUtb34+I0u9Ycnyhp2mSFsQtAAAAAAAAAAYAAAACAAAABmp1bGl1cwAAABIAA"
+ "AAFaG9zdDEAAAAFaG9zdDIAAAAANowB8AAAAABNHmBwAAAAAAAAAAAAAAAAAAAA"
+ "MwAAAAtzc2gtZWQyNTUxOQAAACBThupGO0X+FLQhbz8CoKPwc7V3JNsQuGtlsgN"
+ "+F7SMGQAAAFMAAAALc3NoLWVkMjU1MTkAAABAh/z1LIdNL1b66tQ8t9DY9BTB3B"
+ "QKpTKmc7ezyFKLwl96yaIniZwD9Ticdbe/8i/Li3uCFE3EAt8NAIv9zff8Bg=="
+ ),
+ 'ssh-dss': (
+ "AAAAB3NzaC1kc3MAAACBAPqS600VGwdPAQC/p3f0uGyrLVql0cFn1zYd/JGvtab"
+ "KnIYjLaYprje/NcjwI3CZFJiz4Dp3S8kLs+X5/1DMn/Tg1Y4D4yLB+6vCtHcJF7"
+ "rVBFhvw/KZwc7G54ez3khyOtsg82fzpyOc8/mq+/+C5TMKO7DDjMF0k5emWKCsa"
+ "3ZfAAAAFQCjA/+dKkMu4/CWjJPtfl7YNaStNQAAAIEA7uX1BVVtJKjLmWrpw62+"
+ "l/xSXA5rr7MHBuWjiCYV3VHBfXJaQDyRDtGuEJKDwdzqYgacpGApGWL/cuBtJ9n"
+ "ShsUl6GRG0Ra03g+Hx9VR5LviJBsjAVB4qVgciU1NGga0Bt2Lecd1X4EGQRBzVX"
+ "euOpiqGM6jP/I2yDMs0Pboet0AAACBAOdXpyfmobEBaOqZAuvgj1P0uhjG2P31U"
+ "furv22FWPBU3A9qrkxbOXwE0LwvjCvrsQV/lrYhJz/tiys40VeahulWZE5SAHMX"
+ "GIf95LiLSgaXMjko7joot+LK84ltLymwZ4QMnYjnZSSclf1UuyQMcUtb34+I0u9"
+ "Ycnyhp2mSFsQt"
+ ),
+ 'ssh-ed25519-cert-v01@openssh.com': (
+ "AAAAIHNzaC1lZDI1NTE5LWNlcnQtdjAxQG9wZW5zc2guY29tAAAAIIxzuxl4z3u"
+ "wAIslne8Huft+1n1IhHAlNbWZkQyyECCGAAAAIFOG6kY7Rf4UtCFvPwKgo/BztX"
+ "ck2xC4a2WyA34XtIwZAAAAAAAAAAgAAAACAAAABmp1bGl1cwAAABIAAAAFaG9zd"
+ "DEAAAAFaG9zdDIAAAAANowB8AAAAABNHmBwAAAAAAAAAAAAAAAAAAAAMwAAAAtz"
+ "c2gtZWQyNTUxOQAAACBThupGO0X+FLQhbz8CoKPwc7V3JNsQuGtlsgN+F7SMGQA"
+ "AAFMAAAALc3NoLWVkMjU1MTkAAABABGTn+Bmz86Ajk+iqKCSdP5NClsYzn4alJd"
+ "0V5bizhP0Kumc/HbqQfSt684J1WdSzih+EjvnTgBhK9jTBKb90AQ=="
+ ),
+ 'ssh-ed25519': (
+ "AAAAC3NzaC1lZDI1NTE5AAAAIFOG6kY7Rf4UtCFvPwKgo/BztXck2xC4a2WyA34"
+ "XtIwZ"
+ ),
+ 'ssh-rsa-cert-v01@openssh.com': (
+ "AAAAHHNzaC1yc2EtY2VydC12MDFAb3BlbnNzaC5jb20AAAAg98LhS2EHxLOWCLo"
+ "pZPwHdg/RJXusnkOqQXSc9R7aITkAAAADAQABAAAAgQDLV5lUTt7FrADseB/CGh"
+ "EZzpoojjEW5y8+ePvLppmK3MmMI18ud6vxzpK3bwZLYkVSyfJYI0HmIuGhdu7yM"
+ "rW6wb84gbq8C31Xoe9EORcIUuGSvDKdNSM1SjlhDquRblDFB8kToqXyx1lqrXec"
+ "XylxIUOL0jE+u0rU1967pDJx+wAAAAAAAAAFAAAAAgAAAAZqdWxpdXMAAAASAAA"
+ "ABWhvc3QxAAAABWhvc3QyAAAAADaMAfAAAAAATR5gcAAAAAAAAAAAAAAAAAAAAD"
+ "MAAAALc3NoLWVkMjU1MTkAAAAgU4bqRjtF/hS0IW8/AqCj8HO1dyTbELhrZbIDf"
+ "he0jBkAAABTAAAAC3NzaC1lZDI1NTE5AAAAQI3QGlUCzC07KorupxpDkkGy6tni"
+ "aZ8EvBflzvv+itXWNchGvfUeHmVT6aX0sRqehdz/lR+GmXRoZBhofwh0qAM="
+ ),
+ 'ssh-rsa': (
+ "AAAAB3NzaC1yc2EAAAADAQABAAAAgQDLV5lUTt7FrADseB/CGhEZzpoojjEW5y8"
+ "+ePvLppmK3MmMI18ud6vxzpK3bwZLYkVSyfJYI0HmIuGhdu7yMrW6wb84gbq8C3"
+ "1Xoe9EORcIUuGSvDKdNSM1SjlhDquRblDFB8kToqXyx1lqrXecXylxIUOL0jE+u"
+ "0rU1967pDJx+w=="
+ ),
+ 'ssh-xmss-cert-v01@openssh.com': (
+ "AAAAHXNzaC14bXNzLWNlcnQtdjAxQG9wZW5zc2guY29tAAAAIM2UD0IH+Igsekq"
+ "xjTO5f36exX4WGRMCtDGPjwfbXblxAAAAFVhNU1NfU0hBMi0yNTZfVzE2X0gxMA"
+ "AAAEDI83/K5JMOy0BMJgQypRdz35ApAnoQinMJ8ZMoZPaEJF8Z4rANQlfzaAXum"
+ "N3RDU5CGIUGGw+WJ904G/wwEq9CAAAAAAAAAAAAAAABAAAACWtleXMveG1zcwAA"
+ "AAAAAAAAAAAAAP//////////AAAAAAAAAIIAAAAVcGVybWl0LVgxMS1mb3J3YXJ"
+ "kaW5nAAAAAAAAABdwZXJtaXQtYWdlbnQtZm9yd2FyZGluZwAAAAAAAAAWcGVybW"
+ "l0LXBvcnQtZm9yd2FyZGluZwAAAAAAAAAKcGVybWl0LXB0eQAAAAAAAAAOcGVyb"
+ "Wl0LXVzZXItcmMAAAAAAAAAAAAAAHUAAAAUc3NoLXhtc3NAb3BlbnNzaC5jb20A"
+ "AAAVWE1TU19TSEEyLTI1Nl9XMTZfSDEwAAAAQA+irIyT2kaOd07YWZT/QItzNBZ"
+ "kUYwnqZJihQ7BxuyiDP4HEFbnfYnnIZXx9Asyi7vDyZRvi+AMSOzmMSq4JnkAAA"
+ "ngAAAAFHNzaC14bXNzQG9wZW5zc2guY29tAAAJxAAAAAAFjaKTDc+7Hu2uFGIab"
+ "3NAku8HbbGtrq/uGXOxmqxu4RaLqmwofl5iXk3nMwWEhQAb99vAc9D9ZFtfxJO4"
+ "STYUTjbj4BxToov/uvbYfE5VeO6sMvkGglgh9YHkCTAItsG8EmGT1SIPfKYzLlN"
+ "jvUlbcv0PaPFMJ0wzS9mNfuRf+KUhf3dxQ6zaMrBH3KEJ8Me2kNjhnh6rNPROeI"
+ "N+IcStSKsydYuiySGKS/orsH38XysuK5QqLizbHJY3cqLbkW9LsIijb+pfEJh4Y"
+ "bOoAbraWAv9ySnWCyRhvw2x8uJ0ZM+p5WSRiZfB3JxCpOhHgiKa9TdmdjnAtnED"
+ "zqKOj/gM7y9mesn5ydQI0bENOGymlw0ThUGKbXMxn87Hc9dDPURUBmoO3NGjPDf"
+ "7meS39A1ZEGtCe/pbZU9iwxqGx4wJYvB4lutRP2tYC1pA6hjQCcHibvxl5iqj+1"
+ "jRjwPr8dbTm4PdETW/7JDSVQXKjxOT0kRLHLelJNeviGx5zSHR5PtnUP3nOBMme"
+ "hk9DwcQW9vfKeWSnu9CMnF8xvYJxoPKQwmz0TKo+YVOUnc9/Ma+Ykseof9/W+rk"
+ "USQGELc4x7XE5XBKYZZP2PmtxirQ3qTWFw+CeTX2Oa+jPYkzOa7jgmHJ3Fi9Xqw"
+ "3L844vRl97e28GmwS0M1SXH+ohES0mO4EcrGh5OLyXBaRTV5QMo+4Bg6FH/HwEn"
+ "gG1mdEOAqvctK2QC70c4lHGzfexqwQ2U6WUADPcd/BLOE8Noj1EiXYwZrSA1okZ"
+ "FYnS/b89Uo51D2FE4A33V4gcxAglGzVNtrPulkguNT9B4jjNgdIwkTBL9k3ujkG"
+ "og6pyYjZ0J5Jp5XPBn+y0LqrpOdZijzrc1OJbX59tTeIbDkM7Fw8As4a03hQPDU"
+ "FTOdyMHgLnuLhLXOcqIjvW5axZL/Kx3UET8wrSHizPoa6NErCG4v5mC2M4kBSOW"
+ "In1QV27QMaHkL/ZAa3mPsW5iFZtOVEGzw2BW4MZs0qOrcloCENZzOHiMBroKEkH"
+ "AbzX6D1FLwml2JpXq4JXlCrdIiFm4+co5ygnWPqb4QGzMlcbjW/x/A16TthNuok"
+ "wwlmK5ndKZ76LahyGKEwx2Nv0D+0xilEC1EldtiYRdBNlcGbU/A5EhH5bQ9KVIH"
+ "wjWm35pRPLl5224//nqvQKhwFCn9otsR35XHXev3IQ0or3HmQxIvSDOwir1l66z"
+ "FFrkyHMWexoucbTBxw1MN3hLb247lcVYJ5+hspJgyoYbfR5RkQVDzhpzskogP7l"
+ "K5t0bphu+f+hpvrca7DAiiIZkcR4R1UUQoRnJPRXyXOxlxwS10b51cP9p9jzvZj"
+ "d2LUs8yx1KXWSxNHo6WmtYONNaUfdX2OB5+QCvPULfLfFeBrqpX6Yp5wQMM5Cup"
+ "k8FEfV07eEgQkVE9nDGKHglWo3kUdOF+XCqWAnXn0b/2bNS9/SSAz6gB1GTFcN/"
+ "QsFGlC0QgbCJbQ7LQM6hilRWupWvN5zZ/+HJyyRHuSs5VnQnKiGbIa6AIhx7mP7"
+ "8T82gKjU3mHLJWMGKcT3cY8R958Gs+w4OT71VJRMw3kK6qk02WCbD5OtbFeC6ib"
+ "KRJKdLK3BzjVs/Fzu3mHVucVby3jpvG1Z8HKspKFhvV7gjFEPu8qHKi4MdAlif/"
+ "KakyPk8yZB/dMfaxh7Kv/WpJuSwWNs7RNh29e+ZG+POxqRPWiHqiVw7P17a4dN7"
+ "nkVOawdBEyxI4NAY+4zW+0r0bAy6zNBitBvkq3IXfr3De6Upex52sPHvK04PXoV"
+ "RI6gjnpPSbLLjpSpcHPKgB7DWefLfhd63BUQbc57D8zm8Jd6qtmzcSKn+wz5/zT"
+ "0I6v9I4a+DOjjyqpPpzzNU76pt+Y8SuBgHzMm1vcAdNWlbQrqtScvm0T9AkYni6"
+ "47vSh77uwRZKDtMCMSU151tVUavXhtLYLZ6/ll5NhMXkkx8//i7pk1OBjN5LHVQ"
+ "0QeimRmavlXU1dJ2rwsFAV+9dDdJXUNOq3VLTo9FrbOzZiWtzzjkJpVJAFREnBn"
+ "yIDBK5AXtXE1RzfzaBHzbI2e2kO3t+CSNLWYMFYHBDqaeICYQ9+I9aO/8hnzVSo"
+ "fp+8IfWO8iJhppqynUniicW2oCzrn4oczzYNEjImt8CGY7g90GxWfX+ZgXMJfy/"
+ "bQiFQL3dZvVypDHEbFoIGz+sxkL83xrP4MZV1V9Wwa64lDXYv01Kp4kQXmmnAZY"
+ "KlxBoWqYDXLeLLguSOZxDSCIDpd+YPm39wQ3wOysHW2fmsWtp6FPPlQRUYjsGIP"
+ "lfrkJzpoeaPKDtF1m+mOULfEh9kvTKCmKRi385T9ON39D97eWqaM4CCfUGImvdR"
+ "DlZLXvjmaAh5BVJ8VJxk75OkP14vWFFlTMv0/k4BYLDKsrNqCREC/G9nQBGcD2D"
+ "CLwC2zPNaX2Y9dnyDs2csjN1ibsYttUMnXMgBcnCOkIkVS496Bpc0jQMf35GUgb"
+ "PSyliwqCoXjEBP/2eyq0VLFKQ0fXGsHWvElT+Y/7RYNTiYVWttFMxN5H/2EGcgn"
+ "lfNHLpQvXH9u/3YminS9GX30hQ7jFhpHXxkK8gZ1mpHL9K3pfKS3lG6EF9wQ23O"
+ "qS8m995SG3dp3MzmywxXen/ukXx6bDiEl5VaOvdRUcbhr5Eb3exVDfdWiaJdTYF"
+ "WfIfJOWx88drB3J9vFwjmuaoNEOjFsoNAMYthYOxXraXaJblvmUKz6tJ3T8/G7x"
+ "B9QGYNBsOqBolKoKHBtsWCosLdWhEZr9VFFh2AJrOW1fx24CIkHnvfTtwYORvQq"
+ "Ckuq2bZS1EOdsFkU/X5gwPl6gSUTNhV3IooXkBFL3iBEbfZ6JpQHVVyIuNWjIyN"
+ "b2liCn9Nn0VHeNMMRLl7uyw4eKlOX2ogom8SLvihYxcJoqlCwtehpLsKsU4iwME"
+ "PmDteW5GBGf4GbnqPFkpIT5ed1jGhdZt/dpsp+v6QhYH1uX4pPxdkdnuc84/yb9"
+ "k4SQdKBJ+l3KZkfIxApNWOZqicJfz/eWwS/15hiamRKRuiiUV2zS1V+l8bV7g9O"
+ "gy5scPBMONxtfFlGEKikZKurFmzboCOGQKRBEUCpsY44IAp443h59pQdVIb0YAS"
+ "kfp2xKHwYij6ELRNdH5MrlFa3bNTskGO4k5XDR4cl/Sma2SXgBKb5XjTtlNmCQG"
+ "Gv6lOW7pGXNhs5wfd8K9Ukm6KeLTIlYn1iiKM37YQpa+4JQYljCYhumbqNCkPTZ"
+ "rNYClh8fQEQ8XuOCDpomMWu58YOTfbZNMDWs/Ou7RfCjX+VNwjPShDK9joMwWKc"
+ "Jy3QalZbaoWtcyyvXxR2sqhVR9F7Cmasq4="
+ ),
+ 'ssh-xmss@openssh.com': (
+ "AAAAFHNzaC14bXNzQG9wZW5zc2guY29tAAAAFVhNU1NfU0hBMi0yNTZfVzE2X0g"
+ "xMAAAAECqptWnK94d+Sj2xcdTu8gz+75lawZoLSZFqC5IhbYuT/Z3oBZCim6yt+"
+ "HAmk6MKldl3Fg+74v4sR/SII0I0Jv/"
),
}
+KEY_TYPES = list(VALID_CONTENT.keys())
+
TEST_OPTIONS = (
"no-port-forwarding,no-agent-forwarding,no-X11-forwarding,"
'command="echo \'Please login as the user \"ubuntu\" rather than the'
@@ -70,13 +287,7 @@ class TestAuthKeyLineParser(test_helpers.CiTestCase):
def test_simple_parse(self):
# test key line with common 3 fields (keytype, base64, comment)
parser = ssh_util.AuthKeyLineParser()
- ecdsa_types = [
- 'ecdsa-sha2-nistp256',
- 'ecdsa-sha2-nistp384',
- 'ecdsa-sha2-nistp521',
- ]
-
- for ktype in ['rsa', 'ecdsa', 'dsa'] + ecdsa_types:
+ for ktype in KEY_TYPES:
content = VALID_CONTENT[ktype]
comment = 'user-%s@host' % ktype
line = ' '.join((ktype, content, comment,))
@@ -90,7 +301,7 @@ class TestAuthKeyLineParser(test_helpers.CiTestCase):
def test_parse_no_comment(self):
# test key line with key type and base64 only
parser = ssh_util.AuthKeyLineParser()
- for ktype in ['rsa', 'ecdsa', 'dsa']:
+ for ktype in KEY_TYPES:
content = VALID_CONTENT[ktype]
line = ' '.join((ktype, content,))
key = parser.parse(line)
@@ -104,7 +315,7 @@ class TestAuthKeyLineParser(test_helpers.CiTestCase):
# test key line with options in it
parser = ssh_util.AuthKeyLineParser()
options = TEST_OPTIONS
- for ktype in ['rsa', 'ecdsa', 'dsa']:
+ for ktype in KEY_TYPES:
content = VALID_CONTENT[ktype]
comment = 'user-%s@host' % ktype
line = ' '.join((options, ktype, content, comment,))
@@ -299,7 +510,7 @@ class TestUpdateSshConfigLines(test_helpers.CiTestCase):
lines = ssh_util.parse_ssh_config_lines(list(self.exlines))
result = ssh_util.update_ssh_config_lines(lines, updates)
self.assertEqual([], result)
- self.assertEqual(self.exlines, [str(l) for l in lines])
+ self.assertEqual(self.exlines, [str(line) for line in lines])
def test_keycase_not_modified(self):
"""Original case of key should not be changed on update.
@@ -374,13 +585,13 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
sshd_config = self.tmp_path('sshd_config')
util.write_file(
- sshd_config,
- "AuthorizedKeysFile %s %s" % (authorized_keys, user_keys))
+ sshd_config,
+ "AuthorizedKeysFile %s %s" % (authorized_keys, user_keys)
+ )
(auth_key_fn, auth_key_entries) = ssh_util.extract_authorized_keys(
- fpw.pw_name, sshd_config)
- content = ssh_util.update_authorized_keys(
- auth_key_entries, [])
+ fpw.pw_name, sshd_config)
+ content = ssh_util.update_authorized_keys(auth_key_entries, [])
self.assertEqual("%s/.ssh/authorized_keys" % fpw.pw_dir, auth_key_fn)
self.assertTrue(VALID_CONTENT['rsa'] in content)
@@ -398,11 +609,13 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
sshd_config = self.tmp_path('sshd_config')
util.write_file(
- sshd_config,
- "AuthorizedKeysFile %s %s" % (authorized_keys, user_keys))
+ sshd_config,
+ "AuthorizedKeysFile %s %s" % (authorized_keys, user_keys)
+ )
(auth_key_fn, auth_key_entries) = ssh_util.extract_authorized_keys(
- fpw.pw_name, sshd_config)
+ fpw.pw_name, sshd_config
+ )
content = ssh_util.update_authorized_keys(auth_key_entries, [])
self.assertEqual("%s/.ssh/authorized_keys" % fpw.pw_dir, auth_key_fn)
diff --git a/tests/unittests/test_templating.py b/tests/unittests/test_templating.py
index c36e6eb0..cba09830 100644
--- a/tests/unittests/test_templating.py
+++ b/tests/unittests/test_templating.py
@@ -4,8 +4,6 @@
#
# This file is part of cloud-init. See LICENSE file for license information.
-from __future__ import print_function
-
from cloudinit.tests import helpers as test_helpers
import textwrap
diff --git a/tests/unittests/test_util.py b/tests/unittests/test_util.py
index 9ff17f52..fc557469 100644
--- a/tests/unittests/test_util.py
+++ b/tests/unittests/test_util.py
@@ -1,27 +1,21 @@
# This file is part of cloud-init. See LICENSE file for license information.
-from __future__ import print_function
-
import io
-import json
import logging
import os
import re
import shutil
import stat
-import sys
import tempfile
+import pytest
import yaml
from unittest import mock
+from cloudinit import subp
from cloudinit import importer, util
from cloudinit.tests import helpers
-BASH = util.which('bash')
-BOGUS_COMMAND = 'this-is-not-expected-to-be-a-program-name'
-
-
class FakeSelinux(object):
def __init__(self, match_what):
@@ -105,6 +99,17 @@ class TestWriteFile(helpers.TestCase):
self.assertTrue(os.path.isdir(dirname))
self.assertTrue(os.path.isfile(path))
+ def test_dir_is_not_created_if_ensure_dir_false(self):
+ """Verify directories are not created if ensure_dir_exists is False."""
+ dirname = os.path.join(self.tmp, "subdir")
+ path = os.path.join(dirname, "NewFile.txt")
+ contents = "Hey there"
+
+ with self.assertRaises(FileNotFoundError):
+ util.write_file(path, contents, ensure_dir_exists=False)
+
+ self.assertFalse(os.path.isdir(dirname))
+
def test_explicit_mode(self):
"""Verify explicit file mode works properly."""
path = os.path.join(self.tmp, "NewFile.txt")
@@ -117,29 +122,29 @@ class TestWriteFile(helpers.TestCase):
file_stat = os.stat(path)
self.assertEqual(0o666, stat.S_IMODE(file_stat.st_mode))
- def test_copy_mode_no_existing(self):
- """Verify that file is created with mode 0o644 if copy_mode
+ def test_preserve_mode_no_existing(self):
+ """Verify that file is created with mode 0o644 if preserve_mode
is true and there is no prior existing file."""
path = os.path.join(self.tmp, "NewFile.txt")
contents = "Hey there"
- util.write_file(path, contents, copy_mode=True)
+ util.write_file(path, contents, preserve_mode=True)
self.assertTrue(os.path.exists(path))
self.assertTrue(os.path.isfile(path))
file_stat = os.stat(path)
self.assertEqual(0o644, stat.S_IMODE(file_stat.st_mode))
- def test_copy_mode_with_existing(self):
+ def test_preserve_mode_with_existing(self):
"""Verify that file is created using mode of existing file
- if copy_mode is true."""
+ if preserve_mode is true."""
path = os.path.join(self.tmp, "NewFile.txt")
contents = "Hey there"
open(path, 'w').close()
os.chmod(path, 0o666)
- util.write_file(path, contents, copy_mode=True)
+ util.write_file(path, contents, preserve_mode=True)
self.assertTrue(os.path.exists(path))
self.assertTrue(os.path.isfile(path))
@@ -387,7 +392,7 @@ class TestMountinfoParsing(helpers.ResourceUsingTestCase):
self.assertEqual(expected, util.parse_mount_info('/run/lock', lines))
@mock.patch('cloudinit.util.os')
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_get_device_info_from_zpool(self, zpool_output, m_os):
# mock /dev/zfs exists
m_os.path.exists.return_value = True
@@ -410,17 +415,17 @@ class TestMountinfoParsing(helpers.ResourceUsingTestCase):
self.assertIsNone(ret)
@mock.patch('cloudinit.util.os')
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_get_device_info_from_zpool_handles_no_zpool(self, m_sub, m_os):
"""Handle case where there is no zpool command"""
# mock /dev/zfs exists
m_os.path.exists.return_value = True
- m_sub.side_effect = util.ProcessExecutionError("No zpool cmd")
+ m_sub.side_effect = subp.ProcessExecutionError("No zpool cmd")
ret = util.get_device_info_from_zpool('vmzroot')
self.assertIsNone(ret)
@mock.patch('cloudinit.util.os')
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_get_device_info_from_zpool_on_error(self, zpool_output, m_os):
# mock /dev/zfs exists
m_os.path.exists.return_value = True
@@ -432,7 +437,7 @@ class TestMountinfoParsing(helpers.ResourceUsingTestCase):
ret = util.get_device_info_from_zpool('vmzroot')
self.assertIsNone(ret)
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_parse_mount_with_ext(self, mount_out):
mount_out.return_value = (
helpers.readResource('mount_parse_ext.txt'), '')
@@ -449,7 +454,7 @@ class TestMountinfoParsing(helpers.ResourceUsingTestCase):
ret = util.parse_mount('/not/existing/mount')
self.assertIsNone(ret)
- @mock.patch('cloudinit.util.subp')
+ @mock.patch('cloudinit.subp.subp')
def test_parse_mount_with_zfs(self, mount_out):
mount_out.return_value = (
helpers.readResource('mount_parse_zfs.txt'), '')
@@ -515,13 +520,13 @@ class TestReadDMIData(helpers.FilesystemMockingTestCase):
"""
def _dmidecode_subp(cmd):
if cmd[-1] != key:
- raise util.ProcessExecutionError()
+ raise subp.ProcessExecutionError()
return (content, error)
self.patched_funcs.enter_context(
- mock.patch.object(util, 'which', lambda _: True))
+ mock.patch("cloudinit.subp.which", side_effect=lambda _: True))
self.patched_funcs.enter_context(
- mock.patch.object(util, 'subp', _dmidecode_subp))
+ mock.patch("cloudinit.subp.subp", side_effect=_dmidecode_subp))
def patch_mapping(self, new_mapping):
self.patched_funcs.enter_context(
@@ -548,10 +553,12 @@ class TestReadDMIData(helpers.FilesystemMockingTestCase):
def test_dmidecode_not_used_on_arm(self):
self.patch_mapping({})
+ print("current =%s", subp)
self._create_sysfs_parent_directory()
dmi_val = 'from-dmidecode'
dmi_name = 'use-dmidecode'
self._configure_dmidecode_return(dmi_name, dmi_val)
+ print("now =%s", subp)
expected = {'armel': None, 'aarch64': dmi_val, 'x86_64': dmi_val}
found = {}
@@ -562,6 +569,7 @@ class TestReadDMIData(helpers.FilesystemMockingTestCase):
for arch in expected:
m_uname.return_value = ('x-sysname', 'x-nodename',
'x-release', 'x-version', arch)
+ print("now2 =%s", subp)
found[arch] = util.read_dmi_data(dmi_name)
self.assertEqual(expected, found)
@@ -572,7 +580,7 @@ class TestReadDMIData(helpers.FilesystemMockingTestCase):
def test_none_returned_if_dmidecode_not_in_path(self):
self.patched_funcs.enter_context(
- mock.patch.object(util, 'which', lambda _: False))
+ mock.patch.object(subp, 'which', lambda _: False))
self.patch_mapping({})
self.assertIsNone(util.read_dmi_data('expect-fail'))
@@ -736,219 +744,6 @@ class TestReadSeeded(helpers.TestCase):
self.assertEqual(found_ud, ud)
-class TestSubp(helpers.CiTestCase):
- with_logs = True
- allowed_subp = [BASH, 'cat', helpers.CiTestCase.SUBP_SHELL_TRUE,
- BOGUS_COMMAND, sys.executable]
-
- stdin2err = [BASH, '-c', 'cat >&2']
- stdin2out = ['cat']
- utf8_invalid = b'ab\xaadef'
- utf8_valid = b'start \xc3\xa9 end'
- utf8_valid_2 = b'd\xc3\xa9j\xc8\xa7'
- printenv = [BASH, '-c', 'for n in "$@"; do echo "$n=${!n}"; done', '--']
-
- def printf_cmd(self, *args):
- # bash's printf supports \xaa. So does /usr/bin/printf
- # but by using bash, we remove dependency on another program.
- return([BASH, '-c', 'printf "$@"', 'printf'] + list(args))
-
- def test_subp_handles_bytestrings(self):
- """subp can run a bytestring command if shell is True."""
- tmp_file = self.tmp_path('test.out')
- cmd = 'echo HI MOM >> {tmp_file}'.format(tmp_file=tmp_file)
- (out, _err) = util.subp(cmd.encode('utf-8'), shell=True)
- self.assertEqual(u'', out)
- self.assertEqual(u'', _err)
- self.assertEqual('HI MOM\n', util.load_file(tmp_file))
-
- def test_subp_handles_strings(self):
- """subp can run a string command if shell is True."""
- tmp_file = self.tmp_path('test.out')
- cmd = 'echo HI MOM >> {tmp_file}'.format(tmp_file=tmp_file)
- (out, _err) = util.subp(cmd, shell=True)
- self.assertEqual(u'', out)
- self.assertEqual(u'', _err)
- self.assertEqual('HI MOM\n', util.load_file(tmp_file))
-
- def test_subp_handles_utf8(self):
- # The given bytes contain utf-8 accented characters as seen in e.g.
- # the "deja dup" package in Ubuntu.
- cmd = self.printf_cmd(self.utf8_valid_2)
- (out, _err) = util.subp(cmd, capture=True)
- self.assertEqual(out, self.utf8_valid_2.decode('utf-8'))
-
- def test_subp_respects_decode_false(self):
- (out, err) = util.subp(self.stdin2out, capture=True, decode=False,
- data=self.utf8_valid)
- self.assertTrue(isinstance(out, bytes))
- self.assertTrue(isinstance(err, bytes))
- self.assertEqual(out, self.utf8_valid)
-
- def test_subp_decode_ignore(self):
- # this executes a string that writes invalid utf-8 to stdout
- (out, _err) = util.subp(self.printf_cmd('abc\\xaadef'),
- capture=True, decode='ignore')
- self.assertEqual(out, 'abcdef')
-
- def test_subp_decode_strict_valid_utf8(self):
- (out, _err) = util.subp(self.stdin2out, capture=True,
- decode='strict', data=self.utf8_valid)
- self.assertEqual(out, self.utf8_valid.decode('utf-8'))
-
- def test_subp_decode_invalid_utf8_replaces(self):
- (out, _err) = util.subp(self.stdin2out, capture=True,
- data=self.utf8_invalid)
- expected = self.utf8_invalid.decode('utf-8', 'replace')
- self.assertEqual(out, expected)
-
- def test_subp_decode_strict_raises(self):
- args = []
- kwargs = {'args': self.stdin2out, 'capture': True,
- 'decode': 'strict', 'data': self.utf8_invalid}
- self.assertRaises(UnicodeDecodeError, util.subp, *args, **kwargs)
-
- def test_subp_capture_stderr(self):
- data = b'hello world'
- (out, err) = util.subp(self.stdin2err, capture=True,
- decode=False, data=data,
- update_env={'LC_ALL': 'C'})
- self.assertEqual(err, data)
- self.assertEqual(out, b'')
-
- def test_subp_reads_env(self):
- with mock.patch.dict("os.environ", values={'FOO': 'BAR'}):
- out, _err = util.subp(self.printenv + ['FOO'], capture=True)
- self.assertEqual('FOO=BAR', out.splitlines()[0])
-
- def test_subp_env_and_update_env(self):
- out, _err = util.subp(
- self.printenv + ['FOO', 'HOME', 'K1', 'K2'], capture=True,
- env={'FOO': 'BAR'},
- update_env={'HOME': '/myhome', 'K2': 'V2'})
- self.assertEqual(
- ['FOO=BAR', 'HOME=/myhome', 'K1=', 'K2=V2'], out.splitlines())
-
- def test_subp_update_env(self):
- extra = {'FOO': 'BAR', 'HOME': '/root', 'K1': 'V1'}
- with mock.patch.dict("os.environ", values=extra):
- out, _err = util.subp(
- self.printenv + ['FOO', 'HOME', 'K1', 'K2'], capture=True,
- update_env={'HOME': '/myhome', 'K2': 'V2'})
-
- self.assertEqual(
- ['FOO=BAR', 'HOME=/myhome', 'K1=V1', 'K2=V2'], out.splitlines())
-
- def test_subp_warn_missing_shebang(self):
- """Warn on no #! in script"""
- noshebang = self.tmp_path('noshebang')
- util.write_file(noshebang, 'true\n')
-
- os.chmod(noshebang, os.stat(noshebang).st_mode | stat.S_IEXEC)
- with self.allow_subp([noshebang]):
- self.assertRaisesRegex(util.ProcessExecutionError,
- r'Missing #! in script\?',
- util.subp, (noshebang,))
-
- def test_subp_combined_stderr_stdout(self):
- """Providing combine_capture as True redirects stderr to stdout."""
- data = b'hello world'
- (out, err) = util.subp(self.stdin2err, capture=True,
- combine_capture=True, decode=False, data=data)
- self.assertEqual(b'', err)
- self.assertEqual(data, out)
-
- def test_returns_none_if_no_capture(self):
- (out, err) = util.subp(self.stdin2out, data=b'', capture=False)
- self.assertIsNone(err)
- self.assertIsNone(out)
-
- def test_exception_has_out_err_are_bytes_if_decode_false(self):
- """Raised exc should have stderr, stdout as bytes if no decode."""
- with self.assertRaises(util.ProcessExecutionError) as cm:
- util.subp([BOGUS_COMMAND], decode=False)
- self.assertTrue(isinstance(cm.exception.stdout, bytes))
- self.assertTrue(isinstance(cm.exception.stderr, bytes))
-
- def test_exception_has_out_err_are_bytes_if_decode_true(self):
- """Raised exc should have stderr, stdout as string if no decode."""
- with self.assertRaises(util.ProcessExecutionError) as cm:
- util.subp([BOGUS_COMMAND], decode=True)
- self.assertTrue(isinstance(cm.exception.stdout, str))
- self.assertTrue(isinstance(cm.exception.stderr, str))
-
- def test_bunch_of_slashes_in_path(self):
- self.assertEqual("/target/my/path/",
- util.target_path("/target/", "//my/path/"))
- self.assertEqual("/target/my/path/",
- util.target_path("/target/", "///my/path/"))
-
- def test_c_lang_can_take_utf8_args(self):
- """Independent of system LC_CTYPE, args can contain utf-8 strings.
-
- When python starts up, its default encoding gets set based on
- the value of LC_CTYPE. If no system locale is set, the default
- encoding for both python2 and python3 in some paths will end up
- being ascii.
-
- Attempts to use setlocale or patching (or changing) os.environ
- in the current environment seem to not be effective.
-
- This test starts up a python with LC_CTYPE set to C so that
- the default encoding will be set to ascii. In such an environment
- Popen(['command', 'non-ascii-arg']) would cause a UnicodeDecodeError.
- """
- python_prog = '\n'.join([
- 'import json, sys',
- 'from cloudinit.util import subp',
- 'data = sys.stdin.read()',
- 'cmd = json.loads(data)',
- 'subp(cmd, capture=False)',
- ''])
- cmd = [BASH, '-c', 'echo -n "$@"', '--',
- self.utf8_valid.decode("utf-8")]
- python_subp = [sys.executable, '-c', python_prog]
-
- out, _err = util.subp(
- python_subp, update_env={'LC_CTYPE': 'C'},
- data=json.dumps(cmd).encode("utf-8"),
- decode=False)
- self.assertEqual(self.utf8_valid, out)
-
- def test_bogus_command_logs_status_messages(self):
- """status_cb gets status messages logs on bogus commands provided."""
- logs = []
-
- def status_cb(log):
- logs.append(log)
-
- with self.assertRaises(util.ProcessExecutionError):
- util.subp([BOGUS_COMMAND], status_cb=status_cb)
-
- expected = [
- 'Begin run command: {cmd}\n'.format(cmd=BOGUS_COMMAND),
- 'ERROR: End run command: invalid command provided\n']
- self.assertEqual(expected, logs)
-
- def test_command_logs_exit_codes_to_status_cb(self):
- """status_cb gets status messages containing command exit code."""
- logs = []
-
- def status_cb(log):
- logs.append(log)
-
- with self.assertRaises(util.ProcessExecutionError):
- util.subp([BASH, '-c', 'exit 2'], status_cb=status_cb)
- util.subp([BASH, '-c', 'exit 0'], status_cb=status_cb)
-
- expected = [
- 'Begin run command: %s -c exit 2\n' % BASH,
- 'ERROR: End run command: exit(2)\n',
- 'Begin run command: %s -c exit 0\n' % BASH,
- 'End run command: exit(0)\n']
- self.assertEqual(expected, logs)
-
-
class TestEncode(helpers.TestCase):
"""Test the encoding functions"""
def test_decode_binary_plain_text_with_hex(self):
@@ -969,7 +764,7 @@ class TestProcessExecutionError(helpers.TestCase):
empty_description = 'Unexpected error while running command.'
def test_pexec_error_indent_text(self):
- error = util.ProcessExecutionError()
+ error = subp.ProcessExecutionError()
msg = 'abc\ndef'
formatted = 'abc\n{0}def'.format(' ' * 4)
self.assertEqual(error._indent_text(msg, indent_level=4), formatted)
@@ -979,10 +774,10 @@ class TestProcessExecutionError(helpers.TestCase):
error._indent_text(msg.encode()), type(msg.encode()))
def test_pexec_error_type(self):
- self.assertIsInstance(util.ProcessExecutionError(), IOError)
+ self.assertIsInstance(subp.ProcessExecutionError(), IOError)
def test_pexec_error_empty_msgs(self):
- error = util.ProcessExecutionError()
+ error = subp.ProcessExecutionError()
self.assertTrue(all(attr == self.empty_attr for attr in
(error.stderr, error.stdout, error.reason)))
self.assertEqual(error.description, self.empty_description)
@@ -996,7 +791,7 @@ class TestProcessExecutionError(helpers.TestCase):
stderr_msg = 'error error'
cmd = 'test command'
exit_code = 3
- error = util.ProcessExecutionError(
+ error = subp.ProcessExecutionError(
stdout=stdout_msg, stderr=stderr_msg, exit_code=3, cmd=cmd)
self.assertEqual(str(error), self.template.format(
description=self.empty_description, stdout=stdout_msg,
@@ -1007,7 +802,7 @@ class TestProcessExecutionError(helpers.TestCase):
# make sure bytes is converted handled properly when formatting
stdout_msg = 'multi\nline\noutput message'.encode()
stderr_msg = 'multi\nline\nerror message\n\n\n'
- error = util.ProcessExecutionError(
+ error = subp.ProcessExecutionError(
stdout=stdout_msg, stderr=stderr_msg)
self.assertEqual(
str(error),
@@ -1172,4 +967,133 @@ class TestGetProcEnv(helpers.TestCase):
my_ppid = os.getppid()
self.assertEqual(my_ppid, util.get_proc_ppid(my_pid))
+
+class TestKernelVersion():
+ """test kernel version function"""
+
+ params = [
+ ('5.6.19-300.fc32.x86_64', (5, 6)),
+ ('4.15.0-101-generic', (4, 15)),
+ ('3.10.0-1062.12.1.vz7.131.10', (3, 10)),
+ ('4.18.0-144.el8.x86_64', (4, 18))]
+
+ @mock.patch('os.uname')
+ @pytest.mark.parametrize("uname_release,expected", params)
+ def test_kernel_version(self, m_uname, uname_release, expected):
+ m_uname.return_value.release = uname_release
+ assert expected == util.kernel_version()
+
+
+class TestFindDevs:
+ @mock.patch('cloudinit.subp.subp')
+ def test_find_devs_with(self, m_subp):
+ m_subp.return_value = (
+ '/dev/sda1: UUID="some-uuid" TYPE="ext4" PARTUUID="some-partid"',
+ ''
+ )
+ devlist = util.find_devs_with()
+ assert devlist == [
+ '/dev/sda1: UUID="some-uuid" TYPE="ext4" PARTUUID="some-partid"']
+
+ devlist = util.find_devs_with("LABEL_FATBOOT=A_LABEL")
+ assert devlist == [
+ '/dev/sda1: UUID="some-uuid" TYPE="ext4" PARTUUID="some-partid"']
+
+ @mock.patch('cloudinit.subp.subp')
+ def test_find_devs_with_openbsd(self, m_subp):
+ m_subp.return_value = (
+ 'cd0:,sd0:630d98d32b5d3759,sd1:,fd0:', ''
+ )
+ devlist = util.find_devs_with_openbsd()
+ assert devlist == ['/dev/cd0a', '/dev/sd1i']
+
+ @mock.patch('cloudinit.subp.subp')
+ def test_find_devs_with_openbsd_with_criteria(self, m_subp):
+ m_subp.return_value = (
+ 'cd0:,sd0:630d98d32b5d3759,sd1:,fd0:', ''
+ )
+ devlist = util.find_devs_with_openbsd(criteria="TYPE=iso9660")
+ assert devlist == ['/dev/cd0a']
+
+ # lp: #1841466
+ devlist = util.find_devs_with_openbsd(criteria="LABEL_FATBOOT=A_LABEL")
+ assert devlist == ['/dev/cd0a', '/dev/sd1i']
+
+ @pytest.mark.parametrize(
+ 'criteria,expected_devlist', (
+ (None, ['/dev/msdosfs/EFISYS', '/dev/iso9660/config-2']),
+ ('TYPE=iso9660', ['/dev/iso9660/config-2']),
+ ('TYPE=vfat', ['/dev/msdosfs/EFISYS']),
+ ('LABEL_FATBOOT=A_LABEL', []), # lp: #1841466
+ ),
+ )
+ @mock.patch('glob.glob')
+ def test_find_devs_with_freebsd(self, m_glob, criteria, expected_devlist):
+ def fake_glob(pattern):
+ msdos = ["/dev/msdosfs/EFISYS"]
+ iso9660 = ["/dev/iso9660/config-2"]
+ if pattern == "/dev/msdosfs/*":
+ return msdos
+ elif pattern == "/dev/iso9660/*":
+ return iso9660
+ raise Exception
+ m_glob.side_effect = fake_glob
+
+ devlist = util.find_devs_with_freebsd(criteria=criteria)
+ assert devlist == expected_devlist
+
+ @pytest.mark.parametrize(
+ 'criteria,expected_devlist', (
+ (None, ['/dev/ld0', '/dev/dk0', '/dev/dk1', '/dev/cd0']),
+ ('TYPE=iso9660', ['/dev/cd0']),
+ ('TYPE=vfat', ["/dev/ld0", "/dev/dk0", "/dev/dk1"]),
+ ('LABEL_FATBOOT=A_LABEL', # lp: #1841466
+ ['/dev/ld0', '/dev/dk0', '/dev/dk1', '/dev/cd0']),
+ )
+ )
+ @mock.patch("cloudinit.subp.subp")
+ def test_find_devs_with_netbsd(self, m_subp, criteria, expected_devlist):
+ side_effect_values = [
+ ("ld0 dk0 dk1 cd0", ""),
+ (
+ (
+ "mscdlabel: CDIOREADTOCHEADER: "
+ "Inappropriate ioctl for device\n"
+ "track (ctl=4) at sector 0\n"
+ "disklabel not written\n"
+ ),
+ "",
+ ),
+ (
+ (
+ "mscdlabel: CDIOREADTOCHEADER: "
+ "Inappropriate ioctl for device\n"
+ "track (ctl=4) at sector 0\n"
+ "disklabel not written\n"
+ ),
+ "",
+ ),
+ (
+ (
+ "mscdlabel: CDIOREADTOCHEADER: "
+ "Inappropriate ioctl for device\n"
+ "track (ctl=4) at sector 0\n"
+ "disklabel not written\n"
+ ),
+ "",
+ ),
+ (
+ (
+ "track (ctl=4) at sector 0\n"
+ 'ISO filesystem, label "config-2", '
+ "creation time: 2020/03/31 17:29\n"
+ "adding as 'a'\n"
+ ),
+ "",
+ ),
+ ]
+ m_subp.side_effect = side_effect_values
+ devlist = util.find_devs_with_netbsd(criteria=criteria)
+ assert devlist == expected_devlist
+
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_vmware/test_guestcust_util.py b/tests/unittests/test_vmware/test_guestcust_util.py
index b175a998..c8b59d83 100644
--- a/tests/unittests/test_vmware/test_guestcust_util.py
+++ b/tests/unittests/test_vmware/test_guestcust_util.py
@@ -5,9 +5,12 @@
#
# This file is part of cloud-init. See LICENSE file for license information.
-from cloudinit import util
+from cloudinit import subp
+from cloudinit.sources.helpers.vmware.imc.config import Config
+from cloudinit.sources.helpers.vmware.imc.config_file import ConfigFile
from cloudinit.sources.helpers.vmware.imc.guestcust_util import (
get_tools_config,
+ set_gc_status,
)
from cloudinit.tests.helpers import CiTestCase, mock
@@ -18,7 +21,7 @@ class TestGuestCustUtil(CiTestCase):
This test is designed to verify the behavior if vmware-toolbox-cmd
is not installed.
"""
- with mock.patch.object(util, 'which', return_value=None):
+ with mock.patch.object(subp, 'which', return_value=None):
self.assertEqual(
get_tools_config('section', 'key', 'defaultVal'), 'defaultVal')
@@ -27,10 +30,10 @@ class TestGuestCustUtil(CiTestCase):
This test is designed to verify the behavior if internal exception
is raised.
"""
- with mock.patch.object(util, 'which', return_value='/dummy/path'):
- with mock.patch.object(util, 'subp',
+ with mock.patch.object(subp, 'which', return_value='/dummy/path'):
+ with mock.patch.object(subp, 'subp',
return_value=('key=value', b''),
- side_effect=util.ProcessExecutionError(
+ side_effect=subp.ProcessExecutionError(
"subp failed", exit_code=99)):
# verify return value is 'defaultVal', not 'value'.
self.assertEqual(
@@ -42,31 +45,54 @@ class TestGuestCustUtil(CiTestCase):
This test is designed to verify the value could be parsed from
key = value of the given [section]
"""
- with mock.patch.object(util, 'which', return_value='/dummy/path'):
+ with mock.patch.object(subp, 'which', return_value='/dummy/path'):
# value is not blank
- with mock.patch.object(util, 'subp',
+ with mock.patch.object(subp, 'subp',
return_value=('key = value ', b'')):
self.assertEqual(
get_tools_config('section', 'key', 'defaultVal'),
'value')
# value is blank
- with mock.patch.object(util, 'subp',
+ with mock.patch.object(subp, 'subp',
return_value=('key = ', b'')):
self.assertEqual(
get_tools_config('section', 'key', 'defaultVal'),
'')
# value contains =
- with mock.patch.object(util, 'subp',
+ with mock.patch.object(subp, 'subp',
return_value=('key=Bar=Wark', b'')):
self.assertEqual(
get_tools_config('section', 'key', 'defaultVal'),
'Bar=Wark')
# value contains specific characters
- with mock.patch.object(util, 'subp',
+ with mock.patch.object(subp, 'subp',
return_value=('[a] b.c_d=e-f', b'')):
self.assertEqual(
get_tools_config('section', 'key', 'defaultVal'),
'e-f')
+ def test_set_gc_status(self):
+ """
+ This test is designed to verify the behavior of set_gc_status
+ """
+ # config is None, return None
+ self.assertEqual(set_gc_status(None, 'Successful'), None)
+
+ # post gc status is NO, return None
+ cf = ConfigFile("tests/data/vmware/cust-dhcp-2nic.cfg")
+ conf = Config(cf)
+ self.assertEqual(set_gc_status(conf, 'Successful'), None)
+
+ # post gc status is YES, subp is called to execute command
+ cf._insertKey("MISC|POST-GC-STATUS", "YES")
+ conf = Config(cf)
+ with mock.patch.object(subp, 'subp',
+ return_value=('ok', b'')) as mockobj:
+ self.assertEqual(
+ set_gc_status(conf, 'Successful'), ('ok', b''))
+ mockobj.assert_called_once_with(
+ ['vmware-rpctool', 'info-set guestinfo.gc.status Successful'],
+ rcs=[0])
+
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_vmware_config_file.py b/tests/unittests/test_vmware_config_file.py
index 16343ed2..9c7d25fa 100644
--- a/tests/unittests/test_vmware_config_file.py
+++ b/tests/unittests/test_vmware_config_file.py
@@ -348,6 +348,28 @@ class TestVmwareConfigFile(CiTestCase):
conf = Config(cf)
self.assertEqual("test-script", conf.custom_script_name)
+ def test_post_gc_status(self):
+ cf = ConfigFile("tests/data/vmware/cust-dhcp-2nic.cfg")
+ conf = Config(cf)
+ self.assertFalse(conf.post_gc_status)
+ cf._insertKey("MISC|POST-GC-STATUS", "YES")
+ conf = Config(cf)
+ self.assertTrue(conf.post_gc_status)
+
+ def test_no_default_run_post_script(self):
+ cf = ConfigFile("tests/data/vmware/cust-dhcp-2nic.cfg")
+ conf = Config(cf)
+ self.assertFalse(conf.default_run_post_script)
+ cf._insertKey("MISC|DEFAULT-RUN-POST-CUST-SCRIPT", "NO")
+ conf = Config(cf)
+ self.assertFalse(conf.default_run_post_script)
+
+ def test_yes_default_run_post_script(self):
+ cf = ConfigFile("tests/data/vmware/cust-dhcp-2nic.cfg")
+ cf._insertKey("MISC|DEFAULT-RUN-POST-CUST-SCRIPT", "yes")
+ conf = Config(cf)
+ self.assertTrue(conf.default_run_post_script)
+
class TestVmwareNetConfig(CiTestCase):
"""Test conversion of vmware config to cloud-init config."""