summaryrefslogtreecommitdiff
path: root/tests/unittests/config
diff options
context:
space:
mode:
Diffstat (limited to 'tests/unittests/config')
-rw-r--r--tests/unittests/config/__init__.py0
-rw-r--r--tests/unittests/config/test_apt_conf_v1.py129
-rw-r--r--tests/unittests/config/test_apt_configure_sources_list_v1.py181
-rw-r--r--tests/unittests/config/test_apt_configure_sources_list_v3.py226
-rw-r--r--tests/unittests/config/test_apt_key.py137
-rw-r--r--tests/unittests/config/test_apt_source_v1.py651
-rw-r--r--tests/unittests/config/test_apt_source_v3.py1170
-rw-r--r--tests/unittests/config/test_cc_apk_configure.py299
-rw-r--r--tests/unittests/config/test_cc_apt_pipelining.py28
-rw-r--r--tests/unittests/config/test_cc_bootcmd.py152
-rw-r--r--tests/unittests/config/test_cc_ca_certs.py361
-rw-r--r--tests/unittests/config/test_cc_chef.py271
-rw-r--r--tests/unittests/config/test_cc_debug.py59
-rw-r--r--tests/unittests/config/test_cc_disable_ec2_metadata.py48
-rw-r--r--tests/unittests/config/test_cc_disk_setup.py243
-rw-r--r--tests/unittests/config/test_cc_final_message.py46
-rw-r--r--tests/unittests/config/test_cc_growpart.py309
-rw-r--r--tests/unittests/config/test_cc_grub_dpkg.py176
-rw-r--r--tests/unittests/config/test_cc_install_hotplug.py113
-rw-r--r--tests/unittests/config/test_cc_keys_to_console.py34
-rw-r--r--tests/unittests/config/test_cc_landscape.py126
-rw-r--r--tests/unittests/config/test_cc_locale.py116
-rw-r--r--tests/unittests/config/test_cc_lxd.py222
-rw-r--r--tests/unittests/config/test_cc_mcollective.py146
-rw-r--r--tests/unittests/config/test_cc_mounts.py461
-rw-r--r--tests/unittests/config/test_cc_ntp.py765
-rw-r--r--tests/unittests/config/test_cc_power_state_change.py159
-rw-r--r--tests/unittests/config/test_cc_puppet.py380
-rw-r--r--tests/unittests/config/test_cc_refresh_rmc_and_interface.py109
-rw-r--r--tests/unittests/config/test_cc_resizefs.py398
-rw-r--r--tests/unittests/config/test_cc_resolv_conf.py193
-rw-r--r--tests/unittests/config/test_cc_rh_subscription.py234
-rw-r--r--tests/unittests/config/test_cc_rsyslog.py178
-rw-r--r--tests/unittests/config/test_cc_runcmd.py129
-rw-r--r--tests/unittests/config/test_cc_seed_random.py205
-rw-r--r--tests/unittests/config/test_cc_set_hostname.py207
-rw-r--r--tests/unittests/config/test_cc_set_passwords.py162
-rw-r--r--tests/unittests/config/test_cc_snap.py564
-rw-r--r--tests/unittests/config/test_cc_spacewalk.py42
-rw-r--r--tests/unittests/config/test_cc_ssh.py405
-rw-r--r--tests/unittests/config/test_cc_timezone.py54
-rw-r--r--tests/unittests/config/test_cc_ubuntu_advantage.py333
-rw-r--r--tests/unittests/config/test_cc_ubuntu_drivers.py244
-rw-r--r--tests/unittests/config/test_cc_update_etc_hosts.py70
-rw-r--r--tests/unittests/config/test_cc_users_groups.py172
-rw-r--r--tests/unittests/config/test_cc_write_files.py246
-rw-r--r--tests/unittests/config/test_cc_write_files_deferred.py77
-rw-r--r--tests/unittests/config/test_cc_yum_add_repo.py111
-rw-r--r--tests/unittests/config/test_cc_zypper_add_repo.py231
-rw-r--r--tests/unittests/config/test_schema.py515
50 files changed, 11887 insertions, 0 deletions
diff --git a/tests/unittests/config/__init__.py b/tests/unittests/config/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/unittests/config/__init__.py
diff --git a/tests/unittests/config/test_apt_conf_v1.py b/tests/unittests/config/test_apt_conf_v1.py
new file mode 100644
index 00000000..98d99945
--- /dev/null
+++ b/tests/unittests/config/test_apt_conf_v1.py
@@ -0,0 +1,129 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+from cloudinit.config import cc_apt_configure
+from cloudinit import util
+
+from tests.unittests.helpers import TestCase
+
+import copy
+import os
+import re
+import shutil
+import tempfile
+
+
+class TestAptProxyConfig(TestCase):
+ def setUp(self):
+ super(TestAptProxyConfig, self).setUp()
+ self.tmp = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.tmp)
+ self.pfile = os.path.join(self.tmp, "proxy.cfg")
+ self.cfile = os.path.join(self.tmp, "config.cfg")
+
+ def _search_apt_config(self, contents, ptype, value):
+ return re.search(
+ r"acquire::%s::proxy\s+[\"']%s[\"'];\n" % (ptype, value),
+ contents, flags=re.IGNORECASE)
+
+ def test_apt_proxy_written(self):
+ cfg = {'proxy': 'myproxy'}
+ cc_apt_configure.apply_apt_config(cfg, self.pfile, self.cfile)
+
+ self.assertTrue(os.path.isfile(self.pfile))
+ self.assertFalse(os.path.isfile(self.cfile))
+
+ contents = util.load_file(self.pfile)
+ self.assertTrue(self._search_apt_config(contents, "http", "myproxy"))
+
+ def test_apt_http_proxy_written(self):
+ cfg = {'http_proxy': 'myproxy'}
+ cc_apt_configure.apply_apt_config(cfg, self.pfile, self.cfile)
+
+ self.assertTrue(os.path.isfile(self.pfile))
+ self.assertFalse(os.path.isfile(self.cfile))
+
+ contents = util.load_file(self.pfile)
+ self.assertTrue(self._search_apt_config(contents, "http", "myproxy"))
+
+ def test_apt_all_proxy_written(self):
+ cfg = {'http_proxy': 'myproxy_http_proxy',
+ 'https_proxy': 'myproxy_https_proxy',
+ 'ftp_proxy': 'myproxy_ftp_proxy'}
+
+ values = {'http': cfg['http_proxy'],
+ 'https': cfg['https_proxy'],
+ 'ftp': cfg['ftp_proxy'],
+ }
+
+ cc_apt_configure.apply_apt_config(cfg, self.pfile, self.cfile)
+
+ self.assertTrue(os.path.isfile(self.pfile))
+ self.assertFalse(os.path.isfile(self.cfile))
+
+ contents = util.load_file(self.pfile)
+
+ for ptype, pval in values.items():
+ self.assertTrue(self._search_apt_config(contents, ptype, pval))
+
+ def test_proxy_deleted(self):
+ util.write_file(self.cfile, "content doesnt matter")
+ cc_apt_configure.apply_apt_config({}, self.pfile, self.cfile)
+ self.assertFalse(os.path.isfile(self.pfile))
+ self.assertFalse(os.path.isfile(self.cfile))
+
+ def test_proxy_replaced(self):
+ util.write_file(self.cfile, "content doesnt matter")
+ cc_apt_configure.apply_apt_config({'proxy': "foo"},
+ self.pfile, self.cfile)
+ self.assertTrue(os.path.isfile(self.pfile))
+ contents = util.load_file(self.pfile)
+ self.assertTrue(self._search_apt_config(contents, "http", "foo"))
+
+ def test_config_written(self):
+ payload = 'this is my apt config'
+ cfg = {'conf': payload}
+
+ cc_apt_configure.apply_apt_config(cfg, self.pfile, self.cfile)
+
+ self.assertTrue(os.path.isfile(self.cfile))
+ self.assertFalse(os.path.isfile(self.pfile))
+
+ self.assertEqual(util.load_file(self.cfile), payload)
+
+ def test_config_replaced(self):
+ util.write_file(self.pfile, "content doesnt matter")
+ cc_apt_configure.apply_apt_config({'conf': "foo"},
+ self.pfile, self.cfile)
+ self.assertTrue(os.path.isfile(self.cfile))
+ self.assertEqual(util.load_file(self.cfile), "foo")
+
+ def test_config_deleted(self):
+ # if no 'conf' is provided, delete any previously written file
+ util.write_file(self.pfile, "content doesnt matter")
+ cc_apt_configure.apply_apt_config({}, self.pfile, self.cfile)
+ self.assertFalse(os.path.isfile(self.pfile))
+ self.assertFalse(os.path.isfile(self.cfile))
+
+
+class TestConversion(TestCase):
+ def test_convert_with_apt_mirror_as_empty_string(self):
+ # an empty apt_mirror is the same as no apt_mirror
+ empty_m_found = cc_apt_configure.convert_to_v3_apt_format(
+ {'apt_mirror': ''})
+ default_found = cc_apt_configure.convert_to_v3_apt_format({})
+ self.assertEqual(default_found, empty_m_found)
+
+ def test_convert_with_apt_mirror(self):
+ mirror = 'http://my.mirror/ubuntu'
+ f = cc_apt_configure.convert_to_v3_apt_format({'apt_mirror': mirror})
+ self.assertIn(mirror, set(m['uri'] for m in f['apt']['primary']))
+
+ def test_no_old_content(self):
+ mirror = 'http://my.mirror/ubuntu'
+ mydata = {'apt': {'primary': {'arches': ['default'], 'uri': mirror}}}
+ expected = copy.deepcopy(mydata)
+ self.assertEqual(expected,
+ cc_apt_configure.convert_to_v3_apt_format(mydata))
+
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_apt_configure_sources_list_v1.py b/tests/unittests/config/test_apt_configure_sources_list_v1.py
new file mode 100644
index 00000000..4aeaea24
--- /dev/null
+++ b/tests/unittests/config/test_apt_configure_sources_list_v1.py
@@ -0,0 +1,181 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+""" test_handler_apt_configure_sources_list
+Test templating of sources list
+"""
+import logging
+import os
+import shutil
+import tempfile
+from unittest import mock
+
+from cloudinit import templater
+from cloudinit import subp
+from cloudinit import util
+
+from cloudinit.config import cc_apt_configure
+
+from cloudinit.distros.debian import Distro
+
+from tests.unittests import helpers as t_help
+from tests.unittests.util import get_cloud
+
+LOG = logging.getLogger(__name__)
+
+YAML_TEXT_CUSTOM_SL = """
+apt_mirror: http://archive.ubuntu.com/ubuntu/
+apt_custom_sources_list: |
+ ## template:jinja
+ ## Note, this file is written by cloud-init on first boot of an instance
+ ## modifications made here will not survive a re-bundle.
+ ## if you wish to make changes you can:
+ ## a.) add 'apt_preserve_sources_list: true' to /etc/cloud/cloud.cfg
+ ## or do the same in user-data
+ ## b.) add sources in /etc/apt/sources.list.d
+ ## c.) make changes to template file /etc/cloud/templates/sources.list.tmpl
+
+ # See http://help.ubuntu.com/community/UpgradeNotes for how to upgrade to
+ # newer versions of the distribution.
+ deb {{mirror}} {{codename}} main restricted
+ deb-src {{mirror}} {{codename}} main restricted
+ # FIND_SOMETHING_SPECIAL
+"""
+
+EXPECTED_CONVERTED_CONTENT = (
+ """## Note, this file is written by cloud-init on first boot of an instance
+## modifications made here will not survive a re-bundle.
+## if you wish to make changes you can:
+## a.) add 'apt_preserve_sources_list: true' to /etc/cloud/cloud.cfg
+## or do the same in user-data
+## b.) add sources in /etc/apt/sources.list.d
+## c.) make changes to template file /etc/cloud/templates/sources.list.tmpl
+
+# See http://help.ubuntu.com/community/UpgradeNotes for how to upgrade to
+# newer versions of the distribution.
+deb http://archive.ubuntu.com/ubuntu/ fakerelease main restricted
+deb-src http://archive.ubuntu.com/ubuntu/ fakerelease main restricted
+# FIND_SOMETHING_SPECIAL
+""")
+
+
+class TestAptSourceConfigSourceList(t_help.FilesystemMockingTestCase):
+ """TestAptSourceConfigSourceList
+ Main Class to test sources list rendering
+ """
+ def setUp(self):
+ super(TestAptSourceConfigSourceList, self).setUp()
+ self.subp = subp.subp
+ self.new_root = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.new_root)
+
+ rpatcher = mock.patch("cloudinit.util.lsb_release")
+ get_rel = rpatcher.start()
+ get_rel.return_value = {'codename': "fakerelease"}
+ self.addCleanup(rpatcher.stop)
+ apatcher = mock.patch("cloudinit.util.get_dpkg_architecture")
+ get_arch = apatcher.start()
+ get_arch.return_value = 'amd64'
+ self.addCleanup(apatcher.stop)
+
+ def apt_source_list(self, distro, mirror, mirrorcheck=None):
+ """apt_source_list
+ Test rendering of a source.list from template for a given distro
+ """
+ if mirrorcheck is None:
+ mirrorcheck = mirror
+
+ if isinstance(mirror, list):
+ cfg = {'apt_mirror_search': mirror}
+ else:
+ cfg = {'apt_mirror': mirror}
+
+ mycloud = get_cloud(distro)
+
+ with mock.patch.object(util, 'write_file') as mockwf:
+ with mock.patch.object(util, 'load_file',
+ return_value="faketmpl") as mocklf:
+ with mock.patch.object(os.path, 'isfile',
+ return_value=True) as mockisfile:
+ with mock.patch.object(
+ templater, 'render_string',
+ return_value='fake') as mockrnd:
+ with mock.patch.object(util, 'rename'):
+ cc_apt_configure.handle("test", cfg, mycloud,
+ LOG, None)
+
+ mockisfile.assert_any_call(
+ ('/etc/cloud/templates/sources.list.%s.tmpl' % distro))
+ mocklf.assert_any_call(
+ ('/etc/cloud/templates/sources.list.%s.tmpl' % distro))
+ mockrnd.assert_called_once_with('faketmpl',
+ {'RELEASE': 'fakerelease',
+ 'PRIMARY': mirrorcheck,
+ 'MIRROR': mirrorcheck,
+ 'SECURITY': mirrorcheck,
+ 'codename': 'fakerelease',
+ 'primary': mirrorcheck,
+ 'mirror': mirrorcheck,
+ 'security': mirrorcheck})
+ mockwf.assert_called_once_with('/etc/apt/sources.list', 'fake',
+ mode=0o644)
+
+ def test_apt_v1_source_list_debian(self):
+ """Test rendering of a source.list from template for debian"""
+ self.apt_source_list('debian', 'http://httpredir.debian.org/debian')
+
+ def test_apt_v1_source_list_ubuntu(self):
+ """Test rendering of a source.list from template for ubuntu"""
+ self.apt_source_list('ubuntu', 'http://archive.ubuntu.com/ubuntu/')
+
+ @staticmethod
+ def myresolve(name):
+ """Fake util.is_resolvable for mirrorfail tests"""
+ if name == "does.not.exist":
+ print("Faking FAIL for '%s'" % name)
+ return False
+ else:
+ print("Faking SUCCESS for '%s'" % name)
+ return True
+
+ def test_apt_v1_srcl_debian_mirrorfail(self):
+ """Test rendering of a source.list from template for debian"""
+ with mock.patch.object(util, 'is_resolvable',
+ side_effect=self.myresolve) as mockresolve:
+ self.apt_source_list('debian',
+ ['http://does.not.exist',
+ 'http://httpredir.debian.org/debian'],
+ 'http://httpredir.debian.org/debian')
+ mockresolve.assert_any_call("does.not.exist")
+ mockresolve.assert_any_call("httpredir.debian.org")
+
+ def test_apt_v1_srcl_ubuntu_mirrorfail(self):
+ """Test rendering of a source.list from template for ubuntu"""
+ with mock.patch.object(util, 'is_resolvable',
+ side_effect=self.myresolve) as mockresolve:
+ self.apt_source_list('ubuntu',
+ ['http://does.not.exist',
+ 'http://archive.ubuntu.com/ubuntu/'],
+ 'http://archive.ubuntu.com/ubuntu/')
+ mockresolve.assert_any_call("does.not.exist")
+ mockresolve.assert_any_call("archive.ubuntu.com")
+
+ def test_apt_v1_srcl_custom(self):
+ """Test rendering from a custom source.list template"""
+ cfg = util.load_yaml(YAML_TEXT_CUSTOM_SL)
+ mycloud = get_cloud()
+
+ # the second mock restores the original subp
+ with mock.patch.object(util, 'write_file') as mockwrite:
+ with mock.patch.object(subp, 'subp', self.subp):
+ with mock.patch.object(Distro, 'get_primary_arch',
+ return_value='amd64'):
+ cc_apt_configure.handle("notimportant", cfg, mycloud,
+ LOG, None)
+
+ mockwrite.assert_called_once_with(
+ '/etc/apt/sources.list',
+ EXPECTED_CONVERTED_CONTENT,
+ mode=420)
+
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_apt_configure_sources_list_v3.py b/tests/unittests/config/test_apt_configure_sources_list_v3.py
new file mode 100644
index 00000000..a8087bd1
--- /dev/null
+++ b/tests/unittests/config/test_apt_configure_sources_list_v3.py
@@ -0,0 +1,226 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+""" test_apt_custom_sources_list
+Test templating of custom sources list
+"""
+from contextlib import ExitStack
+import logging
+import os
+import shutil
+import tempfile
+from unittest import mock
+from unittest.mock import call
+
+from cloudinit import subp
+from cloudinit import util
+from cloudinit.config import cc_apt_configure
+from cloudinit.distros.debian import Distro
+from tests.unittests import helpers as t_help
+
+from tests.unittests.util import get_cloud
+
+LOG = logging.getLogger(__name__)
+
+TARGET = "/"
+
+# Input and expected output for the custom template
+YAML_TEXT_CUSTOM_SL = """
+apt:
+ primary:
+ - arches: [default]
+ uri: http://test.ubuntu.com/ubuntu/
+ security:
+ - arches: [default]
+ uri: http://testsec.ubuntu.com/ubuntu/
+ sources_list: |
+
+ # Note, this file is written by cloud-init at install time. It should not
+ # end up on the installed system itself.
+ # See http://help.ubuntu.com/community/UpgradeNotes for how to upgrade to
+ # newer versions of the distribution.
+ deb $MIRROR $RELEASE main restricted
+ deb-src $MIRROR $RELEASE main restricted
+ deb $PRIMARY $RELEASE universe restricted
+ deb $SECURITY $RELEASE-security multiverse
+ # FIND_SOMETHING_SPECIAL
+"""
+
+EXPECTED_CONVERTED_CONTENT = """
+# Note, this file is written by cloud-init at install time. It should not
+# end up on the installed system itself.
+# See http://help.ubuntu.com/community/UpgradeNotes for how to upgrade to
+# newer versions of the distribution.
+deb http://test.ubuntu.com/ubuntu/ fakerel main restricted
+deb-src http://test.ubuntu.com/ubuntu/ fakerel main restricted
+deb http://test.ubuntu.com/ubuntu/ fakerel universe restricted
+deb http://testsec.ubuntu.com/ubuntu/ fakerel-security multiverse
+# FIND_SOMETHING_SPECIAL
+"""
+
+# mocked to be independent to the unittest system
+MOCKED_APT_SRC_LIST = """
+deb http://test.ubuntu.com/ubuntu/ notouched main restricted
+deb-src http://test.ubuntu.com/ubuntu/ notouched main restricted
+deb http://test.ubuntu.com/ubuntu/ notouched-updates main restricted
+deb http://testsec.ubuntu.com/ubuntu/ notouched-security main restricted
+"""
+
+EXPECTED_BASE_CONTENT = ("""
+deb http://test.ubuntu.com/ubuntu/ notouched main restricted
+deb-src http://test.ubuntu.com/ubuntu/ notouched main restricted
+deb http://test.ubuntu.com/ubuntu/ notouched-updates main restricted
+deb http://testsec.ubuntu.com/ubuntu/ notouched-security main restricted
+""")
+
+EXPECTED_MIRROR_CONTENT = ("""
+deb http://test.ubuntu.com/ubuntu/ notouched main restricted
+deb-src http://test.ubuntu.com/ubuntu/ notouched main restricted
+deb http://test.ubuntu.com/ubuntu/ notouched-updates main restricted
+deb http://test.ubuntu.com/ubuntu/ notouched-security main restricted
+""")
+
+EXPECTED_PRIMSEC_CONTENT = ("""
+deb http://test.ubuntu.com/ubuntu/ notouched main restricted
+deb-src http://test.ubuntu.com/ubuntu/ notouched main restricted
+deb http://test.ubuntu.com/ubuntu/ notouched-updates main restricted
+deb http://testsec.ubuntu.com/ubuntu/ notouched-security main restricted
+""")
+
+
+class TestAptSourceConfigSourceList(t_help.FilesystemMockingTestCase):
+ """TestAptSourceConfigSourceList - Class to test sources list rendering"""
+ def setUp(self):
+ super(TestAptSourceConfigSourceList, self).setUp()
+ self.subp = subp.subp
+ self.new_root = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.new_root)
+
+ rpatcher = mock.patch("cloudinit.util.lsb_release")
+ get_rel = rpatcher.start()
+ get_rel.return_value = {'codename': "fakerel"}
+ self.addCleanup(rpatcher.stop)
+ apatcher = mock.patch("cloudinit.util.get_dpkg_architecture")
+ get_arch = apatcher.start()
+ get_arch.return_value = 'amd64'
+ self.addCleanup(apatcher.stop)
+
+ def _apt_source_list(self, distro, cfg, cfg_on_empty=False):
+ """_apt_source_list - Test rendering from template (generic)"""
+ # entry at top level now, wrap in 'apt' key
+ cfg = {'apt': cfg}
+ mycloud = get_cloud(distro)
+
+ with ExitStack() as stack:
+ mock_writefile = stack.enter_context(mock.patch.object(
+ util, 'write_file'))
+ mock_loadfile = stack.enter_context(mock.patch.object(
+ util, 'load_file', return_value=MOCKED_APT_SRC_LIST))
+ mock_isfile = stack.enter_context(mock.patch.object(
+ os.path, 'isfile', return_value=True))
+ stack.enter_context(mock.patch.object(
+ util, 'del_file'))
+ cfg_func = ('cloudinit.config.cc_apt_configure.'
+ '_should_configure_on_empty_apt')
+ mock_shouldcfg = stack.enter_context(mock.patch(
+ cfg_func, return_value=(cfg_on_empty, 'test')
+ ))
+ cc_apt_configure.handle("test", cfg, mycloud, LOG, None)
+
+ return mock_writefile, mock_loadfile, mock_isfile, mock_shouldcfg
+
+ def test_apt_v3_source_list_debian(self):
+ """test_apt_v3_source_list_debian - without custom sources or parms"""
+ cfg = {}
+ distro = 'debian'
+ expected = EXPECTED_BASE_CONTENT
+
+ mock_writefile, mock_load_file, mock_isfile, mock_shouldcfg = (
+ self._apt_source_list(distro, cfg, cfg_on_empty=True))
+
+ template = '/etc/cloud/templates/sources.list.%s.tmpl' % distro
+ mock_writefile.assert_called_once_with('/etc/apt/sources.list',
+ expected, mode=0o644)
+ mock_load_file.assert_called_with(template)
+ mock_isfile.assert_any_call(template)
+ self.assertEqual(1, mock_shouldcfg.call_count)
+
+ def test_apt_v3_source_list_ubuntu(self):
+ """test_apt_v3_source_list_ubuntu - without custom sources or parms"""
+ cfg = {}
+ distro = 'ubuntu'
+ expected = EXPECTED_BASE_CONTENT
+
+ mock_writefile, mock_load_file, mock_isfile, mock_shouldcfg = (
+ self._apt_source_list(distro, cfg, cfg_on_empty=True))
+
+ template = '/etc/cloud/templates/sources.list.%s.tmpl' % distro
+ mock_writefile.assert_called_once_with('/etc/apt/sources.list',
+ expected, mode=0o644)
+ mock_load_file.assert_called_with(template)
+ mock_isfile.assert_any_call(template)
+ self.assertEqual(1, mock_shouldcfg.call_count)
+
+ def test_apt_v3_source_list_ubuntu_snappy(self):
+ """test_apt_v3_source_list_ubuntu_snappy - without custom sources or
+ parms"""
+ cfg = {'apt': {}}
+ mycloud = get_cloud()
+
+ with mock.patch.object(util, 'write_file') as mock_writefile:
+ with mock.patch.object(util, 'system_is_snappy',
+ return_value=True) as mock_issnappy:
+ cc_apt_configure.handle("test", cfg, mycloud, LOG, None)
+
+ self.assertEqual(0, mock_writefile.call_count)
+ self.assertEqual(1, mock_issnappy.call_count)
+
+ def test_apt_v3_source_list_centos(self):
+ """test_apt_v3_source_list_centos - without custom sources or parms"""
+ cfg = {}
+ distro = 'rhel'
+
+ mock_writefile, _, _, _ = self._apt_source_list(distro, cfg)
+
+ self.assertEqual(0, mock_writefile.call_count)
+
+ def test_apt_v3_source_list_psm(self):
+ """test_apt_v3_source_list_psm - Test specifying prim+sec mirrors"""
+ pm = 'http://test.ubuntu.com/ubuntu/'
+ sm = 'http://testsec.ubuntu.com/ubuntu/'
+ cfg = {'preserve_sources_list': False,
+ 'primary': [{'arches': ["default"],
+ 'uri': pm}],
+ 'security': [{'arches': ["default"],
+ 'uri': sm}]}
+ distro = 'ubuntu'
+ expected = EXPECTED_PRIMSEC_CONTENT
+
+ mock_writefile, mock_load_file, mock_isfile, _ = (
+ self._apt_source_list(distro, cfg, cfg_on_empty=True))
+
+ template = '/etc/cloud/templates/sources.list.%s.tmpl' % distro
+ mock_writefile.assert_called_once_with('/etc/apt/sources.list',
+ expected, mode=0o644)
+ mock_load_file.assert_called_with(template)
+ mock_isfile.assert_any_call(template)
+
+ def test_apt_v3_srcl_custom(self):
+ """test_apt_v3_srcl_custom - Test rendering a custom source template"""
+ cfg = util.load_yaml(YAML_TEXT_CUSTOM_SL)
+ mycloud = get_cloud()
+
+ # the second mock restores the original subp
+ with mock.patch.object(util, 'write_file') as mockwrite:
+ with mock.patch.object(subp, 'subp', self.subp):
+ with mock.patch.object(Distro, 'get_primary_arch',
+ return_value='amd64'):
+ cc_apt_configure.handle("notimportant", cfg, mycloud,
+ LOG, None)
+
+ calls = [call('/etc/apt/sources.list',
+ EXPECTED_CONVERTED_CONTENT,
+ mode=0o644)]
+ mockwrite.assert_has_calls(calls)
+
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_apt_key.py b/tests/unittests/config/test_apt_key.py
new file mode 100644
index 00000000..00e5a38d
--- /dev/null
+++ b/tests/unittests/config/test_apt_key.py
@@ -0,0 +1,137 @@
+import os
+from unittest import mock
+
+from cloudinit.config import cc_apt_configure
+from cloudinit import subp
+from cloudinit import util
+
+TEST_KEY_HUMAN = '''
+/etc/apt/cloud-init.gpg.d/my_key.gpg
+--------------------------------------------
+pub rsa4096 2021-10-22 [SC]
+ 3A3E F34D FDED B3B7 F3FD F603 F83F 7712 9A5E BD85
+uid [ unknown] Brett Holman <brett.holman@canonical.com>
+sub rsa4096 2021-10-22 [A]
+sub rsa4096 2021-10-22 [E]
+'''
+
+TEST_KEY_MACHINE = '''
+tru::1:1635129362:0:3:1:5
+pub:-:4096:1:F83F77129A5EBD85:1634912922:::-:::scESCA::::::23::0:
+fpr:::::::::3A3EF34DFDEDB3B7F3FDF603F83F77129A5EBD85:
+uid:-::::1634912922::64F1F1D6FA96316752D635D7C6406C52C40713C7::Brett Holman \
+<brett.holman@canonical.com>::::::::::0:
+sub:-:4096:1:544B39C9A9141F04:1634912922::::::a::::::23:
+fpr:::::::::8BD901490D6EC986D03D6F0D544B39C9A9141F04:
+sub:-:4096:1:F45D9443F0A87092:1634912922::::::e::::::23:
+fpr:::::::::8CCCB332317324F030A45B19F45D9443F0A87092:
+'''
+
+TEST_KEY_FINGERPRINT_HUMAN = \
+ '3A3E F34D FDED B3B7 F3FD F603 F83F 7712 9A5E BD85'
+
+TEST_KEY_FINGERPRINT_MACHINE = \
+ '3A3EF34DFDEDB3B7F3FDF603F83F77129A5EBD85'
+
+
+class TestAptKey:
+ """TestAptKey
+ Class to test apt-key commands
+ """
+ @mock.patch.object(subp, 'subp', return_value=('fakekey', ''))
+ @mock.patch.object(util, 'write_file')
+ def _apt_key_add_success_helper(self, directory, *args, hardened=False):
+ file = cc_apt_configure.apt_key(
+ 'add',
+ output_file='my-key',
+ data='fakekey',
+ hardened=hardened)
+ assert file == directory + '/my-key.gpg'
+
+ def test_apt_key_add_success(self):
+ """Verify the correct directory path gets returned for unhardened case
+ """
+ self._apt_key_add_success_helper('/etc/apt/trusted.gpg.d')
+
+ def test_apt_key_add_success_hardened(self):
+ """Verify the correct directory path gets returned for hardened case
+ """
+ self._apt_key_add_success_helper(
+ '/etc/apt/cloud-init.gpg.d',
+ hardened=True)
+
+ def test_apt_key_add_fail_no_file_name(self):
+ """Verify that null filename gets handled correctly
+ """
+ file = cc_apt_configure.apt_key(
+ 'add',
+ output_file=None,
+ data='')
+ assert '/dev/null' == file
+
+ def _apt_key_fail_helper(self):
+ file = cc_apt_configure.apt_key(
+ 'add',
+ output_file='my-key',
+ data='fakekey')
+ assert file == '/dev/null'
+
+ @mock.patch.object(subp, 'subp', side_effect=subp.ProcessExecutionError)
+ def test_apt_key_add_fail_no_file_name_subproc(self, *args):
+ """Verify that bad key value gets handled correctly
+ """
+ self._apt_key_fail_helper()
+
+ @mock.patch.object(
+ subp, 'subp', side_effect=UnicodeDecodeError('test', b'', 1, 1, ''))
+ def test_apt_key_add_fail_no_file_name_unicode(self, *args):
+ """Verify that bad key encoding gets handled correctly
+ """
+ self._apt_key_fail_helper()
+
+ def _apt_key_list_success_helper(self, finger, key, human_output=True):
+ @mock.patch.object(os, 'listdir', return_value=('/fake/dir/key.gpg',))
+ @mock.patch.object(subp, 'subp', return_value=(key, ''))
+ def mocked_list(*a):
+
+ keys = cc_apt_configure.apt_key('list', human_output)
+ assert finger in keys
+ mocked_list()
+
+ def test_apt_key_list_success_human(self):
+ """Verify expected key output, human
+ """
+ self._apt_key_list_success_helper(
+ TEST_KEY_FINGERPRINT_HUMAN,
+ TEST_KEY_HUMAN)
+
+ def test_apt_key_list_success_machine(self):
+ """Verify expected key output, machine
+ """
+ self._apt_key_list_success_helper(
+ TEST_KEY_FINGERPRINT_MACHINE,
+ TEST_KEY_MACHINE, human_output=False)
+
+ @mock.patch.object(os, 'listdir', return_value=())
+ @mock.patch.object(subp, 'subp', return_value=('', ''))
+ def test_apt_key_list_fail_no_keys(self, *args):
+ """Ensure falsy output for no keys
+ """
+ keys = cc_apt_configure.apt_key('list')
+ assert not keys
+
+ @mock.patch.object(os, 'listdir', return_value=('file_not_gpg_key.txt'))
+ @mock.patch.object(subp, 'subp', return_value=('', ''))
+ def test_apt_key_list_fail_no_keys_file(self, *args):
+ """Ensure non-gpg file is not returned.
+
+ apt-key used file extensions for this, so we do too
+ """
+ assert not cc_apt_configure.apt_key('list')
+
+ @mock.patch.object(subp, 'subp', side_effect=subp.ProcessExecutionError)
+ @mock.patch.object(os, 'listdir', return_value=('bad_gpg_key.gpg'))
+ def test_apt_key_list_fail_bad_key_file(self, *args):
+ """Ensure bad gpg key doesn't throw exeption.
+ """
+ assert not cc_apt_configure.apt_key('list')
diff --git a/tests/unittests/config/test_apt_source_v1.py b/tests/unittests/config/test_apt_source_v1.py
new file mode 100644
index 00000000..684c2495
--- /dev/null
+++ b/tests/unittests/config/test_apt_source_v1.py
@@ -0,0 +1,651 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+""" test_handler_apt_source_v1
+Testing various config variations of the apt_source config
+This calls all things with v1 format to stress the conversion code on top of
+the actually tested code.
+"""
+import os
+import re
+import shutil
+import tempfile
+import pathlib
+from unittest import mock
+from unittest.mock import call
+
+from cloudinit.config import cc_apt_configure
+from cloudinit import gpg
+from cloudinit import subp
+from cloudinit import util
+
+from tests.unittests.helpers import TestCase
+
+EXPECTEDKEY = """-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v1
+
+mI0ESuZLUgEEAKkqq3idtFP7g9hzOu1a8+v8ImawQN4TrvlygfScMU1TIS1eC7UQ
+NUA8Qqgr9iUaGnejb0VciqftLrU9D6WYHSKz+EITefgdyJ6SoQxjoJdsCpJ7o9Jy
+8PQnpRttiFm4qHu6BVnKnBNxw/z3ST9YMqW5kbMQpfxbGe+obRox59NpABEBAAG0
+HUxhdW5jaHBhZCBQUEEgZm9yIFNjb3R0IE1vc2VyiLYEEwECACAFAkrmS1ICGwMG
+CwkIBwMCBBUCCAMEFgIDAQIeAQIXgAAKCRAGILvPA2g/d3aEA/9tVjc10HOZwV29
+OatVuTeERjjrIbxflO586GLA8cp0C9RQCwgod/R+cKYdQcHjbqVcP0HqxveLg0RZ
+FJpWLmWKamwkABErwQLGlM/Hwhjfade8VvEQutH5/0JgKHmzRsoqfR+LMO6OS+Sm
+S0ORP6HXET3+jC8BMG4tBWCTK/XEZw==
+=ACB2
+-----END PGP PUBLIC KEY BLOCK-----"""
+
+ADD_APT_REPO_MATCH = r"^[\w-]+:\w"
+
+
+class FakeDistro(object):
+ """Fake Distro helper object"""
+ def update_package_sources(self):
+ """Fake update_package_sources helper method"""
+ return
+
+
+class FakeDatasource:
+ """Fake Datasource helper object"""
+ def __init__(self):
+ self.region = 'region'
+
+
+class FakeCloud(object):
+ """Fake Cloud helper object"""
+ def __init__(self):
+ self.distro = FakeDistro()
+ self.datasource = FakeDatasource()
+
+
+class TestAptSourceConfig(TestCase):
+ """TestAptSourceConfig
+ Main Class to test apt_source configs
+ """
+ release = "fantastic"
+
+ def setUp(self):
+ super(TestAptSourceConfig, self).setUp()
+ self.tmp = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.tmp)
+ self.aptlistfile = os.path.join(self.tmp, "single-deb.list")
+ self.aptlistfile2 = os.path.join(self.tmp, "single-deb2.list")
+ self.aptlistfile3 = os.path.join(self.tmp, "single-deb3.list")
+ self.join = os.path.join
+ self.matcher = re.compile(ADD_APT_REPO_MATCH).search
+ # mock fallback filename into writable tmp dir
+ self.fallbackfn = os.path.join(self.tmp, "etc/apt/sources.list.d/",
+ "cloud_config_sources.list")
+
+ self.fakecloud = FakeCloud()
+
+ rpatcher = mock.patch("cloudinit.util.lsb_release")
+ get_rel = rpatcher.start()
+ get_rel.return_value = {'codename': self.release}
+ self.addCleanup(rpatcher.stop)
+ apatcher = mock.patch("cloudinit.util.get_dpkg_architecture")
+ get_arch = apatcher.start()
+ get_arch.return_value = 'amd64'
+ self.addCleanup(apatcher.stop)
+
+ def _get_default_params(self):
+ """get_default_params
+ Get the most basic default mrror and release info to be used in tests
+ """
+ params = {}
+ params['RELEASE'] = self.release
+ params['MIRROR'] = "http://archive.ubuntu.com/ubuntu"
+ return params
+
+ def wrapv1conf(self, cfg):
+ params = self._get_default_params()
+ # old v1 list format under old keys, but callabe to main handler
+ # disable source.list rendering and set mirror to avoid other code
+ return {'apt_preserve_sources_list': True,
+ 'apt_mirror': params['MIRROR'],
+ 'apt_sources': cfg}
+
+ def myjoin(self, *args, **kwargs):
+ """myjoin - redir into writable tmpdir"""
+ if (args[0] == "/etc/apt/sources.list.d/" and
+ args[1] == "cloud_config_sources.list" and
+ len(args) == 2):
+ return self.join(self.tmp, args[0].lstrip("/"), args[1])
+ else:
+ return self.join(*args, **kwargs)
+
+ def apt_src_basic(self, filename, cfg):
+ """apt_src_basic
+ Test Fix deb source string, has to overwrite mirror conf in params
+ """
+ cfg = self.wrapv1conf(cfg)
+
+ cc_apt_configure.handle("test", cfg, self.fakecloud, None, None)
+
+ self.assertTrue(os.path.isfile(filename))
+
+ contents = util.load_file(filename)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb", "http://archive.ubuntu.com/ubuntu",
+ "karmic-backports",
+ "main universe multiverse restricted"),
+ contents, flags=re.IGNORECASE))
+
+ def test_apt_src_basic(self):
+ """Test deb source string, overwrite mirror and filename"""
+ cfg = {'source': ('deb http://archive.ubuntu.com/ubuntu'
+ ' karmic-backports'
+ ' main universe multiverse restricted'),
+ 'filename': self.aptlistfile}
+ self.apt_src_basic(self.aptlistfile, [cfg])
+
+ def test_apt_src_basic_dict(self):
+ """Test deb source string, overwrite mirror and filename (dict)"""
+ cfg = {self.aptlistfile: {'source':
+ ('deb http://archive.ubuntu.com/ubuntu'
+ ' karmic-backports'
+ ' main universe multiverse restricted')}}
+ self.apt_src_basic(self.aptlistfile, cfg)
+
+ def apt_src_basic_tri(self, cfg):
+ """apt_src_basic_tri
+ Test Fix three deb source string, has to overwrite mirror conf in
+ params. Test with filenames provided in config.
+ generic part to check three files with different content
+ """
+ self.apt_src_basic(self.aptlistfile, cfg)
+
+ # extra verify on two extra files of this test
+ contents = util.load_file(self.aptlistfile2)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb", "http://archive.ubuntu.com/ubuntu",
+ "precise-backports",
+ "main universe multiverse restricted"),
+ contents, flags=re.IGNORECASE))
+ contents = util.load_file(self.aptlistfile3)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb", "http://archive.ubuntu.com/ubuntu",
+ "lucid-backports",
+ "main universe multiverse restricted"),
+ contents, flags=re.IGNORECASE))
+
+ def test_apt_src_basic_tri(self):
+ """Test Fix three deb source string with filenames"""
+ cfg1 = {'source': ('deb http://archive.ubuntu.com/ubuntu'
+ ' karmic-backports'
+ ' main universe multiverse restricted'),
+ 'filename': self.aptlistfile}
+ cfg2 = {'source': ('deb http://archive.ubuntu.com/ubuntu'
+ ' precise-backports'
+ ' main universe multiverse restricted'),
+ 'filename': self.aptlistfile2}
+ cfg3 = {'source': ('deb http://archive.ubuntu.com/ubuntu'
+ ' lucid-backports'
+ ' main universe multiverse restricted'),
+ 'filename': self.aptlistfile3}
+ self.apt_src_basic_tri([cfg1, cfg2, cfg3])
+
+ def test_apt_src_basic_dict_tri(self):
+ """Test Fix three deb source string with filenames (dict)"""
+ cfg = {self.aptlistfile: {'source':
+ ('deb http://archive.ubuntu.com/ubuntu'
+ ' karmic-backports'
+ ' main universe multiverse restricted')},
+ self.aptlistfile2: {'source':
+ ('deb http://archive.ubuntu.com/ubuntu'
+ ' precise-backports'
+ ' main universe multiverse restricted')},
+ self.aptlistfile3: {'source':
+ ('deb http://archive.ubuntu.com/ubuntu'
+ ' lucid-backports'
+ ' main universe multiverse restricted')}}
+ self.apt_src_basic_tri(cfg)
+
+ def test_apt_src_basic_nofn(self):
+ """Test Fix three deb source string without filenames (dict)"""
+ cfg = {'source': ('deb http://archive.ubuntu.com/ubuntu'
+ ' karmic-backports'
+ ' main universe multiverse restricted')}
+ with mock.patch.object(os.path, 'join', side_effect=self.myjoin):
+ self.apt_src_basic(self.fallbackfn, [cfg])
+
+ def apt_src_replacement(self, filename, cfg):
+ """apt_src_replace
+ Test Autoreplacement of MIRROR and RELEASE in source specs
+ """
+ cfg = self.wrapv1conf(cfg)
+ params = self._get_default_params()
+ cc_apt_configure.handle("test", cfg, self.fakecloud, None, None)
+
+ self.assertTrue(os.path.isfile(filename))
+
+ contents = util.load_file(filename)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb", params['MIRROR'], params['RELEASE'],
+ "multiverse"),
+ contents, flags=re.IGNORECASE))
+
+ def test_apt_src_replace(self):
+ """Test Autoreplacement of MIRROR and RELEASE in source specs"""
+ cfg = {'source': 'deb $MIRROR $RELEASE multiverse',
+ 'filename': self.aptlistfile}
+ self.apt_src_replacement(self.aptlistfile, [cfg])
+
+ def apt_src_replace_tri(self, cfg):
+ """apt_src_replace_tri
+ Test three autoreplacements of MIRROR and RELEASE in source specs with
+ generic part
+ """
+ self.apt_src_replacement(self.aptlistfile, cfg)
+
+ # extra verify on two extra files of this test
+ params = self._get_default_params()
+ contents = util.load_file(self.aptlistfile2)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb", params['MIRROR'], params['RELEASE'],
+ "main"),
+ contents, flags=re.IGNORECASE))
+ contents = util.load_file(self.aptlistfile3)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb", params['MIRROR'], params['RELEASE'],
+ "universe"),
+ contents, flags=re.IGNORECASE))
+
+ def test_apt_src_replace_tri(self):
+ """Test triple Autoreplacement of MIRROR and RELEASE in source specs"""
+ cfg1 = {'source': 'deb $MIRROR $RELEASE multiverse',
+ 'filename': self.aptlistfile}
+ cfg2 = {'source': 'deb $MIRROR $RELEASE main',
+ 'filename': self.aptlistfile2}
+ cfg3 = {'source': 'deb $MIRROR $RELEASE universe',
+ 'filename': self.aptlistfile3}
+ self.apt_src_replace_tri([cfg1, cfg2, cfg3])
+
+ def test_apt_src_replace_dict_tri(self):
+ """Test triple Autoreplacement in source specs (dict)"""
+ cfg = {self.aptlistfile: {'source': 'deb $MIRROR $RELEASE multiverse'},
+ 'notused': {'source': 'deb $MIRROR $RELEASE main',
+ 'filename': self.aptlistfile2},
+ self.aptlistfile3: {'source': 'deb $MIRROR $RELEASE universe'}}
+ self.apt_src_replace_tri(cfg)
+
+ def test_apt_src_replace_nofn(self):
+ """Test Autoreplacement of MIRROR and RELEASE in source specs nofile"""
+ cfg = {'source': 'deb $MIRROR $RELEASE multiverse'}
+ with mock.patch.object(os.path, 'join', side_effect=self.myjoin):
+ self.apt_src_replacement(self.fallbackfn, [cfg])
+
+ def apt_src_keyid(self, filename, cfg, keynum):
+ """apt_src_keyid
+ Test specification of a source + keyid
+ """
+ cfg = self.wrapv1conf(cfg)
+
+ with mock.patch.object(cc_apt_configure, 'add_apt_key') as mockobj:
+ cc_apt_configure.handle("test", cfg, self.fakecloud, None, None)
+
+ # check if it added the right number of keys
+ calls = []
+ sources = cfg['apt']['sources']
+ for src in sources:
+ print(sources[src])
+ calls.append(call(sources[src], None))
+
+ mockobj.assert_has_calls(calls, any_order=True)
+
+ self.assertTrue(os.path.isfile(filename))
+
+ contents = util.load_file(filename)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb",
+ ('http://ppa.launchpad.net/smoser/'
+ 'cloud-init-test/ubuntu'),
+ "xenial", "main"),
+ contents, flags=re.IGNORECASE))
+
+ def test_apt_src_keyid(self):
+ """Test specification of a source + keyid with filename being set"""
+ cfg = {'source': ('deb '
+ 'http://ppa.launchpad.net/'
+ 'smoser/cloud-init-test/ubuntu'
+ ' xenial main'),
+ 'keyid': "03683F77",
+ 'filename': self.aptlistfile}
+ self.apt_src_keyid(self.aptlistfile, [cfg], 1)
+
+ def test_apt_src_keyid_tri(self):
+ """Test 3x specification of a source + keyid with filename being set"""
+ cfg1 = {'source': ('deb '
+ 'http://ppa.launchpad.net/'
+ 'smoser/cloud-init-test/ubuntu'
+ ' xenial main'),
+ 'keyid': "03683F77",
+ 'filename': self.aptlistfile}
+ cfg2 = {'source': ('deb '
+ 'http://ppa.launchpad.net/'
+ 'smoser/cloud-init-test/ubuntu'
+ ' xenial universe'),
+ 'keyid': "03683F77",
+ 'filename': self.aptlistfile2}
+ cfg3 = {'source': ('deb '
+ 'http://ppa.launchpad.net/'
+ 'smoser/cloud-init-test/ubuntu'
+ ' xenial multiverse'),
+ 'keyid': "03683F77",
+ 'filename': self.aptlistfile3}
+
+ self.apt_src_keyid(self.aptlistfile, [cfg1, cfg2, cfg3], 3)
+ contents = util.load_file(self.aptlistfile2)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb",
+ ('http://ppa.launchpad.net/smoser/'
+ 'cloud-init-test/ubuntu'),
+ "xenial", "universe"),
+ contents, flags=re.IGNORECASE))
+ contents = util.load_file(self.aptlistfile3)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb",
+ ('http://ppa.launchpad.net/smoser/'
+ 'cloud-init-test/ubuntu'),
+ "xenial", "multiverse"),
+ contents, flags=re.IGNORECASE))
+
+ def test_apt_src_keyid_nofn(self):
+ """Test specification of a source + keyid without filename being set"""
+ cfg = {'source': ('deb '
+ 'http://ppa.launchpad.net/'
+ 'smoser/cloud-init-test/ubuntu'
+ ' xenial main'),
+ 'keyid': "03683F77"}
+ with mock.patch.object(os.path, 'join', side_effect=self.myjoin):
+ self.apt_src_keyid(self.fallbackfn, [cfg], 1)
+
+ def apt_src_key(self, filename, cfg):
+ """apt_src_key
+ Test specification of a source + key
+ """
+ cfg = self.wrapv1conf([cfg])
+
+ with mock.patch.object(cc_apt_configure, 'add_apt_key') as mockobj:
+ cc_apt_configure.handle("test", cfg, self.fakecloud, None, None)
+
+ # check if it added the right amount of keys
+ sources = cfg['apt']['sources']
+ calls = []
+ for src in sources:
+ print(sources[src])
+ calls.append(call(sources[src], None))
+
+ mockobj.assert_has_calls(calls, any_order=True)
+
+ self.assertTrue(os.path.isfile(filename))
+
+ contents = util.load_file(filename)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb",
+ ('http://ppa.launchpad.net/smoser/'
+ 'cloud-init-test/ubuntu'),
+ "xenial", "main"),
+ contents, flags=re.IGNORECASE))
+
+ def test_apt_src_key(self):
+ """Test specification of a source + key with filename being set"""
+ cfg = {'source': ('deb '
+ 'http://ppa.launchpad.net/'
+ 'smoser/cloud-init-test/ubuntu'
+ ' xenial main'),
+ 'key': "fakekey 4321",
+ 'filename': self.aptlistfile}
+ self.apt_src_key(self.aptlistfile, cfg)
+
+ def test_apt_src_key_nofn(self):
+ """Test specification of a source + key without filename being set"""
+ cfg = {'source': ('deb '
+ 'http://ppa.launchpad.net/'
+ 'smoser/cloud-init-test/ubuntu'
+ ' xenial main'),
+ 'key': "fakekey 4321"}
+ with mock.patch.object(os.path, 'join', side_effect=self.myjoin):
+ self.apt_src_key(self.fallbackfn, cfg)
+
+ def test_apt_src_keyonly(self):
+ """Test specifying key without source"""
+ cfg = {'key': "fakekey 4242",
+ 'filename': self.aptlistfile}
+ cfg = self.wrapv1conf([cfg])
+ with mock.patch.object(cc_apt_configure, 'apt_key') as mockobj:
+ cc_apt_configure.handle("test", cfg, self.fakecloud, None, None)
+
+ calls = (call(
+ 'add',
+ output_file=pathlib.Path(self.aptlistfile).stem,
+ data='fakekey 4242',
+ hardened=False),)
+ mockobj.assert_has_calls(calls, any_order=True)
+
+ # filename should be ignored on key only
+ self.assertFalse(os.path.isfile(self.aptlistfile))
+
+ def test_apt_src_keyidonly(self):
+ """Test specification of a keyid without source"""
+ cfg = {'keyid': "03683F77",
+ 'filename': self.aptlistfile}
+ cfg = self.wrapv1conf([cfg])
+
+ with mock.patch.object(subp, 'subp',
+ return_value=('fakekey 1212', '')):
+ with mock.patch.object(cc_apt_configure, 'apt_key') as mockobj:
+ cc_apt_configure.handle(
+ "test",
+ cfg,
+ self.fakecloud,
+ None,
+ None)
+
+ calls = (call(
+ 'add',
+ output_file=pathlib.Path(self.aptlistfile).stem,
+ data='fakekey 1212',
+ hardened=False),)
+ mockobj.assert_has_calls(calls, any_order=True)
+
+ # filename should be ignored on key only
+ self.assertFalse(os.path.isfile(self.aptlistfile))
+
+ def apt_src_keyid_real(self, cfg, expectedkey, is_hardened=None):
+ """apt_src_keyid_real
+ Test specification of a keyid without source including
+ up to addition of the key (add_apt_key_raw mocked to keep the
+ environment as is)
+ """
+ key = cfg['keyid']
+ keyserver = cfg.get('keyserver', 'keyserver.ubuntu.com')
+ cfg = self.wrapv1conf([cfg])
+
+ with mock.patch.object(cc_apt_configure, 'add_apt_key_raw') as mockkey:
+ with mock.patch.object(gpg, 'getkeybyid',
+ return_value=expectedkey) as mockgetkey:
+ cc_apt_configure.handle("test", cfg, self.fakecloud,
+ None, None)
+ if is_hardened is not None:
+ mockkey.assert_called_with(
+ expectedkey,
+ self.aptlistfile,
+ hardened=is_hardened)
+ else:
+ mockkey.assert_called_with(expectedkey, self.aptlistfile)
+ mockgetkey.assert_called_with(key, keyserver)
+
+ # filename should be ignored on key only
+ self.assertFalse(os.path.isfile(self.aptlistfile))
+
+ def test_apt_src_keyid_real(self):
+ """test_apt_src_keyid_real - Test keyid including key add"""
+ keyid = "03683F77"
+ cfg = {'keyid': keyid,
+ 'filename': self.aptlistfile}
+
+ self.apt_src_keyid_real(cfg, EXPECTEDKEY, is_hardened=False)
+
+ def test_apt_src_longkeyid_real(self):
+ """test_apt_src_longkeyid_real - Test long keyid including key add"""
+ keyid = "B59D 5F15 97A5 04B7 E230 6DCA 0620 BBCF 0368 3F77"
+ cfg = {'keyid': keyid,
+ 'filename': self.aptlistfile}
+
+ self.apt_src_keyid_real(cfg, EXPECTEDKEY, is_hardened=False)
+
+ def test_apt_src_longkeyid_ks_real(self):
+ """test_apt_src_longkeyid_ks_real - Test long keyid from other ks"""
+ keyid = "B59D 5F15 97A5 04B7 E230 6DCA 0620 BBCF 0368 3F77"
+ cfg = {'keyid': keyid,
+ 'keyserver': 'keys.gnupg.net',
+ 'filename': self.aptlistfile}
+
+ self.apt_src_keyid_real(cfg, EXPECTEDKEY, is_hardened=False)
+
+ def test_apt_src_ppa(self):
+ """Test adding a ppa"""
+ cfg = {'source': 'ppa:smoser/cloud-init-test',
+ 'filename': self.aptlistfile}
+ cfg = self.wrapv1conf([cfg])
+
+ with mock.patch.object(subp, 'subp') as mockobj:
+ cc_apt_configure.handle("test", cfg, self.fakecloud, None, None)
+ mockobj.assert_called_once_with(['add-apt-repository',
+ 'ppa:smoser/cloud-init-test'],
+ target=None)
+
+ # adding ppa should ignore filename (uses add-apt-repository)
+ self.assertFalse(os.path.isfile(self.aptlistfile))
+
+ def test_apt_src_ppa_tri(self):
+ """Test adding three ppa's"""
+ cfg1 = {'source': 'ppa:smoser/cloud-init-test',
+ 'filename': self.aptlistfile}
+ cfg2 = {'source': 'ppa:smoser/cloud-init-test2',
+ 'filename': self.aptlistfile2}
+ cfg3 = {'source': 'ppa:smoser/cloud-init-test3',
+ 'filename': self.aptlistfile3}
+ cfg = self.wrapv1conf([cfg1, cfg2, cfg3])
+
+ with mock.patch.object(subp, 'subp') as mockobj:
+ cc_apt_configure.handle("test", cfg, self.fakecloud,
+ None, None)
+ calls = [call(['add-apt-repository', 'ppa:smoser/cloud-init-test'],
+ target=None),
+ call(['add-apt-repository', 'ppa:smoser/cloud-init-test2'],
+ target=None),
+ call(['add-apt-repository', 'ppa:smoser/cloud-init-test3'],
+ target=None)]
+ mockobj.assert_has_calls(calls, any_order=True)
+
+ # adding ppa should ignore all filenames (uses add-apt-repository)
+ self.assertFalse(os.path.isfile(self.aptlistfile))
+ self.assertFalse(os.path.isfile(self.aptlistfile2))
+ self.assertFalse(os.path.isfile(self.aptlistfile3))
+
+ def test_convert_to_new_format(self):
+ """Test the conversion of old to new format"""
+ cfg1 = {'source': 'deb $MIRROR $RELEASE multiverse',
+ 'filename': self.aptlistfile}
+ cfg2 = {'source': 'deb $MIRROR $RELEASE main',
+ 'filename': self.aptlistfile2}
+ cfg3 = {'source': 'deb $MIRROR $RELEASE universe',
+ 'filename': self.aptlistfile3}
+ cfg = {'apt_sources': [cfg1, cfg2, cfg3]}
+ checkcfg = {self.aptlistfile: {'filename': self.aptlistfile,
+ 'source': 'deb $MIRROR $RELEASE '
+ 'multiverse'},
+ self.aptlistfile2: {'filename': self.aptlistfile2,
+ 'source': 'deb $MIRROR $RELEASE main'},
+ self.aptlistfile3: {'filename': self.aptlistfile3,
+ 'source': 'deb $MIRROR $RELEASE '
+ 'universe'}}
+
+ newcfg = cc_apt_configure.convert_to_v3_apt_format(cfg)
+ self.assertEqual(newcfg['apt']['sources'], checkcfg)
+
+ # convert again, should stay the same
+ newcfg2 = cc_apt_configure.convert_to_v3_apt_format(newcfg)
+ self.assertEqual(newcfg2['apt']['sources'], checkcfg)
+
+ # should work without raising an exception
+ cc_apt_configure.convert_to_v3_apt_format({})
+
+ with self.assertRaises(ValueError):
+ cc_apt_configure.convert_to_v3_apt_format({'apt_sources': 5})
+
+ def test_convert_to_new_format_collision(self):
+ """Test the conversion of old to new format with collisions
+ That matches e.g. the MAAS case specifying old and new config"""
+ cfg_1_and_3 = {'apt': {'proxy': 'http://192.168.122.1:8000/'},
+ 'apt_proxy': 'http://192.168.122.1:8000/'}
+ cfg_3_only = {'apt': {'proxy': 'http://192.168.122.1:8000/'}}
+ cfgconflict = {'apt': {'proxy': 'http://192.168.122.1:8000/'},
+ 'apt_proxy': 'ftp://192.168.122.1:8000/'}
+
+ # collision (equal)
+ newcfg = cc_apt_configure.convert_to_v3_apt_format(cfg_1_and_3)
+ self.assertEqual(newcfg, cfg_3_only)
+ # collision (equal, so ok to remove)
+ newcfg = cc_apt_configure.convert_to_v3_apt_format(cfg_3_only)
+ self.assertEqual(newcfg, cfg_3_only)
+ # collision (unequal)
+ match = "Old and New.*unequal.*apt_proxy"
+ with self.assertRaisesRegex(ValueError, match):
+ cc_apt_configure.convert_to_v3_apt_format(cfgconflict)
+
+ def test_convert_to_new_format_dict_collision(self):
+ cfg1 = {'source': 'deb $MIRROR $RELEASE multiverse',
+ 'filename': self.aptlistfile}
+ cfg2 = {'source': 'deb $MIRROR $RELEASE main',
+ 'filename': self.aptlistfile2}
+ cfg3 = {'source': 'deb $MIRROR $RELEASE universe',
+ 'filename': self.aptlistfile3}
+ fullv3 = {self.aptlistfile: {'filename': self.aptlistfile,
+ 'source': 'deb $MIRROR $RELEASE '
+ 'multiverse'},
+ self.aptlistfile2: {'filename': self.aptlistfile2,
+ 'source': 'deb $MIRROR $RELEASE main'},
+ self.aptlistfile3: {'filename': self.aptlistfile3,
+ 'source': 'deb $MIRROR $RELEASE '
+ 'universe'}}
+ cfg_3_only = {'apt': {'sources': fullv3}}
+ cfg_1_and_3 = {'apt_sources': [cfg1, cfg2, cfg3]}
+ cfg_1_and_3.update(cfg_3_only)
+
+ # collision (equal, so ok to remove)
+ newcfg = cc_apt_configure.convert_to_v3_apt_format(cfg_1_and_3)
+ self.assertEqual(newcfg, cfg_3_only)
+ # no old spec (same result)
+ newcfg = cc_apt_configure.convert_to_v3_apt_format(cfg_3_only)
+ self.assertEqual(newcfg, cfg_3_only)
+
+ diff = {self.aptlistfile: {'filename': self.aptlistfile,
+ 'source': 'deb $MIRROR $RELEASE '
+ 'DIFFERENTVERSE'},
+ self.aptlistfile2: {'filename': self.aptlistfile2,
+ 'source': 'deb $MIRROR $RELEASE main'},
+ self.aptlistfile3: {'filename': self.aptlistfile3,
+ 'source': 'deb $MIRROR $RELEASE '
+ 'universe'}}
+ cfg_3_only = {'apt': {'sources': diff}}
+ cfg_1_and_3_different = {'apt_sources': [cfg1, cfg2, cfg3]}
+ cfg_1_and_3_different.update(cfg_3_only)
+
+ # collision (unequal by dict having a different entry)
+ with self.assertRaises(ValueError):
+ cc_apt_configure.convert_to_v3_apt_format(cfg_1_and_3_different)
+
+ missing = {self.aptlistfile: {'filename': self.aptlistfile,
+ 'source': 'deb $MIRROR $RELEASE '
+ 'multiverse'}}
+ cfg_3_only = {'apt': {'sources': missing}}
+ cfg_1_and_3_missing = {'apt_sources': [cfg1, cfg2, cfg3]}
+ cfg_1_and_3_missing.update(cfg_3_only)
+ # collision (unequal by dict missing an entry)
+ with self.assertRaises(ValueError):
+ cc_apt_configure.convert_to_v3_apt_format(cfg_1_and_3_missing)
+
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_apt_source_v3.py b/tests/unittests/config/test_apt_source_v3.py
new file mode 100644
index 00000000..0b78037e
--- /dev/null
+++ b/tests/unittests/config/test_apt_source_v3.py
@@ -0,0 +1,1170 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+"""test_handler_apt_source_v3
+Testing various config variations of the apt_source custom config
+This tries to call all in the new v3 format and cares about new features
+"""
+import glob
+import os
+import re
+import shutil
+import socket
+import tempfile
+import pathlib
+
+from unittest import TestCase, mock
+from unittest.mock import call
+
+from cloudinit import gpg
+from cloudinit import subp
+from cloudinit import util
+from cloudinit.config import cc_apt_configure
+from tests.unittests import helpers as t_help
+
+from tests.unittests.util import get_cloud
+
+EXPECTEDKEY = """-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v1
+
+mI0ESuZLUgEEAKkqq3idtFP7g9hzOu1a8+v8ImawQN4TrvlygfScMU1TIS1eC7UQ
+NUA8Qqgr9iUaGnejb0VciqftLrU9D6WYHSKz+EITefgdyJ6SoQxjoJdsCpJ7o9Jy
+8PQnpRttiFm4qHu6BVnKnBNxw/z3ST9YMqW5kbMQpfxbGe+obRox59NpABEBAAG0
+HUxhdW5jaHBhZCBQUEEgZm9yIFNjb3R0IE1vc2VyiLYEEwECACAFAkrmS1ICGwMG
+CwkIBwMCBBUCCAMEFgIDAQIeAQIXgAAKCRAGILvPA2g/d3aEA/9tVjc10HOZwV29
+OatVuTeERjjrIbxflO586GLA8cp0C9RQCwgod/R+cKYdQcHjbqVcP0HqxveLg0RZ
+FJpWLmWKamwkABErwQLGlM/Hwhjfade8VvEQutH5/0JgKHmzRsoqfR+LMO6OS+Sm
+S0ORP6HXET3+jC8BMG4tBWCTK/XEZw==
+=ACB2
+-----END PGP PUBLIC KEY BLOCK-----"""
+
+ADD_APT_REPO_MATCH = r"^[\w-]+:\w"
+
+TARGET = None
+
+MOCK_LSB_RELEASE_DATA = {
+ 'id': 'Ubuntu', 'description': 'Ubuntu 18.04.1 LTS',
+ 'release': '18.04', 'codename': 'bionic'}
+
+
+class FakeDatasource:
+ """Fake Datasource helper object"""
+ def __init__(self):
+ self.region = 'region'
+
+
+class FakeCloud:
+ """Fake Cloud helper object"""
+ def __init__(self):
+ self.datasource = FakeDatasource()
+
+
+class TestAptSourceConfig(t_help.FilesystemMockingTestCase):
+ """TestAptSourceConfig
+ Main Class to test apt configs
+ """
+ def setUp(self):
+ super(TestAptSourceConfig, self).setUp()
+ self.tmp = tempfile.mkdtemp()
+ self.new_root = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.tmp)
+ self.addCleanup(shutil.rmtree, self.new_root)
+ self.aptlistfile = os.path.join(self.tmp, "single-deb.list")
+ self.aptlistfile2 = os.path.join(self.tmp, "single-deb2.list")
+ self.aptlistfile3 = os.path.join(self.tmp, "single-deb3.list")
+ self.join = os.path.join
+ self.matcher = re.compile(ADD_APT_REPO_MATCH).search
+ self.add_patch(
+ 'cloudinit.config.cc_apt_configure.util.lsb_release',
+ 'm_lsb_release', return_value=MOCK_LSB_RELEASE_DATA.copy())
+
+ @staticmethod
+ def _add_apt_sources(*args, **kwargs):
+ with mock.patch.object(cc_apt_configure, 'update_packages'):
+ cc_apt_configure.add_apt_sources(*args, **kwargs)
+
+ @staticmethod
+ def _get_default_params():
+ """get_default_params
+ Get the most basic default mrror and release info to be used in tests
+ """
+ params = {}
+ params['RELEASE'] = MOCK_LSB_RELEASE_DATA['release']
+ arch = 'amd64'
+ params['MIRROR'] = cc_apt_configure.\
+ get_default_mirrors(arch)["PRIMARY"]
+ return params
+
+ def _myjoin(self, *args, **kwargs):
+ """_myjoin - redir into writable tmpdir"""
+ if (args[0] == "/etc/apt/sources.list.d/" and
+ args[1] == "cloud_config_sources.list" and
+ len(args) == 2):
+ return self.join(self.tmp, args[0].lstrip("/"), args[1])
+ else:
+ return self.join(*args, **kwargs)
+
+ def _apt_src_basic(self, filename, cfg):
+ """_apt_src_basic
+ Test Fix deb source string, has to overwrite mirror conf in params
+ """
+ params = self._get_default_params()
+
+ self._add_apt_sources(cfg, TARGET, template_params=params,
+ aa_repo_match=self.matcher)
+
+ self.assertTrue(os.path.isfile(filename))
+
+ contents = util.load_file(filename)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb", "http://test.ubuntu.com/ubuntu",
+ "karmic-backports",
+ "main universe multiverse restricted"),
+ contents, flags=re.IGNORECASE))
+
+ def test_apt_v3_src_basic(self):
+ """test_apt_v3_src_basic - Test fix deb source string"""
+ cfg = {self.aptlistfile: {'source':
+ ('deb http://test.ubuntu.com/ubuntu'
+ ' karmic-backports'
+ ' main universe multiverse restricted')}}
+ self._apt_src_basic(self.aptlistfile, cfg)
+
+ def test_apt_v3_src_basic_tri(self):
+ """test_apt_v3_src_basic_tri - Test multiple fix deb source strings"""
+ cfg = {self.aptlistfile: {'source':
+ ('deb http://test.ubuntu.com/ubuntu'
+ ' karmic-backports'
+ ' main universe multiverse restricted')},
+ self.aptlistfile2: {'source':
+ ('deb http://test.ubuntu.com/ubuntu'
+ ' precise-backports'
+ ' main universe multiverse restricted')},
+ self.aptlistfile3: {'source':
+ ('deb http://test.ubuntu.com/ubuntu'
+ ' lucid-backports'
+ ' main universe multiverse restricted')}}
+ self._apt_src_basic(self.aptlistfile, cfg)
+
+ # extra verify on two extra files of this test
+ contents = util.load_file(self.aptlistfile2)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb", "http://test.ubuntu.com/ubuntu",
+ "precise-backports",
+ "main universe multiverse restricted"),
+ contents, flags=re.IGNORECASE))
+ contents = util.load_file(self.aptlistfile3)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb", "http://test.ubuntu.com/ubuntu",
+ "lucid-backports",
+ "main universe multiverse restricted"),
+ contents, flags=re.IGNORECASE))
+
+ def _apt_src_replacement(self, filename, cfg):
+ """apt_src_replace
+ Test Autoreplacement of MIRROR and RELEASE in source specs
+ """
+ params = self._get_default_params()
+ self._add_apt_sources(cfg, TARGET, template_params=params,
+ aa_repo_match=self.matcher)
+
+ self.assertTrue(os.path.isfile(filename))
+
+ contents = util.load_file(filename)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb", params['MIRROR'], params['RELEASE'],
+ "multiverse"),
+ contents, flags=re.IGNORECASE))
+
+ def test_apt_v3_src_replace(self):
+ """test_apt_v3_src_replace - Test replacement of MIRROR & RELEASE"""
+ cfg = {self.aptlistfile: {'source': 'deb $MIRROR $RELEASE multiverse'}}
+ self._apt_src_replacement(self.aptlistfile, cfg)
+
+ def test_apt_v3_src_replace_fn(self):
+ """test_apt_v3_src_replace_fn - Test filename overwritten in dict"""
+ cfg = {'ignored': {'source': 'deb $MIRROR $RELEASE multiverse',
+ 'filename': self.aptlistfile}}
+ # second file should overwrite the dict key
+ self._apt_src_replacement(self.aptlistfile, cfg)
+
+ def _apt_src_replace_tri(self, cfg):
+ """_apt_src_replace_tri
+ Test three autoreplacements of MIRROR and RELEASE in source specs with
+ generic part
+ """
+ self._apt_src_replacement(self.aptlistfile, cfg)
+
+ # extra verify on two extra files of this test
+ params = self._get_default_params()
+ contents = util.load_file(self.aptlistfile2)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb", params['MIRROR'], params['RELEASE'],
+ "main"),
+ contents, flags=re.IGNORECASE))
+ contents = util.load_file(self.aptlistfile3)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb", params['MIRROR'], params['RELEASE'],
+ "universe"),
+ contents, flags=re.IGNORECASE))
+
+ def test_apt_v3_src_replace_tri(self):
+ """test_apt_v3_src_replace_tri - Test multiple replace/overwrites"""
+ cfg = {self.aptlistfile: {'source': 'deb $MIRROR $RELEASE multiverse'},
+ 'notused': {'source': 'deb $MIRROR $RELEASE main',
+ 'filename': self.aptlistfile2},
+ self.aptlistfile3: {'source': 'deb $MIRROR $RELEASE universe'}}
+ self._apt_src_replace_tri(cfg)
+
+ def _apt_src_keyid(self, filename, cfg, keynum, is_hardened=None):
+ """_apt_src_keyid
+ Test specification of a source + keyid
+ """
+ params = self._get_default_params()
+
+ with mock.patch.object(cc_apt_configure, 'add_apt_key') as mockobj:
+ self._add_apt_sources(cfg, TARGET, template_params=params,
+ aa_repo_match=self.matcher)
+
+ # check if it added the right number of keys
+ calls = []
+ for key in cfg:
+ if is_hardened is not None:
+ calls.append(call(cfg[key], hardened=is_hardened))
+ else:
+ calls.append(call(cfg[key], TARGET))
+
+ mockobj.assert_has_calls(calls, any_order=True)
+
+ self.assertTrue(os.path.isfile(filename))
+
+ contents = util.load_file(filename)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb",
+ ('http://ppa.launchpad.net/smoser/'
+ 'cloud-init-test/ubuntu'),
+ "xenial", "main"),
+ contents, flags=re.IGNORECASE))
+
+ def test_apt_v3_src_keyid(self):
+ """test_apt_v3_src_keyid - Test source + keyid with filename"""
+ cfg = {self.aptlistfile: {'source': ('deb '
+ 'http://ppa.launchpad.net/'
+ 'smoser/cloud-init-test/ubuntu'
+ ' xenial main'),
+ 'filename': self.aptlistfile,
+ 'keyid': "03683F77"}}
+ self._apt_src_keyid(self.aptlistfile, cfg, 1)
+
+ def test_apt_v3_src_keyid_tri(self):
+ """test_apt_v3_src_keyid_tri - Test multiple src+key+filen writes"""
+ cfg = {self.aptlistfile: {'source': ('deb '
+ 'http://ppa.launchpad.net/'
+ 'smoser/cloud-init-test/ubuntu'
+ ' xenial main'),
+ 'keyid': "03683F77"},
+ 'ignored': {'source': ('deb '
+ 'http://ppa.launchpad.net/'
+ 'smoser/cloud-init-test/ubuntu'
+ ' xenial universe'),
+ 'keyid': "03683F77",
+ 'filename': self.aptlistfile2},
+ self.aptlistfile3: {'source': ('deb '
+ 'http://ppa.launchpad.net/'
+ 'smoser/cloud-init-test/ubuntu'
+ ' xenial multiverse'),
+ 'filename': self.aptlistfile3,
+ 'keyid': "03683F77"}}
+
+ self._apt_src_keyid(self.aptlistfile, cfg, 3)
+ contents = util.load_file(self.aptlistfile2)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb",
+ ('http://ppa.launchpad.net/smoser/'
+ 'cloud-init-test/ubuntu'),
+ "xenial", "universe"),
+ contents, flags=re.IGNORECASE))
+ contents = util.load_file(self.aptlistfile3)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb",
+ ('http://ppa.launchpad.net/smoser/'
+ 'cloud-init-test/ubuntu'),
+ "xenial", "multiverse"),
+ contents, flags=re.IGNORECASE))
+
+ def test_apt_v3_src_key(self):
+ """test_apt_v3_src_key - Test source + key"""
+ params = self._get_default_params()
+ cfg = {self.aptlistfile: {'source': ('deb '
+ 'http://ppa.launchpad.net/'
+ 'smoser/cloud-init-test/ubuntu'
+ ' xenial main'),
+ 'filename': self.aptlistfile,
+ 'key': "fakekey 4321"}}
+
+ with mock.patch.object(cc_apt_configure, 'apt_key') as mockobj:
+ self._add_apt_sources(cfg, TARGET, template_params=params,
+ aa_repo_match=self.matcher)
+
+ calls = (call(
+ 'add',
+ output_file=pathlib.Path(self.aptlistfile).stem,
+ data='fakekey 4321',
+ hardened=False),)
+ mockobj.assert_has_calls(calls, any_order=True)
+ self.assertTrue(os.path.isfile(self.aptlistfile))
+
+ contents = util.load_file(self.aptlistfile)
+ self.assertTrue(re.search(r"%s %s %s %s\n" %
+ ("deb",
+ ('http://ppa.launchpad.net/smoser/'
+ 'cloud-init-test/ubuntu'),
+ "xenial", "main"),
+ contents, flags=re.IGNORECASE))
+
+ def test_apt_v3_src_keyonly(self):
+ """test_apt_v3_src_keyonly - Test key without source"""
+ params = self._get_default_params()
+ cfg = {self.aptlistfile: {'key': "fakekey 4242"}}
+
+ with mock.patch.object(cc_apt_configure, 'apt_key') as mockobj:
+ self._add_apt_sources(cfg, TARGET, template_params=params,
+ aa_repo_match=self.matcher)
+
+ calls = (call(
+ 'add',
+ output_file=pathlib.Path(self.aptlistfile).stem,
+ data='fakekey 4242',
+ hardened=False),)
+ mockobj.assert_has_calls(calls, any_order=True)
+
+ # filename should be ignored on key only
+ self.assertFalse(os.path.isfile(self.aptlistfile))
+
+ def test_apt_v3_src_keyidonly(self):
+ """test_apt_v3_src_keyidonly - Test keyid without source"""
+ params = self._get_default_params()
+ cfg = {self.aptlistfile: {'keyid': "03683F77"}}
+ with mock.patch.object(subp, 'subp',
+ return_value=('fakekey 1212', '')):
+ with mock.patch.object(cc_apt_configure, 'apt_key') as mockobj:
+ self._add_apt_sources(cfg, TARGET, template_params=params,
+ aa_repo_match=self.matcher)
+
+ calls = (call(
+ 'add',
+ output_file=pathlib.Path(self.aptlistfile).stem,
+ data='fakekey 1212',
+ hardened=False),)
+ mockobj.assert_has_calls(calls, any_order=True)
+
+ # filename should be ignored on key only
+ self.assertFalse(os.path.isfile(self.aptlistfile))
+
+ def apt_src_keyid_real(self, cfg, expectedkey, is_hardened=None):
+ """apt_src_keyid_real
+ Test specification of a keyid without source including
+ up to addition of the key (add_apt_key_raw mocked to keep the
+ environment as is)
+ """
+ params = self._get_default_params()
+
+ with mock.patch.object(cc_apt_configure, 'add_apt_key_raw') as mockkey:
+ with mock.patch.object(gpg, 'getkeybyid',
+ return_value=expectedkey) as mockgetkey:
+ self._add_apt_sources(cfg, TARGET, template_params=params,
+ aa_repo_match=self.matcher)
+
+ keycfg = cfg[self.aptlistfile]
+ mockgetkey.assert_called_with(keycfg['keyid'],
+ keycfg.get('keyserver',
+ 'keyserver.ubuntu.com'))
+ if is_hardened is not None:
+ mockkey.assert_called_with(
+ expectedkey,
+ keycfg['keyfile'],
+ hardened=is_hardened)
+
+ # filename should be ignored on key only
+ self.assertFalse(os.path.isfile(self.aptlistfile))
+
+ def test_apt_v3_src_keyid_real(self):
+ """test_apt_v3_src_keyid_real - Test keyid including key add"""
+ keyid = "03683F77"
+ cfg = {self.aptlistfile: {'keyid': keyid,
+ 'keyfile': self.aptlistfile}}
+
+ self.apt_src_keyid_real(cfg, EXPECTEDKEY, is_hardened=False)
+
+ def test_apt_v3_src_longkeyid_real(self):
+ """test_apt_v3_src_longkeyid_real Test long keyid including key add"""
+ keyid = "B59D 5F15 97A5 04B7 E230 6DCA 0620 BBCF 0368 3F77"
+ cfg = {self.aptlistfile: {'keyid': keyid,
+ 'keyfile': self.aptlistfile}}
+
+ self.apt_src_keyid_real(cfg, EXPECTEDKEY, is_hardened=False)
+
+ def test_apt_v3_src_longkeyid_ks_real(self):
+ """test_apt_v3_src_longkeyid_ks_real Test long keyid from other ks"""
+ keyid = "B59D 5F15 97A5 04B7 E230 6DCA 0620 BBCF 0368 3F77"
+ cfg = {self.aptlistfile: {'keyid': keyid,
+ 'keyfile': self.aptlistfile,
+ 'keyserver': 'keys.gnupg.net'}}
+
+ self.apt_src_keyid_real(cfg, EXPECTEDKEY)
+
+ def test_apt_v3_src_keyid_keyserver(self):
+ """test_apt_v3_src_keyid_keyserver - Test custom keyserver"""
+ keyid = "03683F77"
+ params = self._get_default_params()
+ cfg = {self.aptlistfile: {'keyid': keyid,
+ 'keyfile': self.aptlistfile,
+ 'keyserver': 'test.random.com'}}
+
+ # in some test environments only *.ubuntu.com is reachable
+ # so mock the call and check if the config got there
+ with mock.patch.object(gpg, 'getkeybyid',
+ return_value="fakekey") as mockgetkey:
+ with mock.patch.object(cc_apt_configure,
+ 'add_apt_key_raw') as mockadd:
+ self._add_apt_sources(cfg, TARGET, template_params=params,
+ aa_repo_match=self.matcher)
+
+ mockgetkey.assert_called_with('03683F77', 'test.random.com')
+ mockadd.assert_called_with('fakekey', self.aptlistfile, hardened=False)
+
+ # filename should be ignored on key only
+ self.assertFalse(os.path.isfile(self.aptlistfile))
+
+ def test_apt_v3_src_ppa(self):
+ """test_apt_v3_src_ppa - Test specification of a ppa"""
+ params = self._get_default_params()
+ cfg = {self.aptlistfile: {'source': 'ppa:smoser/cloud-init-test'}}
+
+ with mock.patch("cloudinit.subp.subp") as mockobj:
+ self._add_apt_sources(cfg, TARGET, template_params=params,
+ aa_repo_match=self.matcher)
+ mockobj.assert_any_call(['add-apt-repository',
+ 'ppa:smoser/cloud-init-test'], target=TARGET)
+
+ # adding ppa should ignore filename (uses add-apt-repository)
+ self.assertFalse(os.path.isfile(self.aptlistfile))
+
+ def test_apt_v3_src_ppa_tri(self):
+ """test_apt_v3_src_ppa_tri - Test specification of multiple ppa's"""
+ params = self._get_default_params()
+ cfg = {self.aptlistfile: {'source': 'ppa:smoser/cloud-init-test'},
+ self.aptlistfile2: {'source': 'ppa:smoser/cloud-init-test2'},
+ self.aptlistfile3: {'source': 'ppa:smoser/cloud-init-test3'}}
+
+ with mock.patch("cloudinit.subp.subp") as mockobj:
+ self._add_apt_sources(cfg, TARGET, template_params=params,
+ aa_repo_match=self.matcher)
+ calls = [call(['add-apt-repository', 'ppa:smoser/cloud-init-test'],
+ target=TARGET),
+ call(['add-apt-repository', 'ppa:smoser/cloud-init-test2'],
+ target=TARGET),
+ call(['add-apt-repository', 'ppa:smoser/cloud-init-test3'],
+ target=TARGET)]
+ mockobj.assert_has_calls(calls, any_order=True)
+
+ # adding ppa should ignore all filenames (uses add-apt-repository)
+ self.assertFalse(os.path.isfile(self.aptlistfile))
+ self.assertFalse(os.path.isfile(self.aptlistfile2))
+ self.assertFalse(os.path.isfile(self.aptlistfile3))
+
+ @mock.patch("cloudinit.config.cc_apt_configure.util.get_dpkg_architecture")
+ def test_apt_v3_list_rename(self, m_get_dpkg_architecture):
+ """test_apt_v3_list_rename - Test find mirror and apt list renaming"""
+ pre = "/var/lib/apt/lists"
+ # filenames are archive dependent
+
+ arch = 's390x'
+ m_get_dpkg_architecture.return_value = arch
+ component = "ubuntu-ports"
+ archive = "ports.ubuntu.com"
+
+ cfg = {'primary': [{'arches': ["default"],
+ 'uri':
+ 'http://test.ubuntu.com/%s/' % component}],
+ 'security': [{'arches': ["default"],
+ 'uri':
+ 'http://testsec.ubuntu.com/%s/' % component}]}
+ post = ("%s_dists_%s-updates_InRelease" %
+ (component, MOCK_LSB_RELEASE_DATA['codename']))
+ fromfn = ("%s/%s_%s" % (pre, archive, post))
+ tofn = ("%s/test.ubuntu.com_%s" % (pre, post))
+
+ mirrors = cc_apt_configure.find_apt_mirror_info(cfg, FakeCloud(), arch)
+
+ self.assertEqual(mirrors['MIRROR'],
+ "http://test.ubuntu.com/%s/" % component)
+ self.assertEqual(mirrors['PRIMARY'],
+ "http://test.ubuntu.com/%s/" % component)
+ self.assertEqual(mirrors['SECURITY'],
+ "http://testsec.ubuntu.com/%s/" % component)
+
+ with mock.patch.object(os, 'rename') as mockren:
+ with mock.patch.object(glob, 'glob',
+ return_value=[fromfn]):
+ cc_apt_configure.rename_apt_lists(mirrors, TARGET, arch)
+
+ mockren.assert_any_call(fromfn, tofn)
+
+ @mock.patch("cloudinit.config.cc_apt_configure.util.get_dpkg_architecture")
+ def test_apt_v3_list_rename_non_slash(self, m_get_dpkg_architecture):
+ target = os.path.join(self.tmp, "rename_non_slash")
+ apt_lists_d = os.path.join(target, "./" + cc_apt_configure.APT_LISTS)
+
+ arch = 'amd64'
+ m_get_dpkg_architecture.return_value = arch
+
+ mirror_path = "some/random/path/"
+ primary = "http://test.ubuntu.com/" + mirror_path
+ security = "http://test-security.ubuntu.com/" + mirror_path
+ mirrors = {'PRIMARY': primary, 'SECURITY': security}
+
+ # these match default archive prefixes
+ opri_pre = "archive.ubuntu.com_ubuntu_dists_xenial"
+ osec_pre = "security.ubuntu.com_ubuntu_dists_xenial"
+ # this one won't match and should not be renamed defaults.
+ other_pre = "dl.google.com_linux_chrome_deb_dists_stable"
+ # these are our new expected prefixes
+ npri_pre = "test.ubuntu.com_some_random_path_dists_xenial"
+ nsec_pre = "test-security.ubuntu.com_some_random_path_dists_xenial"
+
+ files = [
+ # orig prefix, new prefix, suffix
+ (opri_pre, npri_pre, "_main_binary-amd64_Packages"),
+ (opri_pre, npri_pre, "_main_binary-amd64_InRelease"),
+ (opri_pre, npri_pre, "-updates_main_binary-amd64_Packages"),
+ (opri_pre, npri_pre, "-updates_main_binary-amd64_InRelease"),
+ (other_pre, other_pre, "_main_binary-amd64_Packages"),
+ (other_pre, other_pre, "_Release"),
+ (other_pre, other_pre, "_Release.gpg"),
+ (osec_pre, nsec_pre, "_InRelease"),
+ (osec_pre, nsec_pre, "_main_binary-amd64_Packages"),
+ (osec_pre, nsec_pre, "_universe_binary-amd64_Packages"),
+ ]
+
+ expected = sorted([npre + suff for opre, npre, suff in files])
+ # create files
+ for (opre, _npre, suff) in files:
+ fpath = os.path.join(apt_lists_d, opre + suff)
+ util.write_file(fpath, content=fpath)
+
+ cc_apt_configure.rename_apt_lists(mirrors, target, arch)
+ found = sorted(os.listdir(apt_lists_d))
+ self.assertEqual(expected, found)
+
+ @staticmethod
+ def test_apt_v3_proxy():
+ """test_apt_v3_proxy - Test apt_*proxy configuration"""
+ cfg = {"proxy": "foobar1",
+ "http_proxy": "foobar2",
+ "ftp_proxy": "foobar3",
+ "https_proxy": "foobar4"}
+
+ with mock.patch.object(util, 'write_file') as mockobj:
+ cc_apt_configure.apply_apt_config(cfg, "proxyfn", "notused")
+
+ mockobj.assert_called_with('proxyfn',
+ ('Acquire::http::Proxy "foobar1";\n'
+ 'Acquire::http::Proxy "foobar2";\n'
+ 'Acquire::ftp::Proxy "foobar3";\n'
+ 'Acquire::https::Proxy "foobar4";\n'))
+
+ def test_apt_v3_mirror(self):
+ """test_apt_v3_mirror - Test defining a mirror"""
+ pmir = "http://us.archive.ubuntu.com/ubuntu/"
+ smir = "http://security.ubuntu.com/ubuntu/"
+ cfg = {"primary": [{'arches': ["default"],
+ "uri": pmir}],
+ "security": [{'arches': ["default"],
+ "uri": smir}]}
+
+ mirrors = cc_apt_configure.find_apt_mirror_info(
+ cfg, FakeCloud(), 'amd64')
+
+ self.assertEqual(mirrors['MIRROR'],
+ pmir)
+ self.assertEqual(mirrors['PRIMARY'],
+ pmir)
+ self.assertEqual(mirrors['SECURITY'],
+ smir)
+
+ def test_apt_v3_mirror_default(self):
+ """test_apt_v3_mirror_default - Test without defining a mirror"""
+ arch = 'amd64'
+ default_mirrors = cc_apt_configure.get_default_mirrors(arch)
+ pmir = default_mirrors["PRIMARY"]
+ smir = default_mirrors["SECURITY"]
+ mycloud = get_cloud()
+ mirrors = cc_apt_configure.find_apt_mirror_info({}, mycloud, arch)
+
+ self.assertEqual(mirrors['MIRROR'],
+ pmir)
+ self.assertEqual(mirrors['PRIMARY'],
+ pmir)
+ self.assertEqual(mirrors['SECURITY'],
+ smir)
+
+ def test_apt_v3_mirror_arches(self):
+ """test_apt_v3_mirror_arches - Test arches selection of mirror"""
+ pmir = "http://my-primary.ubuntu.com/ubuntu/"
+ smir = "http://my-security.ubuntu.com/ubuntu/"
+ arch = 'ppc64el'
+ cfg = {"primary": [{'arches': ["default"], "uri": "notthis-primary"},
+ {'arches': [arch], "uri": pmir}],
+ "security": [{'arches': ["default"], "uri": "nothis-security"},
+ {'arches': [arch], "uri": smir}]}
+
+ mirrors = cc_apt_configure.find_apt_mirror_info(cfg, FakeCloud(), arch)
+
+ self.assertEqual(mirrors['PRIMARY'], pmir)
+ self.assertEqual(mirrors['MIRROR'], pmir)
+ self.assertEqual(mirrors['SECURITY'], smir)
+
+ def test_apt_v3_mirror_arches_default(self):
+ """test_apt_v3_mirror_arches - Test falling back to default arch"""
+ pmir = "http://us.archive.ubuntu.com/ubuntu/"
+ smir = "http://security.ubuntu.com/ubuntu/"
+ cfg = {"primary": [{'arches': ["default"],
+ "uri": pmir},
+ {'arches': ["thisarchdoesntexist"],
+ "uri": "notthis"}],
+ "security": [{'arches': ["thisarchdoesntexist"],
+ "uri": "nothat"},
+ {'arches': ["default"],
+ "uri": smir}]}
+
+ mirrors = cc_apt_configure.find_apt_mirror_info(
+ cfg, FakeCloud(), 'amd64')
+
+ self.assertEqual(mirrors['MIRROR'],
+ pmir)
+ self.assertEqual(mirrors['PRIMARY'],
+ pmir)
+ self.assertEqual(mirrors['SECURITY'],
+ smir)
+
+ @mock.patch("cloudinit.config.cc_apt_configure.util.get_dpkg_architecture")
+ def test_apt_v3_get_def_mir_non_intel_no_arch(
+ self, m_get_dpkg_architecture
+ ):
+ arch = 'ppc64el'
+ m_get_dpkg_architecture.return_value = arch
+ expected = {'PRIMARY': 'http://ports.ubuntu.com/ubuntu-ports',
+ 'SECURITY': 'http://ports.ubuntu.com/ubuntu-ports'}
+ self.assertEqual(expected, cc_apt_configure.get_default_mirrors())
+
+ def test_apt_v3_get_default_mirrors_non_intel_with_arch(self):
+ found = cc_apt_configure.get_default_mirrors('ppc64el')
+
+ expected = {'PRIMARY': 'http://ports.ubuntu.com/ubuntu-ports',
+ 'SECURITY': 'http://ports.ubuntu.com/ubuntu-ports'}
+ self.assertEqual(expected, found)
+
+ def test_apt_v3_mirror_arches_sysdefault(self):
+ """test_apt_v3_mirror_arches - Test arches fallback to sys default"""
+ arch = 'amd64'
+ default_mirrors = cc_apt_configure.get_default_mirrors(arch)
+ pmir = default_mirrors["PRIMARY"]
+ smir = default_mirrors["SECURITY"]
+ mycloud = get_cloud()
+ cfg = {"primary": [{'arches': ["thisarchdoesntexist_64"],
+ "uri": "notthis"},
+ {'arches': ["thisarchdoesntexist"],
+ "uri": "notthiseither"}],
+ "security": [{'arches': ["thisarchdoesntexist"],
+ "uri": "nothat"},
+ {'arches': ["thisarchdoesntexist_64"],
+ "uri": "nothateither"}]}
+
+ mirrors = cc_apt_configure.find_apt_mirror_info(cfg, mycloud, arch)
+
+ self.assertEqual(mirrors['MIRROR'], pmir)
+ self.assertEqual(mirrors['PRIMARY'], pmir)
+ self.assertEqual(mirrors['SECURITY'], smir)
+
+ def test_apt_v3_mirror_search(self):
+ """test_apt_v3_mirror_search - Test searching mirrors in a list
+ mock checks to avoid relying on network connectivity"""
+ pmir = "http://us.archive.ubuntu.com/ubuntu/"
+ smir = "http://security.ubuntu.com/ubuntu/"
+ cfg = {"primary": [{'arches': ["default"],
+ "search": ["pfailme", pmir]}],
+ "security": [{'arches': ["default"],
+ "search": ["sfailme", smir]}]}
+
+ with mock.patch.object(cc_apt_configure.util, 'search_for_mirror',
+ side_effect=[pmir, smir]) as mocksearch:
+ mirrors = cc_apt_configure.find_apt_mirror_info(cfg, FakeCloud(),
+ 'amd64')
+
+ calls = [call(["pfailme", pmir]),
+ call(["sfailme", smir])]
+ mocksearch.assert_has_calls(calls)
+
+ self.assertEqual(mirrors['MIRROR'],
+ pmir)
+ self.assertEqual(mirrors['PRIMARY'],
+ pmir)
+ self.assertEqual(mirrors['SECURITY'],
+ smir)
+
+ def test_apt_v3_mirror_search_many2(self):
+ """test_apt_v3_mirror_search_many3 - Test both mirrors specs at once"""
+ pmir = "http://us.archive.ubuntu.com/ubuntu/"
+ smir = "http://security.ubuntu.com/ubuntu/"
+ cfg = {"primary": [{'arches': ["default"],
+ "uri": pmir,
+ "search": ["pfailme", "foo"]}],
+ "security": [{'arches': ["default"],
+ "uri": smir,
+ "search": ["sfailme", "bar"]}]}
+
+ arch = 'amd64'
+
+ # should be called only once per type, despite two mirror configs
+ mycloud = None
+ with mock.patch.object(cc_apt_configure, 'get_mirror',
+ return_value="http://mocked/foo") as mockgm:
+ mirrors = cc_apt_configure.find_apt_mirror_info(cfg, mycloud, arch)
+ calls = [call(cfg, 'primary', arch, mycloud),
+ call(cfg, 'security', arch, mycloud)]
+ mockgm.assert_has_calls(calls)
+
+ # should not be called, since primary is specified
+ with mock.patch.object(cc_apt_configure.util,
+ 'search_for_mirror') as mockse:
+ mirrors = cc_apt_configure.find_apt_mirror_info(
+ cfg, FakeCloud(), arch)
+ mockse.assert_not_called()
+
+ self.assertEqual(mirrors['MIRROR'],
+ pmir)
+ self.assertEqual(mirrors['PRIMARY'],
+ pmir)
+ self.assertEqual(mirrors['SECURITY'],
+ smir)
+
+ def test_apt_v3_url_resolvable(self):
+ """test_apt_v3_url_resolvable - Test resolving urls"""
+
+ with mock.patch.object(util, 'is_resolvable') as mockresolve:
+ util.is_resolvable_url("http://1.2.3.4/ubuntu")
+ mockresolve.assert_called_with("1.2.3.4")
+
+ with mock.patch.object(util, 'is_resolvable') as mockresolve:
+ util.is_resolvable_url("http://us.archive.ubuntu.com/ubuntu")
+ mockresolve.assert_called_with("us.archive.ubuntu.com")
+
+ # former tests can leave this set (or not if the test is ran directly)
+ # do a hard reset to ensure a stable result
+ util._DNS_REDIRECT_IP = None
+ bad = [(None, None, None, "badname", ["10.3.2.1"])]
+ good = [(None, None, None, "goodname", ["10.2.3.4"])]
+ with mock.patch.object(socket, 'getaddrinfo',
+ side_effect=[bad, bad, bad, good,
+ good]) as mocksock:
+ ret = util.is_resolvable_url("http://us.archive.ubuntu.com/ubuntu")
+ ret2 = util.is_resolvable_url("http://1.2.3.4/ubuntu")
+ mocksock.assert_any_call('does-not-exist.example.com.', None,
+ 0, 0, 1, 2)
+ mocksock.assert_any_call('example.invalid.', None, 0, 0, 1, 2)
+ mocksock.assert_any_call('us.archive.ubuntu.com', None)
+ mocksock.assert_any_call('1.2.3.4', None)
+
+ self.assertTrue(ret)
+ self.assertTrue(ret2)
+
+ # side effect need only bad ret after initial call
+ with mock.patch.object(socket, 'getaddrinfo',
+ side_effect=[bad]) as mocksock:
+ ret3 = util.is_resolvable_url("http://failme.com/ubuntu")
+ calls = [call('failme.com', None)]
+ mocksock.assert_has_calls(calls)
+ self.assertFalse(ret3)
+
+ def test_apt_v3_disable_suites(self):
+ """test_disable_suites - disable_suites with many configurations"""
+ release = "xenial"
+ orig = """deb http://ubuntu.com//ubuntu xenial main
+deb http://ubuntu.com//ubuntu xenial-updates main
+deb http://ubuntu.com//ubuntu xenial-security main
+deb-src http://ubuntu.com//ubuntu universe multiverse
+deb http://ubuntu.com/ubuntu/ xenial-proposed main"""
+
+ # disable nothing
+ disabled = []
+ expect = """deb http://ubuntu.com//ubuntu xenial main
+deb http://ubuntu.com//ubuntu xenial-updates main
+deb http://ubuntu.com//ubuntu xenial-security main
+deb-src http://ubuntu.com//ubuntu universe multiverse
+deb http://ubuntu.com/ubuntu/ xenial-proposed main"""
+ result = cc_apt_configure.disable_suites(disabled, orig, release)
+ self.assertEqual(expect, result)
+
+ # single disable release suite
+ disabled = ["$RELEASE"]
+ expect = """\
+# suite disabled by cloud-init: deb http://ubuntu.com//ubuntu xenial main
+deb http://ubuntu.com//ubuntu xenial-updates main
+deb http://ubuntu.com//ubuntu xenial-security main
+deb-src http://ubuntu.com//ubuntu universe multiverse
+deb http://ubuntu.com/ubuntu/ xenial-proposed main"""
+ result = cc_apt_configure.disable_suites(disabled, orig, release)
+ self.assertEqual(expect, result)
+
+ # single disable other suite
+ disabled = ["$RELEASE-updates"]
+ expect = ("""deb http://ubuntu.com//ubuntu xenial main
+# suite disabled by cloud-init: deb http://ubuntu.com//ubuntu"""
+ """ xenial-updates main
+deb http://ubuntu.com//ubuntu xenial-security main
+deb-src http://ubuntu.com//ubuntu universe multiverse
+deb http://ubuntu.com/ubuntu/ xenial-proposed main""")
+ result = cc_apt_configure.disable_suites(disabled, orig, release)
+ self.assertEqual(expect, result)
+
+ # multi disable
+ disabled = ["$RELEASE-updates", "$RELEASE-security"]
+ expect = ("""deb http://ubuntu.com//ubuntu xenial main
+# suite disabled by cloud-init: deb http://ubuntu.com//ubuntu """
+ """xenial-updates main
+# suite disabled by cloud-init: deb http://ubuntu.com//ubuntu """
+ """xenial-security main
+deb-src http://ubuntu.com//ubuntu universe multiverse
+deb http://ubuntu.com/ubuntu/ xenial-proposed main""")
+ result = cc_apt_configure.disable_suites(disabled, orig, release)
+ self.assertEqual(expect, result)
+
+ # multi line disable (same suite multiple times in input)
+ disabled = ["$RELEASE-updates", "$RELEASE-security"]
+ orig = """deb http://ubuntu.com//ubuntu xenial main
+deb http://ubuntu.com//ubuntu xenial-updates main
+deb http://ubuntu.com//ubuntu xenial-security main
+deb-src http://ubuntu.com//ubuntu universe multiverse
+deb http://UBUNTU.com//ubuntu xenial-updates main
+deb http://UBUNTU.COM//ubuntu xenial-updates main
+deb http://ubuntu.com/ubuntu/ xenial-proposed main"""
+ expect = ("""deb http://ubuntu.com//ubuntu xenial main
+# suite disabled by cloud-init: deb http://ubuntu.com//ubuntu """
+ """xenial-updates main
+# suite disabled by cloud-init: deb http://ubuntu.com//ubuntu """
+ """xenial-security main
+deb-src http://ubuntu.com//ubuntu universe multiverse
+# suite disabled by cloud-init: deb http://UBUNTU.com//ubuntu """
+ """xenial-updates main
+# suite disabled by cloud-init: deb http://UBUNTU.COM//ubuntu """
+ """xenial-updates main
+deb http://ubuntu.com/ubuntu/ xenial-proposed main""")
+ result = cc_apt_configure.disable_suites(disabled, orig, release)
+ self.assertEqual(expect, result)
+
+ # comment in input
+ disabled = ["$RELEASE-updates", "$RELEASE-security"]
+ orig = """deb http://ubuntu.com//ubuntu xenial main
+deb http://ubuntu.com//ubuntu xenial-updates main
+deb http://ubuntu.com//ubuntu xenial-security main
+deb-src http://ubuntu.com//ubuntu universe multiverse
+#foo
+#deb http://UBUNTU.com//ubuntu xenial-updates main
+deb http://UBUNTU.COM//ubuntu xenial-updates main
+deb http://ubuntu.com/ubuntu/ xenial-proposed main"""
+ expect = ("""deb http://ubuntu.com//ubuntu xenial main
+# suite disabled by cloud-init: deb http://ubuntu.com//ubuntu """
+ """xenial-updates main
+# suite disabled by cloud-init: deb http://ubuntu.com//ubuntu """
+ """xenial-security main
+deb-src http://ubuntu.com//ubuntu universe multiverse
+#foo
+#deb http://UBUNTU.com//ubuntu xenial-updates main
+# suite disabled by cloud-init: deb http://UBUNTU.COM//ubuntu """
+ """xenial-updates main
+deb http://ubuntu.com/ubuntu/ xenial-proposed main""")
+ result = cc_apt_configure.disable_suites(disabled, orig, release)
+ self.assertEqual(expect, result)
+
+ # single disable custom suite
+ disabled = ["foobar"]
+ orig = """deb http://ubuntu.com//ubuntu xenial main
+deb http://ubuntu.com//ubuntu xenial-updates main
+deb http://ubuntu.com//ubuntu xenial-security main
+deb http://ubuntu.com/ubuntu/ foobar main"""
+ expect = """deb http://ubuntu.com//ubuntu xenial main
+deb http://ubuntu.com//ubuntu xenial-updates main
+deb http://ubuntu.com//ubuntu xenial-security main
+# suite disabled by cloud-init: deb http://ubuntu.com/ubuntu/ foobar main"""
+ result = cc_apt_configure.disable_suites(disabled, orig, release)
+ self.assertEqual(expect, result)
+
+ # single disable non existing suite
+ disabled = ["foobar"]
+ orig = """deb http://ubuntu.com//ubuntu xenial main
+deb http://ubuntu.com//ubuntu xenial-updates main
+deb http://ubuntu.com//ubuntu xenial-security main
+deb http://ubuntu.com/ubuntu/ notfoobar main"""
+ expect = """deb http://ubuntu.com//ubuntu xenial main
+deb http://ubuntu.com//ubuntu xenial-updates main
+deb http://ubuntu.com//ubuntu xenial-security main
+deb http://ubuntu.com/ubuntu/ notfoobar main"""
+ result = cc_apt_configure.disable_suites(disabled, orig, release)
+ self.assertEqual(expect, result)
+
+ # single disable suite with option
+ disabled = ["$RELEASE-updates"]
+ orig = """deb http://ubuntu.com//ubuntu xenial main
+deb [a=b] http://ubu.com//ubu xenial-updates main
+deb http://ubuntu.com//ubuntu xenial-security main
+deb-src http://ubuntu.com//ubuntu universe multiverse
+deb http://ubuntu.com/ubuntu/ xenial-proposed main"""
+ expect = ("""deb http://ubuntu.com//ubuntu xenial main
+# suite disabled by cloud-init: deb [a=b] http://ubu.com//ubu """
+ """xenial-updates main
+deb http://ubuntu.com//ubuntu xenial-security main
+deb-src http://ubuntu.com//ubuntu universe multiverse
+deb http://ubuntu.com/ubuntu/ xenial-proposed main""")
+ result = cc_apt_configure.disable_suites(disabled, orig, release)
+ self.assertEqual(expect, result)
+
+ # single disable suite with more options and auto $RELEASE expansion
+ disabled = ["updates"]
+ orig = """deb http://ubuntu.com//ubuntu xenial main
+deb [a=b c=d] http://ubu.com//ubu xenial-updates main
+deb http://ubuntu.com//ubuntu xenial-security main
+deb-src http://ubuntu.com//ubuntu universe multiverse
+deb http://ubuntu.com/ubuntu/ xenial-proposed main"""
+ expect = """deb http://ubuntu.com//ubuntu xenial main
+# suite disabled by cloud-init: deb [a=b c=d] \
+http://ubu.com//ubu xenial-updates main
+deb http://ubuntu.com//ubuntu xenial-security main
+deb-src http://ubuntu.com//ubuntu universe multiverse
+deb http://ubuntu.com/ubuntu/ xenial-proposed main"""
+ result = cc_apt_configure.disable_suites(disabled, orig, release)
+ self.assertEqual(expect, result)
+
+ # single disable suite while options at others
+ disabled = ["$RELEASE-security"]
+ orig = """deb http://ubuntu.com//ubuntu xenial main
+deb [arch=foo] http://ubuntu.com//ubuntu xenial-updates main
+deb http://ubuntu.com//ubuntu xenial-security main
+deb-src http://ubuntu.com//ubuntu universe multiverse
+deb http://ubuntu.com/ubuntu/ xenial-proposed main"""
+ expect = ("""deb http://ubuntu.com//ubuntu xenial main
+deb [arch=foo] http://ubuntu.com//ubuntu xenial-updates main
+# suite disabled by cloud-init: deb http://ubuntu.com//ubuntu """
+ """xenial-security main
+deb-src http://ubuntu.com//ubuntu universe multiverse
+deb http://ubuntu.com/ubuntu/ xenial-proposed main""")
+ result = cc_apt_configure.disable_suites(disabled, orig, release)
+ self.assertEqual(expect, result)
+
+ def test_disable_suites_blank_lines(self):
+ """test_disable_suites_blank_lines - ensure blank lines allowed"""
+ lines = ["deb %(repo)s %(rel)s main universe",
+ "",
+ "deb %(repo)s %(rel)s-updates main universe",
+ " # random comment",
+ "#comment here",
+ ""]
+ rel = "trusty"
+ repo = 'http://example.com/mirrors/ubuntu'
+ orig = "\n".join(lines) % {'repo': repo, 'rel': rel}
+ self.assertEqual(
+ orig, cc_apt_configure.disable_suites(["proposed"], orig, rel))
+
+ @mock.patch("cloudinit.util.get_hostname", return_value='abc.localdomain')
+ def test_apt_v3_mirror_search_dns(self, m_get_hostname):
+ """test_apt_v3_mirror_search_dns - Test searching dns patterns"""
+ pmir = "phit"
+ smir = "shit"
+ arch = 'amd64'
+ mycloud = get_cloud('ubuntu')
+ cfg = {"primary": [{'arches': ["default"],
+ "search_dns": True}],
+ "security": [{'arches': ["default"],
+ "search_dns": True}]}
+
+ with mock.patch.object(cc_apt_configure, 'get_mirror',
+ return_value="http://mocked/foo") as mockgm:
+ mirrors = cc_apt_configure.find_apt_mirror_info(cfg, mycloud, arch)
+ calls = [call(cfg, 'primary', arch, mycloud),
+ call(cfg, 'security', arch, mycloud)]
+ mockgm.assert_has_calls(calls)
+
+ with mock.patch.object(cc_apt_configure, 'search_for_mirror_dns',
+ return_value="http://mocked/foo") as mocksdns:
+ mirrors = cc_apt_configure.find_apt_mirror_info(cfg, mycloud, arch)
+ calls = [call(True, 'primary', cfg, mycloud),
+ call(True, 'security', cfg, mycloud)]
+ mocksdns.assert_has_calls(calls)
+
+ # first return is for the non-dns call before
+ with mock.patch.object(cc_apt_configure.util, 'search_for_mirror',
+ side_effect=[None, pmir, None, smir]) as mockse:
+ mirrors = cc_apt_configure.find_apt_mirror_info(cfg, mycloud, arch)
+
+ calls = [call(None),
+ call(['http://ubuntu-mirror.localdomain/ubuntu',
+ 'http://ubuntu-mirror/ubuntu']),
+ call(None),
+ call(['http://ubuntu-security-mirror.localdomain/ubuntu',
+ 'http://ubuntu-security-mirror/ubuntu'])]
+ mockse.assert_has_calls(calls)
+
+ self.assertEqual(mirrors['MIRROR'],
+ pmir)
+ self.assertEqual(mirrors['PRIMARY'],
+ pmir)
+ self.assertEqual(mirrors['SECURITY'],
+ smir)
+
+ def test_apt_v3_add_mirror_keys(self):
+ """test_apt_v3_add_mirror_keys - Test adding key for mirrors"""
+ arch = 'amd64'
+ cfg = {
+ 'primary': [
+ {'arches': [arch],
+ 'uri': 'http://test.ubuntu.com/',
+ 'filename': 'primary',
+ 'key': 'fakekey_primary'}],
+ 'security': [
+ {'arches': [arch],
+ 'uri': 'http://testsec.ubuntu.com/',
+ 'filename': 'security',
+ 'key': 'fakekey_security'}]
+ }
+
+ with mock.patch.object(cc_apt_configure,
+ 'add_apt_key_raw') as mockadd:
+ cc_apt_configure.add_mirror_keys(cfg, TARGET)
+ calls = [
+ mock.call('fakekey_primary', 'primary', hardened=False),
+ mock.call('fakekey_security', 'security', hardened=False),
+ ]
+ mockadd.assert_has_calls(calls, any_order=True)
+
+
+class TestDebconfSelections(TestCase):
+
+ @mock.patch("cloudinit.config.cc_apt_configure.subp.subp")
+ def test_set_sel_appends_newline_if_absent(self, m_subp):
+ """Automatically append a newline to debconf-set-selections config."""
+ selections = b'some/setting boolean true'
+ cc_apt_configure.debconf_set_selections(selections=selections)
+ cc_apt_configure.debconf_set_selections(selections=selections + b'\n')
+ m_call = mock.call(
+ ['debconf-set-selections'], data=selections + b'\n', capture=True,
+ target=None)
+ self.assertEqual([m_call, m_call], m_subp.call_args_list)
+
+ @mock.patch("cloudinit.config.cc_apt_configure.debconf_set_selections")
+ def test_no_set_sel_if_none_to_set(self, m_set_sel):
+ cc_apt_configure.apply_debconf_selections({'foo': 'bar'})
+ m_set_sel.assert_not_called()
+
+ @mock.patch("cloudinit.config.cc_apt_configure."
+ "debconf_set_selections")
+ @mock.patch("cloudinit.config.cc_apt_configure."
+ "util.get_installed_packages")
+ def test_set_sel_call_has_expected_input(self, m_get_inst, m_set_sel):
+ data = {
+ 'set1': 'pkga pkga/q1 mybool false',
+ 'set2': ('pkgb\tpkgb/b1\tstr\tthis is a string\n'
+ 'pkgc\tpkgc/ip\tstring\t10.0.0.1')}
+ lines = '\n'.join(data.values()).split('\n')
+
+ m_get_inst.return_value = ["adduser", "apparmor"]
+ m_set_sel.return_value = None
+
+ cc_apt_configure.apply_debconf_selections({'debconf_selections': data})
+ self.assertTrue(m_get_inst.called)
+ self.assertEqual(m_set_sel.call_count, 1)
+
+ # assumes called with *args value.
+ selections = m_set_sel.call_args_list[0][0][0].decode()
+
+ missing = [
+ line for line in lines if line not in selections.splitlines()
+ ]
+ self.assertEqual([], missing)
+
+ @mock.patch("cloudinit.config.cc_apt_configure.dpkg_reconfigure")
+ @mock.patch("cloudinit.config.cc_apt_configure.debconf_set_selections")
+ @mock.patch("cloudinit.config.cc_apt_configure."
+ "util.get_installed_packages")
+ def test_reconfigure_if_intersection(self, m_get_inst, m_set_sel,
+ m_dpkg_r):
+ data = {
+ 'set1': 'pkga pkga/q1 mybool false',
+ 'set2': ('pkgb\tpkgb/b1\tstr\tthis is a string\n'
+ 'pkgc\tpkgc/ip\tstring\t10.0.0.1'),
+ 'cloud-init': ('cloud-init cloud-init/datasources'
+ 'multiselect MAAS')}
+
+ m_set_sel.return_value = None
+ m_get_inst.return_value = ["adduser", "apparmor", "pkgb",
+ "cloud-init", 'zdog']
+
+ cc_apt_configure.apply_debconf_selections({'debconf_selections': data})
+
+ # reconfigure should be called with the intersection
+ # of (packages in config, packages installed)
+ self.assertEqual(m_dpkg_r.call_count, 1)
+ # assumes called with *args (dpkg_reconfigure([a,b,c], target=))
+ packages = m_dpkg_r.call_args_list[0][0][0]
+ self.assertEqual(set(['cloud-init', 'pkgb']), set(packages))
+
+ @mock.patch("cloudinit.config.cc_apt_configure.dpkg_reconfigure")
+ @mock.patch("cloudinit.config.cc_apt_configure.debconf_set_selections")
+ @mock.patch("cloudinit.config.cc_apt_configure."
+ "util.get_installed_packages")
+ def test_reconfigure_if_no_intersection(self, m_get_inst, m_set_sel,
+ m_dpkg_r):
+ data = {'set1': 'pkga pkga/q1 mybool false'}
+
+ m_get_inst.return_value = ["adduser", "apparmor", "pkgb",
+ "cloud-init", 'zdog']
+ m_set_sel.return_value = None
+
+ cc_apt_configure.apply_debconf_selections({'debconf_selections': data})
+
+ self.assertTrue(m_get_inst.called)
+ self.assertEqual(m_dpkg_r.call_count, 0)
+
+ @mock.patch("cloudinit.config.cc_apt_configure.subp.subp")
+ def test_dpkg_reconfigure_does_reconfigure(self, m_subp):
+ target = "/foo-target"
+
+ # due to the way the cleaners are called (via dictionary reference)
+ # mocking clean_cloud_init directly does not work. So we mock
+ # the CONFIG_CLEANERS dictionary and assert our cleaner is called.
+ ci_cleaner = mock.MagicMock()
+ with mock.patch.dict(("cloudinit.config.cc_apt_configure."
+ "CONFIG_CLEANERS"),
+ values={'cloud-init': ci_cleaner}, clear=True):
+ cc_apt_configure.dpkg_reconfigure(['pkga', 'cloud-init'],
+ target=target)
+ # cloud-init is actually the only package we have a cleaner for
+ # so for now, its the only one that should reconfigured
+ self.assertTrue(m_subp.called)
+ ci_cleaner.assert_called_with(target)
+ self.assertEqual(m_subp.call_count, 1)
+ found = m_subp.call_args_list[0][0][0]
+ expected = ['dpkg-reconfigure', '--frontend=noninteractive',
+ 'cloud-init']
+ self.assertEqual(expected, found)
+
+ @mock.patch("cloudinit.config.cc_apt_configure.subp.subp")
+ def test_dpkg_reconfigure_not_done_on_no_data(self, m_subp):
+ cc_apt_configure.dpkg_reconfigure([])
+ m_subp.assert_not_called()
+
+ @mock.patch("cloudinit.config.cc_apt_configure.subp.subp")
+ def test_dpkg_reconfigure_not_done_if_no_cleaners(self, m_subp):
+ cc_apt_configure.dpkg_reconfigure(['pkgfoo', 'pkgbar'])
+ m_subp.assert_not_called()
+
+#
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_apk_configure.py b/tests/unittests/config/test_cc_apk_configure.py
new file mode 100644
index 00000000..70139451
--- /dev/null
+++ b/tests/unittests/config/test_cc_apk_configure.py
@@ -0,0 +1,299 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+""" test_apk_configure
+Test creation of repositories file
+"""
+
+import logging
+import os
+import textwrap
+
+from cloudinit import (cloud, helpers, util)
+
+from cloudinit.config import cc_apk_configure
+from tests.unittests.helpers import (FilesystemMockingTestCase, mock)
+
+REPO_FILE = "/etc/apk/repositories"
+DEFAULT_MIRROR_URL = "https://alpine.global.ssl.fastly.net/alpine"
+CC_APK = 'cloudinit.config.cc_apk_configure'
+
+
+class TestNoConfig(FilesystemMockingTestCase):
+ def setUp(self):
+ super(TestNoConfig, self).setUp()
+ self.add_patch(CC_APK + '._write_repositories_file', 'm_write_repos')
+ self.name = "apk-configure"
+ self.cloud_init = None
+ self.log = logging.getLogger("TestNoConfig")
+ self.args = []
+
+ def test_no_config(self):
+ """
+ Test that nothing is done if no apk-configure
+ configuration is provided.
+ """
+ config = util.get_builtin_cfg()
+
+ cc_apk_configure.handle(self.name, config, self.cloud_init,
+ self.log, self.args)
+
+ self.assertEqual(0, self.m_write_repos.call_count)
+
+
+class TestConfig(FilesystemMockingTestCase):
+ def setUp(self):
+ super(TestConfig, self).setUp()
+ self.new_root = self.tmp_dir()
+ self.new_root = self.reRoot(root=self.new_root)
+ for dirname in ['tmp', 'etc/apk']:
+ util.ensure_dir(os.path.join(self.new_root, dirname))
+ self.paths = helpers.Paths({'templates_dir': self.new_root})
+ self.name = "apk-configure"
+ self.cloud = cloud.Cloud(None, self.paths, None, None, None)
+ self.log = logging.getLogger("TestNoConfig")
+ self.args = []
+
+ @mock.patch(CC_APK + '._write_repositories_file')
+ def test_no_repo_settings(self, m_write_repos):
+ """
+ Test that nothing is written if the 'alpine-repo' key
+ is not present.
+ """
+ config = {"apk_repos": {}}
+
+ cc_apk_configure.handle(self.name, config, self.cloud, self.log,
+ self.args)
+
+ self.assertEqual(0, m_write_repos.call_count)
+
+ @mock.patch(CC_APK + '._write_repositories_file')
+ def test_empty_repo_settings(self, m_write_repos):
+ """
+ Test that nothing is written if 'alpine_repo' list is empty.
+ """
+ config = {"apk_repos": {"alpine_repo": []}}
+
+ cc_apk_configure.handle(self.name, config, self.cloud, self.log,
+ self.args)
+
+ self.assertEqual(0, m_write_repos.call_count)
+
+ def test_only_main_repo(self):
+ """
+ Test when only details of main repo is written to file.
+ """
+ alpine_version = 'v3.12'
+ config = {
+ "apk_repos": {
+ "alpine_repo": {
+ "version": alpine_version
+ }
+ }
+ }
+
+ cc_apk_configure.handle(self.name, config, self.cloud, self.log,
+ self.args)
+
+ expected_content = textwrap.dedent("""\
+ #
+ # Created by cloud-init
+ #
+ # This file is written on first boot of an instance
+ #
+
+ {0}/{1}/main
+
+ """.format(DEFAULT_MIRROR_URL, alpine_version))
+
+ self.assertEqual(expected_content, util.load_file(REPO_FILE))
+
+ def test_main_and_community_repos(self):
+ """
+ Test when only details of main and community repos are
+ written to file.
+ """
+ alpine_version = 'edge'
+ config = {
+ "apk_repos": {
+ "alpine_repo": {
+ "version": alpine_version,
+ "community_enabled": True
+ }
+ }
+ }
+
+ cc_apk_configure.handle(self.name, config, self.cloud, self.log,
+ self.args)
+
+ expected_content = textwrap.dedent("""\
+ #
+ # Created by cloud-init
+ #
+ # This file is written on first boot of an instance
+ #
+
+ {0}/{1}/main
+ {0}/{1}/community
+
+ """.format(DEFAULT_MIRROR_URL, alpine_version))
+
+ self.assertEqual(expected_content, util.load_file(REPO_FILE))
+
+ def test_main_community_testing_repos(self):
+ """
+ Test when details of main, community and testing repos
+ are written to file.
+ """
+ alpine_version = 'v3.12'
+ config = {
+ "apk_repos": {
+ "alpine_repo": {
+ "version": alpine_version,
+ "community_enabled": True,
+ "testing_enabled": True
+ }
+ }
+ }
+
+ cc_apk_configure.handle(self.name, config, self.cloud, self.log,
+ self.args)
+
+ expected_content = textwrap.dedent("""\
+ #
+ # Created by cloud-init
+ #
+ # This file is written on first boot of an instance
+ #
+
+ {0}/{1}/main
+ {0}/{1}/community
+ #
+ # Testing - using with non-Edge installation may cause problems!
+ #
+ {0}/edge/testing
+
+ """.format(DEFAULT_MIRROR_URL, alpine_version))
+
+ self.assertEqual(expected_content, util.load_file(REPO_FILE))
+
+ def test_edge_main_community_testing_repos(self):
+ """
+ Test when details of main, community and testing repos
+ for Edge version of Alpine are written to file.
+ """
+ alpine_version = 'edge'
+ config = {
+ "apk_repos": {
+ "alpine_repo": {
+ "version": alpine_version,
+ "community_enabled": True,
+ "testing_enabled": True
+ }
+ }
+ }
+
+ cc_apk_configure.handle(self.name, config, self.cloud, self.log,
+ self.args)
+
+ expected_content = textwrap.dedent("""\
+ #
+ # Created by cloud-init
+ #
+ # This file is written on first boot of an instance
+ #
+
+ {0}/{1}/main
+ {0}/{1}/community
+ {0}/{1}/testing
+
+ """.format(DEFAULT_MIRROR_URL, alpine_version))
+
+ self.assertEqual(expected_content, util.load_file(REPO_FILE))
+
+ def test_main_community_testing_local_repos(self):
+ """
+ Test when details of main, community, testing and
+ local repos are written to file.
+ """
+ alpine_version = 'v3.12'
+ local_repo_url = 'http://some.mirror/whereever'
+ config = {
+ "apk_repos": {
+ "alpine_repo": {
+ "version": alpine_version,
+ "community_enabled": True,
+ "testing_enabled": True
+ },
+ "local_repo_base_url": local_repo_url
+ }
+ }
+
+ cc_apk_configure.handle(self.name, config, self.cloud, self.log,
+ self.args)
+
+ expected_content = textwrap.dedent("""\
+ #
+ # Created by cloud-init
+ #
+ # This file is written on first boot of an instance
+ #
+
+ {0}/{1}/main
+ {0}/{1}/community
+ #
+ # Testing - using with non-Edge installation may cause problems!
+ #
+ {0}/edge/testing
+
+ #
+ # Local repo
+ #
+ {2}/{1}
+
+ """.format(DEFAULT_MIRROR_URL, alpine_version, local_repo_url))
+
+ self.assertEqual(expected_content, util.load_file(REPO_FILE))
+
+ def test_edge_main_community_testing_local_repos(self):
+ """
+ Test when details of main, community, testing and local repos
+ for Edge version of Alpine are written to file.
+ """
+ alpine_version = 'edge'
+ local_repo_url = 'http://some.mirror/whereever'
+ config = {
+ "apk_repos": {
+ "alpine_repo": {
+ "version": alpine_version,
+ "community_enabled": True,
+ "testing_enabled": True
+ },
+ "local_repo_base_url": local_repo_url
+ }
+ }
+
+ cc_apk_configure.handle(self.name, config, self.cloud, self.log,
+ self.args)
+
+ expected_content = textwrap.dedent("""\
+ #
+ # Created by cloud-init
+ #
+ # This file is written on first boot of an instance
+ #
+
+ {0}/{1}/main
+ {0}/{1}/community
+ {0}/edge/testing
+
+ #
+ # Local repo
+ #
+ {2}/{1}
+
+ """.format(DEFAULT_MIRROR_URL, alpine_version, local_repo_url))
+
+ self.assertEqual(expected_content, util.load_file(REPO_FILE))
+
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_apt_pipelining.py b/tests/unittests/config/test_cc_apt_pipelining.py
new file mode 100644
index 00000000..d7589d35
--- /dev/null
+++ b/tests/unittests/config/test_cc_apt_pipelining.py
@@ -0,0 +1,28 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+"""Tests cc_apt_pipelining handler"""
+
+import cloudinit.config.cc_apt_pipelining as cc_apt_pipelining
+
+from tests.unittests.helpers import CiTestCase, mock
+
+
+class TestAptPipelining(CiTestCase):
+
+ @mock.patch('cloudinit.config.cc_apt_pipelining.util.write_file')
+ def test_not_disabled_by_default(self, m_write_file):
+ """ensure that default behaviour is to not disable pipelining"""
+ cc_apt_pipelining.handle('foo', {}, None, mock.MagicMock(), None)
+ self.assertEqual(0, m_write_file.call_count)
+
+ @mock.patch('cloudinit.config.cc_apt_pipelining.util.write_file')
+ def test_false_disables_pipelining(self, m_write_file):
+ """ensure that pipelining can be disabled with correct config"""
+ cc_apt_pipelining.handle(
+ 'foo', {'apt_pipelining': 'false'}, None, mock.MagicMock(), None)
+ self.assertEqual(1, m_write_file.call_count)
+ args, _ = m_write_file.call_args
+ self.assertEqual(cc_apt_pipelining.DEFAULT_FILE, args[0])
+ self.assertIn('Pipeline-Depth "0"', args[1])
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_bootcmd.py b/tests/unittests/config/test_cc_bootcmd.py
new file mode 100644
index 00000000..6f38f12a
--- /dev/null
+++ b/tests/unittests/config/test_cc_bootcmd.py
@@ -0,0 +1,152 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+import logging
+import tempfile
+
+from cloudinit.config.cc_bootcmd import handle, schema
+from cloudinit import (subp, util)
+from tests.unittests.helpers import (
+ CiTestCase, mock, SchemaTestCaseMixin, skipUnlessJsonSchema)
+
+from tests.unittests.util import get_cloud
+
+LOG = logging.getLogger(__name__)
+
+
+class FakeExtendedTempFile(object):
+ def __init__(self, suffix):
+ self.suffix = suffix
+ self.handle = tempfile.NamedTemporaryFile(
+ prefix="ci-%s." % self.__class__.__name__, delete=False)
+
+ def __enter__(self):
+ return self.handle
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.handle.close()
+ util.del_file(self.handle.name)
+
+
+class TestBootcmd(CiTestCase):
+
+ with_logs = True
+
+ _etmpfile_path = ('cloudinit.config.cc_bootcmd.temp_utils.'
+ 'ExtendedTemporaryFile')
+
+ def setUp(self):
+ super(TestBootcmd, self).setUp()
+ self.subp = subp.subp
+ self.new_root = self.tmp_dir()
+
+ def test_handler_skip_if_no_bootcmd(self):
+ """When the provided config doesn't contain bootcmd, skip it."""
+ cfg = {}
+ mycloud = get_cloud()
+ handle('notimportant', cfg, mycloud, LOG, None)
+ self.assertIn(
+ "Skipping module named notimportant, no 'bootcmd' key",
+ self.logs.getvalue())
+
+ def test_handler_invalid_command_set(self):
+ """Commands which can't be converted to shell will raise errors."""
+ invalid_config = {'bootcmd': 1}
+ cc = get_cloud()
+ with self.assertRaises(TypeError) as context_manager:
+ handle('cc_bootcmd', invalid_config, cc, LOG, [])
+ self.assertIn('Failed to shellify bootcmd', self.logs.getvalue())
+ self.assertEqual(
+ "Input to shellify was type 'int'. Expected list or tuple.",
+ str(context_manager.exception))
+
+ @skipUnlessJsonSchema()
+ def test_handler_schema_validation_warns_non_array_type(self):
+ """Schema validation warns of non-array type for bootcmd key.
+
+ Schema validation is not strict, so bootcmd attempts to shellify the
+ invalid content.
+ """
+ invalid_config = {'bootcmd': 1}
+ cc = get_cloud()
+ with self.assertRaises(TypeError):
+ handle('cc_bootcmd', invalid_config, cc, LOG, [])
+ self.assertIn(
+ 'Invalid config:\nbootcmd: 1 is not of type \'array\'',
+ self.logs.getvalue())
+ self.assertIn('Failed to shellify', self.logs.getvalue())
+
+ @skipUnlessJsonSchema()
+ def test_handler_schema_validation_warns_non_array_item_type(self):
+ """Schema validation warns of non-array or string bootcmd items.
+
+ Schema validation is not strict, so bootcmd attempts to shellify the
+ invalid content.
+ """
+ invalid_config = {
+ 'bootcmd': ['ls /', 20, ['wget', 'http://stuff/blah'], {'a': 'n'}]}
+ cc = get_cloud()
+ with self.assertRaises(TypeError) as context_manager:
+ handle('cc_bootcmd', invalid_config, cc, LOG, [])
+ expected_warnings = [
+ 'bootcmd.1: 20 is not valid under any of the given schemas',
+ 'bootcmd.3: {\'a\': \'n\'} is not valid under any of the given'
+ ' schema'
+ ]
+ logs = self.logs.getvalue()
+ for warning in expected_warnings:
+ self.assertIn(warning, logs)
+ self.assertIn('Failed to shellify', logs)
+ self.assertEqual(
+ ("Unable to shellify type 'int'. Expected list, string, tuple. "
+ "Got: 20"),
+ str(context_manager.exception))
+
+ def test_handler_creates_and_runs_bootcmd_script_with_instance_id(self):
+ """Valid schema runs a bootcmd script with INSTANCE_ID in the env."""
+ cc = get_cloud()
+ out_file = self.tmp_path('bootcmd.out', self.new_root)
+ my_id = "b6ea0f59-e27d-49c6-9f87-79f19765a425"
+ valid_config = {'bootcmd': [
+ 'echo {0} $INSTANCE_ID > {1}'.format(my_id, out_file)]}
+
+ with mock.patch(self._etmpfile_path, FakeExtendedTempFile):
+ with self.allow_subp(['/bin/sh']):
+ handle('cc_bootcmd', valid_config, cc, LOG, [])
+ self.assertEqual(my_id + ' iid-datasource-none\n',
+ util.load_file(out_file))
+
+ def test_handler_runs_bootcmd_script_with_error(self):
+ """When a valid script generates an error, that error is raised."""
+ cc = get_cloud()
+ valid_config = {'bootcmd': ['exit 1']} # Script with error
+
+ with mock.patch(self._etmpfile_path, FakeExtendedTempFile):
+ with self.allow_subp(['/bin/sh']):
+ with self.assertRaises(subp.ProcessExecutionError) as ctxt:
+ handle('does-not-matter', valid_config, cc, LOG, [])
+ self.assertIn(
+ 'Unexpected error while running command.\n'
+ "Command: ['/bin/sh',",
+ str(ctxt.exception))
+ self.assertIn(
+ 'Failed to run bootcmd module does-not-matter',
+ self.logs.getvalue())
+
+
+@skipUnlessJsonSchema()
+class TestSchema(CiTestCase, SchemaTestCaseMixin):
+ """Directly test schema rather than through handle."""
+
+ schema = schema
+
+ def test_duplicates_are_fine_array_array(self):
+ """Duplicated commands array/array entries are allowed."""
+ self.assertSchemaValid(
+ ["byebye", "byebye"], 'command entries can be duplicate')
+
+ def test_duplicates_are_fine_array_string(self):
+ """Duplicated commands array/string entries are allowed."""
+ self.assertSchemaValid(
+ ["echo bye", "echo bye"], "command entries can be duplicate.")
+
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_ca_certs.py b/tests/unittests/config/test_cc_ca_certs.py
new file mode 100644
index 00000000..91b005d0
--- /dev/null
+++ b/tests/unittests/config/test_cc_ca_certs.py
@@ -0,0 +1,361 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+import logging
+import shutil
+import tempfile
+import unittest
+from contextlib import ExitStack
+from unittest import mock
+
+from cloudinit import distros
+from cloudinit.config import cc_ca_certs
+from cloudinit import helpers
+from cloudinit import subp
+from cloudinit import util
+from tests.unittests.helpers import TestCase
+
+from tests.unittests.util import get_cloud
+
+
+class TestNoConfig(unittest.TestCase):
+ def setUp(self):
+ super(TestNoConfig, self).setUp()
+ self.name = "ca-certs"
+ self.cloud_init = None
+ self.log = logging.getLogger("TestNoConfig")
+ self.args = []
+
+ def test_no_config(self):
+ """
+ Test that nothing is done if no ca-certs configuration is provided.
+ """
+ config = util.get_builtin_cfg()
+ with ExitStack() as mocks:
+ util_mock = mocks.enter_context(
+ mock.patch.object(util, 'write_file'))
+ certs_mock = mocks.enter_context(
+ mock.patch.object(cc_ca_certs, 'update_ca_certs'))
+
+ cc_ca_certs.handle(self.name, config, self.cloud_init, self.log,
+ self.args)
+
+ self.assertEqual(util_mock.call_count, 0)
+ self.assertEqual(certs_mock.call_count, 0)
+
+
+class TestConfig(TestCase):
+ def setUp(self):
+ super(TestConfig, self).setUp()
+ self.name = "ca-certs"
+ self.paths = None
+ self.log = logging.getLogger("TestNoConfig")
+ self.args = []
+
+ def _fetch_distro(self, kind):
+ cls = distros.fetch(kind)
+ paths = helpers.Paths({})
+ return cls(kind, {}, paths)
+
+ def _mock_init(self):
+ self.mocks = ExitStack()
+ self.addCleanup(self.mocks.close)
+
+ # Mock out the functions that actually modify the system
+ self.mock_add = self.mocks.enter_context(
+ mock.patch.object(cc_ca_certs, 'add_ca_certs'))
+ self.mock_update = self.mocks.enter_context(
+ mock.patch.object(cc_ca_certs, 'update_ca_certs'))
+ self.mock_remove = self.mocks.enter_context(
+ mock.patch.object(cc_ca_certs, 'remove_default_ca_certs'))
+
+ def test_no_trusted_list(self):
+ """
+ Test that no certificates are written if the 'trusted' key is not
+ present.
+ """
+ config = {"ca-certs": {}}
+
+ for distro_name in cc_ca_certs.distros:
+ self._mock_init()
+ cloud = get_cloud(distro_name)
+ cc_ca_certs.handle(self.name, config, cloud, self.log, self.args)
+
+ self.assertEqual(self.mock_add.call_count, 0)
+ self.assertEqual(self.mock_update.call_count, 1)
+ self.assertEqual(self.mock_remove.call_count, 0)
+
+ def test_empty_trusted_list(self):
+ """Test that no certificate are written if 'trusted' list is empty."""
+ config = {"ca-certs": {"trusted": []}}
+
+ for distro_name in cc_ca_certs.distros:
+ self._mock_init()
+ cloud = get_cloud(distro_name)
+ cc_ca_certs.handle(self.name, config, cloud, self.log, self.args)
+
+ self.assertEqual(self.mock_add.call_count, 0)
+ self.assertEqual(self.mock_update.call_count, 1)
+ self.assertEqual(self.mock_remove.call_count, 0)
+
+ def test_single_trusted(self):
+ """Test that a single cert gets passed to add_ca_certs."""
+ config = {"ca-certs": {"trusted": ["CERT1"]}}
+
+ for distro_name in cc_ca_certs.distros:
+ self._mock_init()
+ cloud = get_cloud(distro_name)
+ conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
+ cc_ca_certs.handle(self.name, config, cloud, self.log, self.args)
+
+ self.mock_add.assert_called_once_with(conf, ['CERT1'])
+ self.assertEqual(self.mock_update.call_count, 1)
+ self.assertEqual(self.mock_remove.call_count, 0)
+
+ def test_multiple_trusted(self):
+ """Test that multiple certs get passed to add_ca_certs."""
+ config = {"ca-certs": {"trusted": ["CERT1", "CERT2"]}}
+
+ for distro_name in cc_ca_certs.distros:
+ self._mock_init()
+ cloud = get_cloud(distro_name)
+ conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
+ cc_ca_certs.handle(self.name, config, cloud, self.log, self.args)
+
+ self.mock_add.assert_called_once_with(conf, ['CERT1', 'CERT2'])
+ self.assertEqual(self.mock_update.call_count, 1)
+ self.assertEqual(self.mock_remove.call_count, 0)
+
+ def test_remove_default_ca_certs(self):
+ """Test remove_defaults works as expected."""
+ config = {"ca-certs": {"remove-defaults": True}}
+
+ for distro_name in cc_ca_certs.distros:
+ self._mock_init()
+ cloud = get_cloud(distro_name)
+ cc_ca_certs.handle(self.name, config, cloud, self.log, self.args)
+
+ self.assertEqual(self.mock_add.call_count, 0)
+ self.assertEqual(self.mock_update.call_count, 1)
+ self.assertEqual(self.mock_remove.call_count, 1)
+
+ def test_no_remove_defaults_if_false(self):
+ """Test remove_defaults is not called when config value is False."""
+ config = {"ca-certs": {"remove-defaults": False}}
+
+ for distro_name in cc_ca_certs.distros:
+ self._mock_init()
+ cloud = get_cloud(distro_name)
+ cc_ca_certs.handle(self.name, config, cloud, self.log, self.args)
+
+ self.assertEqual(self.mock_add.call_count, 0)
+ self.assertEqual(self.mock_update.call_count, 1)
+ self.assertEqual(self.mock_remove.call_count, 0)
+
+ def test_correct_order_for_remove_then_add(self):
+ """Test remove_defaults is not called when config value is False."""
+ config = {"ca-certs": {"remove-defaults": True, "trusted": ["CERT1"]}}
+
+ for distro_name in cc_ca_certs.distros:
+ self._mock_init()
+ cloud = get_cloud(distro_name)
+ conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
+ cc_ca_certs.handle(self.name, config, cloud, self.log, self.args)
+
+ self.mock_add.assert_called_once_with(conf, ['CERT1'])
+ self.assertEqual(self.mock_update.call_count, 1)
+ self.assertEqual(self.mock_remove.call_count, 1)
+
+
+class TestAddCaCerts(TestCase):
+
+ def setUp(self):
+ super(TestAddCaCerts, self).setUp()
+ tmpdir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmpdir)
+ self.paths = helpers.Paths({
+ 'cloud_dir': tmpdir,
+ })
+ self.add_patch("cloudinit.config.cc_ca_certs.os.stat", "m_stat")
+
+ def _fetch_distro(self, kind):
+ cls = distros.fetch(kind)
+ paths = helpers.Paths({})
+ return cls(kind, {}, paths)
+
+ def test_no_certs_in_list(self):
+ """Test that no certificate are written if not provided."""
+ for distro_name in cc_ca_certs.distros:
+ conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
+ with mock.patch.object(util, 'write_file') as mockobj:
+ cc_ca_certs.add_ca_certs(conf, [])
+ self.assertEqual(mockobj.call_count, 0)
+
+ def test_single_cert_trailing_cr(self):
+ """Test adding a single certificate to the trusted CAs
+ when existing ca-certificates has trailing newline"""
+ cert = "CERT1\nLINE2\nLINE3"
+
+ ca_certs_content = "line1\nline2\ncloud-init-ca-certs.crt\nline3\n"
+ expected = "line1\nline2\nline3\ncloud-init-ca-certs.crt\n"
+
+ self.m_stat.return_value.st_size = 1
+
+ for distro_name in cc_ca_certs.distros:
+ conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
+
+ with ExitStack() as mocks:
+ mock_write = mocks.enter_context(
+ mock.patch.object(util, 'write_file'))
+ mock_load = mocks.enter_context(
+ mock.patch.object(util, 'load_file',
+ return_value=ca_certs_content))
+
+ cc_ca_certs.add_ca_certs(conf, [cert])
+
+ mock_write.assert_has_calls([
+ mock.call(conf['ca_cert_full_path'],
+ cert, mode=0o644)])
+ if conf['ca_cert_config'] is not None:
+ mock_write.assert_has_calls([
+ mock.call(conf['ca_cert_config'],
+ expected, omode="wb")])
+ mock_load.assert_called_once_with(conf['ca_cert_config'])
+
+ def test_single_cert_no_trailing_cr(self):
+ """Test adding a single certificate to the trusted CAs
+ when existing ca-certificates has no trailing newline"""
+ cert = "CERT1\nLINE2\nLINE3"
+
+ ca_certs_content = "line1\nline2\nline3"
+
+ self.m_stat.return_value.st_size = 1
+
+ for distro_name in cc_ca_certs.distros:
+ conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
+
+ with ExitStack() as mocks:
+ mock_write = mocks.enter_context(
+ mock.patch.object(util, 'write_file'))
+ mock_load = mocks.enter_context(
+ mock.patch.object(util, 'load_file',
+ return_value=ca_certs_content))
+
+ cc_ca_certs.add_ca_certs(conf, [cert])
+
+ mock_write.assert_has_calls([
+ mock.call(conf['ca_cert_full_path'],
+ cert, mode=0o644)])
+ if conf['ca_cert_config'] is not None:
+ mock_write.assert_has_calls([
+ mock.call(conf['ca_cert_config'],
+ "%s\n%s\n" % (ca_certs_content,
+ conf['ca_cert_filename']),
+ omode="wb")])
+
+ mock_load.assert_called_once_with(conf['ca_cert_config'])
+
+ def test_single_cert_to_empty_existing_ca_file(self):
+ """Test adding a single certificate to the trusted CAs
+ when existing ca-certificates.conf is empty"""
+ cert = "CERT1\nLINE2\nLINE3"
+
+ expected = "cloud-init-ca-certs.crt\n"
+
+ self.m_stat.return_value.st_size = 0
+
+ for distro_name in cc_ca_certs.distros:
+ conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
+ with mock.patch.object(util, 'write_file',
+ autospec=True) as m_write:
+
+ cc_ca_certs.add_ca_certs(conf, [cert])
+
+ m_write.assert_has_calls([
+ mock.call(conf['ca_cert_full_path'],
+ cert, mode=0o644)])
+ if conf['ca_cert_config'] is not None:
+ m_write.assert_has_calls([
+ mock.call(conf['ca_cert_config'],
+ expected, omode="wb")])
+
+ def test_multiple_certs(self):
+ """Test adding multiple certificates to the trusted CAs."""
+ certs = ["CERT1\nLINE2\nLINE3", "CERT2\nLINE2\nLINE3"]
+ expected_cert_file = "\n".join(certs)
+ ca_certs_content = "line1\nline2\nline3"
+
+ self.m_stat.return_value.st_size = 1
+
+ for distro_name in cc_ca_certs.distros:
+ conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
+
+ with ExitStack() as mocks:
+ mock_write = mocks.enter_context(
+ mock.patch.object(util, 'write_file'))
+ mock_load = mocks.enter_context(
+ mock.patch.object(util, 'load_file',
+ return_value=ca_certs_content))
+
+ cc_ca_certs.add_ca_certs(conf, certs)
+
+ mock_write.assert_has_calls([
+ mock.call(conf['ca_cert_full_path'],
+ expected_cert_file, mode=0o644)])
+ if conf['ca_cert_config'] is not None:
+ mock_write.assert_has_calls([
+ mock.call(conf['ca_cert_config'],
+ "%s\n%s\n" % (ca_certs_content,
+ conf['ca_cert_filename']),
+ omode='wb')])
+
+ mock_load.assert_called_once_with(conf['ca_cert_config'])
+
+
+class TestUpdateCaCerts(unittest.TestCase):
+ def test_commands(self):
+ for distro_name in cc_ca_certs.distros:
+ conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
+ with mock.patch.object(subp, 'subp') as mockobj:
+ cc_ca_certs.update_ca_certs(conf)
+ mockobj.assert_called_once_with(
+ conf['ca_cert_update_cmd'], capture=False)
+
+
+class TestRemoveDefaultCaCerts(TestCase):
+
+ def setUp(self):
+ super(TestRemoveDefaultCaCerts, self).setUp()
+ tmpdir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmpdir)
+ self.paths = helpers.Paths({
+ 'cloud_dir': tmpdir,
+ })
+
+ def test_commands(self):
+ for distro_name in cc_ca_certs.distros:
+ conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
+
+ with ExitStack() as mocks:
+ mock_delete = mocks.enter_context(
+ mock.patch.object(util, 'delete_dir_contents'))
+ mock_write = mocks.enter_context(
+ mock.patch.object(util, 'write_file'))
+ mock_subp = mocks.enter_context(
+ mock.patch.object(subp, 'subp'))
+
+ cc_ca_certs.remove_default_ca_certs(distro_name, conf)
+
+ mock_delete.assert_has_calls([
+ mock.call(conf['ca_cert_path']),
+ mock.call(conf['ca_cert_system_path'])])
+
+ if conf['ca_cert_config'] is not None:
+ mock_write.assert_called_once_with(
+ conf['ca_cert_config'], "", mode=0o644)
+
+ if distro_name in ['debian', 'ubuntu']:
+ mock_subp.assert_called_once_with(
+ ('debconf-set-selections', '-'),
+ "ca-certificates \
+ca-certificates/trust_new_crts select no")
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_chef.py b/tests/unittests/config/test_cc_chef.py
new file mode 100644
index 00000000..060293c8
--- /dev/null
+++ b/tests/unittests/config/test_cc_chef.py
@@ -0,0 +1,271 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import httpretty
+import json
+import logging
+import os
+
+from cloudinit.config import cc_chef
+from cloudinit import util
+
+from tests.unittests.helpers import (
+ HttprettyTestCase, FilesystemMockingTestCase, mock, skipIf)
+
+from tests.unittests.util import get_cloud
+
+LOG = logging.getLogger(__name__)
+
+CLIENT_TEMPL = os.path.sep.join(["templates", "chef_client.rb.tmpl"])
+
+# This is adjusted to use http because using with https causes issue
+# in some openssl/httpretty combinations.
+# https://github.com/gabrielfalcao/HTTPretty/issues/242
+# We saw issue in opensuse 42.3 with
+# httpretty=0.8.8-7.1 ndg-httpsclient=0.4.0-3.2 pyOpenSSL=16.0.0-4.1
+OMNIBUS_URL_HTTP = cc_chef.OMNIBUS_URL.replace("https:", "http:")
+
+
+class TestInstallChefOmnibus(HttprettyTestCase):
+
+ def setUp(self):
+ super(TestInstallChefOmnibus, self).setUp()
+ self.new_root = self.tmp_dir()
+
+ @mock.patch("cloudinit.config.cc_chef.OMNIBUS_URL", OMNIBUS_URL_HTTP)
+ def test_install_chef_from_omnibus_runs_chef_url_content(self):
+ """install_chef_from_omnibus calls subp_blob_in_tempfile."""
+ response = b'#!/bin/bash\necho "Hi Mom"'
+ httpretty.register_uri(
+ httpretty.GET, cc_chef.OMNIBUS_URL, body=response, status=200)
+ ret = (None, None) # stdout, stderr but capture=False
+
+ with mock.patch("cloudinit.config.cc_chef.subp_blob_in_tempfile",
+ return_value=ret) as m_subp_blob:
+ cc_chef.install_chef_from_omnibus()
+ # admittedly whitebox, but assuming subp_blob_in_tempfile works
+ # this should be fine.
+ self.assertEqual(
+ [mock.call(blob=response, args=[], basename='chef-omnibus-install',
+ capture=False)],
+ m_subp_blob.call_args_list)
+
+ @mock.patch('cloudinit.config.cc_chef.url_helper.readurl')
+ @mock.patch('cloudinit.config.cc_chef.subp_blob_in_tempfile')
+ def test_install_chef_from_omnibus_retries_url(self, m_subp_blob, m_rdurl):
+ """install_chef_from_omnibus retries OMNIBUS_URL upon failure."""
+
+ class FakeURLResponse(object):
+ contents = '#!/bin/bash\necho "Hi Mom" > {0}/chef.out'.format(
+ self.new_root)
+
+ m_rdurl.return_value = FakeURLResponse()
+
+ cc_chef.install_chef_from_omnibus()
+ expected_kwargs = {'retries': cc_chef.OMNIBUS_URL_RETRIES,
+ 'url': cc_chef.OMNIBUS_URL}
+ self.assertCountEqual(expected_kwargs, m_rdurl.call_args_list[0][1])
+ cc_chef.install_chef_from_omnibus(retries=10)
+ expected_kwargs = {'retries': 10,
+ 'url': cc_chef.OMNIBUS_URL}
+ self.assertCountEqual(expected_kwargs, m_rdurl.call_args_list[1][1])
+ expected_subp_kwargs = {
+ 'args': ['-v', '2.0'],
+ 'basename': 'chef-omnibus-install',
+ 'blob': m_rdurl.return_value.contents,
+ 'capture': False
+ }
+ self.assertCountEqual(
+ expected_subp_kwargs,
+ m_subp_blob.call_args_list[0][1])
+
+ @mock.patch("cloudinit.config.cc_chef.OMNIBUS_URL", OMNIBUS_URL_HTTP)
+ @mock.patch('cloudinit.config.cc_chef.subp_blob_in_tempfile')
+ def test_install_chef_from_omnibus_has_omnibus_version(self, m_subp_blob):
+ """install_chef_from_omnibus provides version arg to OMNIBUS_URL."""
+ chef_outfile = self.tmp_path('chef.out', self.new_root)
+ response = '#!/bin/bash\necho "Hi Mom" > {0}'.format(chef_outfile)
+ httpretty.register_uri(
+ httpretty.GET, cc_chef.OMNIBUS_URL, body=response)
+ cc_chef.install_chef_from_omnibus(omnibus_version='2.0')
+
+ called_kwargs = m_subp_blob.call_args_list[0][1]
+ expected_kwargs = {
+ 'args': ['-v', '2.0'],
+ 'basename': 'chef-omnibus-install',
+ 'blob': response,
+ 'capture': False
+ }
+ self.assertCountEqual(expected_kwargs, called_kwargs)
+
+
+class TestChef(FilesystemMockingTestCase):
+
+ def setUp(self):
+ super(TestChef, self).setUp()
+ self.tmp = self.tmp_dir()
+
+ def test_no_config(self):
+ self.patchUtils(self.tmp)
+ self.patchOS(self.tmp)
+
+ cfg = {}
+ cc_chef.handle('chef', cfg, get_cloud(), LOG, [])
+ for d in cc_chef.CHEF_DIRS:
+ self.assertFalse(os.path.isdir(d))
+
+ @skipIf(not os.path.isfile(CLIENT_TEMPL),
+ CLIENT_TEMPL + " is not available")
+ def test_basic_config(self):
+ """
+ test basic config looks sane
+
+ # This should create a file of the format...
+ # Created by cloud-init v. 0.7.6 on Sat, 11 Oct 2014 23:57:21 +0000
+ chef_license "accept"
+ log_level :info
+ ssl_verify_mode :verify_none
+ log_location "/var/log/chef/client.log"
+ validation_client_name "bob"
+ validation_key "/etc/chef/validation.pem"
+ client_key "/etc/chef/client.pem"
+ chef_server_url "localhost"
+ environment "_default"
+ node_name "iid-datasource-none"
+ json_attribs "/etc/chef/firstboot.json"
+ file_cache_path "/var/cache/chef"
+ file_backup_path "/var/backups/chef"
+ pid_file "/var/run/chef/client.pid"
+ Chef::Log::Formatter.show_time = true
+ encrypted_data_bag_secret "/etc/chef/encrypted_data_bag_secret"
+ """
+ tpl_file = util.load_file('templates/chef_client.rb.tmpl')
+ self.patchUtils(self.tmp)
+ self.patchOS(self.tmp)
+
+ util.write_file('/etc/cloud/templates/chef_client.rb.tmpl', tpl_file)
+ cfg = {
+ 'chef': {
+ 'chef_license': "accept",
+ 'server_url': 'localhost',
+ 'validation_name': 'bob',
+ 'validation_key': "/etc/chef/vkey.pem",
+ 'validation_cert': "this is my cert",
+ 'encrypted_data_bag_secret':
+ '/etc/chef/encrypted_data_bag_secret'
+ },
+ }
+ cc_chef.handle('chef', cfg, get_cloud(), LOG, [])
+ for d in cc_chef.CHEF_DIRS:
+ self.assertTrue(os.path.isdir(d))
+ c = util.load_file(cc_chef.CHEF_RB_PATH)
+
+ # the content of these keys is not expected to be rendered to tmpl
+ unrendered_keys = ('validation_cert',)
+ for k, v in cfg['chef'].items():
+ if k in unrendered_keys:
+ continue
+ self.assertIn(v, c)
+ for k, v in cc_chef.CHEF_RB_TPL_DEFAULTS.items():
+ if k in unrendered_keys:
+ continue
+ # the value from the cfg overrides that in the default
+ val = cfg['chef'].get(k, v)
+ if isinstance(val, str):
+ self.assertIn(val, c)
+ c = util.load_file(cc_chef.CHEF_FB_PATH)
+ self.assertEqual({}, json.loads(c))
+
+ def test_firstboot_json(self):
+ self.patchUtils(self.tmp)
+ self.patchOS(self.tmp)
+
+ cfg = {
+ 'chef': {
+ 'server_url': 'localhost',
+ 'validation_name': 'bob',
+ 'run_list': ['a', 'b', 'c'],
+ 'initial_attributes': {
+ 'c': 'd',
+ }
+ },
+ }
+ cc_chef.handle('chef', cfg, get_cloud(), LOG, [])
+ c = util.load_file(cc_chef.CHEF_FB_PATH)
+ self.assertEqual(
+ {
+ 'run_list': ['a', 'b', 'c'],
+ 'c': 'd',
+ }, json.loads(c))
+
+ @skipIf(not os.path.isfile(CLIENT_TEMPL),
+ CLIENT_TEMPL + " is not available")
+ def test_template_deletes(self):
+ tpl_file = util.load_file('templates/chef_client.rb.tmpl')
+ self.patchUtils(self.tmp)
+ self.patchOS(self.tmp)
+
+ util.write_file('/etc/cloud/templates/chef_client.rb.tmpl', tpl_file)
+ cfg = {
+ 'chef': {
+ 'server_url': 'localhost',
+ 'validation_name': 'bob',
+ 'json_attribs': None,
+ 'show_time': None,
+ },
+ }
+ cc_chef.handle('chef', cfg, get_cloud(), LOG, [])
+ c = util.load_file(cc_chef.CHEF_RB_PATH)
+ self.assertNotIn('json_attribs', c)
+ self.assertNotIn('Formatter.show_time', c)
+
+ @skipIf(not os.path.isfile(CLIENT_TEMPL),
+ CLIENT_TEMPL + " is not available")
+ def test_validation_cert_and_validation_key(self):
+ # test validation_cert content is written to validation_key path
+ tpl_file = util.load_file('templates/chef_client.rb.tmpl')
+ self.patchUtils(self.tmp)
+ self.patchOS(self.tmp)
+
+ util.write_file('/etc/cloud/templates/chef_client.rb.tmpl', tpl_file)
+ v_path = '/etc/chef/vkey.pem'
+ v_cert = 'this is my cert'
+ cfg = {
+ 'chef': {
+ 'server_url': 'localhost',
+ 'validation_name': 'bob',
+ 'validation_key': v_path,
+ 'validation_cert': v_cert
+ },
+ }
+ cc_chef.handle('chef', cfg, get_cloud(), LOG, [])
+ content = util.load_file(cc_chef.CHEF_RB_PATH)
+ self.assertIn(v_path, content)
+ util.load_file(v_path)
+ self.assertEqual(v_cert, util.load_file(v_path))
+
+ def test_validation_cert_with_system(self):
+ # test validation_cert content is not written over system file
+ tpl_file = util.load_file('templates/chef_client.rb.tmpl')
+ self.patchUtils(self.tmp)
+ self.patchOS(self.tmp)
+
+ v_path = '/etc/chef/vkey.pem'
+ v_cert = "system"
+ expected_cert = "this is the system file certificate"
+ cfg = {
+ 'chef': {
+ 'server_url': 'localhost',
+ 'validation_name': 'bob',
+ 'validation_key': v_path,
+ 'validation_cert': v_cert
+ },
+ }
+ util.write_file('/etc/cloud/templates/chef_client.rb.tmpl', tpl_file)
+ util.write_file(v_path, expected_cert)
+ cc_chef.handle('chef', cfg, get_cloud(), LOG, [])
+ content = util.load_file(cc_chef.CHEF_RB_PATH)
+ self.assertIn(v_path, content)
+ util.load_file(v_path)
+ self.assertEqual(expected_cert, util.load_file(v_path))
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_debug.py b/tests/unittests/config/test_cc_debug.py
new file mode 100644
index 00000000..174f772f
--- /dev/null
+++ b/tests/unittests/config/test_cc_debug.py
@@ -0,0 +1,59 @@
+# Copyright (C) 2014 Yahoo! Inc.
+#
+# This file is part of cloud-init. See LICENSE file for license information.
+import logging
+import shutil
+import tempfile
+
+from cloudinit import util
+from cloudinit.config import cc_debug
+from tests.unittests.helpers import (FilesystemMockingTestCase, mock)
+
+from tests.unittests.util import get_cloud
+
+LOG = logging.getLogger(__name__)
+
+
+@mock.patch('cloudinit.distros.debian.read_system_locale')
+class TestDebug(FilesystemMockingTestCase):
+ def setUp(self):
+ super(TestDebug, self).setUp()
+ self.new_root = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.new_root)
+ self.patchUtils(self.new_root)
+
+ def test_debug_write(self, m_locale):
+ m_locale.return_value = 'en_US.UTF-8'
+ cfg = {
+ 'abc': '123',
+ 'c': '\u20a0',
+ 'debug': {
+ 'verbose': True,
+ # Does not actually write here due to mocking...
+ 'output': '/var/log/cloud-init-debug.log',
+ },
+ }
+ cc = get_cloud()
+ cc_debug.handle('cc_debug', cfg, cc, LOG, [])
+ contents = util.load_file('/var/log/cloud-init-debug.log')
+ # Some basic sanity tests...
+ self.assertNotEqual(0, len(contents))
+ for k in cfg.keys():
+ self.assertIn(k, contents)
+
+ def test_debug_no_write(self, m_locale):
+ m_locale.return_value = 'en_US.UTF-8'
+ cfg = {
+ 'abc': '123',
+ 'debug': {
+ 'verbose': False,
+ # Does not actually write here due to mocking...
+ 'output': '/var/log/cloud-init-debug.log',
+ },
+ }
+ cc = get_cloud()
+ cc_debug.handle('cc_debug', cfg, cc, LOG, [])
+ self.assertRaises(IOError,
+ util.load_file, '/var/log/cloud-init-debug.log')
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_disable_ec2_metadata.py b/tests/unittests/config/test_cc_disable_ec2_metadata.py
new file mode 100644
index 00000000..7a794845
--- /dev/null
+++ b/tests/unittests/config/test_cc_disable_ec2_metadata.py
@@ -0,0 +1,48 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+"""Tests cc_disable_ec2_metadata handler"""
+
+import cloudinit.config.cc_disable_ec2_metadata as ec2_meta
+
+from tests.unittests.helpers import CiTestCase, mock
+
+import logging
+
+LOG = logging.getLogger(__name__)
+
+DISABLE_CFG = {'disable_ec2_metadata': 'true'}
+
+
+class TestEC2MetadataRoute(CiTestCase):
+
+ @mock.patch('cloudinit.config.cc_disable_ec2_metadata.subp.which')
+ @mock.patch('cloudinit.config.cc_disable_ec2_metadata.subp.subp')
+ def test_disable_ifconfig(self, m_subp, m_which):
+ """Set the route if ifconfig command is available"""
+ m_which.side_effect = lambda x: x if x == 'ifconfig' else None
+ ec2_meta.handle('foo', DISABLE_CFG, None, LOG, None)
+ m_subp.assert_called_with(
+ ['route', 'add', '-host', '169.254.169.254', 'reject'],
+ capture=False)
+
+ @mock.patch('cloudinit.config.cc_disable_ec2_metadata.subp.which')
+ @mock.patch('cloudinit.config.cc_disable_ec2_metadata.subp.subp')
+ def test_disable_ip(self, m_subp, m_which):
+ """Set the route if ip command is available"""
+ m_which.side_effect = lambda x: x if x == 'ip' else None
+ ec2_meta.handle('foo', DISABLE_CFG, None, LOG, None)
+ m_subp.assert_called_with(
+ ['ip', 'route', 'add', 'prohibit', '169.254.169.254'],
+ capture=False)
+
+ @mock.patch('cloudinit.config.cc_disable_ec2_metadata.subp.which')
+ @mock.patch('cloudinit.config.cc_disable_ec2_metadata.subp.subp')
+ def test_disable_no_tool(self, m_subp, m_which):
+ """Log error when neither route nor ip commands are available"""
+ m_which.return_value = None # Find neither ifconfig nor ip
+ ec2_meta.handle('foo', DISABLE_CFG, None, LOG, None)
+ self.assertEqual(
+ [mock.call('ip'), mock.call('ifconfig')], m_which.call_args_list)
+ m_subp.assert_not_called()
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_disk_setup.py b/tests/unittests/config/test_cc_disk_setup.py
new file mode 100644
index 00000000..fa565559
--- /dev/null
+++ b/tests/unittests/config/test_cc_disk_setup.py
@@ -0,0 +1,243 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import random
+
+from cloudinit.config import cc_disk_setup
+from tests.unittests.helpers import CiTestCase, ExitStack, mock, TestCase
+
+
+class TestIsDiskUsed(TestCase):
+
+ def setUp(self):
+ super(TestIsDiskUsed, self).setUp()
+ self.patches = ExitStack()
+ mod_name = 'cloudinit.config.cc_disk_setup'
+ self.enumerate_disk = self.patches.enter_context(
+ mock.patch('{0}.enumerate_disk'.format(mod_name)))
+ self.check_fs = self.patches.enter_context(
+ mock.patch('{0}.check_fs'.format(mod_name)))
+
+ def tearDown(self):
+ super(TestIsDiskUsed, self).tearDown()
+ self.patches.close()
+
+ def test_multiple_child_nodes_returns_true(self):
+ self.enumerate_disk.return_value = (mock.MagicMock() for _ in range(2))
+ self.check_fs.return_value = (mock.MagicMock(), None, mock.MagicMock())
+ self.assertTrue(cc_disk_setup.is_disk_used(mock.MagicMock()))
+
+ def test_valid_filesystem_returns_true(self):
+ self.enumerate_disk.return_value = (mock.MagicMock() for _ in range(1))
+ self.check_fs.return_value = (
+ mock.MagicMock(), 'ext4', mock.MagicMock())
+ self.assertTrue(cc_disk_setup.is_disk_used(mock.MagicMock()))
+
+ def test_one_child_nodes_and_no_fs_returns_false(self):
+ self.enumerate_disk.return_value = (mock.MagicMock() for _ in range(1))
+ self.check_fs.return_value = (mock.MagicMock(), None, mock.MagicMock())
+ self.assertFalse(cc_disk_setup.is_disk_used(mock.MagicMock()))
+
+
+class TestGetMbrHddSize(TestCase):
+
+ def setUp(self):
+ super(TestGetMbrHddSize, self).setUp()
+ self.patches = ExitStack()
+ self.subp = self.patches.enter_context(
+ mock.patch.object(cc_disk_setup.subp, 'subp'))
+
+ def tearDown(self):
+ super(TestGetMbrHddSize, self).tearDown()
+ self.patches.close()
+
+ def _configure_subp_mock(self, hdd_size_in_bytes, sector_size_in_bytes):
+ def _subp(cmd, *args, **kwargs):
+ self.assertEqual(3, len(cmd))
+ if '--getsize64' in cmd:
+ return hdd_size_in_bytes, None
+ elif '--getss' in cmd:
+ return sector_size_in_bytes, None
+ raise Exception('Unexpected blockdev command called')
+
+ self.subp.side_effect = _subp
+
+ def _test_for_sector_size(self, sector_size):
+ size_in_bytes = random.randint(10000, 10000000) * 512
+ size_in_sectors = size_in_bytes / sector_size
+ self._configure_subp_mock(size_in_bytes, sector_size)
+ self.assertEqual(size_in_sectors,
+ cc_disk_setup.get_hdd_size('/dev/sda1'))
+
+ def test_size_for_512_byte_sectors(self):
+ self._test_for_sector_size(512)
+
+ def test_size_for_1024_byte_sectors(self):
+ self._test_for_sector_size(1024)
+
+ def test_size_for_2048_byte_sectors(self):
+ self._test_for_sector_size(2048)
+
+ def test_size_for_4096_byte_sectors(self):
+ self._test_for_sector_size(4096)
+
+
+class TestGetPartitionMbrLayout(TestCase):
+
+ def test_single_partition_using_boolean(self):
+ self.assertEqual('0,',
+ cc_disk_setup.get_partition_mbr_layout(1000, True))
+
+ def test_single_partition_using_list(self):
+ disk_size = random.randint(1000000, 1000000000000)
+ self.assertEqual(
+ ',,83',
+ cc_disk_setup.get_partition_mbr_layout(disk_size, [100]))
+
+ def test_half_and_half(self):
+ disk_size = random.randint(1000000, 1000000000000)
+ expected_partition_size = int(float(disk_size) / 2)
+ self.assertEqual(
+ ',{0},83\n,,83'.format(expected_partition_size),
+ cc_disk_setup.get_partition_mbr_layout(disk_size, [50, 50]))
+
+ def test_thirds_with_different_partition_type(self):
+ disk_size = random.randint(1000000, 1000000000000)
+ expected_partition_size = int(float(disk_size) * 0.33)
+ self.assertEqual(
+ ',{0},83\n,,82'.format(expected_partition_size),
+ cc_disk_setup.get_partition_mbr_layout(disk_size, [33, [66, 82]]))
+
+
+class TestUpdateFsSetupDevices(TestCase):
+ def test_regression_1634678(self):
+ # Cf. https://bugs.launchpad.net/cloud-init/+bug/1634678
+ fs_setup = {
+ 'partition': 'auto',
+ 'device': '/dev/xvdb1',
+ 'overwrite': False,
+ 'label': 'test',
+ 'filesystem': 'ext4'
+ }
+
+ cc_disk_setup.update_fs_setup_devices([fs_setup],
+ lambda device: device)
+
+ self.assertEqual({
+ '_origname': '/dev/xvdb1',
+ 'partition': 'auto',
+ 'device': '/dev/xvdb1',
+ 'overwrite': False,
+ 'label': 'test',
+ 'filesystem': 'ext4'
+ }, fs_setup)
+
+ def test_dotted_devname(self):
+ fs_setup = {
+ 'partition': 'auto',
+ 'device': 'ephemeral0.0',
+ 'label': 'test2',
+ 'filesystem': 'xfs'
+ }
+
+ cc_disk_setup.update_fs_setup_devices([fs_setup],
+ lambda device: device)
+
+ self.assertEqual({
+ '_origname': 'ephemeral0.0',
+ '_partition': 'auto',
+ 'partition': '0',
+ 'device': 'ephemeral0',
+ 'label': 'test2',
+ 'filesystem': 'xfs'
+ }, fs_setup)
+
+ def test_dotted_devname_populates_partition(self):
+ fs_setup = {
+ 'device': 'ephemeral0.1',
+ 'label': 'test2',
+ 'filesystem': 'xfs'
+ }
+ cc_disk_setup.update_fs_setup_devices([fs_setup],
+ lambda device: device)
+ self.assertEqual({
+ '_origname': 'ephemeral0.1',
+ 'device': 'ephemeral0',
+ 'partition': '1',
+ 'label': 'test2',
+ 'filesystem': 'xfs'
+ }, fs_setup)
+
+
+@mock.patch('cloudinit.config.cc_disk_setup.assert_and_settle_device',
+ return_value=None)
+@mock.patch('cloudinit.config.cc_disk_setup.find_device_node',
+ return_value=('/dev/xdb1', False))
+@mock.patch('cloudinit.config.cc_disk_setup.device_type', return_value=None)
+@mock.patch('cloudinit.config.cc_disk_setup.subp.subp', return_value=('', ''))
+class TestMkfsCommandHandling(CiTestCase):
+
+ with_logs = True
+
+ def test_with_cmd(self, subp, *args):
+ """mkfs honors cmd and logs warnings when extra_opts or overwrite are
+ provided."""
+ cc_disk_setup.mkfs({
+ 'cmd': 'mkfs -t %(filesystem)s -L %(label)s %(device)s',
+ 'filesystem': 'ext4',
+ 'device': '/dev/xdb1',
+ 'label': 'with_cmd',
+ 'extra_opts': ['should', 'generate', 'warning'],
+ 'overwrite': 'should generate warning too'
+ })
+
+ self.assertIn(
+ 'extra_opts ' +
+ 'ignored because cmd was specified: mkfs -t ext4 -L with_cmd ' +
+ '/dev/xdb1',
+ self.logs.getvalue())
+ self.assertIn(
+ 'overwrite ' +
+ 'ignored because cmd was specified: mkfs -t ext4 -L with_cmd ' +
+ '/dev/xdb1',
+ self.logs.getvalue())
+
+ subp.assert_called_once_with(
+ 'mkfs -t ext4 -L with_cmd /dev/xdb1', shell=True)
+
+ @mock.patch('cloudinit.config.cc_disk_setup.subp.which')
+ def test_overwrite_and_extra_opts_without_cmd(self, m_which, subp, *args):
+ """mkfs observes extra_opts and overwrite settings when cmd is not
+ present."""
+ m_which.side_effect = lambda p: {'mkfs.ext4': '/sbin/mkfs.ext4'}[p]
+ cc_disk_setup.mkfs({
+ 'filesystem': 'ext4',
+ 'device': '/dev/xdb1',
+ 'label': 'without_cmd',
+ 'extra_opts': ['are', 'added'],
+ 'overwrite': True
+ })
+
+ subp.assert_called_once_with(
+ ['/sbin/mkfs.ext4', '/dev/xdb1',
+ '-L', 'without_cmd', '-F', 'are', 'added'],
+ shell=False)
+
+ @mock.patch('cloudinit.config.cc_disk_setup.subp.which')
+ def test_mkswap(self, m_which, subp, *args):
+ """mkfs observes extra_opts and overwrite settings when cmd is not
+ present."""
+ m_which.side_effect = iter([None, '/sbin/mkswap'])
+ cc_disk_setup.mkfs({
+ 'filesystem': 'swap',
+ 'device': '/dev/xdb1',
+ 'label': 'swap',
+ 'overwrite': True,
+ })
+
+ self.assertEqual([mock.call('mkfs.swap'), mock.call('mkswap')],
+ m_which.call_args_list)
+ subp.assert_called_once_with(
+ ['/sbin/mkswap', '/dev/xdb1', '-L', 'swap', '-f'], shell=False)
+
+#
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_final_message.py b/tests/unittests/config/test_cc_final_message.py
new file mode 100644
index 00000000..46ba99b2
--- /dev/null
+++ b/tests/unittests/config/test_cc_final_message.py
@@ -0,0 +1,46 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+import logging
+from unittest import mock
+
+import pytest
+
+from cloudinit.config.cc_final_message import handle
+
+
+class TestHandle:
+ # TODO: Expand these tests to cover full functionality; currently they only
+ # cover the logic around how the boot-finished file is written (and not its
+ # contents).
+
+ @pytest.mark.parametrize(
+ "instance_dir_exists,file_is_written,expected_log_substring",
+ [
+ (True, True, None),
+ (False, False, "Failed to write boot finished file "),
+ ],
+ )
+ def test_boot_finished_written(
+ self,
+ instance_dir_exists,
+ file_is_written,
+ expected_log_substring,
+ caplog,
+ tmpdir,
+ ):
+ instance_dir = tmpdir.join("var/lib/cloud/instance")
+ if instance_dir_exists:
+ instance_dir.ensure_dir()
+ boot_finished = instance_dir.join("boot-finished")
+
+ m_cloud = mock.Mock(
+ paths=mock.Mock(boot_finished=boot_finished.strpath)
+ )
+
+ handle(None, {}, m_cloud, logging.getLogger(), [])
+
+ # We should not change the status of the instance directory
+ assert instance_dir_exists == instance_dir.exists()
+ assert file_is_written == boot_finished.exists()
+
+ if expected_log_substring:
+ assert expected_log_substring in caplog.text
diff --git a/tests/unittests/config/test_cc_growpart.py b/tests/unittests/config/test_cc_growpart.py
new file mode 100644
index 00000000..b007f24f
--- /dev/null
+++ b/tests/unittests/config/test_cc_growpart.py
@@ -0,0 +1,309 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+from cloudinit import cloud
+from cloudinit.config import cc_growpart
+from cloudinit import subp
+from cloudinit import temp_utils
+
+from tests.unittests.helpers import TestCase
+
+import errno
+import logging
+import os
+import shutil
+import re
+import unittest
+from contextlib import ExitStack
+from unittest import mock
+import stat
+
+# growpart:
+# mode: auto # off, on, auto, 'growpart'
+# devices: ['root']
+
+HELP_GROWPART_RESIZE = """
+growpart disk partition
+ rewrite partition table so that partition takes up all the space it can
+ options:
+ -h | --help print Usage and exit
+<SNIP>
+ -u | --update R update the the kernel partition table info after growing
+ this requires kernel support and 'partx --update'
+ R is one of:
+ - 'auto' : [default] update partition if possible
+<SNIP>
+ Example:
+ - growpart /dev/sda 1
+ Resize partition 1 on /dev/sda
+"""
+
+HELP_GROWPART_NO_RESIZE = """
+growpart disk partition
+ rewrite partition table so that partition takes up all the space it can
+ options:
+ -h | --help print Usage and exit
+<SNIP>
+ Example:
+ - growpart /dev/sda 1
+ Resize partition 1 on /dev/sda
+"""
+
+HELP_GPART = """
+usage: gpart add -t type [-a alignment] [-b start] <SNIP> geom
+ gpart backup geom
+ gpart bootcode [-b bootcode] [-p partcode -i index] [-f flags] geom
+<SNIP>
+ gpart resize -i index [-a alignment] [-s size] [-f flags] geom
+ gpart restore [-lF] [-f flags] provider [...]
+ gpart recover [-f flags] geom
+ gpart help
+<SNIP>
+"""
+
+
+class Dir:
+ '''Stub object'''
+ def __init__(self, name):
+ self.name = name
+ self.st_mode = name
+
+ def is_dir(self, *args, **kwargs):
+ return True
+
+ def stat(self, *args, **kwargs):
+ return self
+
+
+class Scanner:
+ '''Stub object'''
+ def __enter__(self):
+ return (Dir(''), Dir(''),)
+
+ def __exit__(self, *args):
+ pass
+
+
+class TestDisabled(unittest.TestCase):
+ def setUp(self):
+ super(TestDisabled, self).setUp()
+ self.name = "growpart"
+ self.cloud_init = None
+ self.log = logging.getLogger("TestDisabled")
+ self.args = []
+
+ self.handle = cc_growpart.handle
+
+ def test_mode_off(self):
+ # Test that nothing is done if mode is off.
+
+ # this really only verifies that resizer_factory isn't called
+ config = {'growpart': {'mode': 'off'}}
+
+ with mock.patch.object(cc_growpart, 'resizer_factory') as mockobj:
+ self.handle(self.name, config, self.cloud_init, self.log,
+ self.args)
+ self.assertEqual(mockobj.call_count, 0)
+
+
+class TestConfig(TestCase):
+ def setUp(self):
+ super(TestConfig, self).setUp()
+ self.name = "growpart"
+ self.paths = None
+ self.cloud = cloud.Cloud(None, self.paths, None, None, None)
+ self.log = logging.getLogger("TestConfig")
+ self.args = []
+
+ self.cloud_init = None
+ self.handle = cc_growpart.handle
+ self.tmppath = '/tmp/cloudinit-test-file'
+ self.tmpdir = os.scandir('/tmp')
+ self.tmpfile = open(self.tmppath, 'w')
+
+ def tearDown(self):
+ self.tmpfile.close()
+ os.remove(self.tmppath)
+
+ @mock.patch.dict("os.environ", clear=True)
+ def test_no_resizers_auto_is_fine(self):
+ with mock.patch.object(
+ subp, 'subp',
+ return_value=(HELP_GROWPART_NO_RESIZE, "")) as mockobj:
+
+ config = {'growpart': {'mode': 'auto'}}
+ self.handle(self.name, config, self.cloud_init, self.log,
+ self.args)
+
+ mockobj.assert_has_calls([
+ mock.call(['growpart', '--help'], env={'LANG': 'C'}),
+ mock.call(['gpart', 'help'], env={'LANG': 'C'}, rcs=[0, 1])])
+
+ @mock.patch.dict("os.environ", clear=True)
+ def test_no_resizers_mode_growpart_is_exception(self):
+ with mock.patch.object(
+ subp, 'subp',
+ return_value=(HELP_GROWPART_NO_RESIZE, "")) as mockobj:
+ config = {'growpart': {'mode': "growpart"}}
+ self.assertRaises(
+ ValueError, self.handle, self.name, config,
+ self.cloud_init, self.log, self.args)
+
+ mockobj.assert_called_once_with(
+ ['growpart', '--help'], env={'LANG': 'C'})
+
+ @mock.patch.dict("os.environ", clear=True)
+ def test_mode_auto_prefers_growpart(self):
+ with mock.patch.object(
+ subp, 'subp',
+ return_value=(HELP_GROWPART_RESIZE, "")) as mockobj:
+ ret = cc_growpart.resizer_factory(mode="auto")
+ self.assertIsInstance(ret, cc_growpart.ResizeGrowPart)
+
+ mockobj.assert_called_once_with(
+ ['growpart', '--help'], env={'LANG': 'C'})
+
+ @mock.patch.dict("os.environ", {'LANG': 'cs_CZ.UTF-8'}, clear=True)
+ @mock.patch.object(temp_utils, 'mkdtemp', return_value='/tmp/much-random')
+ @mock.patch.object(stat, 'S_ISDIR', return_value=False)
+ @mock.patch.object(os.path, 'samestat', return_value=True)
+ @mock.patch.object(os.path, "join", return_value='/tmp')
+ @mock.patch.object(os, 'scandir', return_value=Scanner())
+ @mock.patch.object(os, 'mkdir')
+ @mock.patch.object(os, 'unlink')
+ @mock.patch.object(os, 'rmdir')
+ @mock.patch.object(os, 'open', return_value=1)
+ @mock.patch.object(os, 'close')
+ @mock.patch.object(shutil, 'rmtree')
+ @mock.patch.object(os, 'lseek', return_value=1024)
+ @mock.patch.object(os, 'lstat', return_value='interesting metadata')
+ def test_force_lang_check_tempfile(self, *args, **kwargs):
+ with mock.patch.object(
+ subp,
+ 'subp',
+ return_value=(HELP_GROWPART_RESIZE, "")) as mockobj:
+
+ ret = cc_growpart.resizer_factory(mode="auto")
+ self.assertIsInstance(ret, cc_growpart.ResizeGrowPart)
+ diskdev = '/dev/sdb'
+ partnum = 1
+ partdev = '/dev/sdb'
+ ret.resize(diskdev, partnum, partdev)
+ mockobj.assert_has_calls([
+ mock.call(
+ ["growpart", '--dry-run', diskdev, partnum],
+ env={'LANG': 'C', 'TMPDIR': '/tmp'}),
+ mock.call(
+ ["growpart", diskdev, partnum],
+ env={'LANG': 'C', 'TMPDIR': '/tmp'}),
+ ])
+
+ @mock.patch.dict("os.environ", {'LANG': 'cs_CZ.UTF-8'}, clear=True)
+ def test_mode_auto_falls_back_to_gpart(self):
+ with mock.patch.object(
+ subp, 'subp',
+ return_value=("", HELP_GPART)) as mockobj:
+ ret = cc_growpart.resizer_factory(mode="auto")
+ self.assertIsInstance(ret, cc_growpart.ResizeGpart)
+
+ mockobj.assert_has_calls([
+ mock.call(['growpart', '--help'], env={'LANG': 'C'}),
+ mock.call(['gpart', 'help'], env={'LANG': 'C'}, rcs=[0, 1])])
+
+ def test_handle_with_no_growpart_entry(self):
+ # if no 'growpart' entry in config, then mode=auto should be used
+
+ myresizer = object()
+ retval = (("/", cc_growpart.RESIZE.CHANGED, "my-message",),)
+
+ with ExitStack() as mocks:
+ factory = mocks.enter_context(
+ mock.patch.object(cc_growpart, 'resizer_factory',
+ return_value=myresizer))
+ rsdevs = mocks.enter_context(
+ mock.patch.object(cc_growpart, 'resize_devices',
+ return_value=retval))
+ mocks.enter_context(
+ mock.patch.object(cc_growpart, 'RESIZERS',
+ (('mysizer', object),)
+ ))
+
+ self.handle(self.name, {}, self.cloud_init, self.log, self.args)
+
+ factory.assert_called_once_with('auto')
+ rsdevs.assert_called_once_with(myresizer, ['/'])
+
+
+class TestResize(unittest.TestCase):
+ def setUp(self):
+ super(TestResize, self).setUp()
+ self.name = "growpart"
+ self.log = logging.getLogger("TestResize")
+
+ def test_simple_devices(self):
+ # test simple device list
+ # this patches out devent2dev, os.stat, and device_part_info
+ # so in the end, doesn't test a lot
+ devs = ["/dev/XXda1", "/dev/YYda2"]
+ devstat_ret = Bunch(st_mode=25008, st_ino=6078, st_dev=5,
+ st_nlink=1, st_uid=0, st_gid=6, st_size=0,
+ st_atime=0, st_mtime=0, st_ctime=0)
+ enoent = ["/dev/NOENT"]
+ real_stat = os.stat
+ resize_calls = []
+
+ class myresizer(object):
+ def resize(self, diskdev, partnum, partdev):
+ resize_calls.append((diskdev, partnum, partdev))
+ if partdev == "/dev/YYda2":
+ return (1024, 2048)
+ return (1024, 1024) # old size, new size
+
+ def mystat(path):
+ if path in devs:
+ return devstat_ret
+ if path in enoent:
+ e = OSError("%s: does not exist" % path)
+ e.errno = errno.ENOENT
+ raise e
+ return real_stat(path)
+
+ try:
+ opinfo = cc_growpart.device_part_info
+ cc_growpart.device_part_info = simple_device_part_info
+ os.stat = mystat
+
+ resized = cc_growpart.resize_devices(myresizer(), devs + enoent)
+
+ def find(name, res):
+ for f in res:
+ if f[0] == name:
+ return f
+ return None
+
+ self.assertEqual(cc_growpart.RESIZE.NOCHANGE,
+ find("/dev/XXda1", resized)[1])
+ self.assertEqual(cc_growpart.RESIZE.CHANGED,
+ find("/dev/YYda2", resized)[1])
+ self.assertEqual(cc_growpart.RESIZE.SKIPPED,
+ find(enoent[0], resized)[1])
+ # self.assertEqual(resize_calls,
+ # [("/dev/XXda", "1", "/dev/XXda1"),
+ # ("/dev/YYda", "2", "/dev/YYda2")])
+ finally:
+ cc_growpart.device_part_info = opinfo
+ os.stat = real_stat
+
+
+def simple_device_part_info(devpath):
+ # simple stupid return (/dev/vda, 1) for /dev/vda
+ ret = re.search("([^0-9]*)([0-9]*)$", devpath)
+ x = (ret.group(1), ret.group(2))
+ return x
+
+
+class Bunch(object):
+ def __init__(self, **kwds):
+ self.__dict__.update(kwds)
+
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_grub_dpkg.py b/tests/unittests/config/test_cc_grub_dpkg.py
new file mode 100644
index 00000000..99c05bb5
--- /dev/null
+++ b/tests/unittests/config/test_cc_grub_dpkg.py
@@ -0,0 +1,176 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import pytest
+
+from unittest import mock
+from logging import Logger
+from cloudinit.subp import ProcessExecutionError
+from cloudinit.config.cc_grub_dpkg import fetch_idevs, handle
+
+
+class TestFetchIdevs:
+ """Tests cc_grub_dpkg.fetch_idevs()"""
+
+ # Note: udevadm info returns devices in a large single line string
+ @pytest.mark.parametrize(
+ "grub_output,path_exists,expected_log_call,udevadm_output"
+ ",expected_idevs",
+ [
+ # Inside a container, grub not installed
+ (
+ ProcessExecutionError(reason=FileNotFoundError()),
+ False,
+ mock.call("'grub-probe' not found in $PATH"),
+ '',
+ '',
+ ),
+ # Inside a container, grub installed
+ (
+ ProcessExecutionError(stderr="failed to get canonical path"),
+ False,
+ mock.call("grub-probe 'failed to get canonical path'"),
+ '',
+ '',
+ ),
+ # KVM Instance
+ (
+ ['/dev/vda'],
+ True,
+ None,
+ (
+ '/dev/disk/by-path/pci-0000:00:00.0 ',
+ '/dev/disk/by-path/virtio-pci-0000:00:00.0 '
+ ),
+ '/dev/vda',
+ ),
+ # Xen Instance
+ (
+ ['/dev/xvda'],
+ True,
+ None,
+ '',
+ '/dev/xvda',
+ ),
+ # NVMe Hardware Instance
+ (
+ ['/dev/nvme1n1'],
+ True,
+ None,
+ (
+ '/dev/disk/by-id/nvme-Company_hash000 ',
+ '/dev/disk/by-id/nvme-nvme.000-000-000-000-000 ',
+ '/dev/disk/by-path/pci-0000:00:00.0-nvme-0 '
+ ),
+ '/dev/disk/by-id/nvme-Company_hash000',
+ ),
+ # SCSI Hardware Instance
+ (
+ ['/dev/sda'],
+ True,
+ None,
+ (
+ '/dev/disk/by-id/company-user-1 ',
+ '/dev/disk/by-id/scsi-0Company_user-1 ',
+ '/dev/disk/by-path/pci-0000:00:00.0-scsi-0:0:0:0 '
+ ),
+ '/dev/disk/by-id/company-user-1',
+ ),
+ ],
+ )
+ @mock.patch("cloudinit.config.cc_grub_dpkg.util.logexc")
+ @mock.patch("cloudinit.config.cc_grub_dpkg.os.path.exists")
+ @mock.patch("cloudinit.config.cc_grub_dpkg.subp.subp")
+ def test_fetch_idevs(self, m_subp, m_exists, m_logexc, grub_output,
+ path_exists, expected_log_call, udevadm_output,
+ expected_idevs):
+ """Tests outputs from grub-probe and udevadm info against grub-dpkg"""
+ m_subp.side_effect = [
+ grub_output,
+ ["".join(udevadm_output)]
+ ]
+ m_exists.return_value = path_exists
+ log = mock.Mock(spec=Logger)
+ idevs = fetch_idevs(log)
+ assert expected_idevs == idevs
+ if expected_log_call is not None:
+ assert expected_log_call in log.debug.call_args_list
+
+
+class TestHandle:
+ """Tests cc_grub_dpkg.handle()"""
+
+ @pytest.mark.parametrize(
+ "cfg_idevs,cfg_idevs_empty,fetch_idevs_output,expected_log_output",
+ [
+ (
+ # No configuration
+ None,
+ None,
+ '/dev/disk/by-id/nvme-Company_hash000',
+ (
+ "Setting grub debconf-set-selections with ",
+ "'/dev/disk/by-id/nvme-Company_hash000','false'"
+ ),
+ ),
+ (
+ # idevs set, idevs_empty unset
+ '/dev/sda',
+ None,
+ '/dev/sda',
+ (
+ "Setting grub debconf-set-selections with ",
+ "'/dev/sda','false'"
+ ),
+ ),
+ (
+ # idevs unset, idevs_empty set
+ None,
+ 'true',
+ '/dev/xvda',
+ (
+ "Setting grub debconf-set-selections with ",
+ "'/dev/xvda','true'"
+ ),
+ ),
+ (
+ # idevs set, idevs_empty set
+ '/dev/vda',
+ 'false',
+ '/dev/disk/by-id/company-user-1',
+ (
+ "Setting grub debconf-set-selections with ",
+ "'/dev/vda','false'"
+ ),
+ ),
+ (
+ # idevs set, idevs_empty set
+ # Respect what the user defines, even if its logically wrong
+ '/dev/nvme0n1',
+ 'true',
+ '',
+ (
+ "Setting grub debconf-set-selections with ",
+ "'/dev/nvme0n1','true'"
+ ),
+ )
+ ],
+ )
+ @mock.patch("cloudinit.config.cc_grub_dpkg.fetch_idevs")
+ @mock.patch("cloudinit.config.cc_grub_dpkg.util.get_cfg_option_str")
+ @mock.patch("cloudinit.config.cc_grub_dpkg.util.logexc")
+ @mock.patch("cloudinit.config.cc_grub_dpkg.subp.subp")
+ def test_handle(self, m_subp, m_logexc, m_get_cfg_str, m_fetch_idevs,
+ cfg_idevs, cfg_idevs_empty, fetch_idevs_output,
+ expected_log_output):
+ """Test setting of correct debconf database entries"""
+ m_get_cfg_str.side_effect = [
+ cfg_idevs,
+ cfg_idevs_empty
+ ]
+ m_fetch_idevs.return_value = fetch_idevs_output
+ log = mock.Mock(spec=Logger)
+ handle(mock.Mock(), mock.Mock(), mock.Mock(), log, mock.Mock())
+ log.debug.assert_called_with("".join(expected_log_output))
+
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_install_hotplug.py b/tests/unittests/config/test_cc_install_hotplug.py
new file mode 100644
index 00000000..5d6b1e77
--- /dev/null
+++ b/tests/unittests/config/test_cc_install_hotplug.py
@@ -0,0 +1,113 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+from collections import namedtuple
+from unittest import mock
+
+import pytest
+
+from cloudinit.config.cc_install_hotplug import (
+ handle,
+ HOTPLUG_UDEV_PATH,
+ HOTPLUG_UDEV_RULES_TEMPLATE,
+)
+from cloudinit.event import EventScope, EventType
+
+
+@pytest.yield_fixture()
+def mocks():
+ m_update_enabled = mock.patch('cloudinit.stages.update_event_enabled')
+ m_write = mock.patch('cloudinit.util.write_file', autospec=True)
+ m_del = mock.patch('cloudinit.util.del_file', autospec=True)
+ m_subp = mock.patch('cloudinit.subp.subp')
+ m_which = mock.patch('cloudinit.subp.which', return_value=None)
+ m_path_exists = mock.patch('os.path.exists', return_value=False)
+
+ yield namedtuple(
+ 'Mocks',
+ 'm_update_enabled m_write m_del m_subp m_which m_path_exists'
+ )(
+ m_update_enabled.start(), m_write.start(), m_del.start(),
+ m_subp.start(), m_which.start(), m_path_exists.start()
+ )
+
+ m_update_enabled.stop()
+ m_write.stop()
+ m_del.stop()
+ m_subp.stop()
+ m_which.stop()
+ m_path_exists.stop()
+
+
+class TestInstallHotplug:
+ @pytest.mark.parametrize('libexec_exists', [True, False])
+ def test_rules_installed_when_supported_and_enabled(
+ self, mocks, libexec_exists
+ ):
+ mocks.m_which.return_value = 'udevadm'
+ mocks.m_update_enabled.return_value = True
+ m_cloud = mock.MagicMock()
+ m_cloud.datasource.get_supported_events.return_value = {
+ EventScope.NETWORK: {EventType.HOTPLUG}
+ }
+
+ if libexec_exists:
+ libexecdir = "/usr/libexec/cloud-init"
+ else:
+ libexecdir = "/usr/lib/cloud-init"
+ with mock.patch('os.path.exists', return_value=libexec_exists):
+ handle(None, {}, m_cloud, mock.Mock(), None)
+ mocks.m_write.assert_called_once_with(
+ filename=HOTPLUG_UDEV_PATH,
+ content=HOTPLUG_UDEV_RULES_TEMPLATE.format(
+ libexecdir=libexecdir),
+ )
+ assert mocks.m_subp.call_args_list == [mock.call([
+ 'udevadm', 'control', '--reload-rules',
+ ])]
+ assert mocks.m_del.call_args_list == []
+
+ def test_rules_not_installed_when_unsupported(self, mocks):
+ mocks.m_update_enabled.return_value = True
+ m_cloud = mock.MagicMock()
+ m_cloud.datasource.get_supported_events.return_value = {}
+
+ handle(None, {}, m_cloud, mock.Mock(), None)
+ assert mocks.m_write.call_args_list == []
+ assert mocks.m_del.call_args_list == []
+ assert mocks.m_subp.call_args_list == []
+
+ def test_rules_not_installed_when_disabled(self, mocks):
+ mocks.m_update_enabled.return_value = False
+ m_cloud = mock.MagicMock()
+ m_cloud.datasource.get_supported_events.return_value = {
+ EventScope.NETWORK: {EventType.HOTPLUG}
+ }
+
+ handle(None, {}, m_cloud, mock.Mock(), None)
+ assert mocks.m_write.call_args_list == []
+ assert mocks.m_del.call_args_list == []
+ assert mocks.m_subp.call_args_list == []
+
+ def test_rules_uninstalled_when_disabled(self, mocks):
+ mocks.m_path_exists.return_value = True
+ mocks.m_update_enabled.return_value = False
+ m_cloud = mock.MagicMock()
+ m_cloud.datasource.get_supported_events.return_value = {}
+
+ handle(None, {}, m_cloud, mock.Mock(), None)
+ mocks.m_del.assert_called_with(HOTPLUG_UDEV_PATH)
+ assert mocks.m_subp.call_args_list == [mock.call([
+ 'udevadm', 'control', '--reload-rules',
+ ])]
+ assert mocks.m_write.call_args_list == []
+
+ def test_rules_not_installed_when_no_udevadm(self, mocks):
+ mocks.m_update_enabled.return_value = True
+ m_cloud = mock.MagicMock()
+ m_cloud.datasource.get_supported_events.return_value = {
+ EventScope.NETWORK: {EventType.HOTPLUG}
+ }
+
+ handle(None, {}, m_cloud, mock.Mock(), None)
+ assert mocks.m_del.call_args_list == []
+ assert mocks.m_write.call_args_list == []
+ assert mocks.m_subp.call_args_list == []
diff --git a/tests/unittests/config/test_cc_keys_to_console.py b/tests/unittests/config/test_cc_keys_to_console.py
new file mode 100644
index 00000000..4083fc54
--- /dev/null
+++ b/tests/unittests/config/test_cc_keys_to_console.py
@@ -0,0 +1,34 @@
+"""Tests for cc_keys_to_console."""
+from unittest import mock
+
+import pytest
+
+from cloudinit.config import cc_keys_to_console
+
+
+class TestHandle:
+ """Tests for cloudinit.config.cc_keys_to_console.handle.
+
+ TODO: These tests only cover the emit_keys_to_console config option, they
+ should be expanded to cover the full functionality.
+ """
+
+ @mock.patch("cloudinit.config.cc_keys_to_console.util.multi_log")
+ @mock.patch("cloudinit.config.cc_keys_to_console.os.path.exists")
+ @mock.patch("cloudinit.config.cc_keys_to_console.subp.subp")
+ @pytest.mark.parametrize("cfg,subp_called", [
+ ({}, True), # Default to emitting keys
+ ({"ssh": {}}, True), # Default even if we have the parent key
+ ({"ssh": {"emit_keys_to_console": True}}, True), # Explicitly enabled
+ ({"ssh": {"emit_keys_to_console": False}}, False), # Disabled
+ ])
+ def test_emit_keys_to_console_config(
+ self, m_subp, m_path_exists, _m_multi_log, cfg, subp_called
+ ):
+ # Ensure we always find the helper
+ m_path_exists.return_value = True
+ m_subp.return_value = ("", "")
+
+ cc_keys_to_console.handle("name", cfg, mock.Mock(), mock.Mock(), ())
+
+ assert subp_called == (m_subp.call_count == 1)
diff --git a/tests/unittests/config/test_cc_landscape.py b/tests/unittests/config/test_cc_landscape.py
new file mode 100644
index 00000000..07b3f899
--- /dev/null
+++ b/tests/unittests/config/test_cc_landscape.py
@@ -0,0 +1,126 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+import logging
+from configobj import ConfigObj
+
+from cloudinit.config import cc_landscape
+from cloudinit import util
+from tests.unittests.helpers import (FilesystemMockingTestCase, mock,
+ wrap_and_call)
+
+from tests.unittests.util import get_cloud
+
+LOG = logging.getLogger(__name__)
+
+
+class TestLandscape(FilesystemMockingTestCase):
+
+ with_logs = True
+
+ def setUp(self):
+ super(TestLandscape, self).setUp()
+ self.new_root = self.tmp_dir()
+ self.conf = self.tmp_path('client.conf', self.new_root)
+ self.default_file = self.tmp_path('default_landscape', self.new_root)
+ self.patchUtils(self.new_root)
+ self.add_patch(
+ 'cloudinit.distros.ubuntu.Distro.install_packages',
+ 'm_install_packages'
+ )
+
+ def test_handler_skips_empty_landscape_cloudconfig(self):
+ """Empty landscape cloud-config section does no work."""
+ mycloud = get_cloud('ubuntu')
+ mycloud.distro = mock.MagicMock()
+ cfg = {'landscape': {}}
+ cc_landscape.handle('notimportant', cfg, mycloud, LOG, None)
+ self.assertFalse(mycloud.distro.install_packages.called)
+
+ def test_handler_error_on_invalid_landscape_type(self):
+ """Raise an error when landscape configuraiton option is invalid."""
+ mycloud = get_cloud('ubuntu')
+ cfg = {'landscape': 'wrongtype'}
+ with self.assertRaises(RuntimeError) as context_manager:
+ cc_landscape.handle('notimportant', cfg, mycloud, LOG, None)
+ self.assertIn(
+ "'landscape' key existed in config, but not a dict",
+ str(context_manager.exception))
+
+ @mock.patch('cloudinit.config.cc_landscape.subp')
+ def test_handler_restarts_landscape_client(self, m_subp):
+ """handler restarts lansdscape-client after install."""
+ mycloud = get_cloud('ubuntu')
+ cfg = {'landscape': {'client': {}}}
+ wrap_and_call(
+ 'cloudinit.config.cc_landscape',
+ {'LSC_CLIENT_CFG_FILE': {'new': self.conf}},
+ cc_landscape.handle, 'notimportant', cfg, mycloud, LOG, None)
+ self.assertEqual(
+ [mock.call(['service', 'landscape-client', 'restart'])],
+ m_subp.subp.call_args_list)
+
+ def test_handler_installs_client_and_creates_config_file(self):
+ """Write landscape client.conf and install landscape-client."""
+ mycloud = get_cloud('ubuntu')
+ cfg = {'landscape': {'client': {}}}
+ expected = {'client': {
+ 'log_level': 'info',
+ 'url': 'https://landscape.canonical.com/message-system',
+ 'ping_url': 'http://landscape.canonical.com/ping',
+ 'data_path': '/var/lib/landscape/client'}}
+ mycloud.distro = mock.MagicMock()
+ wrap_and_call(
+ 'cloudinit.config.cc_landscape',
+ {'LSC_CLIENT_CFG_FILE': {'new': self.conf},
+ 'LS_DEFAULT_FILE': {'new': self.default_file}},
+ cc_landscape.handle, 'notimportant', cfg, mycloud, LOG, None)
+ self.assertEqual(
+ [mock.call('landscape-client')],
+ mycloud.distro.install_packages.call_args)
+ self.assertEqual(expected, dict(ConfigObj(self.conf)))
+ self.assertIn(
+ 'Wrote landscape config file to {0}'.format(self.conf),
+ self.logs.getvalue())
+ default_content = util.load_file(self.default_file)
+ self.assertEqual('RUN=1\n', default_content)
+
+ def test_handler_writes_merged_client_config_file_with_defaults(self):
+ """Merge and write options from LSC_CLIENT_CFG_FILE with defaults."""
+ # Write existing sparse client.conf file
+ util.write_file(self.conf, '[client]\ncomputer_title = My PC\n')
+ mycloud = get_cloud('ubuntu')
+ cfg = {'landscape': {'client': {}}}
+ expected = {'client': {
+ 'log_level': 'info',
+ 'url': 'https://landscape.canonical.com/message-system',
+ 'ping_url': 'http://landscape.canonical.com/ping',
+ 'data_path': '/var/lib/landscape/client',
+ 'computer_title': 'My PC'}}
+ wrap_and_call(
+ 'cloudinit.config.cc_landscape',
+ {'LSC_CLIENT_CFG_FILE': {'new': self.conf}},
+ cc_landscape.handle, 'notimportant', cfg, mycloud, LOG, None)
+ self.assertEqual(expected, dict(ConfigObj(self.conf)))
+ self.assertIn(
+ 'Wrote landscape config file to {0}'.format(self.conf),
+ self.logs.getvalue())
+
+ def test_handler_writes_merged_provided_cloudconfig_with_defaults(self):
+ """Merge and write options from cloud-config options with defaults."""
+ # Write empty sparse client.conf file
+ util.write_file(self.conf, '')
+ mycloud = get_cloud('ubuntu')
+ cfg = {'landscape': {'client': {'computer_title': 'My PC'}}}
+ expected = {'client': {
+ 'log_level': 'info',
+ 'url': 'https://landscape.canonical.com/message-system',
+ 'ping_url': 'http://landscape.canonical.com/ping',
+ 'data_path': '/var/lib/landscape/client',
+ 'computer_title': 'My PC'}}
+ wrap_and_call(
+ 'cloudinit.config.cc_landscape',
+ {'LSC_CLIENT_CFG_FILE': {'new': self.conf}},
+ cc_landscape.handle, 'notimportant', cfg, mycloud, LOG, None)
+ self.assertEqual(expected, dict(ConfigObj(self.conf)))
+ self.assertIn(
+ 'Wrote landscape config file to {0}'.format(self.conf),
+ self.logs.getvalue())
diff --git a/tests/unittests/config/test_cc_locale.py b/tests/unittests/config/test_cc_locale.py
new file mode 100644
index 00000000..6cd95a29
--- /dev/null
+++ b/tests/unittests/config/test_cc_locale.py
@@ -0,0 +1,116 @@
+# Copyright (C) 2013 Hewlett-Packard Development Company, L.P.
+#
+# Author: Juerg Haefliger <juerg.haefliger@hp.com>
+#
+# This file is part of cloud-init. See LICENSE file for license information.
+import logging
+import os
+import shutil
+import tempfile
+from io import BytesIO
+from configobj import ConfigObj
+from unittest import mock
+
+from cloudinit import util
+from cloudinit.config import cc_locale
+from tests.unittests import helpers as t_help
+
+from tests.unittests.util import get_cloud
+
+
+LOG = logging.getLogger(__name__)
+
+
+class TestLocale(t_help.FilesystemMockingTestCase):
+
+ def setUp(self):
+ super(TestLocale, self).setUp()
+ self.new_root = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.new_root)
+ self.patchUtils(self.new_root)
+
+ def test_set_locale_arch(self):
+ locale = 'en_GB.UTF-8'
+ locale_configfile = '/etc/invalid-locale-path'
+ cfg = {
+ 'locale': locale,
+ 'locale_configfile': locale_configfile,
+ }
+ cc = get_cloud('arch')
+
+ with mock.patch('cloudinit.distros.arch.subp.subp') as m_subp:
+ with mock.patch('cloudinit.distros.arch.LOG.warning') as m_LOG:
+ cc_locale.handle('cc_locale', cfg, cc, LOG, [])
+ m_LOG.assert_called_with('Invalid locale_configfile %s, '
+ 'only supported value is '
+ '/etc/locale.conf',
+ locale_configfile)
+
+ contents = util.load_file(cc.distro.locale_gen_fn)
+ self.assertIn('%s UTF-8' % locale, contents)
+ m_subp.assert_called_with(['localectl',
+ 'set-locale',
+ locale], capture=False)
+
+ def test_set_locale_sles(self):
+
+ cfg = {
+ 'locale': 'My.Locale',
+ }
+ cc = get_cloud('sles')
+ cc_locale.handle('cc_locale', cfg, cc, LOG, [])
+ if cc.distro.uses_systemd():
+ locale_conf = cc.distro.systemd_locale_conf_fn
+ else:
+ locale_conf = cc.distro.locale_conf_fn
+ contents = util.load_file(locale_conf, decode=False)
+ n_cfg = ConfigObj(BytesIO(contents))
+ if cc.distro.uses_systemd():
+ self.assertEqual({'LANG': cfg['locale']}, dict(n_cfg))
+ else:
+ self.assertEqual({'RC_LANG': cfg['locale']}, dict(n_cfg))
+
+ def test_set_locale_sles_default(self):
+ cfg = {}
+ cc = get_cloud('sles')
+ cc_locale.handle('cc_locale', cfg, cc, LOG, [])
+
+ if cc.distro.uses_systemd():
+ locale_conf = cc.distro.systemd_locale_conf_fn
+ keyname = 'LANG'
+ else:
+ locale_conf = cc.distro.locale_conf_fn
+ keyname = 'RC_LANG'
+
+ contents = util.load_file(locale_conf, decode=False)
+ n_cfg = ConfigObj(BytesIO(contents))
+ self.assertEqual({keyname: 'en_US.UTF-8'}, dict(n_cfg))
+
+ def test_locale_update_config_if_different_than_default(self):
+ """Test cc_locale writes updates conf if different than default"""
+ locale_conf = os.path.join(self.new_root, "etc/default/locale")
+ util.write_file(locale_conf, 'LANG="en_US.UTF-8"\n')
+ cfg = {'locale': 'C.UTF-8'}
+ cc = get_cloud('ubuntu')
+ with mock.patch('cloudinit.distros.debian.subp.subp') as m_subp:
+ with mock.patch('cloudinit.distros.debian.LOCALE_CONF_FN',
+ locale_conf):
+ cc_locale.handle('cc_locale', cfg, cc, LOG, [])
+ m_subp.assert_called_with(['update-locale',
+ '--locale-file=%s' % locale_conf,
+ 'LANG=C.UTF-8'], capture=False)
+
+ def test_locale_rhel_defaults_en_us_utf8(self):
+ """Test cc_locale gets en_US.UTF-8 from distro get_locale fallback"""
+ cfg = {}
+ cc = get_cloud('rhel')
+ update_sysconfig = 'cloudinit.distros.rhel_util.update_sysconfig_file'
+ with mock.patch.object(cc.distro, 'uses_systemd') as m_use_sd:
+ m_use_sd.return_value = True
+ with mock.patch(update_sysconfig) as m_update_syscfg:
+ cc_locale.handle('cc_locale', cfg, cc, LOG, [])
+ m_update_syscfg.assert_called_with('/etc/locale.conf',
+ {'LANG': 'en_US.UTF-8'})
+
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_lxd.py b/tests/unittests/config/test_cc_lxd.py
new file mode 100644
index 00000000..887987c0
--- /dev/null
+++ b/tests/unittests/config/test_cc_lxd.py
@@ -0,0 +1,222 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+from unittest import mock
+
+from cloudinit.config import cc_lxd
+from tests.unittests import helpers as t_help
+
+from tests.unittests.util import get_cloud
+
+
+class TestLxd(t_help.CiTestCase):
+
+ with_logs = True
+
+ lxd_cfg = {
+ 'lxd': {
+ 'init': {
+ 'network_address': '0.0.0.0',
+ 'storage_backend': 'zfs',
+ 'storage_pool': 'poolname',
+ }
+ }
+ }
+
+ @mock.patch("cloudinit.config.cc_lxd.maybe_cleanup_default")
+ @mock.patch("cloudinit.config.cc_lxd.subp")
+ def test_lxd_init(self, mock_subp, m_maybe_clean):
+ cc = get_cloud()
+ mock_subp.which.return_value = True
+ m_maybe_clean.return_value = None
+ cc_lxd.handle('cc_lxd', self.lxd_cfg, cc, self.logger, [])
+ self.assertTrue(mock_subp.which.called)
+ # no bridge config, so maybe_cleanup should not be called.
+ self.assertFalse(m_maybe_clean.called)
+ self.assertEqual(
+ [mock.call(['lxd', 'waitready', '--timeout=300']),
+ mock.call(
+ ['lxd', 'init', '--auto', '--network-address=0.0.0.0',
+ '--storage-backend=zfs', '--storage-pool=poolname'])],
+ mock_subp.subp.call_args_list)
+
+ @mock.patch("cloudinit.config.cc_lxd.maybe_cleanup_default")
+ @mock.patch("cloudinit.config.cc_lxd.subp")
+ def test_lxd_install(self, mock_subp, m_maybe_clean):
+ cc = get_cloud()
+ cc.distro = mock.MagicMock()
+ mock_subp.which.return_value = None
+ cc_lxd.handle('cc_lxd', self.lxd_cfg, cc, self.logger, [])
+ self.assertNotIn('WARN', self.logs.getvalue())
+ self.assertTrue(cc.distro.install_packages.called)
+ cc_lxd.handle('cc_lxd', self.lxd_cfg, cc, self.logger, [])
+ self.assertFalse(m_maybe_clean.called)
+ install_pkg = cc.distro.install_packages.call_args_list[0][0][0]
+ self.assertEqual(sorted(install_pkg), ['lxd', 'zfsutils-linux'])
+
+ @mock.patch("cloudinit.config.cc_lxd.maybe_cleanup_default")
+ @mock.patch("cloudinit.config.cc_lxd.subp")
+ def test_no_init_does_nothing(self, mock_subp, m_maybe_clean):
+ cc = get_cloud()
+ cc.distro = mock.MagicMock()
+ cc_lxd.handle('cc_lxd', {'lxd': {}}, cc, self.logger, [])
+ self.assertFalse(cc.distro.install_packages.called)
+ self.assertFalse(mock_subp.subp.called)
+ self.assertFalse(m_maybe_clean.called)
+
+ @mock.patch("cloudinit.config.cc_lxd.maybe_cleanup_default")
+ @mock.patch("cloudinit.config.cc_lxd.subp")
+ def test_no_lxd_does_nothing(self, mock_subp, m_maybe_clean):
+ cc = get_cloud()
+ cc.distro = mock.MagicMock()
+ cc_lxd.handle('cc_lxd', {'package_update': True}, cc, self.logger, [])
+ self.assertFalse(cc.distro.install_packages.called)
+ self.assertFalse(mock_subp.subp.called)
+ self.assertFalse(m_maybe_clean.called)
+
+ def test_lxd_debconf_new_full(self):
+ data = {"mode": "new",
+ "name": "testbr0",
+ "ipv4_address": "10.0.8.1",
+ "ipv4_netmask": "24",
+ "ipv4_dhcp_first": "10.0.8.2",
+ "ipv4_dhcp_last": "10.0.8.254",
+ "ipv4_dhcp_leases": "250",
+ "ipv4_nat": "true",
+ "ipv6_address": "fd98:9e0:3744::1",
+ "ipv6_netmask": "64",
+ "ipv6_nat": "true",
+ "domain": "lxd"}
+ self.assertEqual(
+ cc_lxd.bridge_to_debconf(data),
+ {"lxd/setup-bridge": "true",
+ "lxd/bridge-name": "testbr0",
+ "lxd/bridge-ipv4": "true",
+ "lxd/bridge-ipv4-address": "10.0.8.1",
+ "lxd/bridge-ipv4-netmask": "24",
+ "lxd/bridge-ipv4-dhcp-first": "10.0.8.2",
+ "lxd/bridge-ipv4-dhcp-last": "10.0.8.254",
+ "lxd/bridge-ipv4-dhcp-leases": "250",
+ "lxd/bridge-ipv4-nat": "true",
+ "lxd/bridge-ipv6": "true",
+ "lxd/bridge-ipv6-address": "fd98:9e0:3744::1",
+ "lxd/bridge-ipv6-netmask": "64",
+ "lxd/bridge-ipv6-nat": "true",
+ "lxd/bridge-domain": "lxd"})
+
+ def test_lxd_debconf_new_partial(self):
+ data = {"mode": "new",
+ "ipv6_address": "fd98:9e0:3744::1",
+ "ipv6_netmask": "64",
+ "ipv6_nat": "true"}
+ self.assertEqual(
+ cc_lxd.bridge_to_debconf(data),
+ {"lxd/setup-bridge": "true",
+ "lxd/bridge-ipv6": "true",
+ "lxd/bridge-ipv6-address": "fd98:9e0:3744::1",
+ "lxd/bridge-ipv6-netmask": "64",
+ "lxd/bridge-ipv6-nat": "true"})
+
+ def test_lxd_debconf_existing(self):
+ data = {"mode": "existing",
+ "name": "testbr0"}
+ self.assertEqual(
+ cc_lxd.bridge_to_debconf(data),
+ {"lxd/setup-bridge": "false",
+ "lxd/use-existing-bridge": "true",
+ "lxd/bridge-name": "testbr0"})
+
+ def test_lxd_debconf_none(self):
+ data = {"mode": "none"}
+ self.assertEqual(
+ cc_lxd.bridge_to_debconf(data),
+ {"lxd/setup-bridge": "false",
+ "lxd/bridge-name": ""})
+
+ def test_lxd_cmd_new_full(self):
+ data = {"mode": "new",
+ "name": "testbr0",
+ "ipv4_address": "10.0.8.1",
+ "ipv4_netmask": "24",
+ "ipv4_dhcp_first": "10.0.8.2",
+ "ipv4_dhcp_last": "10.0.8.254",
+ "ipv4_dhcp_leases": "250",
+ "ipv4_nat": "true",
+ "ipv6_address": "fd98:9e0:3744::1",
+ "ipv6_netmask": "64",
+ "ipv6_nat": "true",
+ "domain": "lxd"}
+ self.assertEqual(
+ cc_lxd.bridge_to_cmd(data),
+ (["network", "create", "testbr0",
+ "ipv4.address=10.0.8.1/24", "ipv4.nat=true",
+ "ipv4.dhcp.ranges=10.0.8.2-10.0.8.254",
+ "ipv6.address=fd98:9e0:3744::1/64",
+ "ipv6.nat=true", "dns.domain=lxd"],
+ ["network", "attach-profile",
+ "testbr0", "default", "eth0"]))
+
+ def test_lxd_cmd_new_partial(self):
+ data = {"mode": "new",
+ "ipv6_address": "fd98:9e0:3744::1",
+ "ipv6_netmask": "64",
+ "ipv6_nat": "true"}
+ self.assertEqual(
+ cc_lxd.bridge_to_cmd(data),
+ (["network", "create", "lxdbr0", "ipv4.address=none",
+ "ipv6.address=fd98:9e0:3744::1/64", "ipv6.nat=true"],
+ ["network", "attach-profile",
+ "lxdbr0", "default", "eth0"]))
+
+ def test_lxd_cmd_existing(self):
+ data = {"mode": "existing",
+ "name": "testbr0"}
+ self.assertEqual(
+ cc_lxd.bridge_to_cmd(data),
+ (None, ["network", "attach-profile",
+ "testbr0", "default", "eth0"]))
+
+ def test_lxd_cmd_none(self):
+ data = {"mode": "none"}
+ self.assertEqual(
+ cc_lxd.bridge_to_cmd(data),
+ (None, None))
+
+
+class TestLxdMaybeCleanupDefault(t_help.CiTestCase):
+ """Test the implementation of maybe_cleanup_default."""
+
+ defnet = cc_lxd._DEFAULT_NETWORK_NAME
+
+ @mock.patch("cloudinit.config.cc_lxd._lxc")
+ def test_network_other_than_default_not_deleted(self, m_lxc):
+ """deletion or removal should only occur if bridge is default."""
+ cc_lxd.maybe_cleanup_default(
+ net_name="lxdbr1", did_init=True, create=True, attach=True)
+ m_lxc.assert_not_called()
+
+ @mock.patch("cloudinit.config.cc_lxd._lxc")
+ def test_did_init_false_does_not_delete(self, m_lxc):
+ """deletion or removal should only occur if did_init is True."""
+ cc_lxd.maybe_cleanup_default(
+ net_name=self.defnet, did_init=False, create=True, attach=True)
+ m_lxc.assert_not_called()
+
+ @mock.patch("cloudinit.config.cc_lxd._lxc")
+ def test_network_deleted_if_create_true(self, m_lxc):
+ """deletion of network should occur if create is True."""
+ cc_lxd.maybe_cleanup_default(
+ net_name=self.defnet, did_init=True, create=True, attach=False)
+ m_lxc.assert_called_with(["network", "delete", self.defnet])
+
+ @mock.patch("cloudinit.config.cc_lxd._lxc")
+ def test_device_removed_if_attach_true(self, m_lxc):
+ """deletion of network should occur if create is True."""
+ nic_name = "my_nic"
+ profile = "my_profile"
+ cc_lxd.maybe_cleanup_default(
+ net_name=self.defnet, did_init=True, create=False, attach=True,
+ profile=profile, nic_name=nic_name)
+ m_lxc.assert_called_once_with(
+ ["profile", "device", "remove", profile, nic_name])
+
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_mcollective.py b/tests/unittests/config/test_cc_mcollective.py
new file mode 100644
index 00000000..fff777b6
--- /dev/null
+++ b/tests/unittests/config/test_cc_mcollective.py
@@ -0,0 +1,146 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+import configobj
+import logging
+import os
+import shutil
+import tempfile
+from io import BytesIO
+
+from cloudinit import (util)
+from cloudinit.config import cc_mcollective
+from tests.unittests import helpers as t_help
+
+from tests.unittests.util import get_cloud
+
+LOG = logging.getLogger(__name__)
+
+
+STOCK_CONFIG = """\
+main_collective = mcollective
+collectives = mcollective
+libdir = /usr/share/mcollective/plugins
+logfile = /var/log/mcollective.log
+loglevel = info
+daemonize = 1
+
+# Plugins
+securityprovider = psk
+plugin.psk = unset
+
+connector = activemq
+plugin.activemq.pool.size = 1
+plugin.activemq.pool.1.host = stomp1
+plugin.activemq.pool.1.port = 61613
+plugin.activemq.pool.1.user = mcollective
+plugin.activemq.pool.1.password = marionette
+
+# Facts
+factsource = yaml
+plugin.yaml = /etc/mcollective/facts.yaml
+"""
+
+
+class TestConfig(t_help.FilesystemMockingTestCase):
+ def setUp(self):
+ super(TestConfig, self).setUp()
+ self.tmp = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.tmp)
+ # "./": make os.path.join behave correctly with abs path as second arg
+ self.server_cfg = os.path.join(
+ self.tmp, "./" + cc_mcollective.SERVER_CFG)
+ self.pubcert_file = os.path.join(
+ self.tmp, "./" + cc_mcollective.PUBCERT_FILE)
+ self.pricert_file = os.path.join(
+ self.tmp, self.tmp, "./" + cc_mcollective.PRICERT_FILE)
+
+ def test_basic_config(self):
+ cfg = {
+ 'mcollective': {
+ 'conf': {
+ 'loglevel': 'debug',
+ 'connector': 'rabbitmq',
+ 'logfile': '/var/log/mcollective.log',
+ 'ttl': '4294957',
+ 'collectives': 'mcollective',
+ 'main_collective': 'mcollective',
+ 'securityprovider': 'psk',
+ 'daemonize': '1',
+ 'factsource': 'yaml',
+ 'direct_addressing': '1',
+ 'plugin.psk': 'unset',
+ 'libdir': '/usr/share/mcollective/plugins',
+ 'identity': '1',
+ },
+ },
+ }
+ expected = cfg['mcollective']['conf']
+
+ self.patchUtils(self.tmp)
+ cc_mcollective.configure(cfg['mcollective']['conf'])
+ contents = util.load_file(cc_mcollective.SERVER_CFG, decode=False)
+ contents = configobj.ConfigObj(BytesIO(contents))
+ self.assertEqual(expected, dict(contents))
+
+ def test_existing_config_is_saved(self):
+ cfg = {'loglevel': 'warn'}
+ util.write_file(self.server_cfg, STOCK_CONFIG)
+ cc_mcollective.configure(config=cfg, server_cfg=self.server_cfg)
+ self.assertTrue(os.path.exists(self.server_cfg))
+ self.assertTrue(os.path.exists(self.server_cfg + ".old"))
+ self.assertEqual(util.load_file(self.server_cfg + ".old"),
+ STOCK_CONFIG)
+
+ def test_existing_updated(self):
+ cfg = {'loglevel': 'warn'}
+ util.write_file(self.server_cfg, STOCK_CONFIG)
+ cc_mcollective.configure(config=cfg, server_cfg=self.server_cfg)
+ cfgobj = configobj.ConfigObj(self.server_cfg)
+ self.assertEqual(cfg['loglevel'], cfgobj['loglevel'])
+
+ def test_certificats_written(self):
+ # check public-cert and private-cert keys in config get written
+ cfg = {'loglevel': 'debug',
+ 'public-cert': "this is my public-certificate",
+ 'private-cert': "secret private certificate"}
+
+ cc_mcollective.configure(
+ config=cfg, server_cfg=self.server_cfg,
+ pricert_file=self.pricert_file, pubcert_file=self.pubcert_file)
+
+ found = configobj.ConfigObj(self.server_cfg)
+
+ # make sure these didnt get written in
+ self.assertFalse('public-cert' in found)
+ self.assertFalse('private-cert' in found)
+
+ # these need updating to the specified paths
+ self.assertEqual(found['plugin.ssl_server_public'], self.pubcert_file)
+ self.assertEqual(found['plugin.ssl_server_private'], self.pricert_file)
+
+ # and the security provider should be ssl
+ self.assertEqual(found['securityprovider'], 'ssl')
+
+ self.assertEqual(
+ util.load_file(self.pricert_file), cfg['private-cert'])
+ self.assertEqual(
+ util.load_file(self.pubcert_file), cfg['public-cert'])
+
+
+class TestHandler(t_help.TestCase):
+ @t_help.mock.patch("cloudinit.config.cc_mcollective.subp")
+ @t_help.mock.patch("cloudinit.config.cc_mcollective.util")
+ def test_mcollective_install(self, mock_util, mock_subp):
+ cc = get_cloud()
+ cc.distro = t_help.mock.MagicMock()
+ mock_util.load_file.return_value = b""
+ mycfg = {'mcollective': {'conf': {'loglevel': 'debug'}}}
+ cc_mcollective.handle('cc_mcollective', mycfg, cc, LOG, [])
+ self.assertTrue(cc.distro.install_packages.called)
+ install_pkg = cc.distro.install_packages.call_args_list[0][0][0]
+ self.assertEqual(install_pkg, ('mcollective',))
+
+ self.assertTrue(mock_subp.subp.called)
+ self.assertEqual(mock_subp.subp.call_args_list[0][0][0],
+ ['service', 'mcollective', 'restart'])
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_mounts.py b/tests/unittests/config/test_cc_mounts.py
new file mode 100644
index 00000000..fc65f108
--- /dev/null
+++ b/tests/unittests/config/test_cc_mounts.py
@@ -0,0 +1,461 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import pytest
+import os.path
+from unittest import mock
+
+from tests.unittests import helpers as test_helpers
+from cloudinit.config import cc_mounts
+from cloudinit.config.cc_mounts import create_swapfile
+from cloudinit.subp import ProcessExecutionError
+
+M_PATH = 'cloudinit.config.cc_mounts.'
+
+
+class TestSanitizeDevname(test_helpers.FilesystemMockingTestCase):
+
+ def setUp(self):
+ super(TestSanitizeDevname, self).setUp()
+ self.new_root = self.tmp_dir()
+ self.patchOS(self.new_root)
+
+ def _touch(self, path):
+ path = os.path.join(self.new_root, path.lstrip('/'))
+ basedir = os.path.dirname(path)
+ if not os.path.exists(basedir):
+ os.makedirs(basedir)
+ open(path, 'a').close()
+
+ def _makedirs(self, directory):
+ directory = os.path.join(self.new_root, directory.lstrip('/'))
+ if not os.path.exists(directory):
+ os.makedirs(directory)
+
+ def mock_existence_of_disk(self, disk_path):
+ self._touch(disk_path)
+ self._makedirs(os.path.join('/sys/block', disk_path.split('/')[-1]))
+
+ def mock_existence_of_partition(self, disk_path, partition_number):
+ self.mock_existence_of_disk(disk_path)
+ self._touch(disk_path + str(partition_number))
+ disk_name = disk_path.split('/')[-1]
+ self._makedirs(os.path.join('/sys/block',
+ disk_name,
+ disk_name + str(partition_number)))
+
+ def test_existent_full_disk_path_is_returned(self):
+ disk_path = '/dev/sda'
+ self.mock_existence_of_disk(disk_path)
+ self.assertEqual(disk_path,
+ cc_mounts.sanitize_devname(disk_path,
+ lambda x: None,
+ mock.Mock()))
+
+ def test_existent_disk_name_returns_full_path(self):
+ disk_name = 'sda'
+ disk_path = '/dev/' + disk_name
+ self.mock_existence_of_disk(disk_path)
+ self.assertEqual(disk_path,
+ cc_mounts.sanitize_devname(disk_name,
+ lambda x: None,
+ mock.Mock()))
+
+ def test_existent_meta_disk_is_returned(self):
+ actual_disk_path = '/dev/sda'
+ self.mock_existence_of_disk(actual_disk_path)
+ self.assertEqual(
+ actual_disk_path,
+ cc_mounts.sanitize_devname('ephemeral0',
+ lambda x: actual_disk_path,
+ mock.Mock()))
+
+ def test_existent_meta_partition_is_returned(self):
+ disk_name, partition_part = '/dev/sda', '1'
+ actual_partition_path = disk_name + partition_part
+ self.mock_existence_of_partition(disk_name, partition_part)
+ self.assertEqual(
+ actual_partition_path,
+ cc_mounts.sanitize_devname('ephemeral0.1',
+ lambda x: disk_name,
+ mock.Mock()))
+
+ def test_existent_meta_partition_with_p_is_returned(self):
+ disk_name, partition_part = '/dev/sda', 'p1'
+ actual_partition_path = disk_name + partition_part
+ self.mock_existence_of_partition(disk_name, partition_part)
+ self.assertEqual(
+ actual_partition_path,
+ cc_mounts.sanitize_devname('ephemeral0.1',
+ lambda x: disk_name,
+ mock.Mock()))
+
+ def test_first_partition_returned_if_existent_disk_is_partitioned(self):
+ disk_name, partition_part = '/dev/sda', '1'
+ actual_partition_path = disk_name + partition_part
+ self.mock_existence_of_partition(disk_name, partition_part)
+ self.assertEqual(
+ actual_partition_path,
+ cc_mounts.sanitize_devname('ephemeral0',
+ lambda x: disk_name,
+ mock.Mock()))
+
+ def test_nth_partition_returned_if_requested(self):
+ disk_name, partition_part = '/dev/sda', '3'
+ actual_partition_path = disk_name + partition_part
+ self.mock_existence_of_partition(disk_name, partition_part)
+ self.assertEqual(
+ actual_partition_path,
+ cc_mounts.sanitize_devname('ephemeral0.3',
+ lambda x: disk_name,
+ mock.Mock()))
+
+ def test_transformer_returning_none_returns_none(self):
+ self.assertIsNone(
+ cc_mounts.sanitize_devname(
+ 'ephemeral0', lambda x: None, mock.Mock()))
+
+ def test_missing_device_returns_none(self):
+ self.assertIsNone(
+ cc_mounts.sanitize_devname('/dev/sda', None, mock.Mock()))
+
+ def test_missing_sys_returns_none(self):
+ disk_path = '/dev/sda'
+ self._makedirs(disk_path)
+ self.assertIsNone(
+ cc_mounts.sanitize_devname(disk_path, None, mock.Mock()))
+
+ def test_existent_disk_but_missing_partition_returns_none(self):
+ disk_path = '/dev/sda'
+ self.mock_existence_of_disk(disk_path)
+ self.assertIsNone(
+ cc_mounts.sanitize_devname(
+ 'ephemeral0.1', lambda x: disk_path, mock.Mock()))
+
+ def test_network_device_returns_network_device(self):
+ disk_path = 'netdevice:/path'
+ self.assertEqual(
+ disk_path,
+ cc_mounts.sanitize_devname(disk_path, None, mock.Mock()))
+
+ def test_device_aliases_remapping(self):
+ disk_path = '/dev/sda'
+ self.mock_existence_of_disk(disk_path)
+ self.assertEqual(disk_path,
+ cc_mounts.sanitize_devname('mydata',
+ lambda x: None,
+ mock.Mock(),
+ {'mydata': disk_path}))
+
+
+class TestSwapFileCreation(test_helpers.FilesystemMockingTestCase):
+
+ def setUp(self):
+ super(TestSwapFileCreation, self).setUp()
+ self.new_root = self.tmp_dir()
+ self.patchOS(self.new_root)
+
+ self.fstab_path = os.path.join(self.new_root, 'etc/fstab')
+ self.swap_path = os.path.join(self.new_root, 'swap.img')
+ self._makedirs('/etc')
+
+ self.add_patch('cloudinit.config.cc_mounts.FSTAB_PATH',
+ 'mock_fstab_path',
+ self.fstab_path,
+ autospec=False)
+
+ self.add_patch('cloudinit.config.cc_mounts.subp.subp',
+ 'm_subp_subp')
+
+ self.add_patch('cloudinit.config.cc_mounts.util.mounts',
+ 'mock_util_mounts',
+ return_value={
+ '/dev/sda1': {'fstype': 'ext4',
+ 'mountpoint': '/',
+ 'opts': 'rw,relatime,discard'
+ }})
+
+ self.mock_cloud = mock.Mock()
+ self.mock_log = mock.Mock()
+ self.mock_cloud.device_name_to_device = self.device_name_to_device
+
+ self.cc = {
+ 'swap': {
+ 'filename': self.swap_path,
+ 'size': '512',
+ 'maxsize': '512'}}
+
+ def _makedirs(self, directory):
+ directory = os.path.join(self.new_root, directory.lstrip('/'))
+ if not os.path.exists(directory):
+ os.makedirs(directory)
+
+ def device_name_to_device(self, path):
+ if path == 'swap':
+ return self.swap_path
+ else:
+ dev = None
+
+ return dev
+
+ @mock.patch('cloudinit.util.get_mount_info')
+ @mock.patch('cloudinit.util.kernel_version')
+ def test_swap_creation_method_fallocate_on_xfs(self, m_kernel_version,
+ m_get_mount_info):
+ m_kernel_version.return_value = (4, 20)
+ m_get_mount_info.return_value = ["", "xfs"]
+
+ cc_mounts.handle(None, self.cc, self.mock_cloud, self.mock_log, [])
+ self.m_subp_subp.assert_has_calls([
+ mock.call(['fallocate', '-l', '0M', self.swap_path], capture=True),
+ mock.call(['mkswap', self.swap_path]),
+ mock.call(['swapon', '-a'])])
+
+ @mock.patch('cloudinit.util.get_mount_info')
+ @mock.patch('cloudinit.util.kernel_version')
+ def test_swap_creation_method_xfs(self, m_kernel_version,
+ m_get_mount_info):
+ m_kernel_version.return_value = (3, 18)
+ m_get_mount_info.return_value = ["", "xfs"]
+
+ cc_mounts.handle(None, self.cc, self.mock_cloud, self.mock_log, [])
+ self.m_subp_subp.assert_has_calls([
+ mock.call(['dd', 'if=/dev/zero',
+ 'of=' + self.swap_path,
+ 'bs=1M', 'count=0'], capture=True),
+ mock.call(['mkswap', self.swap_path]),
+ mock.call(['swapon', '-a'])])
+
+ @mock.patch('cloudinit.util.get_mount_info')
+ @mock.patch('cloudinit.util.kernel_version')
+ def test_swap_creation_method_btrfs(self, m_kernel_version,
+ m_get_mount_info):
+ m_kernel_version.return_value = (4, 20)
+ m_get_mount_info.return_value = ["", "btrfs"]
+
+ cc_mounts.handle(None, self.cc, self.mock_cloud, self.mock_log, [])
+ self.m_subp_subp.assert_has_calls([
+ mock.call(['dd', 'if=/dev/zero',
+ 'of=' + self.swap_path,
+ 'bs=1M', 'count=0'], capture=True),
+ mock.call(['mkswap', self.swap_path]),
+ mock.call(['swapon', '-a'])])
+
+ @mock.patch('cloudinit.util.get_mount_info')
+ @mock.patch('cloudinit.util.kernel_version')
+ def test_swap_creation_method_ext4(self, m_kernel_version,
+ m_get_mount_info):
+ m_kernel_version.return_value = (5, 14)
+ m_get_mount_info.return_value = ["", "ext4"]
+
+ cc_mounts.handle(None, self.cc, self.mock_cloud, self.mock_log, [])
+ self.m_subp_subp.assert_has_calls([
+ mock.call(['fallocate', '-l', '0M', self.swap_path], capture=True),
+ mock.call(['mkswap', self.swap_path]),
+ mock.call(['swapon', '-a'])])
+
+
+class TestFstabHandling(test_helpers.FilesystemMockingTestCase):
+
+ swap_path = '/dev/sdb1'
+
+ def setUp(self):
+ super(TestFstabHandling, self).setUp()
+ self.new_root = self.tmp_dir()
+ self.patchOS(self.new_root)
+
+ self.fstab_path = os.path.join(self.new_root, 'etc/fstab')
+ self._makedirs('/etc')
+
+ self.add_patch('cloudinit.config.cc_mounts.FSTAB_PATH',
+ 'mock_fstab_path',
+ self.fstab_path,
+ autospec=False)
+
+ self.add_patch('cloudinit.config.cc_mounts._is_block_device',
+ 'mock_is_block_device',
+ return_value=True)
+
+ self.add_patch('cloudinit.config.cc_mounts.subp.subp',
+ 'm_subp_subp')
+
+ self.add_patch('cloudinit.config.cc_mounts.util.mounts',
+ 'mock_util_mounts',
+ return_value={
+ '/dev/sda1': {'fstype': 'ext4',
+ 'mountpoint': '/',
+ 'opts': 'rw,relatime,discard'
+ }})
+
+ self.mock_cloud = mock.Mock()
+ self.mock_log = mock.Mock()
+ self.mock_cloud.device_name_to_device = self.device_name_to_device
+
+ def _makedirs(self, directory):
+ directory = os.path.join(self.new_root, directory.lstrip('/'))
+ if not os.path.exists(directory):
+ os.makedirs(directory)
+
+ def device_name_to_device(self, path):
+ if path == 'swap':
+ return self.swap_path
+ else:
+ dev = None
+
+ return dev
+
+ def test_no_fstab(self):
+ """ Handle images which do not include an fstab. """
+ self.assertFalse(os.path.exists(cc_mounts.FSTAB_PATH))
+ fstab_expected_content = (
+ '%s\tnone\tswap\tsw,comment=cloudconfig\t'
+ '0\t0\n' % (self.swap_path,)
+ )
+ cc_mounts.handle(None, {}, self.mock_cloud, self.mock_log, [])
+ with open(cc_mounts.FSTAB_PATH, 'r') as fd:
+ fstab_new_content = fd.read()
+ self.assertEqual(fstab_expected_content, fstab_new_content)
+
+ def test_swap_integrity(self):
+ '''Ensure that the swap file is correctly created and can
+ swapon successfully. Fixing the corner case of:
+ kernel: swapon: swapfile has holes'''
+
+ fstab = '/swap.img swap swap defaults 0 0\n'
+
+ with open(cc_mounts.FSTAB_PATH, 'w') as fd:
+ fd.write(fstab)
+ cc = {'swap': ['filename: /swap.img', 'size: 512', 'maxsize: 512']}
+ cc_mounts.handle(None, cc, self.mock_cloud, self.mock_log, [])
+
+ def test_fstab_no_swap_device(self):
+ '''Ensure that cloud-init adds a discovered swap partition
+ to /etc/fstab.'''
+
+ fstab_original_content = ''
+ fstab_expected_content = (
+ '%s\tnone\tswap\tsw,comment=cloudconfig\t'
+ '0\t0\n' % (self.swap_path,)
+ )
+
+ with open(cc_mounts.FSTAB_PATH, 'w') as fd:
+ fd.write(fstab_original_content)
+
+ cc_mounts.handle(None, {}, self.mock_cloud, self.mock_log, [])
+
+ with open(cc_mounts.FSTAB_PATH, 'r') as fd:
+ fstab_new_content = fd.read()
+ self.assertEqual(fstab_expected_content, fstab_new_content)
+
+ def test_fstab_same_swap_device_already_configured(self):
+ '''Ensure that cloud-init will not add a swap device if the same
+ device already exists in /etc/fstab.'''
+
+ fstab_original_content = '%s swap swap defaults 0 0\n' % (
+ self.swap_path,)
+ fstab_expected_content = fstab_original_content
+
+ with open(cc_mounts.FSTAB_PATH, 'w') as fd:
+ fd.write(fstab_original_content)
+
+ cc_mounts.handle(None, {}, self.mock_cloud, self.mock_log, [])
+
+ with open(cc_mounts.FSTAB_PATH, 'r') as fd:
+ fstab_new_content = fd.read()
+ self.assertEqual(fstab_expected_content, fstab_new_content)
+
+ def test_fstab_alternate_swap_device_already_configured(self):
+ '''Ensure that cloud-init will add a discovered swap device to
+ /etc/fstab even when there exists a swap definition on another
+ device.'''
+
+ fstab_original_content = '/dev/sdc1 swap swap defaults 0 0\n'
+ fstab_expected_content = (
+ fstab_original_content +
+ '%s\tnone\tswap\tsw,comment=cloudconfig\t'
+ '0\t0\n' % (self.swap_path,)
+ )
+
+ with open(cc_mounts.FSTAB_PATH, 'w') as fd:
+ fd.write(fstab_original_content)
+
+ cc_mounts.handle(None, {}, self.mock_cloud, self.mock_log, [])
+
+ with open(cc_mounts.FSTAB_PATH, 'r') as fd:
+ fstab_new_content = fd.read()
+ self.assertEqual(fstab_expected_content, fstab_new_content)
+
+ def test_no_change_fstab_sets_needs_mount_all(self):
+ '''verify unchanged fstab entries are mounted if not call mount -a'''
+ fstab_original_content = (
+ 'LABEL=cloudimg-rootfs / ext4 defaults 0 0\n'
+ 'LABEL=UEFI /boot/efi vfat defaults 0 0\n'
+ '/dev/vdb /mnt auto defaults,noexec,comment=cloudconfig 0 2\n'
+ )
+ fstab_expected_content = fstab_original_content
+ cc = {
+ 'mounts': [
+ ['/dev/vdb', '/mnt', 'auto', 'defaults,noexec']
+ ]
+ }
+ with open(cc_mounts.FSTAB_PATH, 'w') as fd:
+ fd.write(fstab_original_content)
+ with open(cc_mounts.FSTAB_PATH, 'r') as fd:
+ fstab_new_content = fd.read()
+ self.assertEqual(fstab_expected_content, fstab_new_content)
+ cc_mounts.handle(None, cc, self.mock_cloud, self.mock_log, [])
+ self.m_subp_subp.assert_has_calls([
+ mock.call(['mount', '-a']),
+ mock.call(['systemctl', 'daemon-reload'])])
+
+
+class TestCreateSwapfile:
+
+ @pytest.mark.parametrize('fstype', ('xfs', 'btrfs', 'ext4', 'other'))
+ @mock.patch(M_PATH + 'util.get_mount_info')
+ @mock.patch(M_PATH + 'subp.subp')
+ def test_happy_path(self, m_subp, m_get_mount_info, fstype, tmpdir):
+ swap_file = tmpdir.join("swap-file")
+ fname = str(swap_file)
+
+ # Some of the calls to subp.subp should create the swap file; this
+ # roughly approximates that
+ m_subp.side_effect = lambda *args, **kwargs: swap_file.write('')
+
+ m_get_mount_info.return_value = (mock.ANY, fstype)
+
+ create_swapfile(fname, '')
+ assert mock.call(['mkswap', fname]) in m_subp.call_args_list
+
+ @mock.patch(M_PATH + "util.get_mount_info")
+ @mock.patch(M_PATH + "subp.subp")
+ def test_fallback_from_fallocate_to_dd(
+ self, m_subp, m_get_mount_info, caplog, tmpdir
+ ):
+ swap_file = tmpdir.join("swap-file")
+ fname = str(swap_file)
+
+ def subp_side_effect(cmd, *args, **kwargs):
+ # Mock fallocate failing, to initiate fallback
+ if cmd[0] == "fallocate":
+ raise ProcessExecutionError()
+
+ m_subp.side_effect = subp_side_effect
+ # Use ext4 so both fallocate and dd are valid swap creation methods
+ m_get_mount_info.return_value = (mock.ANY, "ext4")
+
+ create_swapfile(fname, "")
+
+ cmds = [args[0][0] for args, _kwargs in m_subp.call_args_list]
+ assert "fallocate" in cmds, "fallocate was not called"
+ assert "dd" in cmds, "fallocate failure did not fallback to dd"
+
+ assert cmds.index("dd") > cmds.index(
+ "fallocate"
+ ), "dd ran before fallocate"
+
+ assert mock.call(["mkswap", fname]) in m_subp.call_args_list
+
+ msg = "fallocate swap creation failed, will attempt with dd"
+ assert msg in caplog.text
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_ntp.py b/tests/unittests/config/test_cc_ntp.py
new file mode 100644
index 00000000..3426533a
--- /dev/null
+++ b/tests/unittests/config/test_cc_ntp.py
@@ -0,0 +1,765 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+import copy
+import os
+import shutil
+from functools import partial
+from os.path import dirname
+
+from cloudinit import (helpers, util)
+from cloudinit.config import cc_ntp
+from tests.unittests.helpers import (
+ CiTestCase, FilesystemMockingTestCase, mock, skipUnlessJsonSchema)
+
+from tests.unittests.util import get_cloud
+
+
+NTP_TEMPLATE = """\
+## template: jinja
+servers {{servers}}
+pools {{pools}}
+"""
+
+TIMESYNCD_TEMPLATE = """\
+## template:jinja
+[Time]
+{% if servers or pools -%}
+NTP={% for host in servers|list + pools|list %}{{ host }} {% endfor -%}
+{% endif -%}
+"""
+
+
+class TestNtp(FilesystemMockingTestCase):
+
+ with_logs = True
+
+ def setUp(self):
+ super(TestNtp, self).setUp()
+ self.new_root = self.tmp_dir()
+ self.add_patch('cloudinit.util.system_is_snappy', 'm_snappy')
+ self.m_snappy.return_value = False
+ self.new_root = self.reRoot()
+ self._get_cloud = partial(
+ get_cloud,
+ paths=helpers.Paths({'templates_dir': self.new_root})
+ )
+
+ def _get_template_path(self, template_name, distro, basepath=None):
+ # ntp.conf.{distro} -> ntp.conf.debian.tmpl
+ template_fn = '{0}.tmpl'.format(
+ template_name.replace('{distro}', distro))
+ if not basepath:
+ basepath = self.new_root
+ path = os.path.join(basepath, template_fn)
+ return path
+
+ def _generate_template(self, template=None):
+ if not template:
+ template = NTP_TEMPLATE
+ confpath = os.path.join(self.new_root, 'client.conf')
+ template_fn = os.path.join(self.new_root, 'client.conf.tmpl')
+ util.write_file(template_fn, content=template)
+ return (confpath, template_fn)
+
+ def _mock_ntp_client_config(self, client=None, distro=None):
+ if not client:
+ client = 'ntp'
+ if not distro:
+ distro = 'ubuntu'
+ dcfg = cc_ntp.distro_ntp_client_configs(distro)
+ if client == 'systemd-timesyncd':
+ template = TIMESYNCD_TEMPLATE
+ else:
+ template = NTP_TEMPLATE
+ (confpath, _template_fn) = self._generate_template(template=template)
+ ntpconfig = copy.deepcopy(dcfg[client])
+ ntpconfig['confpath'] = confpath
+ ntpconfig['template_name'] = os.path.basename(confpath)
+ return ntpconfig
+
+ @mock.patch("cloudinit.config.cc_ntp.subp")
+ def test_ntp_install(self, mock_subp):
+ """ntp_install_client runs install_func when check_exe is absent."""
+ mock_subp.which.return_value = None # check_exe not found.
+ install_func = mock.MagicMock()
+ cc_ntp.install_ntp_client(install_func,
+ packages=['ntpx'], check_exe='ntpdx')
+ mock_subp.which.assert_called_with('ntpdx')
+ install_func.assert_called_once_with(['ntpx'])
+
+ @mock.patch("cloudinit.config.cc_ntp.subp")
+ def test_ntp_install_not_needed(self, mock_subp):
+ """ntp_install_client doesn't install when check_exe is found."""
+ client = 'chrony'
+ mock_subp.which.return_value = [client] # check_exe found.
+ install_func = mock.MagicMock()
+ cc_ntp.install_ntp_client(install_func, packages=[client],
+ check_exe=client)
+ install_func.assert_not_called()
+
+ @mock.patch("cloudinit.config.cc_ntp.subp")
+ def test_ntp_install_no_op_with_empty_pkg_list(self, mock_subp):
+ """ntp_install_client runs install_func with empty list"""
+ mock_subp.which.return_value = None # check_exe not found
+ install_func = mock.MagicMock()
+ cc_ntp.install_ntp_client(install_func, packages=[],
+ check_exe='timesyncd')
+ install_func.assert_called_once_with([])
+
+ def test_ntp_rename_ntp_conf(self):
+ """When NTP_CONF exists, rename_ntp moves it."""
+ ntpconf = self.tmp_path("ntp.conf", self.new_root)
+ util.write_file(ntpconf, "")
+ cc_ntp.rename_ntp_conf(confpath=ntpconf)
+ self.assertFalse(os.path.exists(ntpconf))
+ self.assertTrue(os.path.exists("{0}.dist".format(ntpconf)))
+
+ def test_ntp_rename_ntp_conf_skip_missing(self):
+ """When NTP_CONF doesn't exist rename_ntp doesn't create a file."""
+ ntpconf = self.tmp_path("ntp.conf", self.new_root)
+ self.assertFalse(os.path.exists(ntpconf))
+ cc_ntp.rename_ntp_conf(confpath=ntpconf)
+ self.assertFalse(os.path.exists("{0}.dist".format(ntpconf)))
+ self.assertFalse(os.path.exists(ntpconf))
+
+ def test_write_ntp_config_template_uses_ntp_conf_distro_no_servers(self):
+ """write_ntp_config_template reads from $client.conf.distro.tmpl"""
+ servers = []
+ pools = ['10.0.0.1', '10.0.0.2']
+ (confpath, template_fn) = self._generate_template()
+ mock_path = 'cloudinit.config.cc_ntp.temp_utils._TMPDIR'
+ with mock.patch(mock_path, self.new_root):
+ cc_ntp.write_ntp_config_template('ubuntu',
+ servers=servers, pools=pools,
+ path=confpath,
+ template_fn=template_fn,
+ template=None)
+ self.assertEqual(
+ "servers []\npools ['10.0.0.1', '10.0.0.2']\n",
+ util.load_file(confpath))
+
+ def test_write_ntp_config_template_defaults_pools_w_empty_lists(self):
+ """write_ntp_config_template defaults pools servers upon empty config.
+
+ When both pools and servers are empty, default NR_POOL_SERVERS get
+ configured.
+ """
+ distro = 'ubuntu'
+ pools = cc_ntp.generate_server_names(distro)
+ servers = []
+ (confpath, template_fn) = self._generate_template()
+ mock_path = 'cloudinit.config.cc_ntp.temp_utils._TMPDIR'
+ with mock.patch(mock_path, self.new_root):
+ cc_ntp.write_ntp_config_template(distro,
+ servers=servers, pools=pools,
+ path=confpath,
+ template_fn=template_fn,
+ template=None)
+ self.assertEqual(
+ "servers []\npools {0}\n".format(pools),
+ util.load_file(confpath))
+
+ def test_defaults_pools_empty_lists_sles(self):
+ """write_ntp_config_template defaults opensuse pools upon empty config.
+
+ When both pools and servers are empty, default NR_POOL_SERVERS get
+ configured.
+ """
+ distro = 'sles'
+ default_pools = cc_ntp.generate_server_names(distro)
+ (confpath, template_fn) = self._generate_template()
+
+ cc_ntp.write_ntp_config_template(distro,
+ servers=[], pools=[],
+ path=confpath,
+ template_fn=template_fn,
+ template=None)
+ for pool in default_pools:
+ self.assertIn('opensuse', pool)
+ self.assertEqual(
+ "servers []\npools {0}\n".format(default_pools),
+ util.load_file(confpath))
+ self.assertIn(
+ "Adding distro default ntp pool servers: {0}".format(
+ ",".join(default_pools)),
+ self.logs.getvalue())
+
+ def test_timesyncd_template(self):
+ """Test timesycnd template is correct"""
+ pools = ['0.mycompany.pool.ntp.org', '3.mycompany.pool.ntp.org']
+ servers = ['192.168.23.3', '192.168.23.4']
+ (confpath, template_fn) = self._generate_template(
+ template=TIMESYNCD_TEMPLATE)
+ cc_ntp.write_ntp_config_template('ubuntu',
+ servers=servers, pools=pools,
+ path=confpath,
+ template_fn=template_fn,
+ template=None)
+ self.assertEqual(
+ "[Time]\nNTP=%s %s \n" % (" ".join(servers), " ".join(pools)),
+ util.load_file(confpath))
+
+ def test_distro_ntp_client_configs(self):
+ """Test we have updated ntp client configs on different distros"""
+ delta = copy.deepcopy(cc_ntp.DISTRO_CLIENT_CONFIG)
+ base = copy.deepcopy(cc_ntp.NTP_CLIENT_CONFIG)
+ # confirm no-delta distros match the base config
+ for distro in cc_ntp.distros:
+ if distro not in delta:
+ result = cc_ntp.distro_ntp_client_configs(distro)
+ self.assertEqual(base, result)
+ # for distros with delta, ensure the merged config values match
+ # what is set in the delta
+ for distro in delta.keys():
+ result = cc_ntp.distro_ntp_client_configs(distro)
+ for client in delta[distro].keys():
+ for key in delta[distro][client].keys():
+ self.assertEqual(delta[distro][client][key],
+ result[client][key])
+
+ def _get_expected_pools(self, pools, distro, client):
+ if client in ['ntp', 'chrony']:
+ if client == 'ntp' and distro == 'alpine':
+ # NTP for Alpine Linux is Busybox's ntp which does not
+ # support 'pool' lines in its configuration file.
+ expected_pools = []
+ else:
+ expected_pools = [
+ 'pool {0} iburst'.format(pool) for pool in pools]
+ elif client == 'systemd-timesyncd':
+ expected_pools = " ".join(pools)
+
+ return expected_pools
+
+ def _get_expected_servers(self, servers, distro, client):
+ if client in ['ntp', 'chrony']:
+ if client == 'ntp' and distro == 'alpine':
+ # NTP for Alpine Linux is Busybox's ntp which only supports
+ # 'server' lines without iburst option.
+ expected_servers = [
+ 'server {0}'.format(srv) for srv in servers]
+ else:
+ expected_servers = [
+ 'server {0} iburst'.format(srv) for srv in servers]
+ elif client == 'systemd-timesyncd':
+ expected_servers = " ".join(servers)
+
+ return expected_servers
+
+ def test_ntp_handler_real_distro_ntp_templates(self):
+ """Test ntp handler renders the shipped distro ntp client templates."""
+ pools = ['0.mycompany.pool.ntp.org', '3.mycompany.pool.ntp.org']
+ servers = ['192.168.23.3', '192.168.23.4']
+ for client in ['ntp', 'systemd-timesyncd', 'chrony']:
+ for distro in cc_ntp.distros:
+ distro_cfg = cc_ntp.distro_ntp_client_configs(distro)
+ ntpclient = distro_cfg[client]
+ confpath = (
+ os.path.join(self.new_root, ntpclient.get('confpath')[1:]))
+ template = ntpclient.get('template_name')
+ # find sourcetree template file
+ root_dir = (
+ dirname(dirname(os.path.realpath(util.__file__))) +
+ '/templates')
+ source_fn = self._get_template_path(template, distro,
+ basepath=root_dir)
+ template_fn = self._get_template_path(template, distro)
+ # don't fail if cloud-init doesn't have a template for
+ # a distro,client pair
+ if not os.path.exists(source_fn):
+ continue
+ # Create a copy in our tmp_dir
+ shutil.copy(source_fn, template_fn)
+ cc_ntp.write_ntp_config_template(distro, servers=servers,
+ pools=pools, path=confpath,
+ template_fn=template_fn)
+ content = util.load_file(confpath)
+ if client in ['ntp', 'chrony']:
+ content_lines = content.splitlines()
+ expected_servers = self._get_expected_servers(servers,
+ distro,
+ client)
+ print('distro=%s client=%s' % (distro, client))
+ for sline in expected_servers:
+ self.assertIn(sline, content_lines,
+ ('failed to render {0} conf'
+ ' for distro:{1}'.format(client,
+ distro)))
+ expected_pools = self._get_expected_pools(pools, distro,
+ client)
+ if expected_pools != []:
+ for pline in expected_pools:
+ self.assertIn(pline, content_lines,
+ ('failed to render {0} conf'
+ ' for distro:{1}'.format(client,
+ distro)))
+ elif client == 'systemd-timesyncd':
+ expected_servers = self._get_expected_servers(servers,
+ distro,
+ client)
+ expected_pools = self._get_expected_pools(pools,
+ distro,
+ client)
+ expected_content = (
+ "# cloud-init generated file\n" +
+ "# See timesyncd.conf(5) for details.\n\n" +
+ "[Time]\nNTP=%s %s \n" % (expected_servers,
+ expected_pools))
+ self.assertEqual(expected_content, content)
+
+ def test_no_ntpcfg_does_nothing(self):
+ """When no ntp section is defined handler logs a warning and noops."""
+ cc_ntp.handle('cc_ntp', {}, None, None, [])
+ self.assertEqual(
+ 'DEBUG: Skipping module named cc_ntp, '
+ 'not present or disabled by cfg\n',
+ self.logs.getvalue())
+
+ @mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
+ def test_ntp_handler_schema_validation_allows_empty_ntp_config(self,
+ m_select):
+ """Ntp schema validation allows for an empty ntp: configuration."""
+ valid_empty_configs = [{'ntp': {}}, {'ntp': None}]
+ for valid_empty_config in valid_empty_configs:
+ for distro in cc_ntp.distros:
+ mycloud = self._get_cloud(distro)
+ ntpconfig = self._mock_ntp_client_config(distro=distro)
+ confpath = ntpconfig['confpath']
+ m_select.return_value = ntpconfig
+ cc_ntp.handle('cc_ntp', valid_empty_config, mycloud, None, [])
+ if distro == 'alpine':
+ # _mock_ntp_client_config call above did not specify a
+ # client value and so it defaults to "ntp" which on
+ # Alpine Linux only supports servers and not pools.
+
+ servers = cc_ntp.generate_server_names(mycloud.distro.name)
+ self.assertEqual(
+ "servers {0}\npools []\n".format(servers),
+ util.load_file(confpath))
+ else:
+ pools = cc_ntp.generate_server_names(mycloud.distro.name)
+ self.assertEqual(
+ "servers []\npools {0}\n".format(pools),
+ util.load_file(confpath))
+ self.assertNotIn('Invalid config:', self.logs.getvalue())
+
+ @skipUnlessJsonSchema()
+ @mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
+ def test_ntp_handler_schema_validation_warns_non_string_item_type(self,
+ m_sel):
+ """Ntp schema validation warns of non-strings in pools or servers.
+
+ Schema validation is not strict, so ntp config is still be rendered.
+ """
+ invalid_config = {'ntp': {'pools': [123], 'servers': ['valid', None]}}
+ for distro in cc_ntp.distros:
+ mycloud = self._get_cloud(distro)
+ ntpconfig = self._mock_ntp_client_config(distro=distro)
+ confpath = ntpconfig['confpath']
+ m_sel.return_value = ntpconfig
+ cc_ntp.handle('cc_ntp', invalid_config, mycloud, None, [])
+ self.assertIn(
+ "Invalid config:\nntp.pools.0: 123 is not of type 'string'\n"
+ "ntp.servers.1: None is not of type 'string'",
+ self.logs.getvalue())
+ self.assertEqual("servers ['valid', None]\npools [123]\n",
+ util.load_file(confpath))
+
+ @skipUnlessJsonSchema()
+ @mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
+ def test_ntp_handler_schema_validation_warns_of_non_array_type(self,
+ m_select):
+ """Ntp schema validation warns of non-array pools or servers types.
+
+ Schema validation is not strict, so ntp config is still be rendered.
+ """
+ invalid_config = {'ntp': {'pools': 123, 'servers': 'non-array'}}
+
+ for distro in cc_ntp.distros:
+ mycloud = self._get_cloud(distro)
+ ntpconfig = self._mock_ntp_client_config(distro=distro)
+ confpath = ntpconfig['confpath']
+ m_select.return_value = ntpconfig
+ cc_ntp.handle('cc_ntp', invalid_config, mycloud, None, [])
+ self.assertIn(
+ "Invalid config:\nntp.pools: 123 is not of type 'array'\n"
+ "ntp.servers: 'non-array' is not of type 'array'",
+ self.logs.getvalue())
+ self.assertEqual("servers non-array\npools 123\n",
+ util.load_file(confpath))
+
+ @skipUnlessJsonSchema()
+ @mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
+ def test_ntp_handler_schema_validation_warns_invalid_key_present(self,
+ m_select):
+ """Ntp schema validation warns of invalid keys present in ntp config.
+
+ Schema validation is not strict, so ntp config is still be rendered.
+ """
+ invalid_config = {
+ 'ntp': {'invalidkey': 1, 'pools': ['0.mycompany.pool.ntp.org']}}
+ for distro in cc_ntp.distros:
+ if distro != 'alpine':
+ mycloud = self._get_cloud(distro)
+ ntpconfig = self._mock_ntp_client_config(distro=distro)
+ confpath = ntpconfig['confpath']
+ m_select.return_value = ntpconfig
+ cc_ntp.handle('cc_ntp', invalid_config, mycloud, None, [])
+ self.assertIn(
+ "Invalid config:\nntp: Additional properties are not "
+ "allowed ('invalidkey' was unexpected)",
+ self.logs.getvalue())
+ self.assertEqual(
+ "servers []\npools ['0.mycompany.pool.ntp.org']\n",
+ util.load_file(confpath))
+
+ @skipUnlessJsonSchema()
+ @mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
+ def test_ntp_handler_schema_validation_warns_of_duplicates(self, m_select):
+ """Ntp schema validation warns of duplicates in servers or pools.
+
+ Schema validation is not strict, so ntp config is still be rendered.
+ """
+ invalid_config = {
+ 'ntp': {'pools': ['0.mypool.org', '0.mypool.org'],
+ 'servers': ['10.0.0.1', '10.0.0.1']}}
+ for distro in cc_ntp.distros:
+ mycloud = self._get_cloud(distro)
+ ntpconfig = self._mock_ntp_client_config(distro=distro)
+ confpath = ntpconfig['confpath']
+ m_select.return_value = ntpconfig
+ cc_ntp.handle('cc_ntp', invalid_config, mycloud, None, [])
+ self.assertIn(
+ "Invalid config:\nntp.pools: ['0.mypool.org', '0.mypool.org']"
+ " has non-unique elements\nntp.servers: "
+ "['10.0.0.1', '10.0.0.1'] has non-unique elements",
+ self.logs.getvalue())
+ self.assertEqual(
+ "servers ['10.0.0.1', '10.0.0.1']\n"
+ "pools ['0.mypool.org', '0.mypool.org']\n",
+ util.load_file(confpath))
+
+ @mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
+ def test_ntp_handler_timesyncd(self, m_select):
+ """Test ntp handler configures timesyncd"""
+ servers = ['192.168.2.1', '192.168.2.2']
+ pools = ['0.mypool.org']
+ cfg = {'ntp': {'servers': servers, 'pools': pools}}
+ client = 'systemd-timesyncd'
+ for distro in cc_ntp.distros:
+ mycloud = self._get_cloud(distro)
+ ntpconfig = self._mock_ntp_client_config(distro=distro,
+ client=client)
+ confpath = ntpconfig['confpath']
+ m_select.return_value = ntpconfig
+ cc_ntp.handle('cc_ntp', cfg, mycloud, None, [])
+ self.assertEqual(
+ "[Time]\nNTP=192.168.2.1 192.168.2.2 0.mypool.org \n",
+ util.load_file(confpath))
+
+ @mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
+ def test_ntp_handler_enabled_false(self, m_select):
+ """Test ntp handler does not run if enabled: false """
+ cfg = {'ntp': {'enabled': False}}
+ for distro in cc_ntp.distros:
+ mycloud = self._get_cloud(distro)
+ cc_ntp.handle('notimportant', cfg, mycloud, None, None)
+ self.assertEqual(0, m_select.call_count)
+
+ @mock.patch("cloudinit.distros.subp")
+ @mock.patch("cloudinit.config.cc_ntp.subp")
+ @mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
+ @mock.patch("cloudinit.distros.Distro.uses_systemd")
+ def test_ntp_the_whole_package(self, m_sysd, m_select, m_subp, m_dsubp):
+ """Test enabled config renders template, and restarts service """
+ cfg = {'ntp': {'enabled': True}}
+ for distro in cc_ntp.distros:
+ mycloud = self._get_cloud(distro)
+ ntpconfig = self._mock_ntp_client_config(distro=distro)
+ confpath = ntpconfig['confpath']
+ service_name = ntpconfig['service_name']
+ m_select.return_value = ntpconfig
+
+ hosts = cc_ntp.generate_server_names(mycloud.distro.name)
+ uses_systemd = True
+ expected_service_call = ['systemctl', 'reload-or-restart',
+ service_name]
+ expected_content = "servers []\npools {0}\n".format(hosts)
+
+ if distro == 'alpine':
+ uses_systemd = False
+ expected_service_call = ['rc-service', service_name, 'restart']
+ # _mock_ntp_client_config call above did not specify a client
+ # value and so it defaults to "ntp" which on Alpine Linux only
+ # supports servers and not pools.
+ expected_content = "servers {0}\npools []\n".format(hosts)
+
+ m_sysd.return_value = uses_systemd
+ with mock.patch('cloudinit.config.cc_ntp.util') as m_util:
+ # allow use of util.mergemanydict
+ m_util.mergemanydict.side_effect = util.mergemanydict
+ # default client is present
+ m_subp.which.return_value = True
+ # use the config 'enabled' value
+ m_util.is_false.return_value = util.is_false(
+ cfg['ntp']['enabled'])
+ cc_ntp.handle('notimportant', cfg, mycloud, None, None)
+ m_dsubp.subp.assert_called_with(
+ expected_service_call, capture=True)
+
+ self.assertEqual(expected_content, util.load_file(confpath))
+
+ @mock.patch('cloudinit.util.system_info')
+ def test_opensuse_picks_chrony(self, m_sysinfo):
+ """Test opensuse picks chrony or ntp on certain distro versions"""
+ # < 15.0 => ntp
+ m_sysinfo.return_value = {
+ 'dist': ('openSUSE', '13.2', 'Harlequin')
+ }
+ mycloud = self._get_cloud('opensuse')
+ expected_client = mycloud.distro.preferred_ntp_clients[0]
+ self.assertEqual('ntp', expected_client)
+
+ # >= 15.0 and not openSUSE => chrony
+ m_sysinfo.return_value = {
+ 'dist': ('SLES', '15.0', 'SUSE Linux Enterprise Server 15')
+ }
+ mycloud = self._get_cloud('sles')
+ expected_client = mycloud.distro.preferred_ntp_clients[0]
+ self.assertEqual('chrony', expected_client)
+
+ # >= 15.0 and openSUSE and ver != 42 => chrony
+ m_sysinfo.return_value = {
+ 'dist': ('openSUSE Tumbleweed', '20180326', 'timbleweed')
+ }
+ mycloud = self._get_cloud('opensuse')
+ expected_client = mycloud.distro.preferred_ntp_clients[0]
+ self.assertEqual('chrony', expected_client)
+
+ @mock.patch('cloudinit.util.system_info')
+ def test_ubuntu_xenial_picks_ntp(self, m_sysinfo):
+ """Test Ubuntu picks ntp on xenial release"""
+
+ m_sysinfo.return_value = {'dist': ('Ubuntu', '16.04', 'xenial')}
+ mycloud = self._get_cloud('ubuntu')
+ expected_client = mycloud.distro.preferred_ntp_clients[0]
+ self.assertEqual('ntp', expected_client)
+
+ @mock.patch('cloudinit.config.cc_ntp.subp.which')
+ def test_snappy_system_picks_timesyncd(self, m_which):
+ """Test snappy systems prefer installed clients"""
+
+ # we are on ubuntu-core here
+ self.m_snappy.return_value = True
+
+ # ubuntu core systems will have timesyncd installed
+ m_which.side_effect = iter([None, '/lib/systemd/systemd-timesyncd',
+ None, None, None])
+ distro = 'ubuntu'
+ mycloud = self._get_cloud(distro)
+ distro_configs = cc_ntp.distro_ntp_client_configs(distro)
+ expected_client = 'systemd-timesyncd'
+ expected_cfg = distro_configs[expected_client]
+ expected_calls = []
+ # we only get to timesyncd
+ for client in mycloud.distro.preferred_ntp_clients[0:2]:
+ cfg = distro_configs[client]
+ expected_calls.append(mock.call(cfg['check_exe']))
+ result = cc_ntp.select_ntp_client(None, mycloud.distro)
+ m_which.assert_has_calls(expected_calls)
+ self.assertEqual(sorted(expected_cfg), sorted(cfg))
+ self.assertEqual(sorted(expected_cfg), sorted(result))
+
+ @mock.patch('cloudinit.config.cc_ntp.subp.which')
+ def test_ntp_distro_searches_all_preferred_clients(self, m_which):
+ """Test select_ntp_client search all distro perferred clients """
+ # nothing is installed
+ m_which.return_value = None
+ for distro in cc_ntp.distros:
+ mycloud = self._get_cloud(distro)
+ distro_configs = cc_ntp.distro_ntp_client_configs(distro)
+ expected_client = mycloud.distro.preferred_ntp_clients[0]
+ expected_cfg = distro_configs[expected_client]
+ expected_calls = []
+ for client in mycloud.distro.preferred_ntp_clients:
+ cfg = distro_configs[client]
+ expected_calls.append(mock.call(cfg['check_exe']))
+ cc_ntp.select_ntp_client({}, mycloud.distro)
+ m_which.assert_has_calls(expected_calls)
+ self.assertEqual(sorted(expected_cfg), sorted(cfg))
+
+ @mock.patch('cloudinit.config.cc_ntp.subp.which')
+ def test_user_cfg_ntp_client_auto_uses_distro_clients(self, m_which):
+ """Test user_cfg.ntp_client='auto' defaults to distro search"""
+ # nothing is installed
+ m_which.return_value = None
+ for distro in cc_ntp.distros:
+ mycloud = self._get_cloud(distro)
+ distro_configs = cc_ntp.distro_ntp_client_configs(distro)
+ expected_client = mycloud.distro.preferred_ntp_clients[0]
+ expected_cfg = distro_configs[expected_client]
+ expected_calls = []
+ for client in mycloud.distro.preferred_ntp_clients:
+ cfg = distro_configs[client]
+ expected_calls.append(mock.call(cfg['check_exe']))
+ cc_ntp.select_ntp_client('auto', mycloud.distro)
+ m_which.assert_has_calls(expected_calls)
+ self.assertEqual(sorted(expected_cfg), sorted(cfg))
+
+ @mock.patch('cloudinit.config.cc_ntp.write_ntp_config_template')
+ @mock.patch('cloudinit.cloud.Cloud.get_template_filename')
+ @mock.patch('cloudinit.config.cc_ntp.subp.which')
+ def test_ntp_custom_client_overrides_installed_clients(self, m_which,
+ m_tmpfn, m_write):
+ """Test user client is installed despite other clients present """
+ client = 'ntpdate'
+ cfg = {'ntp': {'ntp_client': client}}
+ for distro in cc_ntp.distros:
+ # client is not installed
+ m_which.side_effect = iter([None])
+ mycloud = self._get_cloud(distro)
+ with mock.patch.object(mycloud.distro,
+ 'install_packages') as m_install:
+ cc_ntp.handle('notimportant', cfg, mycloud, None, None)
+ m_install.assert_called_with([client])
+ m_which.assert_called_with(client)
+
+ @mock.patch('cloudinit.config.cc_ntp.subp.which')
+ def test_ntp_system_config_overrides_distro_builtin_clients(self, m_which):
+ """Test distro system_config overrides builtin preferred ntp clients"""
+ system_client = 'chrony'
+ sys_cfg = {'ntp_client': system_client}
+ # no clients installed
+ m_which.return_value = None
+ for distro in cc_ntp.distros:
+ mycloud = self._get_cloud(distro, sys_cfg=sys_cfg)
+ distro_configs = cc_ntp.distro_ntp_client_configs(distro)
+ expected_cfg = distro_configs[system_client]
+ result = cc_ntp.select_ntp_client(None, mycloud.distro)
+ self.assertEqual(sorted(expected_cfg), sorted(result))
+ m_which.assert_has_calls([])
+
+ @mock.patch('cloudinit.config.cc_ntp.subp.which')
+ def test_ntp_user_config_overrides_system_cfg(self, m_which):
+ """Test user-data overrides system_config ntp_client"""
+ system_client = 'chrony'
+ sys_cfg = {'ntp_client': system_client}
+ user_client = 'systemd-timesyncd'
+ # no clients installed
+ m_which.return_value = None
+ for distro in cc_ntp.distros:
+ mycloud = self._get_cloud(distro, sys_cfg=sys_cfg)
+ distro_configs = cc_ntp.distro_ntp_client_configs(distro)
+ expected_cfg = distro_configs[user_client]
+ result = cc_ntp.select_ntp_client(user_client, mycloud.distro)
+ self.assertEqual(sorted(expected_cfg), sorted(result))
+ m_which.assert_has_calls([])
+
+ @mock.patch('cloudinit.config.cc_ntp.install_ntp_client')
+ def test_ntp_user_provided_config_with_template(self, m_install):
+ custom = r'\n#MyCustomTemplate'
+ user_template = NTP_TEMPLATE + custom
+ confpath = os.path.join(self.new_root, 'etc/myntp/myntp.conf')
+ cfg = {
+ 'ntp': {
+ 'pools': ['mypool.org'],
+ 'ntp_client': 'myntpd',
+ 'config': {
+ 'check_exe': 'myntpd',
+ 'confpath': confpath,
+ 'packages': ['myntp'],
+ 'service_name': 'myntp',
+ 'template': user_template,
+ }
+ }
+ }
+ for distro in cc_ntp.distros:
+ mycloud = self._get_cloud(distro)
+ mock_path = 'cloudinit.config.cc_ntp.temp_utils._TMPDIR'
+ with mock.patch(mock_path, self.new_root):
+ cc_ntp.handle('notimportant', cfg, mycloud, None, None)
+ self.assertEqual(
+ "servers []\npools ['mypool.org']\n%s" % custom,
+ util.load_file(confpath))
+
+ @mock.patch('cloudinit.config.cc_ntp.supplemental_schema_validation')
+ @mock.patch('cloudinit.config.cc_ntp.install_ntp_client')
+ @mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
+ def test_ntp_user_provided_config_template_only(self, m_select, m_install,
+ m_schema):
+ """Test custom template for default client"""
+ custom = r'\n#MyCustomTemplate'
+ user_template = NTP_TEMPLATE + custom
+ client = 'chrony'
+ cfg = {
+ 'pools': ['mypool.org'],
+ 'ntp_client': client,
+ 'config': {
+ 'template': user_template,
+ }
+ }
+ expected_merged_cfg = {
+ 'check_exe': 'chronyd',
+ 'confpath': '{tmpdir}/client.conf'.format(tmpdir=self.new_root),
+ 'template_name': 'client.conf', 'template': user_template,
+ 'service_name': 'chrony', 'packages': ['chrony']}
+ for distro in cc_ntp.distros:
+ mycloud = self._get_cloud(distro)
+ ntpconfig = self._mock_ntp_client_config(client=client,
+ distro=distro)
+ confpath = ntpconfig['confpath']
+ m_select.return_value = ntpconfig
+ mock_path = 'cloudinit.config.cc_ntp.temp_utils._TMPDIR'
+ with mock.patch(mock_path, self.new_root):
+ cc_ntp.handle('notimportant',
+ {'ntp': cfg}, mycloud, None, None)
+ self.assertEqual(
+ "servers []\npools ['mypool.org']\n%s" % custom,
+ util.load_file(confpath))
+ m_schema.assert_called_with(expected_merged_cfg)
+
+
+class TestSupplementalSchemaValidation(CiTestCase):
+
+ def test_error_on_missing_keys(self):
+ """ValueError raised reporting any missing required ntp:config keys"""
+ cfg = {}
+ match = (r'Invalid ntp configuration:\\nMissing required ntp:config'
+ ' keys: check_exe, confpath, packages, service_name')
+ with self.assertRaisesRegex(ValueError, match):
+ cc_ntp.supplemental_schema_validation(cfg)
+
+ def test_error_requiring_either_template_or_template_name(self):
+ """ValueError raised if both template not template_name are None."""
+ cfg = {'confpath': 'someconf', 'check_exe': '', 'service_name': '',
+ 'template': None, 'template_name': None, 'packages': []}
+ match = (r'Invalid ntp configuration:\\nEither ntp:config:template'
+ ' or ntp:config:template_name values are required')
+ with self.assertRaisesRegex(ValueError, match):
+ cc_ntp.supplemental_schema_validation(cfg)
+
+ def test_error_on_non_list_values(self):
+ """ValueError raised when packages is not of type list."""
+ cfg = {'confpath': 'someconf', 'check_exe': '', 'service_name': '',
+ 'template': 'asdf', 'template_name': None, 'packages': 'NOPE'}
+ match = (r'Invalid ntp configuration:\\nExpected a list of required'
+ ' package names for ntp:config:packages. Found \\(NOPE\\)')
+ with self.assertRaisesRegex(ValueError, match):
+ cc_ntp.supplemental_schema_validation(cfg)
+
+ def test_error_on_non_string_values(self):
+ """ValueError raised for any values expected as string type."""
+ cfg = {'confpath': 1, 'check_exe': 2, 'service_name': 3,
+ 'template': 4, 'template_name': 5, 'packages': []}
+ errors = [
+ 'Expected a config file path ntp:config:confpath. Found (1)',
+ 'Expected a string type for ntp:config:check_exe. Found (2)',
+ 'Expected a string type for ntp:config:service_name. Found (3)',
+ 'Expected a string type for ntp:config:template. Found (4)',
+ 'Expected a string type for ntp:config:template_name. Found (5)']
+ with self.assertRaises(ValueError) as context_mgr:
+ cc_ntp.supplemental_schema_validation(cfg)
+ error_msg = str(context_mgr.exception)
+ for error in errors:
+ self.assertIn(error, error_msg)
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_power_state_change.py b/tests/unittests/config/test_cc_power_state_change.py
new file mode 100644
index 00000000..e699f424
--- /dev/null
+++ b/tests/unittests/config/test_cc_power_state_change.py
@@ -0,0 +1,159 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import sys
+
+from cloudinit.config import cc_power_state_change as psc
+
+from cloudinit import distros
+from cloudinit import helpers
+
+from tests.unittests import helpers as t_help
+from tests.unittests.helpers import mock
+
+
+class TestLoadPowerState(t_help.TestCase):
+ def setUp(self):
+ super(TestLoadPowerState, self).setUp()
+ cls = distros.fetch('ubuntu')
+ paths = helpers.Paths({})
+ self.dist = cls('ubuntu', {}, paths)
+
+ def test_no_config(self):
+ # completely empty config should mean do nothing
+ (cmd, _timeout, _condition) = psc.load_power_state({}, self.dist)
+ self.assertIsNone(cmd)
+
+ def test_irrelevant_config(self):
+ # no power_state field in config should return None for cmd
+ (cmd, _timeout, _condition) = psc.load_power_state({'foo': 'bar'},
+ self.dist)
+ self.assertIsNone(cmd)
+
+ def test_invalid_mode(self):
+
+ cfg = {'power_state': {'mode': 'gibberish'}}
+ self.assertRaises(TypeError, psc.load_power_state, cfg, self.dist)
+
+ cfg = {'power_state': {'mode': ''}}
+ self.assertRaises(TypeError, psc.load_power_state, cfg, self.dist)
+
+ def test_empty_mode(self):
+ cfg = {'power_state': {'message': 'goodbye'}}
+ self.assertRaises(TypeError, psc.load_power_state, cfg, self.dist)
+
+ def test_valid_modes(self):
+ cfg = {'power_state': {}}
+ for mode in ('halt', 'poweroff', 'reboot'):
+ cfg['power_state']['mode'] = mode
+ check_lps_ret(psc.load_power_state(cfg, self.dist), mode=mode)
+
+ def test_invalid_delay(self):
+ cfg = {'power_state': {'mode': 'poweroff', 'delay': 'goodbye'}}
+ self.assertRaises(TypeError, psc.load_power_state, cfg, self.dist)
+
+ def test_valid_delay(self):
+ cfg = {'power_state': {'mode': 'poweroff', 'delay': ''}}
+ for delay in ("now", "+1", "+30"):
+ cfg['power_state']['delay'] = delay
+ check_lps_ret(psc.load_power_state(cfg, self.dist))
+
+ def test_message_present(self):
+ cfg = {'power_state': {'mode': 'poweroff', 'message': 'GOODBYE'}}
+ ret = psc.load_power_state(cfg, self.dist)
+ check_lps_ret(psc.load_power_state(cfg, self.dist))
+ self.assertIn(cfg['power_state']['message'], ret[0])
+
+ def test_no_message(self):
+ # if message is not present, then no argument should be passed for it
+ cfg = {'power_state': {'mode': 'poweroff'}}
+ (cmd, _timeout, _condition) = psc.load_power_state(cfg, self.dist)
+ self.assertNotIn("", cmd)
+ check_lps_ret(psc.load_power_state(cfg, self.dist))
+ self.assertTrue(len(cmd) == 3)
+
+ def test_condition_null_raises(self):
+ cfg = {'power_state': {'mode': 'poweroff', 'condition': None}}
+ self.assertRaises(TypeError, psc.load_power_state, cfg, self.dist)
+
+ def test_condition_default_is_true(self):
+ cfg = {'power_state': {'mode': 'poweroff'}}
+ _cmd, _timeout, cond = psc.load_power_state(cfg, self.dist)
+ self.assertEqual(cond, True)
+
+ def test_freebsd_poweroff_uses_lowercase_p(self):
+ cls = distros.fetch('freebsd')
+ paths = helpers.Paths({})
+ freebsd = cls('freebsd', {}, paths)
+ cfg = {'power_state': {'mode': 'poweroff'}}
+ ret = psc.load_power_state(cfg, freebsd)
+ self.assertIn('-p', ret[0])
+
+ def test_alpine_delay(self):
+ # alpine takes delay in seconds.
+ cls = distros.fetch('alpine')
+ paths = helpers.Paths({})
+ alpine = cls('alpine', {}, paths)
+ cfg = {'power_state': {'mode': 'poweroff', 'delay': ''}}
+ for delay, value in (('now', 0), ("+1", 60), ("+30", 1800)):
+ cfg['power_state']['delay'] = delay
+ ret = psc.load_power_state(cfg, alpine)
+ self.assertEqual('-d', ret[0][1])
+ self.assertEqual(str(value), ret[0][2])
+
+
+class TestCheckCondition(t_help.TestCase):
+ def cmd_with_exit(self, rc):
+ return([sys.executable, '-c', 'import sys; sys.exit(%s)' % rc])
+
+ def test_true_is_true(self):
+ self.assertEqual(psc.check_condition(True), True)
+
+ def test_false_is_false(self):
+ self.assertEqual(psc.check_condition(False), False)
+
+ def test_cmd_exit_zero_true(self):
+ self.assertEqual(psc.check_condition(self.cmd_with_exit(0)), True)
+
+ def test_cmd_exit_one_false(self):
+ self.assertEqual(psc.check_condition(self.cmd_with_exit(1)), False)
+
+ def test_cmd_exit_nonzero_warns(self):
+ mocklog = mock.Mock()
+ self.assertEqual(
+ psc.check_condition(self.cmd_with_exit(2), mocklog), False)
+ self.assertEqual(mocklog.warning.call_count, 1)
+
+
+def check_lps_ret(psc_return, mode=None):
+ if len(psc_return) != 3:
+ raise TypeError("length returned = %d" % len(psc_return))
+
+ errs = []
+ cmd = psc_return[0]
+ timeout = psc_return[1]
+ condition = psc_return[2]
+
+ if 'shutdown' not in psc_return[0][0]:
+ errs.append("string 'shutdown' not in cmd")
+
+ if condition is None:
+ errs.append("condition was not returned")
+
+ if mode is not None:
+ opt = {'halt': '-H', 'poweroff': '-P', 'reboot': '-r'}[mode]
+ if opt not in psc_return[0]:
+ errs.append("opt '%s' not in cmd: %s" % (opt, cmd))
+
+ if len(cmd) != 3 and len(cmd) != 4:
+ errs.append("Invalid command length: %s" % len(cmd))
+
+ try:
+ float(timeout)
+ except Exception:
+ errs.append("timeout failed convert to float")
+
+ if len(errs):
+ lines = ["Errors in result: %s" % str(psc_return)] + errs
+ raise Exception('\n'.join(lines))
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_puppet.py b/tests/unittests/config/test_cc_puppet.py
new file mode 100644
index 00000000..1f67dc4c
--- /dev/null
+++ b/tests/unittests/config/test_cc_puppet.py
@@ -0,0 +1,380 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+import logging
+import textwrap
+
+from cloudinit.config import cc_puppet
+from cloudinit import util
+from tests.unittests.helpers import CiTestCase, HttprettyTestCase, mock
+
+from tests.unittests.util import get_cloud
+
+LOG = logging.getLogger(__name__)
+
+
+@mock.patch('cloudinit.config.cc_puppet.subp.subp')
+@mock.patch('cloudinit.config.cc_puppet.os')
+class TestAutostartPuppet(CiTestCase):
+
+ def test_wb_autostart_puppet_updates_puppet_default(self, m_os, m_subp):
+ """Update /etc/default/puppet to autostart if it exists."""
+
+ def _fake_exists(path):
+ return path == '/etc/default/puppet'
+
+ m_os.path.exists.side_effect = _fake_exists
+ cc_puppet._autostart_puppet(LOG)
+ self.assertEqual(
+ [mock.call(['sed', '-i', '-e', 's/^START=.*/START=yes/',
+ '/etc/default/puppet'], capture=False)],
+ m_subp.call_args_list)
+
+ def test_wb_autostart_pupppet_enables_puppet_systemctl(self, m_os, m_subp):
+ """If systemctl is present, enable puppet via systemctl."""
+
+ def _fake_exists(path):
+ return path == '/bin/systemctl'
+
+ m_os.path.exists.side_effect = _fake_exists
+ cc_puppet._autostart_puppet(LOG)
+ expected_calls = [mock.call(
+ ['/bin/systemctl', 'enable', 'puppet.service'], capture=False)]
+ self.assertEqual(expected_calls, m_subp.call_args_list)
+
+ def test_wb_autostart_pupppet_enables_puppet_chkconfig(self, m_os, m_subp):
+ """If chkconfig is present, enable puppet via checkcfg."""
+
+ def _fake_exists(path):
+ return path == '/sbin/chkconfig'
+
+ m_os.path.exists.side_effect = _fake_exists
+ cc_puppet._autostart_puppet(LOG)
+ expected_calls = [mock.call(
+ ['/sbin/chkconfig', 'puppet', 'on'], capture=False)]
+ self.assertEqual(expected_calls, m_subp.call_args_list)
+
+
+@mock.patch('cloudinit.config.cc_puppet._autostart_puppet')
+class TestPuppetHandle(CiTestCase):
+
+ with_logs = True
+
+ def setUp(self):
+ super(TestPuppetHandle, self).setUp()
+ self.new_root = self.tmp_dir()
+ self.conf = self.tmp_path('puppet.conf')
+ self.csr_attributes_path = self.tmp_path(
+ 'csr_attributes.yaml')
+ self.cloud = get_cloud()
+
+ def test_skips_missing_puppet_key_in_cloudconfig(self, m_auto):
+ """Cloud-config containing no 'puppet' key is skipped."""
+
+ cfg = {}
+ cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
+ self.assertIn(
+ "no 'puppet' configuration found", self.logs.getvalue())
+ self.assertEqual(0, m_auto.call_count)
+
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
+ def test_puppet_config_starts_puppet_service(self, m_subp, m_auto):
+ """Cloud-config 'puppet' configuration starts puppet."""
+
+ cfg = {'puppet': {'install': False}}
+ cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
+ self.assertEqual(1, m_auto.call_count)
+ self.assertIn(
+ [mock.call(['service', 'puppet', 'start'], capture=False)],
+ m_subp.call_args_list)
+
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
+ def test_empty_puppet_config_installs_puppet(self, m_subp, m_auto):
+ """Cloud-config empty 'puppet' configuration installs latest puppet."""
+
+ self.cloud.distro = mock.MagicMock()
+ cfg = {'puppet': {}}
+ cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
+ self.assertEqual(
+ [mock.call(('puppet', None))],
+ self.cloud.distro.install_packages.call_args_list)
+
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
+ def test_puppet_config_installs_puppet_on_true(self, m_subp, _):
+ """Cloud-config with 'puppet' key installs when 'install' is True."""
+
+ self.cloud.distro = mock.MagicMock()
+ cfg = {'puppet': {'install': True}}
+ cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
+ self.assertEqual(
+ [mock.call(('puppet', None))],
+ self.cloud.distro.install_packages.call_args_list)
+
+ @mock.patch('cloudinit.config.cc_puppet.install_puppet_aio', autospec=True)
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
+ def test_puppet_config_installs_puppet_aio(self, m_subp, m_aio, _):
+ """Cloud-config with 'puppet' key installs
+ when 'install_type' is 'aio'."""
+
+ self.cloud.distro = mock.MagicMock()
+ cfg = {'puppet': {'install': True, 'install_type': 'aio'}}
+ cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
+ m_aio.assert_called_with(
+ cc_puppet.AIO_INSTALL_URL,
+ None, None, True)
+
+ @mock.patch('cloudinit.config.cc_puppet.install_puppet_aio', autospec=True)
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
+ def test_puppet_config_installs_puppet_aio_with_version(self,
+ m_subp, m_aio, _):
+ """Cloud-config with 'puppet' key installs
+ when 'install_type' is 'aio' and 'version' is specified."""
+
+ self.cloud.distro = mock.MagicMock()
+ cfg = {'puppet': {'install': True,
+ 'version': '6.24.0', 'install_type': 'aio'}}
+ cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
+ m_aio.assert_called_with(
+ cc_puppet.AIO_INSTALL_URL,
+ '6.24.0', None, True)
+
+ @mock.patch('cloudinit.config.cc_puppet.install_puppet_aio', autospec=True)
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
+ def test_puppet_config_installs_puppet_aio_with_collection(self,
+ m_subp,
+ m_aio, _):
+ """Cloud-config with 'puppet' key installs
+ when 'install_type' is 'aio' and 'collection' is specified."""
+
+ self.cloud.distro = mock.MagicMock()
+ cfg = {'puppet': {'install': True,
+ 'collection': 'puppet6', 'install_type': 'aio'}}
+ cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
+ m_aio.assert_called_with(
+ cc_puppet.AIO_INSTALL_URL,
+ None, 'puppet6', True)
+
+ @mock.patch('cloudinit.config.cc_puppet.install_puppet_aio', autospec=True)
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
+ def test_puppet_config_installs_puppet_aio_with_custom_url(self,
+ m_subp,
+ m_aio, _):
+ """Cloud-config with 'puppet' key installs
+ when 'install_type' is 'aio' and 'aio_install_url' is specified."""
+
+ self.cloud.distro = mock.MagicMock()
+ cfg = {'puppet':
+ {'install': True,
+ 'aio_install_url': 'http://test.url/path/to/script.sh',
+ 'install_type': 'aio'}}
+ cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
+ m_aio.assert_called_with(
+ 'http://test.url/path/to/script.sh', None, None, True)
+
+ @mock.patch('cloudinit.config.cc_puppet.install_puppet_aio', autospec=True)
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
+ def test_puppet_config_installs_puppet_aio_without_cleanup(self,
+ m_subp,
+ m_aio, _):
+ """Cloud-config with 'puppet' key installs
+ when 'install_type' is 'aio' and no cleanup."""
+
+ self.cloud.distro = mock.MagicMock()
+ cfg = {'puppet': {'install': True,
+ 'cleanup': False, 'install_type': 'aio'}}
+ cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
+ m_aio.assert_called_with(
+ cc_puppet.AIO_INSTALL_URL,
+ None, None, False)
+
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
+ def test_puppet_config_installs_puppet_version(self, m_subp, _):
+ """Cloud-config 'puppet' configuration can specify a version."""
+
+ self.cloud.distro = mock.MagicMock()
+ cfg = {'puppet': {'version': '3.8'}}
+ cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
+ self.assertEqual(
+ [mock.call(('puppet', '3.8'))],
+ self.cloud.distro.install_packages.call_args_list)
+
+ @mock.patch('cloudinit.config.cc_puppet.get_config_value')
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
+ def test_puppet_config_updates_puppet_conf(self,
+ m_subp, m_default, m_auto):
+ """When 'conf' is provided update values in PUPPET_CONF_PATH."""
+
+ def _fake_get_config_value(puppet_bin, setting):
+ return self.conf
+
+ m_default.side_effect = _fake_get_config_value
+
+ cfg = {
+ 'puppet': {
+ 'conf': {'agent': {'server': 'puppetserver.example.org'}}}}
+ util.write_file(
+ self.conf, '[agent]\nserver = origpuppet\nother = 3')
+ self.cloud.distro = mock.MagicMock()
+ cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
+ content = util.load_file(self.conf)
+ expected = '[agent]\nserver = puppetserver.example.org\nother = 3\n\n'
+ self.assertEqual(expected, content)
+
+ @mock.patch('cloudinit.config.cc_puppet.get_config_value')
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp')
+ def test_puppet_writes_csr_attributes_file(self,
+ m_subp, m_default, m_auto):
+ """When csr_attributes is provided
+ creates file in PUPPET_CSR_ATTRIBUTES_PATH."""
+
+ def _fake_get_config_value(puppet_bin, setting):
+ return self.csr_attributes_path
+
+ m_default.side_effect = _fake_get_config_value
+
+ self.cloud.distro = mock.MagicMock()
+ cfg = {
+ 'puppet': {
+ 'csr_attributes': {
+ 'custom_attributes': {
+ '1.2.840.113549.1.9.7':
+ '342thbjkt82094y0uthhor289jnqthpc2290'
+ },
+ 'extension_requests': {
+ 'pp_uuid': 'ED803750-E3C7-44F5-BB08-41A04433FE2E',
+ 'pp_image_name': 'my_ami_image',
+ 'pp_preshared_key':
+ '342thbjkt82094y0uthhor289jnqthpc2290'
+ }
+ }
+ }
+ }
+ cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
+ content = util.load_file(self.csr_attributes_path)
+ expected = textwrap.dedent("""\
+ custom_attributes:
+ 1.2.840.113549.1.9.7: 342thbjkt82094y0uthhor289jnqthpc2290
+ extension_requests:
+ pp_image_name: my_ami_image
+ pp_preshared_key: 342thbjkt82094y0uthhor289jnqthpc2290
+ pp_uuid: ED803750-E3C7-44F5-BB08-41A04433FE2E
+ """)
+ self.assertEqual(expected, content)
+
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
+ def test_puppet_runs_puppet_if_requested(self, m_subp, m_auto):
+ """Run puppet with default args if 'exec' is set to True."""
+
+ cfg = {'puppet': {'exec': True}}
+ cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
+ self.assertEqual(1, m_auto.call_count)
+ self.assertIn(
+ [mock.call(['puppet', 'agent', '--test'], capture=False)],
+ m_subp.call_args_list)
+
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
+ def test_puppet_starts_puppetd(self, m_subp, m_auto):
+ """Run puppet with default args if 'exec' is set to True."""
+
+ cfg = {'puppet': {}}
+ cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
+ self.assertEqual(1, m_auto.call_count)
+ self.assertIn(
+ [mock.call(['service', 'puppet', 'start'], capture=False)],
+ m_subp.call_args_list)
+
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
+ def test_puppet_skips_puppetd(self, m_subp, m_auto):
+ """Run puppet with default args if 'exec' is set to True."""
+
+ cfg = {'puppet': {'start_service': False}}
+ cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
+ self.assertEqual(0, m_auto.call_count)
+ self.assertNotIn(
+ [mock.call(['service', 'puppet', 'start'], capture=False)],
+ m_subp.call_args_list)
+
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
+ def test_puppet_runs_puppet_with_args_list_if_requested(self,
+ m_subp, m_auto):
+ """Run puppet with 'exec_args' list if 'exec' is set to True."""
+
+ cfg = {'puppet': {'exec': True, 'exec_args': [
+ '--onetime', '--detailed-exitcodes']}}
+ cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
+ self.assertEqual(1, m_auto.call_count)
+ self.assertIn(
+ [mock.call(
+ ['puppet', 'agent', '--onetime', '--detailed-exitcodes'],
+ capture=False)],
+ m_subp.call_args_list)
+
+ @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
+ def test_puppet_runs_puppet_with_args_string_if_requested(self,
+ m_subp, m_auto):
+ """Run puppet with 'exec_args' string if 'exec' is set to True."""
+
+ cfg = {'puppet': {'exec': True,
+ 'exec_args': '--onetime --detailed-exitcodes'}}
+ cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
+ self.assertEqual(1, m_auto.call_count)
+ self.assertIn(
+ [mock.call(
+ ['puppet', 'agent', '--onetime', '--detailed-exitcodes'],
+ capture=False)],
+ m_subp.call_args_list)
+
+
+URL_MOCK = mock.Mock()
+URL_MOCK.contents = b'#!/bin/bash\necho "Hi Mom"'
+
+
+@mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=(None, None))
+@mock.patch(
+ 'cloudinit.config.cc_puppet.url_helper.readurl',
+ return_value=URL_MOCK, autospec=True,
+)
+class TestInstallPuppetAio(HttprettyTestCase):
+ def test_install_with_default_arguments(self, m_readurl, m_subp):
+ """Install AIO with no arguments"""
+ cc_puppet.install_puppet_aio()
+
+ self.assertEqual(
+ [mock.call([mock.ANY, '--cleanup'], capture=False)],
+ m_subp.call_args_list)
+
+ def test_install_with_custom_url(self, m_readurl, m_subp):
+ """Install AIO from custom URL"""
+ cc_puppet.install_puppet_aio('http://custom.url/path/to/script.sh')
+ m_readurl.assert_called_with(
+ url='http://custom.url/path/to/script.sh',
+ retries=5)
+
+ self.assertEqual(
+ [mock.call([mock.ANY, '--cleanup'], capture=False)],
+ m_subp.call_args_list)
+
+ def test_install_with_version(self, m_readurl, m_subp):
+ """Install AIO with specific version"""
+ cc_puppet.install_puppet_aio(cc_puppet.AIO_INSTALL_URL, '7.6.0')
+
+ self.assertEqual(
+ [mock.call([mock.ANY, '-v', '7.6.0', '--cleanup'], capture=False)],
+ m_subp.call_args_list)
+
+ def test_install_with_collection(self, m_readurl, m_subp):
+ """Install AIO with specific collection"""
+ cc_puppet.install_puppet_aio(
+ cc_puppet.AIO_INSTALL_URL, None, 'puppet6-nightly')
+
+ self.assertEqual(
+ [mock.call([mock.ANY, '-c', 'puppet6-nightly', '--cleanup'],
+ capture=False)],
+ m_subp.call_args_list)
+
+ def test_install_with_no_cleanup(self, m_readurl, m_subp):
+ """Install AIO with no cleanup"""
+ cc_puppet.install_puppet_aio(
+ cc_puppet.AIO_INSTALL_URL, None, None, False)
+
+ self.assertEqual(
+ [mock.call([mock.ANY], capture=False)],
+ m_subp.call_args_list)
diff --git a/tests/unittests/config/test_cc_refresh_rmc_and_interface.py b/tests/unittests/config/test_cc_refresh_rmc_and_interface.py
new file mode 100644
index 00000000..522de23d
--- /dev/null
+++ b/tests/unittests/config/test_cc_refresh_rmc_and_interface.py
@@ -0,0 +1,109 @@
+from cloudinit.config import cc_refresh_rmc_and_interface as ccrmci
+
+from cloudinit import util
+
+from tests.unittests import helpers as t_help
+from tests.unittests.helpers import mock
+
+from textwrap import dedent
+import logging
+
+LOG = logging.getLogger(__name__)
+MPATH = "cloudinit.config.cc_refresh_rmc_and_interface"
+NET_INFO = {
+ 'lo': {'ipv4': [{'ip': '127.0.0.1',
+ 'bcast': '', 'mask': '255.0.0.0',
+ 'scope': 'host'}],
+ 'ipv6': [{'ip': '::1/128',
+ 'scope6': 'host'}], 'hwaddr': '',
+ 'up': 'True'},
+ 'env2': {'ipv4': [{'ip': '8.0.0.19',
+ 'bcast': '8.0.0.255', 'mask': '255.255.255.0',
+ 'scope': 'global'}],
+ 'ipv6': [{'ip': 'fe80::f896:c2ff:fe81:8220/64',
+ 'scope6': 'link'}], 'hwaddr': 'fa:96:c2:81:82:20',
+ 'up': 'True'},
+ 'env3': {'ipv4': [{'ip': '90.0.0.14',
+ 'bcast': '90.0.0.255', 'mask': '255.255.255.0',
+ 'scope': 'global'}],
+ 'ipv6': [{'ip': 'fe80::f896:c2ff:fe81:8221/64',
+ 'scope6': 'link'}], 'hwaddr': 'fa:96:c2:81:82:21',
+ 'up': 'True'},
+ 'env4': {'ipv4': [{'ip': '9.114.23.7',
+ 'bcast': '9.114.23.255', 'mask': '255.255.255.0',
+ 'scope': 'global'}],
+ 'ipv6': [{'ip': 'fe80::f896:c2ff:fe81:8222/64',
+ 'scope6': 'link'}], 'hwaddr': 'fa:96:c2:81:82:22',
+ 'up': 'True'},
+ 'env5': {'ipv4': [],
+ 'ipv6': [{'ip': 'fe80::9c26:c3ff:fea4:62c8/64',
+ 'scope6': 'link'}], 'hwaddr': '42:20:86:df:fa:4c',
+ 'up': 'True'}}
+
+
+class TestRsctNodeFile(t_help.CiTestCase):
+ def test_disable_ipv6_interface(self):
+ """test parsing of iface files."""
+ fname = self.tmp_path("iface-eth5")
+ util.write_file(fname, dedent("""\
+ BOOTPROTO=static
+ DEVICE=eth5
+ HWADDR=42:20:86:df:fa:4c
+ IPV6INIT=yes
+ IPADDR6=fe80::9c26:c3ff:fea4:62c8/64
+ IPV6ADDR=fe80::9c26:c3ff:fea4:62c8/64
+ NM_CONTROLLED=yes
+ ONBOOT=yes
+ STARTMODE=auto
+ TYPE=Ethernet
+ USERCTL=no
+ """))
+
+ ccrmci.disable_ipv6(fname)
+ self.assertEqual(dedent("""\
+ BOOTPROTO=static
+ DEVICE=eth5
+ HWADDR=42:20:86:df:fa:4c
+ ONBOOT=yes
+ STARTMODE=auto
+ TYPE=Ethernet
+ USERCTL=no
+ NM_CONTROLLED=no
+ """), util.load_file(fname))
+
+ @mock.patch(MPATH + '.refresh_rmc')
+ @mock.patch(MPATH + '.restart_network_manager')
+ @mock.patch(MPATH + '.disable_ipv6')
+ @mock.patch(MPATH + '.refresh_ipv6')
+ @mock.patch(MPATH + '.netinfo.netdev_info')
+ @mock.patch(MPATH + '.subp.which')
+ def test_handle(self, m_refresh_rmc,
+ m_netdev_info, m_refresh_ipv6, m_disable_ipv6,
+ m_restart_nm, m_which):
+ """Basic test of handle."""
+ m_netdev_info.return_value = NET_INFO
+ m_which.return_value = '/opt/rsct/bin/rmcctrl'
+ ccrmci.handle(
+ "refresh_rmc_and_interface", None, None, None, None)
+ self.assertEqual(1, m_netdev_info.call_count)
+ m_refresh_ipv6.assert_called_with('env5')
+ m_disable_ipv6.assert_called_with(
+ '/etc/sysconfig/network-scripts/ifcfg-env5')
+ self.assertEqual(1, m_restart_nm.call_count)
+ self.assertEqual(1, m_refresh_rmc.call_count)
+
+ @mock.patch(MPATH + '.netinfo.netdev_info')
+ def test_find_ipv6(self, m_netdev_info):
+ """find_ipv6_ifaces parses netdev_info returning those with ipv6"""
+ m_netdev_info.return_value = NET_INFO
+ found = ccrmci.find_ipv6_ifaces()
+ self.assertEqual(['env5'], found)
+
+ @mock.patch(MPATH + '.subp.subp')
+ def test_refresh_ipv6(self, m_subp):
+ """refresh_ipv6 should ip down and up the interface."""
+ iface = "myeth0"
+ ccrmci.refresh_ipv6(iface)
+ m_subp.assert_has_calls([
+ mock.call(['ip', 'link', 'set', iface, 'down']),
+ mock.call(['ip', 'link', 'set', iface, 'up'])])
diff --git a/tests/unittests/config/test_cc_resizefs.py b/tests/unittests/config/test_cc_resizefs.py
new file mode 100644
index 00000000..1f9e24da
--- /dev/null
+++ b/tests/unittests/config/test_cc_resizefs.py
@@ -0,0 +1,398 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+from cloudinit.config.cc_resizefs import (
+ can_skip_resize, handle, maybe_get_writable_device_path, _resize_btrfs,
+ _resize_zfs, _resize_xfs, _resize_ext, _resize_ufs)
+
+from collections import namedtuple
+import logging
+
+from cloudinit.subp import ProcessExecutionError
+from tests.unittests.helpers import (
+ CiTestCase, mock, skipUnlessJsonSchema, util, wrap_and_call)
+
+
+LOG = logging.getLogger(__name__)
+
+
+class TestResizefs(CiTestCase):
+ with_logs = True
+
+ def setUp(self):
+ super(TestResizefs, self).setUp()
+ self.name = "resizefs"
+
+ @mock.patch('cloudinit.subp.subp')
+ def test_skip_ufs_resize(self, m_subp):
+ fs_type = "ufs"
+ resize_what = "/"
+ devpth = "/dev/da0p2"
+ err = ("growfs: requested size 2.0GB is not larger than the "
+ "current filesystem size 2.0GB\n")
+ exception = ProcessExecutionError(stderr=err, exit_code=1)
+ m_subp.side_effect = exception
+ res = can_skip_resize(fs_type, resize_what, devpth)
+ self.assertTrue(res)
+
+ @mock.patch('cloudinit.subp.subp')
+ def test_cannot_skip_ufs_resize(self, m_subp):
+ fs_type = "ufs"
+ resize_what = "/"
+ devpth = "/dev/da0p2"
+ m_subp.return_value = (
+ ("stdout: super-block backups (for fsck_ffs -b #) at:\n\n"),
+ ("growfs: no room to allocate last cylinder group; "
+ "leaving 364KB unused\n")
+ )
+ res = can_skip_resize(fs_type, resize_what, devpth)
+ self.assertFalse(res)
+
+ @mock.patch('cloudinit.subp.subp')
+ def test_cannot_skip_ufs_growfs_exception(self, m_subp):
+ fs_type = "ufs"
+ resize_what = "/"
+ devpth = "/dev/da0p2"
+ err = "growfs: /dev/da0p2 is not clean - run fsck.\n"
+ exception = ProcessExecutionError(stderr=err, exit_code=1)
+ m_subp.side_effect = exception
+ with self.assertRaises(ProcessExecutionError):
+ can_skip_resize(fs_type, resize_what, devpth)
+
+ def test_can_skip_resize_ext(self):
+ self.assertFalse(can_skip_resize('ext', '/', '/dev/sda1'))
+
+ def test_handle_noops_on_disabled(self):
+ """The handle function logs when the configuration disables resize."""
+ cfg = {'resize_rootfs': False}
+ handle('cc_resizefs', cfg, _cloud=None, log=LOG, args=[])
+ self.assertIn(
+ 'DEBUG: Skipping module named cc_resizefs, resizing disabled\n',
+ self.logs.getvalue())
+
+ @skipUnlessJsonSchema()
+ def test_handle_schema_validation_logs_invalid_resize_rootfs_value(self):
+ """The handle reports json schema violations as a warning.
+
+ Invalid values for resize_rootfs result in disabling the module.
+ """
+ cfg = {'resize_rootfs': 'junk'}
+ handle('cc_resizefs', cfg, _cloud=None, log=LOG, args=[])
+ logs = self.logs.getvalue()
+ self.assertIn(
+ "WARNING: Invalid config:\nresize_rootfs: 'junk' is not one of"
+ " [True, False, 'noblock']",
+ logs)
+ self.assertIn(
+ 'DEBUG: Skipping module named cc_resizefs, resizing disabled\n',
+ logs)
+
+ @mock.patch('cloudinit.config.cc_resizefs.util.get_mount_info')
+ def test_handle_warns_on_unknown_mount_info(self, m_get_mount_info):
+ """handle warns when get_mount_info sees unknown filesystem for /."""
+ m_get_mount_info.return_value = None
+ cfg = {'resize_rootfs': True}
+ handle('cc_resizefs', cfg, _cloud=None, log=LOG, args=[])
+ logs = self.logs.getvalue()
+ self.assertNotIn("WARNING: Invalid config:\nresize_rootfs:", logs)
+ self.assertIn(
+ 'WARNING: Could not determine filesystem type of /\n',
+ logs)
+ self.assertEqual(
+ [mock.call('/', LOG)],
+ m_get_mount_info.call_args_list)
+
+ def test_handle_warns_on_undiscoverable_root_path_in_commandline(self):
+ """handle noops when the root path is not found on the commandline."""
+ cfg = {'resize_rootfs': True}
+ exists_mock_path = 'cloudinit.config.cc_resizefs.os.path.exists'
+
+ def fake_mount_info(path, log):
+ self.assertEqual('/', path)
+ self.assertEqual(LOG, log)
+ return ('/dev/root', 'ext4', '/')
+
+ with mock.patch(exists_mock_path) as m_exists:
+ m_exists.return_value = False
+ wrap_and_call(
+ 'cloudinit.config.cc_resizefs.util',
+ {'is_container': {'return_value': False},
+ 'get_mount_info': {'side_effect': fake_mount_info},
+ 'get_cmdline': {'return_value': 'BOOT_IMAGE=/vmlinuz.efi'}},
+ handle, 'cc_resizefs', cfg, _cloud=None, log=LOG,
+ args=[])
+ logs = self.logs.getvalue()
+ self.assertIn("WARNING: Unable to find device '/dev/root'", logs)
+
+ def test_resize_zfs_cmd_return(self):
+ zpool = 'zroot'
+ devpth = 'gpt/system'
+ self.assertEqual(('zpool', 'online', '-e', zpool, devpth),
+ _resize_zfs(zpool, devpth))
+
+ def test_resize_xfs_cmd_return(self):
+ mount_point = '/mnt/test'
+ devpth = '/dev/sda1'
+ self.assertEqual(('xfs_growfs', mount_point),
+ _resize_xfs(mount_point, devpth))
+
+ def test_resize_ext_cmd_return(self):
+ mount_point = '/'
+ devpth = '/dev/sdb1'
+ self.assertEqual(('resize2fs', devpth),
+ _resize_ext(mount_point, devpth))
+
+ def test_resize_ufs_cmd_return(self):
+ mount_point = '/'
+ devpth = '/dev/sda2'
+ self.assertEqual(('growfs', '-y', mount_point),
+ _resize_ufs(mount_point, devpth))
+
+ @mock.patch('cloudinit.util.is_container', return_value=False)
+ @mock.patch('cloudinit.util.parse_mount')
+ @mock.patch('cloudinit.util.get_device_info_from_zpool')
+ @mock.patch('cloudinit.util.get_mount_info')
+ def test_handle_zfs_root(self, mount_info, zpool_info, parse_mount,
+ is_container):
+ devpth = 'vmzroot/ROOT/freebsd'
+ disk = 'gpt/system'
+ fs_type = 'zfs'
+ mount_point = '/'
+
+ mount_info.return_value = (devpth, fs_type, mount_point)
+ zpool_info.return_value = disk
+ parse_mount.return_value = (devpth, fs_type, mount_point)
+
+ cfg = {'resize_rootfs': True}
+
+ with mock.patch('cloudinit.config.cc_resizefs.do_resize') as dresize:
+ handle('cc_resizefs', cfg, _cloud=None, log=LOG, args=[])
+ ret = dresize.call_args[0][0]
+
+ self.assertEqual(('zpool', 'online', '-e', 'vmzroot', disk), ret)
+
+ @mock.patch('cloudinit.util.is_container', return_value=False)
+ @mock.patch('cloudinit.util.get_mount_info')
+ @mock.patch('cloudinit.util.get_device_info_from_zpool')
+ @mock.patch('cloudinit.util.parse_mount')
+ def test_handle_modern_zfsroot(self, mount_info, zpool_info, parse_mount,
+ is_container):
+ devpth = 'zroot/ROOT/default'
+ disk = 'da0p3'
+ fs_type = 'zfs'
+ mount_point = '/'
+
+ mount_info.return_value = (devpth, fs_type, mount_point)
+ zpool_info.return_value = disk
+ parse_mount.return_value = (devpth, fs_type, mount_point)
+
+ cfg = {'resize_rootfs': True}
+
+ def fake_stat(devpath):
+ if devpath == disk:
+ raise OSError("not here")
+ FakeStat = namedtuple(
+ 'FakeStat', ['st_mode', 'st_size', 'st_mtime']) # minimal stat
+ return FakeStat(25008, 0, 1) # fake char block device
+
+ with mock.patch('cloudinit.config.cc_resizefs.do_resize') as dresize:
+ with mock.patch('cloudinit.config.cc_resizefs.os.stat') as m_stat:
+ m_stat.side_effect = fake_stat
+ handle('cc_resizefs', cfg, _cloud=None, log=LOG, args=[])
+
+ self.assertEqual(('zpool', 'online', '-e', 'zroot', '/dev/' + disk),
+ dresize.call_args[0][0])
+
+
+class TestRootDevFromCmdline(CiTestCase):
+
+ def test_rootdev_from_cmdline_with_no_root(self):
+ """Return None from rootdev_from_cmdline when root is not present."""
+ invalid_cases = [
+ 'BOOT_IMAGE=/adsf asdfa werasef root adf', 'BOOT_IMAGE=/adsf', '']
+ for case in invalid_cases:
+ self.assertIsNone(util.rootdev_from_cmdline(case))
+
+ def test_rootdev_from_cmdline_with_root_startswith_dev(self):
+ """Return the cmdline root when the path starts with /dev."""
+ self.assertEqual(
+ '/dev/this', util.rootdev_from_cmdline('asdf root=/dev/this'))
+
+ def test_rootdev_from_cmdline_with_root_without_dev_prefix(self):
+ """Add /dev prefix to cmdline root when the path lacks the prefix."""
+ self.assertEqual(
+ '/dev/this', util.rootdev_from_cmdline('asdf root=this'))
+
+ def test_rootdev_from_cmdline_with_root_with_label(self):
+ """When cmdline root contains a LABEL, our root is disk/by-label."""
+ self.assertEqual(
+ '/dev/disk/by-label/unique',
+ util.rootdev_from_cmdline('asdf root=LABEL=unique'))
+
+ def test_rootdev_from_cmdline_with_root_with_uuid(self):
+ """When cmdline root contains a UUID, our root is disk/by-uuid."""
+ self.assertEqual(
+ '/dev/disk/by-uuid/adsfdsaf-adsf',
+ util.rootdev_from_cmdline('asdf root=UUID=adsfdsaf-adsf'))
+
+
+class TestMaybeGetDevicePathAsWritableBlock(CiTestCase):
+
+ with_logs = True
+
+ def test_maybe_get_writable_device_path_none_on_overlayroot(self):
+ """When devpath is overlayroot (on MAAS), is_dev_writable is False."""
+ info = 'does not matter'
+ devpath = wrap_and_call(
+ 'cloudinit.config.cc_resizefs.util',
+ {'is_container': {'return_value': False}},
+ maybe_get_writable_device_path, 'overlayroot', info, LOG)
+ self.assertIsNone(devpath)
+ self.assertIn(
+ "Not attempting to resize devpath 'overlayroot'",
+ self.logs.getvalue())
+
+ def test_maybe_get_writable_device_path_warns_missing_cmdline_root(self):
+ """When root does not exist isn't in the cmdline, log warning."""
+ info = 'does not matter'
+
+ def fake_mount_info(path, log):
+ self.assertEqual('/', path)
+ self.assertEqual(LOG, log)
+ return ('/dev/root', 'ext4', '/')
+
+ exists_mock_path = 'cloudinit.config.cc_resizefs.os.path.exists'
+ with mock.patch(exists_mock_path) as m_exists:
+ m_exists.return_value = False
+ devpath = wrap_and_call(
+ 'cloudinit.config.cc_resizefs.util',
+ {'is_container': {'return_value': False},
+ 'get_mount_info': {'side_effect': fake_mount_info},
+ 'get_cmdline': {'return_value': 'BOOT_IMAGE=/vmlinuz.efi'}},
+ maybe_get_writable_device_path, '/dev/root', info, LOG)
+ self.assertIsNone(devpath)
+ logs = self.logs.getvalue()
+ self.assertIn("WARNING: Unable to find device '/dev/root'", logs)
+
+ def test_maybe_get_writable_device_path_does_not_exist(self):
+ """When devpath does not exist, a warning is logged."""
+ info = 'dev=/dev/I/dont/exist mnt_point=/ path=/dev/none'
+ devpath = wrap_and_call(
+ 'cloudinit.config.cc_resizefs.util',
+ {'is_container': {'return_value': False}},
+ maybe_get_writable_device_path, '/dev/I/dont/exist', info, LOG)
+ self.assertIsNone(devpath)
+ self.assertIn(
+ "WARNING: Device '/dev/I/dont/exist' did not exist."
+ ' cannot resize: %s' % info,
+ self.logs.getvalue())
+
+ def test_maybe_get_writable_device_path_does_not_exist_in_container(self):
+ """When devpath does not exist in a container, log a debug message."""
+ info = 'dev=/dev/I/dont/exist mnt_point=/ path=/dev/none'
+ devpath = wrap_and_call(
+ 'cloudinit.config.cc_resizefs.util',
+ {'is_container': {'return_value': True}},
+ maybe_get_writable_device_path, '/dev/I/dont/exist', info, LOG)
+ self.assertIsNone(devpath)
+ self.assertIn(
+ "DEBUG: Device '/dev/I/dont/exist' did not exist in container."
+ ' cannot resize: %s' % info,
+ self.logs.getvalue())
+
+ def test_maybe_get_writable_device_path_raises_oserror(self):
+ """When unexpected OSError is raises by os.stat it is reraised."""
+ info = 'dev=/dev/I/dont/exist mnt_point=/ path=/dev/none'
+ with self.assertRaises(OSError) as context_manager:
+ wrap_and_call(
+ 'cloudinit.config.cc_resizefs',
+ {'util.is_container': {'return_value': True},
+ 'os.stat': {'side_effect': OSError('Something unexpected')}},
+ maybe_get_writable_device_path, '/dev/I/dont/exist', info, LOG)
+ self.assertEqual(
+ 'Something unexpected', str(context_manager.exception))
+
+ def test_maybe_get_writable_device_path_non_block(self):
+ """When device is not a block device, emit warning return False."""
+ fake_devpath = self.tmp_path('dev/readwrite')
+ util.write_file(fake_devpath, '', mode=0o600) # read-write
+ info = 'dev=/dev/root mnt_point=/ path={0}'.format(fake_devpath)
+
+ devpath = wrap_and_call(
+ 'cloudinit.config.cc_resizefs.util',
+ {'is_container': {'return_value': False}},
+ maybe_get_writable_device_path, fake_devpath, info, LOG)
+ self.assertIsNone(devpath)
+ self.assertIn(
+ "WARNING: device '{0}' not a block device. cannot resize".format(
+ fake_devpath),
+ self.logs.getvalue())
+
+ def test_maybe_get_writable_device_path_non_block_on_container(self):
+ """When device is non-block device in container, emit debug log."""
+ fake_devpath = self.tmp_path('dev/readwrite')
+ util.write_file(fake_devpath, '', mode=0o600) # read-write
+ info = 'dev=/dev/root mnt_point=/ path={0}'.format(fake_devpath)
+
+ devpath = wrap_and_call(
+ 'cloudinit.config.cc_resizefs.util',
+ {'is_container': {'return_value': True}},
+ maybe_get_writable_device_path, fake_devpath, info, LOG)
+ self.assertIsNone(devpath)
+ self.assertIn(
+ "DEBUG: device '{0}' not a block device in container."
+ ' cannot resize'.format(fake_devpath),
+ self.logs.getvalue())
+
+ def test_maybe_get_writable_device_path_returns_cmdline_root(self):
+ """When root device is UUID in kernel commandline, update devpath."""
+ # XXX Long-term we want to use FilesystemMocking test to avoid
+ # touching os.stat.
+ FakeStat = namedtuple(
+ 'FakeStat', ['st_mode', 'st_size', 'st_mtime']) # minimal def.
+ info = 'dev=/dev/root mnt_point=/ path=/does/not/matter'
+ devpath = wrap_and_call(
+ 'cloudinit.config.cc_resizefs',
+ {'util.get_cmdline': {'return_value': 'asdf root=UUID=my-uuid'},
+ 'util.is_container': False,
+ 'os.path.exists': False, # /dev/root doesn't exist
+ 'os.stat': {
+ 'return_value': FakeStat(25008, 0, 1)} # char block device
+ },
+ maybe_get_writable_device_path, '/dev/root', info, LOG)
+ self.assertEqual('/dev/disk/by-uuid/my-uuid', devpath)
+ self.assertIn(
+ "DEBUG: Converted /dev/root to '/dev/disk/by-uuid/my-uuid'"
+ " per kernel cmdline",
+ self.logs.getvalue())
+
+ @mock.patch('cloudinit.util.mount_is_read_write')
+ @mock.patch('cloudinit.config.cc_resizefs.os.path.isdir')
+ def test_resize_btrfs_mount_is_ro(self, m_is_dir, m_is_rw):
+ """Do not resize / directly if it is read-only. (LP: #1734787)."""
+ m_is_rw.return_value = False
+ m_is_dir.return_value = True
+ self.assertEqual(
+ ('btrfs', 'filesystem', 'resize', 'max', '//.snapshots'),
+ _resize_btrfs("/", "/dev/sda1"))
+
+ @mock.patch('cloudinit.util.mount_is_read_write')
+ @mock.patch('cloudinit.config.cc_resizefs.os.path.isdir')
+ def test_resize_btrfs_mount_is_rw(self, m_is_dir, m_is_rw):
+ """Do not resize / directly if it is read-only. (LP: #1734787)."""
+ m_is_rw.return_value = True
+ m_is_dir.return_value = True
+ self.assertEqual(
+ ('btrfs', 'filesystem', 'resize', 'max', '/'),
+ _resize_btrfs("/", "/dev/sda1"))
+
+ @mock.patch('cloudinit.util.is_container', return_value=True)
+ @mock.patch('cloudinit.util.is_FreeBSD')
+ def test_maybe_get_writable_device_path_zfs_freebsd(self, freebsd,
+ m_is_container):
+ freebsd.return_value = True
+ info = 'dev=gpt/system mnt_point=/ path=/'
+ devpth = maybe_get_writable_device_path('gpt/system', info, LOG)
+ self.assertEqual('gpt/system', devpth)
+
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_resolv_conf.py b/tests/unittests/config/test_cc_resolv_conf.py
new file mode 100644
index 00000000..0aa90a23
--- /dev/null
+++ b/tests/unittests/config/test_cc_resolv_conf.py
@@ -0,0 +1,193 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import logging
+import os
+import shutil
+import tempfile
+import pytest
+from copy import deepcopy
+from unittest import mock
+
+from cloudinit import cloud
+from cloudinit import distros
+from cloudinit import helpers
+from cloudinit import util
+
+from tests.unittests import helpers as t_help
+from tests.unittests.util import MockDistro
+from cloudinit.config import cc_resolv_conf
+from cloudinit.config.cc_resolv_conf import generate_resolv_conf
+
+LOG = logging.getLogger(__name__)
+EXPECTED_HEADER = """\
+# Your system has been configured with 'manage-resolv-conf' set to true.
+# As a result, cloud-init has written this file with configuration data
+# that it has been provided. Cloud-init, by default, will write this file
+# a single time (PER_ONCE).
+#\n\n"""
+
+
+class TestResolvConf(t_help.FilesystemMockingTestCase):
+ with_logs = True
+ cfg = {'manage_resolv_conf': True, 'resolv_conf': {}}
+
+ def setUp(self):
+ super(TestResolvConf, self).setUp()
+ self.tmp = tempfile.mkdtemp()
+ util.ensure_dir(os.path.join(self.tmp, 'data'))
+ self.addCleanup(shutil.rmtree, self.tmp)
+
+ def _fetch_distro(self, kind, conf=None):
+ cls = distros.fetch(kind)
+ paths = helpers.Paths({'cloud_dir': self.tmp})
+ conf = {} if conf is None else conf
+ return cls(kind, conf, paths)
+
+ def call_resolv_conf_handler(self, distro_name, conf, cc=None):
+ if not cc:
+ ds = None
+ distro = self._fetch_distro(distro_name, conf)
+ paths = helpers.Paths({'cloud_dir': self.tmp})
+ cc = cloud.Cloud(ds, paths, {}, distro, None)
+ cc_resolv_conf.handle('cc_resolv_conf', conf, cc, LOG, [])
+
+ @mock.patch("cloudinit.config.cc_resolv_conf.templater.render_to_file")
+ def test_resolv_conf_systemd_resolved(self, m_render_to_file):
+ self.call_resolv_conf_handler('photon', self.cfg)
+
+ assert [
+ mock.call(mock.ANY, '/etc/systemd/resolved.conf', mock.ANY)
+ ] == m_render_to_file.call_args_list
+
+ @mock.patch("cloudinit.config.cc_resolv_conf.templater.render_to_file")
+ def test_resolv_conf_no_param(self, m_render_to_file):
+ tmp = deepcopy(self.cfg)
+ self.logs.truncate(0)
+ tmp.pop('resolv_conf')
+ self.call_resolv_conf_handler('photon', tmp)
+
+ self.assertIn('manage_resolv_conf True but no parameters provided',
+ self.logs.getvalue())
+ assert [
+ mock.call(mock.ANY, '/etc/systemd/resolved.conf', mock.ANY)
+ ] not in m_render_to_file.call_args_list
+
+ @mock.patch("cloudinit.config.cc_resolv_conf.templater.render_to_file")
+ def test_resolv_conf_manage_resolv_conf_false(self, m_render_to_file):
+ tmp = deepcopy(self.cfg)
+ self.logs.truncate(0)
+ tmp['manage_resolv_conf'] = False
+ self.call_resolv_conf_handler('photon', tmp)
+ self.assertIn("'manage_resolv_conf' present but set to False",
+ self.logs.getvalue())
+ assert [
+ mock.call(mock.ANY, '/etc/systemd/resolved.conf', mock.ANY)
+ ] not in m_render_to_file.call_args_list
+
+ @mock.patch("cloudinit.config.cc_resolv_conf.templater.render_to_file")
+ def test_resolv_conf_etc_resolv_conf(self, m_render_to_file):
+ self.call_resolv_conf_handler('rhel', self.cfg)
+
+ assert [
+ mock.call(mock.ANY, '/etc/resolv.conf', mock.ANY)
+ ] == m_render_to_file.call_args_list
+
+ @mock.patch("cloudinit.config.cc_resolv_conf.templater.render_to_file")
+ def test_resolv_conf_invalid_resolve_conf_fn(self, m_render_to_file):
+ ds = None
+ distro = self._fetch_distro('rhel', self.cfg)
+ paths = helpers.Paths({'cloud_dir': self.tmp})
+ cc = cloud.Cloud(ds, paths, {}, distro, None)
+ cc.distro.resolve_conf_fn = 'bla'
+
+ self.logs.truncate(0)
+ self.call_resolv_conf_handler('rhel', self.cfg, cc)
+
+ self.assertIn('No template found, not rendering resolve configs',
+ self.logs.getvalue())
+
+ assert [
+ mock.call(mock.ANY, '/etc/resolv.conf', mock.ANY)
+ ] not in m_render_to_file.call_args_list
+
+
+class TestGenerateResolvConf:
+
+ dist = MockDistro()
+ tmpl_fn = "templates/resolv.conf.tmpl"
+
+ @mock.patch("cloudinit.config.cc_resolv_conf.templater.render_to_file")
+ def test_dist_resolv_conf_fn(self, m_render_to_file):
+ self.dist.resolve_conf_fn = "/tmp/resolv-test.conf"
+ generate_resolv_conf(self.tmpl_fn,
+ mock.MagicMock(),
+ self.dist.resolve_conf_fn)
+
+ assert [
+ mock.call(mock.ANY, self.dist.resolve_conf_fn, mock.ANY)
+ ] == m_render_to_file.call_args_list
+
+ @mock.patch("cloudinit.config.cc_resolv_conf.templater.render_to_file")
+ def test_target_fname_is_used_if_passed(self, m_render_to_file):
+ path = "/use/this/path"
+ generate_resolv_conf(self.tmpl_fn, mock.MagicMock(), path)
+
+ assert [
+ mock.call(mock.ANY, path, mock.ANY)
+ ] == m_render_to_file.call_args_list
+
+ # Patch in templater so we can assert on the actual generated content
+ @mock.patch("cloudinit.templater.util.write_file")
+ # Parameterise with the value to be passed to generate_resolv_conf as the
+ # params parameter, and the expected line after the header as
+ # expected_extra_line.
+ @pytest.mark.parametrize(
+ "params,expected_extra_line",
+ [
+ # No options
+ ({}, None),
+ # Just a true flag
+ ({"options": {"foo": True}}, "options foo"),
+ # Just a false flag
+ ({"options": {"foo": False}}, None),
+ # Just an option
+ ({"options": {"foo": "some_value"}}, "options foo:some_value"),
+ # A true flag and an option
+ (
+ {"options": {"foo": "some_value", "bar": True}},
+ "options bar foo:some_value",
+ ),
+ # Two options
+ (
+ {"options": {"foo": "some_value", "bar": "other_value"}},
+ "options bar:other_value foo:some_value",
+ ),
+ # Everything
+ (
+ {
+ "options": {
+ "foo": "some_value",
+ "bar": "other_value",
+ "baz": False,
+ "spam": True,
+ }
+ },
+ "options spam bar:other_value foo:some_value",
+ ),
+ ],
+ )
+ def test_flags_and_options(
+ self, m_write_file, params, expected_extra_line
+ ):
+ target_fn = "/etc/resolv.conf"
+ generate_resolv_conf(self.tmpl_fn, params, target_fn)
+
+ expected_content = EXPECTED_HEADER
+ if expected_extra_line is not None:
+ # If we have any extra lines, expect a trailing newline
+ expected_content += "\n".join([expected_extra_line, ""])
+ assert [
+ mock.call(mock.ANY, expected_content, mode=mock.ANY)
+ ] == m_write_file.call_args_list
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_rh_subscription.py b/tests/unittests/config/test_cc_rh_subscription.py
new file mode 100644
index 00000000..bd7ebc98
--- /dev/null
+++ b/tests/unittests/config/test_cc_rh_subscription.py
@@ -0,0 +1,234 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+"""Tests for registering RHEL subscription via rh_subscription."""
+
+import copy
+import logging
+
+from cloudinit.config import cc_rh_subscription
+from cloudinit import subp
+
+from tests.unittests.helpers import CiTestCase, mock
+
+SUBMGR = cc_rh_subscription.SubscriptionManager
+SUB_MAN_CLI = 'cloudinit.config.cc_rh_subscription._sub_man_cli'
+
+
+@mock.patch(SUB_MAN_CLI)
+class GoodTests(CiTestCase):
+ with_logs = True
+
+ def setUp(self):
+ super(GoodTests, self).setUp()
+ self.name = "cc_rh_subscription"
+ self.cloud_init = None
+ self.log = logging.getLogger("good_tests")
+ self.args = []
+ self.handle = cc_rh_subscription.handle
+
+ self.config = {'rh_subscription':
+ {'username': 'scooby@do.com',
+ 'password': 'scooby-snacks'
+ }}
+ self.config_full = {'rh_subscription':
+ {'username': 'scooby@do.com',
+ 'password': 'scooby-snacks',
+ 'auto-attach': True,
+ 'service-level': 'self-support',
+ 'add-pool': ['pool1', 'pool2', 'pool3'],
+ 'enable-repo': ['repo1', 'repo2', 'repo3'],
+ 'disable-repo': ['repo4', 'repo5']
+ }}
+
+ def test_already_registered(self, m_sman_cli):
+ '''
+ Emulates a system that is already registered. Ensure it gets
+ a non-ProcessExecution error from is_registered()
+ '''
+ self.handle(self.name, self.config, self.cloud_init,
+ self.log, self.args)
+ self.assertEqual(m_sman_cli.call_count, 1)
+ self.assertIn('System is already registered', self.logs.getvalue())
+
+ def test_simple_registration(self, m_sman_cli):
+ '''
+ Simple registration with username and password
+ '''
+ reg = "The system has been registered with ID:" \
+ " 12345678-abde-abcde-1234-1234567890abc"
+ m_sman_cli.side_effect = [subp.ProcessExecutionError, (reg, 'bar')]
+ self.handle(self.name, self.config, self.cloud_init,
+ self.log, self.args)
+ self.assertIn(mock.call(['identity']), m_sman_cli.call_args_list)
+ self.assertIn(mock.call(['register', '--username=scooby@do.com',
+ '--password=scooby-snacks'],
+ logstring_val=True),
+ m_sman_cli.call_args_list)
+ self.assertIn('rh_subscription plugin completed successfully',
+ self.logs.getvalue())
+ self.assertEqual(m_sman_cli.call_count, 2)
+
+ @mock.patch.object(cc_rh_subscription.SubscriptionManager, "_getRepos")
+ def test_update_repos_disable_with_none(self, m_get_repos, m_sman_cli):
+ cfg = copy.deepcopy(self.config)
+ m_get_repos.return_value = ([], ['repo1'])
+ cfg['rh_subscription'].update(
+ {'enable-repo': ['repo1'], 'disable-repo': None})
+ mysm = cc_rh_subscription.SubscriptionManager(cfg)
+ self.assertEqual(True, mysm.update_repos())
+ m_get_repos.assert_called_with()
+ self.assertEqual(m_sman_cli.call_args_list,
+ [mock.call(['repos', '--enable=repo1'])])
+
+ def test_full_registration(self, m_sman_cli):
+ '''
+ Registration with auto-attach, service-level, adding pools,
+ and enabling and disabling yum repos
+ '''
+ call_lists = []
+ call_lists.append(['attach', '--pool=pool1', '--pool=pool3'])
+ call_lists.append(['repos', '--disable=repo5', '--enable=repo2',
+ '--enable=repo3'])
+ call_lists.append(['attach', '--auto', '--servicelevel=self-support'])
+ reg = "The system has been registered with ID:" \
+ " 12345678-abde-abcde-1234-1234567890abc"
+ m_sman_cli.side_effect = [
+ subp.ProcessExecutionError,
+ (reg, 'bar'),
+ ('Service level set to: self-support', ''),
+ ('pool1\npool3\n', ''), ('pool2\n', ''), ('', ''),
+ ('Repo ID: repo1\nRepo ID: repo5\n', ''),
+ ('Repo ID: repo2\nRepo ID: repo3\nRepo ID: repo4', ''),
+ ('', '')]
+ self.handle(self.name, self.config_full, self.cloud_init,
+ self.log, self.args)
+ self.assertEqual(m_sman_cli.call_count, 9)
+ for call in call_lists:
+ self.assertIn(mock.call(call), m_sman_cli.call_args_list)
+ self.assertIn("rh_subscription plugin completed successfully",
+ self.logs.getvalue())
+
+
+@mock.patch(SUB_MAN_CLI)
+class TestBadInput(CiTestCase):
+ with_logs = True
+ name = "cc_rh_subscription"
+ cloud_init = None
+ log = logging.getLogger("bad_tests")
+ args = []
+ SM = cc_rh_subscription.SubscriptionManager
+ reg = "The system has been registered with ID:" \
+ " 12345678-abde-abcde-1234-1234567890abc"
+
+ config_no_password = {'rh_subscription':
+ {'username': 'scooby@do.com'
+ }}
+
+ config_no_key = {'rh_subscription':
+ {'activation-key': '1234abcde',
+ }}
+
+ config_service = {'rh_subscription':
+ {'username': 'scooby@do.com',
+ 'password': 'scooby-snacks',
+ 'service-level': 'self-support'
+ }}
+
+ config_badpool = {'rh_subscription':
+ {'username': 'scooby@do.com',
+ 'password': 'scooby-snacks',
+ 'add-pool': 'not_a_list'
+ }}
+ config_badrepo = {'rh_subscription':
+ {'username': 'scooby@do.com',
+ 'password': 'scooby-snacks',
+ 'enable-repo': 'not_a_list'
+ }}
+ config_badkey = {'rh_subscription':
+ {'activation-key': 'abcdef1234',
+ 'fookey': 'bar',
+ 'org': '123',
+ }}
+
+ def setUp(self):
+ super(TestBadInput, self).setUp()
+ self.handle = cc_rh_subscription.handle
+
+ def assert_logged_warnings(self, warnings):
+ logs = self.logs.getvalue()
+ missing = [w for w in warnings if "WARNING: " + w not in logs]
+ self.assertEqual([], missing, "Missing expected warnings.")
+
+ def test_no_password(self, m_sman_cli):
+ '''Attempt to register without the password key/value.'''
+ m_sman_cli.side_effect = [subp.ProcessExecutionError,
+ (self.reg, 'bar')]
+ self.handle(self.name, self.config_no_password, self.cloud_init,
+ self.log, self.args)
+ self.assertEqual(m_sman_cli.call_count, 0)
+
+ def test_no_org(self, m_sman_cli):
+ '''Attempt to register without the org key/value.'''
+ m_sman_cli.side_effect = [subp.ProcessExecutionError]
+ self.handle(self.name, self.config_no_key, self.cloud_init,
+ self.log, self.args)
+ m_sman_cli.assert_called_with(['identity'])
+ self.assertEqual(m_sman_cli.call_count, 1)
+ self.assert_logged_warnings((
+ 'Unable to register system due to incomplete information.',
+ 'Use either activationkey and org *or* userid and password',
+ 'Registration failed or did not run completely',
+ 'rh_subscription plugin did not complete successfully'))
+
+ def test_service_level_without_auto(self, m_sman_cli):
+ '''Attempt to register using service-level without auto-attach key.'''
+ m_sman_cli.side_effect = [subp.ProcessExecutionError,
+ (self.reg, 'bar')]
+ self.handle(self.name, self.config_service, self.cloud_init,
+ self.log, self.args)
+ self.assertEqual(m_sman_cli.call_count, 1)
+ self.assert_logged_warnings((
+ 'The service-level key must be used in conjunction with ',
+ 'rh_subscription plugin did not complete successfully'))
+
+ def test_pool_not_a_list(self, m_sman_cli):
+ '''
+ Register with pools that are not in the format of a list
+ '''
+ m_sman_cli.side_effect = [subp.ProcessExecutionError,
+ (self.reg, 'bar')]
+ self.handle(self.name, self.config_badpool, self.cloud_init,
+ self.log, self.args)
+ self.assertEqual(m_sman_cli.call_count, 2)
+ self.assert_logged_warnings((
+ 'Pools must in the format of a list',
+ 'rh_subscription plugin did not complete successfully'))
+
+ def test_repo_not_a_list(self, m_sman_cli):
+ '''
+ Register with repos that are not in the format of a list
+ '''
+ m_sman_cli.side_effect = [subp.ProcessExecutionError,
+ (self.reg, 'bar')]
+ self.handle(self.name, self.config_badrepo, self.cloud_init,
+ self.log, self.args)
+ self.assertEqual(m_sman_cli.call_count, 2)
+ self.assert_logged_warnings((
+ 'Repo IDs must in the format of a list.',
+ 'Unable to add or remove repos',
+ 'rh_subscription plugin did not complete successfully'))
+
+ def test_bad_key_value(self, m_sman_cli):
+ '''
+ Attempt to register with a key that we don't know
+ '''
+ m_sman_cli.side_effect = [subp.ProcessExecutionError,
+ (self.reg, 'bar')]
+ self.handle(self.name, self.config_badkey, self.cloud_init,
+ self.log, self.args)
+ self.assertEqual(m_sman_cli.call_count, 1)
+ self.assert_logged_warnings((
+ 'fookey is not a valid key for rh_subscription. Valid keys are:',
+ 'rh_subscription plugin did not complete successfully'))
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_rsyslog.py b/tests/unittests/config/test_cc_rsyslog.py
new file mode 100644
index 00000000..bc147dac
--- /dev/null
+++ b/tests/unittests/config/test_cc_rsyslog.py
@@ -0,0 +1,178 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import os
+import shutil
+import tempfile
+
+from cloudinit.config.cc_rsyslog import (
+ apply_rsyslog_changes, DEF_DIR, DEF_FILENAME, DEF_RELOAD, load_config,
+ parse_remotes_line, remotes_to_rsyslog_cfg)
+from cloudinit import util
+
+from tests.unittests import helpers as t_help
+
+
+class TestLoadConfig(t_help.TestCase):
+ def setUp(self):
+ super(TestLoadConfig, self).setUp()
+ self.basecfg = {
+ 'config_filename': DEF_FILENAME,
+ 'config_dir': DEF_DIR,
+ 'service_reload_command': DEF_RELOAD,
+ 'configs': [],
+ 'remotes': {},
+ }
+
+ def test_legacy_full(self):
+ found = load_config({
+ 'rsyslog': ['*.* @192.168.1.1'],
+ 'rsyslog_dir': "mydir",
+ 'rsyslog_filename': "myfilename"})
+ self.basecfg.update({
+ 'configs': ['*.* @192.168.1.1'],
+ 'config_dir': "mydir",
+ 'config_filename': 'myfilename',
+ 'service_reload_command': 'auto'}
+ )
+
+ self.assertEqual(found, self.basecfg)
+
+ def test_legacy_defaults(self):
+ found = load_config({
+ 'rsyslog': ['*.* @192.168.1.1']})
+ self.basecfg.update({
+ 'configs': ['*.* @192.168.1.1']})
+ self.assertEqual(found, self.basecfg)
+
+ def test_new_defaults(self):
+ self.assertEqual(load_config({}), self.basecfg)
+
+ def test_new_configs(self):
+ cfgs = ['*.* myhost', '*.* my2host']
+ self.basecfg.update({'configs': cfgs})
+ self.assertEqual(
+ load_config({'rsyslog': {'configs': cfgs}}),
+ self.basecfg)
+
+
+class TestApplyChanges(t_help.TestCase):
+ def setUp(self):
+ self.tmp = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.tmp)
+
+ def test_simple(self):
+ cfgline = "*.* foohost"
+ changed = apply_rsyslog_changes(
+ configs=[cfgline], def_fname="foo.cfg", cfg_dir=self.tmp)
+
+ fname = os.path.join(self.tmp, "foo.cfg")
+ self.assertEqual([fname], changed)
+ self.assertEqual(
+ util.load_file(fname), cfgline + "\n")
+
+ def test_multiple_files(self):
+ configs = [
+ '*.* foohost',
+ {'content': 'abc', 'filename': 'my.cfg'},
+ {'content': 'filefoo-content',
+ 'filename': os.path.join(self.tmp, 'mydir/mycfg')},
+ ]
+
+ changed = apply_rsyslog_changes(
+ configs=configs, def_fname="default.cfg", cfg_dir=self.tmp)
+
+ expected = [
+ (os.path.join(self.tmp, "default.cfg"),
+ "*.* foohost\n"),
+ (os.path.join(self.tmp, "my.cfg"), "abc\n"),
+ (os.path.join(self.tmp, "mydir/mycfg"), "filefoo-content\n"),
+ ]
+ self.assertEqual([f[0] for f in expected], changed)
+ actual = []
+ for fname, _content in expected:
+ util.load_file(fname)
+ actual.append((fname, util.load_file(fname),))
+ self.assertEqual(expected, actual)
+
+ def test_repeat_def(self):
+ configs = ['*.* foohost', "*.warn otherhost"]
+
+ changed = apply_rsyslog_changes(
+ configs=configs, def_fname="default.cfg", cfg_dir=self.tmp)
+
+ fname = os.path.join(self.tmp, "default.cfg")
+ self.assertEqual([fname], changed)
+
+ expected_content = '\n'.join([c for c in configs]) + '\n'
+ found_content = util.load_file(fname)
+ self.assertEqual(expected_content, found_content)
+
+ def test_multiline_content(self):
+ configs = ['line1', 'line2\nline3\n']
+
+ apply_rsyslog_changes(
+ configs=configs, def_fname="default.cfg", cfg_dir=self.tmp)
+
+ fname = os.path.join(self.tmp, "default.cfg")
+ expected_content = '\n'.join([c for c in configs])
+ found_content = util.load_file(fname)
+ self.assertEqual(expected_content, found_content)
+
+
+class TestParseRemotesLine(t_help.TestCase):
+ def test_valid_port(self):
+ r = parse_remotes_line("foo:9")
+ self.assertEqual(9, r.port)
+
+ def test_invalid_port(self):
+ with self.assertRaises(ValueError):
+ parse_remotes_line("*.* foo:abc")
+
+ def test_valid_ipv6(self):
+ r = parse_remotes_line("*.* [::1]")
+ self.assertEqual("*.* @[::1]", str(r))
+
+ def test_valid_ipv6_with_port(self):
+ r = parse_remotes_line("*.* [::1]:100")
+ self.assertEqual(r.port, 100)
+ self.assertEqual(r.addr, "::1")
+ self.assertEqual("*.* @[::1]:100", str(r))
+
+ def test_invalid_multiple_colon(self):
+ with self.assertRaises(ValueError):
+ parse_remotes_line("*.* ::1:100")
+
+ def test_name_in_string(self):
+ r = parse_remotes_line("syslog.host", name="foobar")
+ self.assertEqual("*.* @syslog.host # foobar", str(r))
+
+
+class TestRemotesToSyslog(t_help.TestCase):
+ def test_simple(self):
+ # str rendered line must appear in remotes_to_ryslog_cfg return
+ mycfg = "*.* myhost"
+ myline = str(parse_remotes_line(mycfg, name="myname"))
+ r = remotes_to_rsyslog_cfg({'myname': mycfg})
+ lines = r.splitlines()
+ self.assertEqual(1, len(lines))
+ self.assertTrue(myline in r.splitlines())
+
+ def test_header_footer(self):
+ header = "#foo head"
+ footer = "#foo foot"
+ r = remotes_to_rsyslog_cfg(
+ {'myname': "*.* myhost"}, header=header, footer=footer)
+ lines = r.splitlines()
+ self.assertTrue(header, lines[0])
+ self.assertTrue(footer, lines[-1])
+
+ def test_with_empty_or_null(self):
+ mycfg = "*.* myhost"
+ myline = str(parse_remotes_line(mycfg, name="myname"))
+ r = remotes_to_rsyslog_cfg(
+ {'myname': mycfg, 'removed': None, 'removed2': ""})
+ lines = r.splitlines()
+ self.assertEqual(1, len(lines))
+ self.assertTrue(myline in r.splitlines())
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_runcmd.py b/tests/unittests/config/test_cc_runcmd.py
new file mode 100644
index 00000000..01de6af0
--- /dev/null
+++ b/tests/unittests/config/test_cc_runcmd.py
@@ -0,0 +1,129 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+import logging
+import os
+import stat
+from unittest.mock import patch
+
+from cloudinit.config.cc_runcmd import handle, schema
+from cloudinit import (helpers, subp, util)
+from tests.unittests.helpers import (
+ CiTestCase, FilesystemMockingTestCase, SchemaTestCaseMixin,
+ skipUnlessJsonSchema)
+
+from tests.unittests.util import get_cloud
+
+LOG = logging.getLogger(__name__)
+
+
+class TestRuncmd(FilesystemMockingTestCase):
+
+ with_logs = True
+
+ def setUp(self):
+ super(TestRuncmd, self).setUp()
+ self.subp = subp.subp
+ self.new_root = self.tmp_dir()
+ self.patchUtils(self.new_root)
+ self.paths = helpers.Paths({'scripts': self.new_root})
+
+ def test_handler_skip_if_no_runcmd(self):
+ """When the provided config doesn't contain runcmd, skip it."""
+ cfg = {}
+ mycloud = get_cloud(paths=self.paths)
+ handle('notimportant', cfg, mycloud, LOG, None)
+ self.assertIn(
+ "Skipping module named notimportant, no 'runcmd' key",
+ self.logs.getvalue())
+
+ @patch('cloudinit.util.shellify')
+ def test_runcmd_shellify_fails(self, cls):
+ """When shellify fails throw exception"""
+ cls.side_effect = TypeError("patched shellify")
+ valid_config = {'runcmd': ['echo 42']}
+ cc = get_cloud(paths=self.paths)
+ with self.assertRaises(TypeError) as cm:
+ with self.allow_subp(['/bin/sh']):
+ handle('cc_runcmd', valid_config, cc, LOG, None)
+ self.assertIn("Failed to shellify", str(cm.exception))
+
+ def test_handler_invalid_command_set(self):
+ """Commands which can't be converted to shell will raise errors."""
+ invalid_config = {'runcmd': 1}
+ cc = get_cloud(paths=self.paths)
+ with self.assertRaises(TypeError) as cm:
+ handle('cc_runcmd', invalid_config, cc, LOG, [])
+ self.assertIn(
+ 'Failed to shellify 1 into file'
+ ' /var/lib/cloud/instances/iid-datasource-none/scripts/runcmd',
+ str(cm.exception))
+
+ @skipUnlessJsonSchema()
+ def test_handler_schema_validation_warns_non_array_type(self):
+ """Schema validation warns of non-array type for runcmd key.
+
+ Schema validation is not strict, so runcmd attempts to shellify the
+ invalid content.
+ """
+ invalid_config = {'runcmd': 1}
+ cc = get_cloud(paths=self.paths)
+ with self.assertRaises(TypeError) as cm:
+ handle('cc_runcmd', invalid_config, cc, LOG, [])
+ self.assertIn(
+ 'Invalid config:\nruncmd: 1 is not of type \'array\'',
+ self.logs.getvalue())
+ self.assertIn('Failed to shellify', str(cm.exception))
+
+ @skipUnlessJsonSchema()
+ def test_handler_schema_validation_warns_non_array_item_type(self):
+ """Schema validation warns of non-array or string runcmd items.
+
+ Schema validation is not strict, so runcmd attempts to shellify the
+ invalid content.
+ """
+ invalid_config = {
+ 'runcmd': ['ls /', 20, ['wget', 'http://stuff/blah'], {'a': 'n'}]}
+ cc = get_cloud(paths=self.paths)
+ with self.assertRaises(TypeError) as cm:
+ handle('cc_runcmd', invalid_config, cc, LOG, [])
+ expected_warnings = [
+ 'runcmd.1: 20 is not valid under any of the given schemas',
+ 'runcmd.3: {\'a\': \'n\'} is not valid under any of the given'
+ ' schema'
+ ]
+ logs = self.logs.getvalue()
+ for warning in expected_warnings:
+ self.assertIn(warning, logs)
+ self.assertIn('Failed to shellify', str(cm.exception))
+
+ def test_handler_write_valid_runcmd_schema_to_file(self):
+ """Valid runcmd schema is written to a runcmd shell script."""
+ valid_config = {'runcmd': [['ls', '/']]}
+ cc = get_cloud(paths=self.paths)
+ handle('cc_runcmd', valid_config, cc, LOG, [])
+ runcmd_file = os.path.join(
+ self.new_root,
+ 'var/lib/cloud/instances/iid-datasource-none/scripts/runcmd')
+ self.assertEqual("#!/bin/sh\n'ls' '/'\n", util.load_file(runcmd_file))
+ file_stat = os.stat(runcmd_file)
+ self.assertEqual(0o700, stat.S_IMODE(file_stat.st_mode))
+
+
+@skipUnlessJsonSchema()
+class TestSchema(CiTestCase, SchemaTestCaseMixin):
+ """Directly test schema rather than through handle."""
+
+ schema = schema
+
+ def test_duplicates_are_fine_array_array(self):
+ """Duplicated commands array/array entries are allowed."""
+ self.assertSchemaValid(
+ [["echo", "bye"], ["echo", "bye"]],
+ "command entries can be duplicate.")
+
+ def test_duplicates_are_fine_array_string(self):
+ """Duplicated commands array/string entries are allowed."""
+ self.assertSchemaValid(
+ ["echo bye", "echo bye"],
+ "command entries can be duplicate.")
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_seed_random.py b/tests/unittests/config/test_cc_seed_random.py
new file mode 100644
index 00000000..cfd67dce
--- /dev/null
+++ b/tests/unittests/config/test_cc_seed_random.py
@@ -0,0 +1,205 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+# Copyright (C) 2013 Hewlett-Packard Development Company, L.P.
+#
+# Author: Juerg Haefliger <juerg.haefliger@hp.com>
+#
+# Based on test_handler_set_hostname.py
+#
+# This file is part of cloud-init. See LICENSE file for license information.
+import gzip
+import logging
+import tempfile
+from io import BytesIO
+
+from cloudinit import subp
+from cloudinit import util
+from cloudinit.config import cc_seed_random
+from tests.unittests import helpers as t_help
+
+from tests.unittests.util import get_cloud
+
+LOG = logging.getLogger(__name__)
+
+
+class TestRandomSeed(t_help.TestCase):
+ def setUp(self):
+ super(TestRandomSeed, self).setUp()
+ self._seed_file = tempfile.mktemp()
+ self.unapply = []
+
+ # by default 'which' has nothing in its path
+ self.apply_patches([(subp, 'which', self._which)])
+ self.apply_patches([(subp, 'subp', self._subp)])
+ self.subp_called = []
+ self.whichdata = {}
+
+ def tearDown(self):
+ apply_patches([i for i in reversed(self.unapply)])
+ util.del_file(self._seed_file)
+
+ def apply_patches(self, patches):
+ ret = apply_patches(patches)
+ self.unapply += ret
+
+ def _which(self, program):
+ return self.whichdata.get(program)
+
+ def _subp(self, *args, **kwargs):
+ # supports subp calling with cmd as args or kwargs
+ if 'args' not in kwargs:
+ kwargs['args'] = args[0]
+ self.subp_called.append(kwargs)
+ return
+
+ def _compress(self, text):
+ contents = BytesIO()
+ gz_fh = gzip.GzipFile(mode='wb', fileobj=contents)
+ gz_fh.write(text)
+ gz_fh.close()
+ return contents.getvalue()
+
+ def test_append_random(self):
+ cfg = {
+ 'random_seed': {
+ 'file': self._seed_file,
+ 'data': 'tiny-tim-was-here',
+ }
+ }
+ cc_seed_random.handle('test', cfg, get_cloud('ubuntu'), LOG, [])
+ contents = util.load_file(self._seed_file)
+ self.assertEqual("tiny-tim-was-here", contents)
+
+ def test_append_random_unknown_encoding(self):
+ data = self._compress(b"tiny-toe")
+ cfg = {
+ 'random_seed': {
+ 'file': self._seed_file,
+ 'data': data,
+ 'encoding': 'special_encoding',
+ }
+ }
+ self.assertRaises(IOError, cc_seed_random.handle, 'test', cfg,
+ get_cloud('ubuntu'), LOG, [])
+
+ def test_append_random_gzip(self):
+ data = self._compress(b"tiny-toe")
+ cfg = {
+ 'random_seed': {
+ 'file': self._seed_file,
+ 'data': data,
+ 'encoding': 'gzip',
+ }
+ }
+ cc_seed_random.handle('test', cfg, get_cloud('ubuntu'), LOG, [])
+ contents = util.load_file(self._seed_file)
+ self.assertEqual("tiny-toe", contents)
+
+ def test_append_random_gz(self):
+ data = self._compress(b"big-toe")
+ cfg = {
+ 'random_seed': {
+ 'file': self._seed_file,
+ 'data': data,
+ 'encoding': 'gz',
+ }
+ }
+ cc_seed_random.handle('test', cfg, get_cloud('ubuntu'), LOG, [])
+ contents = util.load_file(self._seed_file)
+ self.assertEqual("big-toe", contents)
+
+ def test_append_random_base64(self):
+ data = util.b64e('bubbles')
+ cfg = {
+ 'random_seed': {
+ 'file': self._seed_file,
+ 'data': data,
+ 'encoding': 'base64',
+ }
+ }
+ cc_seed_random.handle('test', cfg, get_cloud('ubuntu'), LOG, [])
+ contents = util.load_file(self._seed_file)
+ self.assertEqual("bubbles", contents)
+
+ def test_append_random_b64(self):
+ data = util.b64e('kit-kat')
+ cfg = {
+ 'random_seed': {
+ 'file': self._seed_file,
+ 'data': data,
+ 'encoding': 'b64',
+ }
+ }
+ cc_seed_random.handle('test', cfg, get_cloud('ubuntu'), LOG, [])
+ contents = util.load_file(self._seed_file)
+ self.assertEqual("kit-kat", contents)
+
+ def test_append_random_metadata(self):
+ cfg = {
+ 'random_seed': {
+ 'file': self._seed_file,
+ 'data': 'tiny-tim-was-here',
+ }
+ }
+ c = get_cloud('ubuntu', metadata={'random_seed': '-so-was-josh'})
+ cc_seed_random.handle('test', cfg, c, LOG, [])
+ contents = util.load_file(self._seed_file)
+ self.assertEqual('tiny-tim-was-here-so-was-josh', contents)
+
+ def test_seed_command_provided_and_available(self):
+ c = get_cloud('ubuntu')
+ self.whichdata = {'pollinate': '/usr/bin/pollinate'}
+ cfg = {'random_seed': {'command': ['pollinate', '-q']}}
+ cc_seed_random.handle('test', cfg, c, LOG, [])
+
+ subp_args = [f['args'] for f in self.subp_called]
+ self.assertIn(['pollinate', '-q'], subp_args)
+
+ def test_seed_command_not_provided(self):
+ c = get_cloud('ubuntu')
+ self.whichdata = {}
+ cc_seed_random.handle('test', {}, c, LOG, [])
+
+ # subp should not have been called as which would say not available
+ self.assertFalse(self.subp_called)
+
+ def test_unavailable_seed_command_and_required_raises_error(self):
+ c = get_cloud('ubuntu')
+ self.whichdata = {}
+ cfg = {'random_seed': {'command': ['THIS_NO_COMMAND'],
+ 'command_required': True}}
+ self.assertRaises(ValueError, cc_seed_random.handle,
+ 'test', cfg, c, LOG, [])
+
+ def test_seed_command_and_required(self):
+ c = get_cloud('ubuntu')
+ self.whichdata = {'foo': 'foo'}
+ cfg = {'random_seed': {'command_required': True, 'command': ['foo']}}
+ cc_seed_random.handle('test', cfg, c, LOG, [])
+
+ self.assertIn(['foo'], [f['args'] for f in self.subp_called])
+
+ def test_file_in_environment_for_command(self):
+ c = get_cloud('ubuntu')
+ self.whichdata = {'foo': 'foo'}
+ cfg = {'random_seed': {'command_required': True, 'command': ['foo'],
+ 'file': self._seed_file}}
+ cc_seed_random.handle('test', cfg, c, LOG, [])
+
+ # this just instists that the first time subp was called,
+ # RANDOM_SEED_FILE was in the environment set up correctly
+ subp_env = [f['env'] for f in self.subp_called]
+ self.assertEqual(subp_env[0].get('RANDOM_SEED_FILE'), self._seed_file)
+
+
+def apply_patches(patches):
+ ret = []
+ for (ref, name, replace) in patches:
+ if replace is None:
+ continue
+ orig = getattr(ref, name)
+ setattr(ref, name, replace)
+ ret.append((ref, name, orig))
+ return ret
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_set_hostname.py b/tests/unittests/config/test_cc_set_hostname.py
new file mode 100644
index 00000000..b9a783a7
--- /dev/null
+++ b/tests/unittests/config/test_cc_set_hostname.py
@@ -0,0 +1,207 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+from cloudinit.config import cc_set_hostname
+
+from cloudinit import cloud
+from cloudinit import distros
+from cloudinit import helpers
+from cloudinit import util
+
+from tests.unittests import helpers as t_help
+
+from configobj import ConfigObj
+import logging
+import os
+import shutil
+import tempfile
+from io import BytesIO
+from unittest import mock
+
+LOG = logging.getLogger(__name__)
+
+
+class TestHostname(t_help.FilesystemMockingTestCase):
+
+ with_logs = True
+
+ def setUp(self):
+ super(TestHostname, self).setUp()
+ self.tmp = tempfile.mkdtemp()
+ util.ensure_dir(os.path.join(self.tmp, 'data'))
+ self.addCleanup(shutil.rmtree, self.tmp)
+
+ def _fetch_distro(self, kind, conf=None):
+ cls = distros.fetch(kind)
+ paths = helpers.Paths({'cloud_dir': self.tmp})
+ conf = {} if conf is None else conf
+ return cls(kind, conf, paths)
+
+ def test_debian_write_hostname_prefer_fqdn(self):
+ cfg = {
+ 'hostname': 'blah',
+ 'prefer_fqdn_over_hostname': True,
+ 'fqdn': 'blah.yahoo.com',
+ }
+ distro = self._fetch_distro('debian', cfg)
+ paths = helpers.Paths({'cloud_dir': self.tmp})
+ ds = None
+ cc = cloud.Cloud(ds, paths, {}, distro, None)
+ self.patchUtils(self.tmp)
+ cc_set_hostname.handle('cc_set_hostname',
+ cfg, cc, LOG, [])
+ contents = util.load_file("/etc/hostname")
+ self.assertEqual('blah.yahoo.com', contents.strip())
+
+ @mock.patch('cloudinit.distros.Distro.uses_systemd', return_value=False)
+ def test_rhel_write_hostname_prefer_hostname(self, m_uses_systemd):
+ cfg = {
+ 'hostname': 'blah',
+ 'prefer_fqdn_over_hostname': False,
+ 'fqdn': 'blah.yahoo.com',
+ }
+ distro = self._fetch_distro('rhel', cfg)
+ paths = helpers.Paths({'cloud_dir': self.tmp})
+ ds = None
+ cc = cloud.Cloud(ds, paths, {}, distro, None)
+ self.patchUtils(self.tmp)
+ cc_set_hostname.handle('cc_set_hostname',
+ cfg, cc, LOG, [])
+ contents = util.load_file("/etc/sysconfig/network", decode=False)
+ n_cfg = ConfigObj(BytesIO(contents))
+ self.assertEqual(
+ {'HOSTNAME': 'blah'},
+ dict(n_cfg))
+
+ @mock.patch('cloudinit.distros.Distro.uses_systemd', return_value=False)
+ def test_write_hostname_rhel(self, m_uses_systemd):
+ cfg = {
+ 'hostname': 'blah',
+ 'fqdn': 'blah.blah.blah.yahoo.com'
+ }
+ distro = self._fetch_distro('rhel')
+ paths = helpers.Paths({'cloud_dir': self.tmp})
+ ds = None
+ cc = cloud.Cloud(ds, paths, {}, distro, None)
+ self.patchUtils(self.tmp)
+ cc_set_hostname.handle('cc_set_hostname',
+ cfg, cc, LOG, [])
+ contents = util.load_file("/etc/sysconfig/network", decode=False)
+ n_cfg = ConfigObj(BytesIO(contents))
+ self.assertEqual(
+ {'HOSTNAME': 'blah.blah.blah.yahoo.com'},
+ dict(n_cfg))
+
+ def test_write_hostname_debian(self):
+ cfg = {
+ 'hostname': 'blah',
+ 'fqdn': 'blah.blah.blah.yahoo.com',
+ }
+ distro = self._fetch_distro('debian')
+ paths = helpers.Paths({'cloud_dir': self.tmp})
+ ds = None
+ cc = cloud.Cloud(ds, paths, {}, distro, None)
+ self.patchUtils(self.tmp)
+ cc_set_hostname.handle('cc_set_hostname',
+ cfg, cc, LOG, [])
+ contents = util.load_file("/etc/hostname")
+ self.assertEqual('blah', contents.strip())
+
+ @mock.patch('cloudinit.distros.Distro.uses_systemd', return_value=False)
+ def test_write_hostname_sles(self, m_uses_systemd):
+ cfg = {
+ 'hostname': 'blah.blah.blah.suse.com',
+ }
+ distro = self._fetch_distro('sles')
+ paths = helpers.Paths({'cloud_dir': self.tmp})
+ ds = None
+ cc = cloud.Cloud(ds, paths, {}, distro, None)
+ self.patchUtils(self.tmp)
+ cc_set_hostname.handle('cc_set_hostname', cfg, cc, LOG, [])
+ contents = util.load_file(distro.hostname_conf_fn)
+ self.assertEqual('blah', contents.strip())
+
+ @mock.patch('cloudinit.distros.photon.subp.subp')
+ def test_photon_hostname(self, m_subp):
+ cfg1 = {
+ 'hostname': 'photon',
+ 'prefer_fqdn_over_hostname': True,
+ 'fqdn': 'test1.vmware.com',
+ }
+ cfg2 = {
+ 'hostname': 'photon',
+ 'prefer_fqdn_over_hostname': False,
+ 'fqdn': 'test2.vmware.com',
+ }
+
+ ds = None
+ m_subp.return_value = (None, None)
+ distro = self._fetch_distro('photon', cfg1)
+ paths = helpers.Paths({'cloud_dir': self.tmp})
+ cc = cloud.Cloud(ds, paths, {}, distro, None)
+ for c in [cfg1, cfg2]:
+ cc_set_hostname.handle('cc_set_hostname', c, cc, LOG, [])
+ print("\n", m_subp.call_args_list)
+ if c['prefer_fqdn_over_hostname']:
+ assert [
+ mock.call(['hostnamectl', 'set-hostname', c['fqdn']],
+ capture=True)
+ ] in m_subp.call_args_list
+ assert [
+ mock.call(['hostnamectl', 'set-hostname', c['hostname']],
+ capture=True)
+ ] not in m_subp.call_args_list
+ else:
+ assert [
+ mock.call(['hostnamectl', 'set-hostname', c['hostname']],
+ capture=True)
+ ] in m_subp.call_args_list
+ assert [
+ mock.call(['hostnamectl', 'set-hostname', c['fqdn']],
+ capture=True)
+ ] not in m_subp.call_args_list
+
+ def test_multiple_calls_skips_unchanged_hostname(self):
+ """Only new hostname or fqdn values will generate a hostname call."""
+ distro = self._fetch_distro('debian')
+ paths = helpers.Paths({'cloud_dir': self.tmp})
+ ds = None
+ cc = cloud.Cloud(ds, paths, {}, distro, None)
+ self.patchUtils(self.tmp)
+ cc_set_hostname.handle(
+ 'cc_set_hostname', {'hostname': 'hostname1.me.com'}, cc, LOG, [])
+ contents = util.load_file("/etc/hostname")
+ self.assertEqual('hostname1', contents.strip())
+ cc_set_hostname.handle(
+ 'cc_set_hostname', {'hostname': 'hostname1.me.com'}, cc, LOG, [])
+ self.assertIn(
+ 'DEBUG: No hostname changes. Skipping set-hostname\n',
+ self.logs.getvalue())
+ cc_set_hostname.handle(
+ 'cc_set_hostname', {'hostname': 'hostname2.me.com'}, cc, LOG, [])
+ contents = util.load_file("/etc/hostname")
+ self.assertEqual('hostname2', contents.strip())
+ self.assertIn(
+ 'Non-persistently setting the system hostname to hostname2',
+ self.logs.getvalue())
+
+ def test_error_on_distro_set_hostname_errors(self):
+ """Raise SetHostnameError on exceptions from distro.set_hostname."""
+ distro = self._fetch_distro('debian')
+
+ def set_hostname_error(hostname, fqdn):
+ raise Exception("OOPS on: %s" % fqdn)
+
+ distro.set_hostname = set_hostname_error
+ paths = helpers.Paths({'cloud_dir': self.tmp})
+ ds = None
+ cc = cloud.Cloud(ds, paths, {}, distro, None)
+ self.patchUtils(self.tmp)
+ with self.assertRaises(cc_set_hostname.SetHostnameError) as ctx_mgr:
+ cc_set_hostname.handle(
+ 'somename', {'hostname': 'hostname1.me.com'}, cc, LOG, [])
+ self.assertEqual(
+ 'Failed to set the hostname to hostname1.me.com (hostname1):'
+ ' OOPS on: hostname1.me.com',
+ str(ctx_mgr.exception))
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_set_passwords.py b/tests/unittests/config/test_cc_set_passwords.py
new file mode 100644
index 00000000..9bcd0439
--- /dev/null
+++ b/tests/unittests/config/test_cc_set_passwords.py
@@ -0,0 +1,162 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+from unittest import mock
+
+from cloudinit.config import cc_set_passwords as setpass
+from tests.unittests.helpers import CiTestCase
+from cloudinit import util
+
+MODPATH = "cloudinit.config.cc_set_passwords."
+
+
+class TestHandleSshPwauth(CiTestCase):
+ """Test cc_set_passwords handling of ssh_pwauth in handle_ssh_pwauth."""
+
+ with_logs = True
+
+ @mock.patch("cloudinit.distros.subp.subp")
+ def test_unknown_value_logs_warning(self, m_subp):
+ cloud = self.tmp_cloud(distro='ubuntu')
+ setpass.handle_ssh_pwauth("floo", cloud.distro)
+ self.assertIn("Unrecognized value: ssh_pwauth=floo",
+ self.logs.getvalue())
+ m_subp.assert_not_called()
+
+ @mock.patch(MODPATH + "update_ssh_config", return_value=True)
+ @mock.patch("cloudinit.distros.subp.subp")
+ def test_systemctl_as_service_cmd(self, m_subp, m_update_ssh_config):
+ """If systemctl in service cmd: systemctl restart name."""
+ cloud = self.tmp_cloud(distro='ubuntu')
+ cloud.distro.init_cmd = ['systemctl']
+ setpass.handle_ssh_pwauth(True, cloud.distro)
+ m_subp.assert_called_with(
+ ["systemctl", "restart", "ssh"], capture=True)
+
+ @mock.patch(MODPATH + "update_ssh_config", return_value=False)
+ @mock.patch("cloudinit.distros.subp.subp")
+ def test_not_restarted_if_not_updated(self, m_subp, m_update_ssh_config):
+ """If config is not updated, then no system restart should be done."""
+ cloud = self.tmp_cloud(distro='ubuntu')
+ setpass.handle_ssh_pwauth(True, cloud.distro)
+ m_subp.assert_not_called()
+ self.assertIn("No need to restart SSH", self.logs.getvalue())
+
+ @mock.patch(MODPATH + "update_ssh_config", return_value=True)
+ @mock.patch("cloudinit.distros.subp.subp")
+ def test_unchanged_does_nothing(self, m_subp, m_update_ssh_config):
+ """If 'unchanged', then no updates to config and no restart."""
+ cloud = self.tmp_cloud(distro='ubuntu')
+ setpass.handle_ssh_pwauth("unchanged", cloud.distro)
+ m_update_ssh_config.assert_not_called()
+ m_subp.assert_not_called()
+
+ @mock.patch("cloudinit.distros.subp.subp")
+ def test_valid_change_values(self, m_subp):
+ """If value is a valid changen value, then update should be called."""
+ cloud = self.tmp_cloud(distro='ubuntu')
+ upname = MODPATH + "update_ssh_config"
+ optname = "PasswordAuthentication"
+ for value in util.FALSE_STRINGS + util.TRUE_STRINGS:
+ optval = "yes" if value in util.TRUE_STRINGS else "no"
+ with mock.patch(upname, return_value=False) as m_update:
+ setpass.handle_ssh_pwauth(value, cloud.distro)
+ m_update.assert_called_with({optname: optval})
+ m_subp.assert_not_called()
+
+
+class TestSetPasswordsHandle(CiTestCase):
+ """Test cc_set_passwords.handle"""
+
+ with_logs = True
+
+ def test_handle_on_empty_config(self, *args):
+ """handle logs that no password has changed when config is empty."""
+ cloud = self.tmp_cloud(distro='ubuntu')
+ setpass.handle(
+ 'IGNORED', cfg={}, cloud=cloud, log=self.logger, args=[])
+ self.assertEqual(
+ "DEBUG: Leaving SSH config 'PasswordAuthentication' unchanged. "
+ 'ssh_pwauth=None\n',
+ self.logs.getvalue())
+
+ def test_handle_on_chpasswd_list_parses_common_hashes(self):
+ """handle parses command password hashes."""
+ cloud = self.tmp_cloud(distro='ubuntu')
+ valid_hashed_pwds = [
+ 'root:$2y$10$8BQjxjVByHA/Ee.O1bCXtO8S7Y5WojbXWqnqYpUW.BrPx/'
+ 'Dlew1Va',
+ 'ubuntu:$6$5hOurLPO$naywm3Ce0UlmZg9gG2Fl9acWCVEoakMMC7dR52q'
+ 'SDexZbrN9z8yHxhUM2b.sxpguSwOlbOQSW/HpXazGGx3oo1']
+ cfg = {'chpasswd': {'list': valid_hashed_pwds}}
+ with mock.patch.object(setpass, 'chpasswd') as chpasswd:
+ setpass.handle(
+ 'IGNORED', cfg=cfg, cloud=cloud, log=self.logger, args=[])
+ self.assertIn(
+ 'DEBUG: Handling input for chpasswd as list.',
+ self.logs.getvalue())
+ self.assertIn(
+ "DEBUG: Setting hashed password for ['root', 'ubuntu']",
+ self.logs.getvalue())
+ valid = '\n'.join(valid_hashed_pwds) + '\n'
+ called = chpasswd.call_args[0][1]
+ self.assertEqual(valid, called)
+
+ @mock.patch(MODPATH + "util.is_BSD")
+ @mock.patch(MODPATH + "subp.subp")
+ def test_bsd_calls_custom_pw_cmds_to_set_and_expire_passwords(
+ self, m_subp, m_is_bsd):
+ """BSD don't use chpasswd"""
+ m_is_bsd.return_value = True
+ cloud = self.tmp_cloud(distro='freebsd')
+ valid_pwds = ['ubuntu:passw0rd']
+ cfg = {'chpasswd': {'list': valid_pwds}}
+ setpass.handle(
+ 'IGNORED', cfg=cfg, cloud=cloud, log=self.logger, args=[])
+ self.assertEqual([
+ mock.call(['pw', 'usermod', 'ubuntu', '-h', '0'], data='passw0rd',
+ logstring="chpasswd for ubuntu"),
+ mock.call(['pw', 'usermod', 'ubuntu', '-p', '01-Jan-1970'])],
+ m_subp.call_args_list)
+
+ @mock.patch(MODPATH + "util.multi_log")
+ @mock.patch(MODPATH + "subp.subp")
+ def test_handle_on_chpasswd_list_creates_random_passwords(
+ self, m_subp, m_multi_log
+ ):
+ """handle parses command set random passwords."""
+ cloud = self.tmp_cloud(distro='ubuntu')
+ valid_random_pwds = [
+ 'root:R',
+ 'ubuntu:RANDOM']
+ cfg = {'chpasswd': {'expire': 'false', 'list': valid_random_pwds}}
+ with mock.patch.object(setpass, 'chpasswd') as chpasswd:
+ setpass.handle(
+ 'IGNORED', cfg=cfg, cloud=cloud, log=self.logger, args=[])
+ self.assertIn(
+ 'DEBUG: Handling input for chpasswd as list.',
+ self.logs.getvalue())
+ self.assertEqual(1, chpasswd.call_count)
+ passwords, _ = chpasswd.call_args
+ user_pass = {
+ user: password
+ for user, password
+ in (line.split(":") for line in passwords[1].splitlines())
+ }
+
+ self.assertEqual(1, m_multi_log.call_count)
+ self.assertEqual(
+ mock.call(mock.ANY, stderr=False, fallback_to_stdout=False),
+ m_multi_log.call_args
+ )
+
+ self.assertEqual(set(["root", "ubuntu"]), set(user_pass.keys()))
+ written_lines = m_multi_log.call_args[0][0].splitlines()
+ for password in user_pass.values():
+ for line in written_lines:
+ if password in line:
+ break
+ else:
+ self.fail("Password not emitted to console")
+
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_snap.py b/tests/unittests/config/test_cc_snap.py
new file mode 100644
index 00000000..e8113eca
--- /dev/null
+++ b/tests/unittests/config/test_cc_snap.py
@@ -0,0 +1,564 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import re
+from io import StringIO
+
+from cloudinit.config.cc_snap import (
+ ASSERTIONS_FILE, add_assertions, handle, maybe_install_squashfuse,
+ run_commands, schema)
+from cloudinit.config.schema import validate_cloudconfig_schema
+from cloudinit import util
+from tests.unittests.helpers import (
+ CiTestCase, SchemaTestCaseMixin, mock, wrap_and_call, skipUnlessJsonSchema)
+
+
+SYSTEM_USER_ASSERTION = """\
+type: system-user
+authority-id: LqvZQdfyfGlYvtep4W6Oj6pFXP9t1Ksp
+brand-id: LqvZQdfyfGlYvtep4W6Oj6pFXP9t1Ksp
+email: foo@bar.com
+password: $6$E5YiAuMIPAwX58jG$miomhVNui/vf7f/3ctB/f0RWSKFxG0YXzrJ9rtJ1ikvzt
+series:
+- 16
+since: 2016-09-10T16:34:00+03:00
+until: 2017-11-10T16:34:00+03:00
+username: baz
+sign-key-sha3-384: RuVvnp4n52GilycjfbbTCI3_L8Y6QlIE75wxMc0KzGV3AUQqVd9GuXoj
+
+AcLBXAQAAQoABgUCV/UU1wAKCRBKnlMoJQLkZVeLD/9/+hIeVywtzsDA3oxl+P+u9D13y9s6svP
+Jd6Wnf4FTw6sq1GjBE4ZA7lrwSaRCUJ9Vcsvf2q9OGPY7mOb2TBxaDe0PbUMjrSrqllSSQwhpNI
+zG+NxkkKuxsUmLzFa+k9m6cyojNbw5LFhQZBQCGlr3JYqC0tIREq/UsZxj+90TUC87lDJwkU8GF
+s4CR+rejZj4itIcDcVxCSnJH6hv6j2JrJskJmvObqTnoOlcab+JXdamXqbldSP3UIhWoyVjqzkj
++to7mXgx+cCUA9+ngNCcfUG+1huGGTWXPCYkZ78HvErcRlIdeo4d3xwtz1cl/w3vYnq9og1XwsP
+Yfetr3boig2qs1Y+j/LpsfYBYncgWjeDfAB9ZZaqQz/oc8n87tIPZDJHrusTlBfop8CqcM4xsKS
+d+wnEY8e/F24mdSOYmS1vQCIDiRU3MKb6x138Ud6oHXFlRBbBJqMMctPqWDunWzb5QJ7YR0I39q
+BrnEqv5NE0G7w6HOJ1LSPG5Hae3P4T2ea+ATgkb03RPr3KnXnzXg4TtBbW1nytdlgoNc/BafE1H
+f3NThcq9gwX4xWZ2PAWnqVPYdDMyCtzW3Ck+o6sIzx+dh4gDLPHIi/6TPe/pUuMop9CBpWwez7V
+v1z+1+URx6Xlq3Jq18y5pZ6fY3IDJ6km2nQPMzcm4Q=="""
+
+ACCOUNT_ASSERTION = """\
+type: account-key
+authority-id: canonical
+revision: 2
+public-key-sha3-384: BWDEoaqyr25nF5SNCvEv2v7QnM9QsfCc0PBMYD_i2NGSQ32EF2d4D0
+account-id: canonical
+name: store
+since: 2016-04-01T00:00:00.0Z
+body-length: 717
+sign-key-sha3-384: -CvQKAwRQ5h3Ffn10FILJoEZUXOv6km9FwA80-Rcj-f-6jadQ89VRswH
+
+AcbBTQRWhcGAARAA0KKYYQWuHOrsFVi4p4l7ZzSvX7kLgJFFeFgOkzdWKBTHEnsMKjl5mefFe9j
+qe8NlmJdfY7BenP7XeBtwKp700H/t9lLrZbpTNAPHXYxEWFJp5bPqIcJYBZ+29oLVLN1Tc5X482
+vCiDqL8+pPYqBrK2fNlyPlNNSum9wI70rDDL4r6FVvr+osTnGejibdV8JphWX+lrSQDnRSdM8KJ
+UM43vTgLGTi9W54oRhsA2OFexRfRksTrnqGoonCjqX5wO3OFSaMDzMsO2MJ/hPfLgDqw53qjzuK
+Iec9OL3k5basvu2cj5u9tKwVFDsCKK2GbKUsWWpx2KTpOifmhmiAbzkTHbH9KaoMS7p0kJwhTQG
+o9aJ9VMTWHJc/NCBx7eu451u6d46sBPCXS/OMUh2766fQmoRtO1OwCTxsRKG2kkjbMn54UdFULl
+VfzvyghMNRKIezsEkmM8wueTqGUGZWa6CEZqZKwhe/PROxOPYzqtDH18XZknbU1n5lNb7vNfem9
+2ai+3+JyFnW9UhfvpVF7gzAgdyCqNli4C6BIN43uwoS8HkykocZS/+Gv52aUQ/NZ8BKOHLw+7an
+Q0o8W9ltSLZbEMxFIPSN0stiZlkXAp6DLyvh1Y4wXSynDjUondTpej2fSvSlCz/W5v5V7qA4nIc
+vUvV7RjVzv17ut0AEQEAAQ==
+
+AcLDXAQAAQoABgUCV83k9QAKCRDUpVvql9g3IBT8IACKZ7XpiBZ3W4lqbPssY6On81WmxQLtvsM
+WTp6zZpl/wWOSt2vMNUk9pvcmrNq1jG9CuhDfWFLGXEjcrrmVkN3YuCOajMSPFCGrxsIBLSRt/b
+nrKykdLAAzMfG8rP1d82bjFFiIieE+urQ0Kcv09Jtdvavq3JT1Tek5mFyyfhHNlQEKOzWqmRWiL
+3c3VOZUs1ZD8TSlnuq/x+5T0X0YtOyGjSlVxk7UybbyMNd6MZfNaMpIG4x+mxD3KHFtBAC7O6kL
+eX3i6j5nCY5UABfA3DZEAkWP4zlmdBEOvZ9t293NaDdOpzsUHRkoi0Zez/9BHQ/kwx/uNc2WqrY
+inCmu16JGNeXqsyinnLl7Ghn2RwhvDMlLxF6RTx8xdx1yk6p3PBTwhZMUvuZGjUtN/AG8BmVJQ1
+rsGSRkkSywvnhVJRB2sudnrMBmNS2goJbzSbmJnOlBrd2WsV0T9SgNMWZBiov3LvU4o2SmAb6b+
+rYwh8H5QHcuuYJuxDjFhPswIp6Wes5T6hUicf3SWtObcDS4HSkVS4ImBjjX9YgCuFy7QdnooOWE
+aPvkRw3XCVeYq0K6w9GRsk1YFErD4XmXXZjDYY650MX9v42Sz5MmphHV8jdIY5ssbadwFSe2rCQ
+6UX08zy7RsIb19hTndE6ncvSNDChUR9eEnCm73eYaWTWTnq1cxdVP/s52r8uss++OYOkPWqh5nO
+haRn7INjH/yZX4qXjNXlTjo0PnHH0q08vNKDwLhxS+D9du+70FeacXFyLIbcWllSbJ7DmbumGpF
+yYbtj3FDDPzachFQdIG3lSt+cSUGeyfSs6wVtc3cIPka/2Urx7RprfmoWSI6+a5NcLdj0u2z8O9
+HxeIgxDpg/3gT8ZIuFKePMcLDM19Fh/p0ysCsX+84B9chNWtsMSmIaE57V+959MVtsLu7SLb9gi
+skrju0pQCwsu2wHMLTNd1f3PTHmrr49hxetTus07HSQUApMtAGKzQilF5zqFjbyaTd4xgQbd+PK
+CjFyzQTDOcUhXpuUGt/IzlqiFfsCsmbj2K4KdSNYMlqIgZ3Azu8KvZLIhsyN7v5vNIZSPfEbjde
+ClU9r0VRiJmtYBUjcSghD9LWn+yRLwOxhfQVjm0cBwIt5R/yPF/qC76yIVuWUtM5Y2/zJR1J8OF
+qWchvlImHtvDzS9FQeLyzJAOjvZ2CnWp2gILgUz0WQdOk1Dq8ax7KS9BQ42zxw9EZAEPw3PEFqR
+IQsRTONp+iVS8YxSmoYZjDlCgRMWUmawez/Fv5b9Fb/XkO5Eq4e+KfrpUujXItaipb+tV8h5v3t
+oG3Ie3WOHrVjCLXIdYslpL1O4nadqR6Xv58pHj6k"""
+
+
+class FakeCloud(object):
+ def __init__(self, distro):
+ self.distro = distro
+
+
+class TestAddAssertions(CiTestCase):
+
+ with_logs = True
+
+ def setUp(self):
+ super(TestAddAssertions, self).setUp()
+ self.tmp = self.tmp_dir()
+
+ @mock.patch('cloudinit.config.cc_snap.subp.subp')
+ def test_add_assertions_on_empty_list(self, m_subp):
+ """When provided with an empty list, add_assertions does nothing."""
+ add_assertions([])
+ self.assertEqual('', self.logs.getvalue())
+ m_subp.assert_not_called()
+
+ def test_add_assertions_on_non_list_or_dict(self):
+ """When provided an invalid type, add_assertions raises an error."""
+ with self.assertRaises(TypeError) as context_manager:
+ add_assertions(assertions="I'm Not Valid")
+ self.assertEqual(
+ "assertion parameter was not a list or dict: I'm Not Valid",
+ str(context_manager.exception))
+
+ @mock.patch('cloudinit.config.cc_snap.subp.subp')
+ def test_add_assertions_adds_assertions_as_list(self, m_subp):
+ """When provided with a list, add_assertions adds all assertions."""
+ self.assertEqual(
+ ASSERTIONS_FILE, '/var/lib/cloud/instance/snapd.assertions')
+ assert_file = self.tmp_path('snapd.assertions', dir=self.tmp)
+ assertions = [SYSTEM_USER_ASSERTION, ACCOUNT_ASSERTION]
+ wrap_and_call(
+ 'cloudinit.config.cc_snap',
+ {'ASSERTIONS_FILE': {'new': assert_file}},
+ add_assertions, assertions)
+ self.assertIn(
+ 'Importing user-provided snap assertions', self.logs.getvalue())
+ self.assertIn(
+ 'sertions', self.logs.getvalue())
+ self.assertEqual(
+ [mock.call(['snap', 'ack', assert_file], capture=True)],
+ m_subp.call_args_list)
+ compare_file = self.tmp_path('comparison', dir=self.tmp)
+ util.write_file(compare_file, '\n'.join(assertions).encode('utf-8'))
+ self.assertEqual(
+ util.load_file(compare_file), util.load_file(assert_file))
+
+ @mock.patch('cloudinit.config.cc_snap.subp.subp')
+ def test_add_assertions_adds_assertions_as_dict(self, m_subp):
+ """When provided with a dict, add_assertions adds all assertions."""
+ self.assertEqual(
+ ASSERTIONS_FILE, '/var/lib/cloud/instance/snapd.assertions')
+ assert_file = self.tmp_path('snapd.assertions', dir=self.tmp)
+ assertions = {'00': SYSTEM_USER_ASSERTION, '01': ACCOUNT_ASSERTION}
+ wrap_and_call(
+ 'cloudinit.config.cc_snap',
+ {'ASSERTIONS_FILE': {'new': assert_file}},
+ add_assertions, assertions)
+ self.assertIn(
+ 'Importing user-provided snap assertions', self.logs.getvalue())
+ self.assertIn(
+ "DEBUG: Snap acking: ['type: system-user', 'authority-id: Lqv",
+ self.logs.getvalue())
+ self.assertIn(
+ "DEBUG: Snap acking: ['type: account-key', 'authority-id: canonic",
+ self.logs.getvalue())
+ self.assertEqual(
+ [mock.call(['snap', 'ack', assert_file], capture=True)],
+ m_subp.call_args_list)
+ compare_file = self.tmp_path('comparison', dir=self.tmp)
+ combined = '\n'.join(assertions.values())
+ util.write_file(compare_file, combined.encode('utf-8'))
+ self.assertEqual(
+ util.load_file(compare_file), util.load_file(assert_file))
+
+
+class TestRunCommands(CiTestCase):
+
+ with_logs = True
+ allowed_subp = [CiTestCase.SUBP_SHELL_TRUE]
+
+ def setUp(self):
+ super(TestRunCommands, self).setUp()
+ self.tmp = self.tmp_dir()
+
+ @mock.patch('cloudinit.config.cc_snap.subp.subp')
+ def test_run_commands_on_empty_list(self, m_subp):
+ """When provided with an empty list, run_commands does nothing."""
+ run_commands([])
+ self.assertEqual('', self.logs.getvalue())
+ m_subp.assert_not_called()
+
+ def test_run_commands_on_non_list_or_dict(self):
+ """When provided an invalid type, run_commands raises an error."""
+ with self.assertRaises(TypeError) as context_manager:
+ run_commands(commands="I'm Not Valid")
+ self.assertEqual(
+ "commands parameter was not a list or dict: I'm Not Valid",
+ str(context_manager.exception))
+
+ def test_run_command_logs_commands_and_exit_codes_to_stderr(self):
+ """All exit codes are logged to stderr."""
+ outfile = self.tmp_path('output.log', dir=self.tmp)
+
+ cmd1 = 'echo "HI" >> %s' % outfile
+ cmd2 = 'bogus command'
+ cmd3 = 'echo "MOM" >> %s' % outfile
+ commands = [cmd1, cmd2, cmd3]
+
+ mock_path = 'cloudinit.config.cc_snap.sys.stderr'
+ with mock.patch(mock_path, new_callable=StringIO) as m_stderr:
+ with self.assertRaises(RuntimeError) as context_manager:
+ run_commands(commands=commands)
+
+ self.assertIsNotNone(
+ re.search(r'bogus: (command )?not found',
+ str(context_manager.exception)),
+ msg='Expected bogus command not found')
+ expected_stderr_log = '\n'.join([
+ 'Begin run command: {cmd}'.format(cmd=cmd1),
+ 'End run command: exit(0)',
+ 'Begin run command: {cmd}'.format(cmd=cmd2),
+ 'ERROR: End run command: exit(127)',
+ 'Begin run command: {cmd}'.format(cmd=cmd3),
+ 'End run command: exit(0)\n'])
+ self.assertEqual(expected_stderr_log, m_stderr.getvalue())
+
+ def test_run_command_as_lists(self):
+ """When commands are specified as a list, run them in order."""
+ outfile = self.tmp_path('output.log', dir=self.tmp)
+
+ cmd1 = 'echo "HI" >> %s' % outfile
+ cmd2 = 'echo "MOM" >> %s' % outfile
+ commands = [cmd1, cmd2]
+ mock_path = 'cloudinit.config.cc_snap.sys.stderr'
+ with mock.patch(mock_path, new_callable=StringIO):
+ run_commands(commands=commands)
+
+ self.assertIn(
+ 'DEBUG: Running user-provided snap commands',
+ self.logs.getvalue())
+ self.assertEqual('HI\nMOM\n', util.load_file(outfile))
+ self.assertIn(
+ 'WARNING: Non-snap commands in snap config:', self.logs.getvalue())
+
+ def test_run_command_dict_sorted_as_command_script(self):
+ """When commands are a dict, sort them and run."""
+ outfile = self.tmp_path('output.log', dir=self.tmp)
+ cmd1 = 'echo "HI" >> %s' % outfile
+ cmd2 = 'echo "MOM" >> %s' % outfile
+ commands = {'02': cmd1, '01': cmd2}
+ mock_path = 'cloudinit.config.cc_snap.sys.stderr'
+ with mock.patch(mock_path, new_callable=StringIO):
+ run_commands(commands=commands)
+
+ expected_messages = [
+ 'DEBUG: Running user-provided snap commands']
+ for message in expected_messages:
+ self.assertIn(message, self.logs.getvalue())
+ self.assertEqual('MOM\nHI\n', util.load_file(outfile))
+
+
+@skipUnlessJsonSchema()
+class TestSchema(CiTestCase, SchemaTestCaseMixin):
+
+ with_logs = True
+ schema = schema
+
+ def test_schema_warns_on_snap_not_as_dict(self):
+ """If the snap configuration is not a dict, emit a warning."""
+ validate_cloudconfig_schema({'snap': 'wrong type'}, schema)
+ self.assertEqual(
+ "WARNING: Invalid config:\nsnap: 'wrong type' is not of type"
+ " 'object'\n",
+ self.logs.getvalue())
+
+ @mock.patch('cloudinit.config.cc_snap.run_commands')
+ def test_schema_disallows_unknown_keys(self, _):
+ """Unknown keys in the snap configuration emit warnings."""
+ validate_cloudconfig_schema(
+ {'snap': {'commands': ['ls'], 'invalid-key': ''}}, schema)
+ self.assertIn(
+ 'WARNING: Invalid config:\nsnap: Additional properties are not'
+ " allowed ('invalid-key' was unexpected)",
+ self.logs.getvalue())
+
+ def test_warn_schema_requires_either_commands_or_assertions(self):
+ """Warn when snap configuration lacks both commands and assertions."""
+ validate_cloudconfig_schema(
+ {'snap': {}}, schema)
+ self.assertIn(
+ 'WARNING: Invalid config:\nsnap: {} does not have enough'
+ ' properties',
+ self.logs.getvalue())
+
+ @mock.patch('cloudinit.config.cc_snap.run_commands')
+ def test_warn_schema_commands_is_not_list_or_dict(self, _):
+ """Warn when snap:commands config is not a list or dict."""
+ validate_cloudconfig_schema(
+ {'snap': {'commands': 'broken'}}, schema)
+ self.assertEqual(
+ "WARNING: Invalid config:\nsnap.commands: 'broken' is not of type"
+ " 'object', 'array'\n",
+ self.logs.getvalue())
+
+ @mock.patch('cloudinit.config.cc_snap.run_commands')
+ def test_warn_schema_when_commands_is_empty(self, _):
+ """Emit warnings when snap:commands is an empty list or dict."""
+ validate_cloudconfig_schema(
+ {'snap': {'commands': []}}, schema)
+ validate_cloudconfig_schema(
+ {'snap': {'commands': {}}}, schema)
+ self.assertEqual(
+ "WARNING: Invalid config:\nsnap.commands: [] is too short\n"
+ "WARNING: Invalid config:\nsnap.commands: {} does not have enough"
+ " properties\n",
+ self.logs.getvalue())
+
+ @mock.patch('cloudinit.config.cc_snap.run_commands')
+ def test_schema_when_commands_are_list_or_dict(self, _):
+ """No warnings when snap:commands are either a list or dict."""
+ validate_cloudconfig_schema(
+ {'snap': {'commands': ['valid']}}, schema)
+ validate_cloudconfig_schema(
+ {'snap': {'commands': {'01': 'also valid'}}}, schema)
+ self.assertEqual('', self.logs.getvalue())
+
+ @mock.patch('cloudinit.config.cc_snap.run_commands')
+ def test_schema_when_commands_values_are_invalid_type(self, _):
+ """Warnings when snap:commands values are invalid type (e.g. int)"""
+ validate_cloudconfig_schema(
+ {'snap': {'commands': [123]}}, schema)
+ validate_cloudconfig_schema(
+ {'snap': {'commands': {'01': 123}}}, schema)
+ self.assertEqual(
+ "WARNING: Invalid config:\n"
+ "snap.commands.0: 123 is not valid under any of the given"
+ " schemas\n"
+ "WARNING: Invalid config:\n"
+ "snap.commands.01: 123 is not valid under any of the given"
+ " schemas\n",
+ self.logs.getvalue())
+
+ @mock.patch('cloudinit.config.cc_snap.run_commands')
+ def test_schema_when_commands_list_values_are_invalid_type(self, _):
+ """Warnings when snap:commands list values are wrong type (e.g. int)"""
+ validate_cloudconfig_schema(
+ {'snap': {'commands': [["snap", "install", 123]]}}, schema)
+ validate_cloudconfig_schema(
+ {'snap': {'commands': {'01': ["snap", "install", 123]}}}, schema)
+ self.assertEqual(
+ "WARNING: Invalid config:\n"
+ "snap.commands.0: ['snap', 'install', 123] is not valid under any"
+ " of the given schemas\n",
+ "WARNING: Invalid config:\n"
+ "snap.commands.0: ['snap', 'install', 123] is not valid under any"
+ " of the given schemas\n",
+ self.logs.getvalue())
+
+ @mock.patch('cloudinit.config.cc_snap.run_commands')
+ def test_schema_when_assertions_values_are_invalid_type(self, _):
+ """Warnings when snap:assertions values are invalid type (e.g. int)"""
+ validate_cloudconfig_schema(
+ {'snap': {'assertions': [123]}}, schema)
+ validate_cloudconfig_schema(
+ {'snap': {'assertions': {'01': 123}}}, schema)
+ self.assertEqual(
+ "WARNING: Invalid config:\n"
+ "snap.assertions.0: 123 is not of type 'string'\n"
+ "WARNING: Invalid config:\n"
+ "snap.assertions.01: 123 is not of type 'string'\n",
+ self.logs.getvalue())
+
+ @mock.patch('cloudinit.config.cc_snap.add_assertions')
+ def test_warn_schema_assertions_is_not_list_or_dict(self, _):
+ """Warn when snap:assertions config is not a list or dict."""
+ validate_cloudconfig_schema(
+ {'snap': {'assertions': 'broken'}}, schema)
+ self.assertEqual(
+ "WARNING: Invalid config:\nsnap.assertions: 'broken' is not of"
+ " type 'object', 'array'\n",
+ self.logs.getvalue())
+
+ @mock.patch('cloudinit.config.cc_snap.add_assertions')
+ def test_warn_schema_when_assertions_is_empty(self, _):
+ """Emit warnings when snap:assertions is an empty list or dict."""
+ validate_cloudconfig_schema(
+ {'snap': {'assertions': []}}, schema)
+ validate_cloudconfig_schema(
+ {'snap': {'assertions': {}}}, schema)
+ self.assertEqual(
+ "WARNING: Invalid config:\nsnap.assertions: [] is too short\n"
+ "WARNING: Invalid config:\nsnap.assertions: {} does not have"
+ " enough properties\n",
+ self.logs.getvalue())
+
+ @mock.patch('cloudinit.config.cc_snap.add_assertions')
+ def test_schema_when_assertions_are_list_or_dict(self, _):
+ """No warnings when snap:assertions are a list or dict."""
+ validate_cloudconfig_schema(
+ {'snap': {'assertions': ['valid']}}, schema)
+ validate_cloudconfig_schema(
+ {'snap': {'assertions': {'01': 'also valid'}}}, schema)
+ self.assertEqual('', self.logs.getvalue())
+
+ def test_duplicates_are_fine_array_array(self):
+ """Duplicated commands array/array entries are allowed."""
+ self.assertSchemaValid(
+ {'commands': [["echo", "bye"], ["echo", "bye"]]},
+ "command entries can be duplicate.")
+
+ def test_duplicates_are_fine_array_string(self):
+ """Duplicated commands array/string entries are allowed."""
+ self.assertSchemaValid(
+ {'commands': ["echo bye", "echo bye"]},
+ "command entries can be duplicate.")
+
+ def test_duplicates_are_fine_dict_array(self):
+ """Duplicated commands dict/array entries are allowed."""
+ self.assertSchemaValid(
+ {'commands': {'00': ["echo", "bye"], '01': ["echo", "bye"]}},
+ "command entries can be duplicate.")
+
+ def test_duplicates_are_fine_dict_string(self):
+ """Duplicated commands dict/string entries are allowed."""
+ self.assertSchemaValid(
+ {'commands': {'00': "echo bye", '01': "echo bye"}},
+ "command entries can be duplicate.")
+
+
+class TestHandle(CiTestCase):
+
+ with_logs = True
+
+ def setUp(self):
+ super(TestHandle, self).setUp()
+ self.tmp = self.tmp_dir()
+
+ @mock.patch('cloudinit.config.cc_snap.run_commands')
+ @mock.patch('cloudinit.config.cc_snap.add_assertions')
+ @mock.patch('cloudinit.config.cc_snap.validate_cloudconfig_schema')
+ def test_handle_no_config(self, m_schema, m_add, m_run):
+ """When no snap-related configuration is provided, nothing happens."""
+ cfg = {}
+ handle('snap', cfg=cfg, cloud=None, log=self.logger, args=None)
+ self.assertIn(
+ "DEBUG: Skipping module named snap, no 'snap' key in config",
+ self.logs.getvalue())
+ m_schema.assert_not_called()
+ m_add.assert_not_called()
+ m_run.assert_not_called()
+
+ @mock.patch('cloudinit.config.cc_snap.run_commands')
+ @mock.patch('cloudinit.config.cc_snap.add_assertions')
+ @mock.patch('cloudinit.config.cc_snap.maybe_install_squashfuse')
+ def test_handle_skips_squashfuse_when_unconfigured(self, m_squash, m_add,
+ m_run):
+ """When squashfuse_in_container is unset, don't attempt to install."""
+ handle(
+ 'snap', cfg={'snap': {}}, cloud=None, log=self.logger, args=None)
+ handle(
+ 'snap', cfg={'snap': {'squashfuse_in_container': None}},
+ cloud=None, log=self.logger, args=None)
+ handle(
+ 'snap', cfg={'snap': {'squashfuse_in_container': False}},
+ cloud=None, log=self.logger, args=None)
+ self.assertEqual([], m_squash.call_args_list) # No calls
+ # snap configuration missing assertions and commands will default to []
+ self.assertIn(mock.call([]), m_add.call_args_list)
+ self.assertIn(mock.call([]), m_run.call_args_list)
+
+ @mock.patch('cloudinit.config.cc_snap.maybe_install_squashfuse')
+ def test_handle_tries_to_install_squashfuse(self, m_squash):
+ """If squashfuse_in_container is True, try installing squashfuse."""
+ cfg = {'snap': {'squashfuse_in_container': True}}
+ mycloud = FakeCloud(None)
+ handle('snap', cfg=cfg, cloud=mycloud, log=self.logger, args=None)
+ self.assertEqual(
+ [mock.call(mycloud)], m_squash.call_args_list)
+
+ def test_handle_runs_commands_provided(self):
+ """If commands are specified as a list, run them."""
+ outfile = self.tmp_path('output.log', dir=self.tmp)
+
+ cfg = {
+ 'snap': {'commands': ['echo "HI" >> %s' % outfile,
+ 'echo "MOM" >> %s' % outfile]}}
+ mock_path = 'cloudinit.config.cc_snap.sys.stderr'
+ with self.allow_subp([CiTestCase.SUBP_SHELL_TRUE]):
+ with mock.patch(mock_path, new_callable=StringIO):
+ handle('snap', cfg=cfg, cloud=None, log=self.logger, args=None)
+
+ self.assertEqual('HI\nMOM\n', util.load_file(outfile))
+
+ @mock.patch('cloudinit.config.cc_snap.subp.subp')
+ def test_handle_adds_assertions(self, m_subp):
+ """Any configured snap assertions are provided to add_assertions."""
+ assert_file = self.tmp_path('snapd.assertions', dir=self.tmp)
+ compare_file = self.tmp_path('comparison', dir=self.tmp)
+ cfg = {
+ 'snap': {'assertions': [SYSTEM_USER_ASSERTION, ACCOUNT_ASSERTION]}}
+ wrap_and_call(
+ 'cloudinit.config.cc_snap',
+ {'ASSERTIONS_FILE': {'new': assert_file}},
+ handle, 'snap', cfg=cfg, cloud=None, log=self.logger, args=None)
+ content = '\n'.join(cfg['snap']['assertions'])
+ util.write_file(compare_file, content.encode('utf-8'))
+ self.assertEqual(
+ util.load_file(compare_file), util.load_file(assert_file))
+
+ @mock.patch('cloudinit.config.cc_snap.subp.subp')
+ @skipUnlessJsonSchema()
+ def test_handle_validates_schema(self, m_subp):
+ """Any provided configuration is runs validate_cloudconfig_schema."""
+ assert_file = self.tmp_path('snapd.assertions', dir=self.tmp)
+ cfg = {'snap': {'invalid': ''}} # Generates schema warning
+ wrap_and_call(
+ 'cloudinit.config.cc_snap',
+ {'ASSERTIONS_FILE': {'new': assert_file}},
+ handle, 'snap', cfg=cfg, cloud=None, log=self.logger, args=None)
+ self.assertEqual(
+ "WARNING: Invalid config:\nsnap: Additional properties are not"
+ " allowed ('invalid' was unexpected)\n",
+ self.logs.getvalue())
+
+
+class TestMaybeInstallSquashFuse(CiTestCase):
+
+ with_logs = True
+
+ def setUp(self):
+ super(TestMaybeInstallSquashFuse, self).setUp()
+ self.tmp = self.tmp_dir()
+
+ @mock.patch('cloudinit.config.cc_snap.util.is_container')
+ def test_maybe_install_squashfuse_skips_non_containers(self, m_container):
+ """maybe_install_squashfuse does nothing when not on a container."""
+ m_container.return_value = False
+ maybe_install_squashfuse(cloud=FakeCloud(None))
+ self.assertEqual([mock.call()], m_container.call_args_list)
+ self.assertEqual('', self.logs.getvalue())
+
+ @mock.patch('cloudinit.config.cc_snap.util.is_container')
+ def test_maybe_install_squashfuse_raises_install_errors(self, m_container):
+ """maybe_install_squashfuse logs and raises package install errors."""
+ m_container.return_value = True
+ distro = mock.MagicMock()
+ distro.update_package_sources.side_effect = RuntimeError(
+ 'Some apt error')
+ with self.assertRaises(RuntimeError) as context_manager:
+ maybe_install_squashfuse(cloud=FakeCloud(distro))
+ self.assertEqual('Some apt error', str(context_manager.exception))
+ self.assertIn('Package update failed\nTraceback', self.logs.getvalue())
+
+ @mock.patch('cloudinit.config.cc_snap.util.is_container')
+ def test_maybe_install_squashfuse_raises_update_errors(self, m_container):
+ """maybe_install_squashfuse logs and raises package update errors."""
+ m_container.return_value = True
+ distro = mock.MagicMock()
+ distro.update_package_sources.side_effect = RuntimeError(
+ 'Some apt error')
+ with self.assertRaises(RuntimeError) as context_manager:
+ maybe_install_squashfuse(cloud=FakeCloud(distro))
+ self.assertEqual('Some apt error', str(context_manager.exception))
+ self.assertIn('Package update failed\nTraceback', self.logs.getvalue())
+
+ @mock.patch('cloudinit.config.cc_snap.util.is_container')
+ def test_maybe_install_squashfuse_happy_path(self, m_container):
+ """maybe_install_squashfuse logs and raises package install errors."""
+ m_container.return_value = True
+ distro = mock.MagicMock() # No errors raised
+ maybe_install_squashfuse(cloud=FakeCloud(distro))
+ self.assertEqual(
+ [mock.call()], distro.update_package_sources.call_args_list)
+ self.assertEqual(
+ [mock.call(['squashfuse'])],
+ distro.install_packages.call_args_list)
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_spacewalk.py b/tests/unittests/config/test_cc_spacewalk.py
new file mode 100644
index 00000000..96efccf0
--- /dev/null
+++ b/tests/unittests/config/test_cc_spacewalk.py
@@ -0,0 +1,42 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+from cloudinit.config import cc_spacewalk
+from cloudinit import subp
+
+from tests.unittests import helpers
+
+import logging
+from unittest import mock
+
+LOG = logging.getLogger(__name__)
+
+
+class TestSpacewalk(helpers.TestCase):
+ space_cfg = {
+ 'spacewalk': {
+ 'server': 'localhost',
+ 'profile_name': 'test',
+ }
+ }
+
+ @mock.patch("cloudinit.config.cc_spacewalk.subp.subp")
+ def test_not_is_registered(self, mock_subp):
+ mock_subp.side_effect = subp.ProcessExecutionError(exit_code=1)
+ self.assertFalse(cc_spacewalk.is_registered())
+
+ @mock.patch("cloudinit.config.cc_spacewalk.subp.subp")
+ def test_is_registered(self, mock_subp):
+ mock_subp.side_effect = None
+ self.assertTrue(cc_spacewalk.is_registered())
+
+ @mock.patch("cloudinit.config.cc_spacewalk.subp.subp")
+ def test_do_register(self, mock_subp):
+ cc_spacewalk.do_register(**self.space_cfg['spacewalk'])
+ mock_subp.assert_called_with([
+ 'rhnreg_ks',
+ '--serverUrl', 'https://localhost/XMLRPC',
+ '--profilename', 'test',
+ '--sslCACert', cc_spacewalk.def_ca_cert_path,
+ ], capture=False)
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_ssh.py b/tests/unittests/config/test_cc_ssh.py
new file mode 100644
index 00000000..ba179bbf
--- /dev/null
+++ b/tests/unittests/config/test_cc_ssh.py
@@ -0,0 +1,405 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import os.path
+
+from cloudinit.config import cc_ssh
+from cloudinit import ssh_util
+from tests.unittests.helpers import CiTestCase, mock
+import logging
+
+LOG = logging.getLogger(__name__)
+
+MODPATH = "cloudinit.config.cc_ssh."
+KEY_NAMES_NO_DSA = [name for name in cc_ssh.GENERATE_KEY_NAMES
+ if name not in 'dsa']
+
+
+@mock.patch(MODPATH + "ssh_util.setup_user_keys")
+class TestHandleSsh(CiTestCase):
+ """Test cc_ssh handling of ssh config."""
+
+ def _publish_hostkey_test_setup(self):
+ self.test_hostkeys = {
+ 'dsa': ('ssh-dss', 'AAAAB3NzaC1kc3MAAACB'),
+ 'ecdsa': ('ecdsa-sha2-nistp256', 'AAAAE2VjZ'),
+ 'ed25519': ('ssh-ed25519', 'AAAAC3NzaC1lZDI'),
+ 'rsa': ('ssh-rsa', 'AAAAB3NzaC1yc2EAAA'),
+ }
+ self.test_hostkey_files = []
+ hostkey_tmpdir = self.tmp_dir()
+ for key_type in cc_ssh.GENERATE_KEY_NAMES:
+ key_data = self.test_hostkeys[key_type]
+ filename = 'ssh_host_%s_key.pub' % key_type
+ filepath = os.path.join(hostkey_tmpdir, filename)
+ self.test_hostkey_files.append(filepath)
+ with open(filepath, 'w') as f:
+ f.write(' '.join(key_data))
+
+ cc_ssh.KEY_FILE_TPL = os.path.join(hostkey_tmpdir, 'ssh_host_%s_key')
+
+ def test_apply_credentials_with_user(self, m_setup_keys):
+ """Apply keys for the given user and root."""
+ keys = ["key1"]
+ user = "clouduser"
+ cc_ssh.apply_credentials(keys, user, False, ssh_util.DISABLE_USER_OPTS)
+ self.assertEqual([mock.call(set(keys), user),
+ mock.call(set(keys), "root", options="")],
+ m_setup_keys.call_args_list)
+
+ def test_apply_credentials_with_no_user(self, m_setup_keys):
+ """Apply keys for root only."""
+ keys = ["key1"]
+ user = None
+ cc_ssh.apply_credentials(keys, user, False, ssh_util.DISABLE_USER_OPTS)
+ self.assertEqual([mock.call(set(keys), "root", options="")],
+ m_setup_keys.call_args_list)
+
+ def test_apply_credentials_with_user_disable_root(self, m_setup_keys):
+ """Apply keys for the given user and disable root ssh."""
+ keys = ["key1"]
+ user = "clouduser"
+ options = ssh_util.DISABLE_USER_OPTS
+ cc_ssh.apply_credentials(keys, user, True, options)
+ options = options.replace("$USER", user)
+ options = options.replace("$DISABLE_USER", "root")
+ self.assertEqual([mock.call(set(keys), user),
+ mock.call(set(keys), "root", options=options)],
+ m_setup_keys.call_args_list)
+
+ def test_apply_credentials_with_no_user_disable_root(self, m_setup_keys):
+ """Apply keys no user and disable root ssh."""
+ keys = ["key1"]
+ user = None
+ options = ssh_util.DISABLE_USER_OPTS
+ cc_ssh.apply_credentials(keys, user, True, options)
+ options = options.replace("$USER", "NONE")
+ options = options.replace("$DISABLE_USER", "root")
+ self.assertEqual([mock.call(set(keys), "root", options=options)],
+ m_setup_keys.call_args_list)
+
+ @mock.patch(MODPATH + "glob.glob")
+ @mock.patch(MODPATH + "ug_util.normalize_users_groups")
+ @mock.patch(MODPATH + "os.path.exists")
+ def test_handle_no_cfg(self, m_path_exists, m_nug,
+ m_glob, m_setup_keys):
+ """Test handle with no config ignores generating existing keyfiles."""
+ cfg = {}
+ keys = ["key1"]
+ m_glob.return_value = [] # Return no matching keys to prevent removal
+ # Mock os.path.exits to True to short-circuit the key writing logic
+ m_path_exists.return_value = True
+ m_nug.return_value = ([], {})
+ cc_ssh.PUBLISH_HOST_KEYS = False
+ cloud = self.tmp_cloud(
+ distro='ubuntu', metadata={'public-keys': keys})
+ cc_ssh.handle("name", cfg, cloud, LOG, None)
+ options = ssh_util.DISABLE_USER_OPTS.replace("$USER", "NONE")
+ options = options.replace("$DISABLE_USER", "root")
+ m_glob.assert_called_once_with('/etc/ssh/ssh_host_*key*')
+ self.assertIn(
+ [mock.call('/etc/ssh/ssh_host_rsa_key'),
+ mock.call('/etc/ssh/ssh_host_dsa_key'),
+ mock.call('/etc/ssh/ssh_host_ecdsa_key'),
+ mock.call('/etc/ssh/ssh_host_ed25519_key')],
+ m_path_exists.call_args_list)
+ self.assertEqual([mock.call(set(keys), "root", options=options)],
+ m_setup_keys.call_args_list)
+
+ @mock.patch(MODPATH + "glob.glob")
+ @mock.patch(MODPATH + "ug_util.normalize_users_groups")
+ @mock.patch(MODPATH + "os.path.exists")
+ def test_dont_allow_public_ssh_keys(self, m_path_exists, m_nug,
+ m_glob, m_setup_keys):
+ """Test allow_public_ssh_keys=False ignores ssh public keys from
+ platform.
+ """
+ cfg = {"allow_public_ssh_keys": False}
+ keys = ["key1"]
+ user = "clouduser"
+ m_glob.return_value = [] # Return no matching keys to prevent removal
+ # Mock os.path.exits to True to short-circuit the key writing logic
+ m_path_exists.return_value = True
+ m_nug.return_value = ({user: {"default": user}}, {})
+ cloud = self.tmp_cloud(
+ distro='ubuntu', metadata={'public-keys': keys})
+ cc_ssh.handle("name", cfg, cloud, LOG, None)
+
+ options = ssh_util.DISABLE_USER_OPTS.replace("$USER", user)
+ options = options.replace("$DISABLE_USER", "root")
+ self.assertEqual([mock.call(set(), user),
+ mock.call(set(), "root", options=options)],
+ m_setup_keys.call_args_list)
+
+ @mock.patch(MODPATH + "glob.glob")
+ @mock.patch(MODPATH + "ug_util.normalize_users_groups")
+ @mock.patch(MODPATH + "os.path.exists")
+ def test_handle_no_cfg_and_default_root(self, m_path_exists, m_nug,
+ m_glob, m_setup_keys):
+ """Test handle with no config and a default distro user."""
+ cfg = {}
+ keys = ["key1"]
+ user = "clouduser"
+ m_glob.return_value = [] # Return no matching keys to prevent removal
+ # Mock os.path.exits to True to short-circuit the key writing logic
+ m_path_exists.return_value = True
+ m_nug.return_value = ({user: {"default": user}}, {})
+ cloud = self.tmp_cloud(
+ distro='ubuntu', metadata={'public-keys': keys})
+ cc_ssh.handle("name", cfg, cloud, LOG, None)
+
+ options = ssh_util.DISABLE_USER_OPTS.replace("$USER", user)
+ options = options.replace("$DISABLE_USER", "root")
+ self.assertEqual([mock.call(set(keys), user),
+ mock.call(set(keys), "root", options=options)],
+ m_setup_keys.call_args_list)
+
+ @mock.patch(MODPATH + "glob.glob")
+ @mock.patch(MODPATH + "ug_util.normalize_users_groups")
+ @mock.patch(MODPATH + "os.path.exists")
+ def test_handle_cfg_with_explicit_disable_root(self, m_path_exists, m_nug,
+ m_glob, m_setup_keys):
+ """Test handle with explicit disable_root and a default distro user."""
+ # This test is identical to test_handle_no_cfg_and_default_root,
+ # except this uses an explicit cfg value
+ cfg = {"disable_root": True}
+ keys = ["key1"]
+ user = "clouduser"
+ m_glob.return_value = [] # Return no matching keys to prevent removal
+ # Mock os.path.exits to True to short-circuit the key writing logic
+ m_path_exists.return_value = True
+ m_nug.return_value = ({user: {"default": user}}, {})
+ cloud = self.tmp_cloud(
+ distro='ubuntu', metadata={'public-keys': keys})
+ cc_ssh.handle("name", cfg, cloud, LOG, None)
+
+ options = ssh_util.DISABLE_USER_OPTS.replace("$USER", user)
+ options = options.replace("$DISABLE_USER", "root")
+ self.assertEqual([mock.call(set(keys), user),
+ mock.call(set(keys), "root", options=options)],
+ m_setup_keys.call_args_list)
+
+ @mock.patch(MODPATH + "glob.glob")
+ @mock.patch(MODPATH + "ug_util.normalize_users_groups")
+ @mock.patch(MODPATH + "os.path.exists")
+ def test_handle_cfg_without_disable_root(self, m_path_exists, m_nug,
+ m_glob, m_setup_keys):
+ """Test handle with disable_root == False."""
+ # When disable_root == False, the ssh redirect for root is skipped
+ cfg = {"disable_root": False}
+ keys = ["key1"]
+ user = "clouduser"
+ m_glob.return_value = [] # Return no matching keys to prevent removal
+ # Mock os.path.exits to True to short-circuit the key writing logic
+ m_path_exists.return_value = True
+ m_nug.return_value = ({user: {"default": user}}, {})
+ cloud = self.tmp_cloud(
+ distro='ubuntu', metadata={'public-keys': keys})
+ cloud.get_public_ssh_keys = mock.Mock(return_value=keys)
+ cc_ssh.handle("name", cfg, cloud, LOG, None)
+
+ self.assertEqual([mock.call(set(keys), user),
+ mock.call(set(keys), "root", options="")],
+ m_setup_keys.call_args_list)
+
+ @mock.patch(MODPATH + "glob.glob")
+ @mock.patch(MODPATH + "ug_util.normalize_users_groups")
+ @mock.patch(MODPATH + "os.path.exists")
+ def test_handle_publish_hostkeys_default(
+ self, m_path_exists, m_nug, m_glob, m_setup_keys):
+ """Test handle with various configs for ssh_publish_hostkeys."""
+ self._publish_hostkey_test_setup()
+ cc_ssh.PUBLISH_HOST_KEYS = True
+ keys = ["key1"]
+ user = "clouduser"
+ # Return no matching keys for first glob, test keys for second.
+ m_glob.side_effect = iter([
+ [],
+ self.test_hostkey_files,
+ ])
+ # Mock os.path.exits to True to short-circuit the key writing logic
+ m_path_exists.return_value = True
+ m_nug.return_value = ({user: {"default": user}}, {})
+ cloud = self.tmp_cloud(
+ distro='ubuntu', metadata={'public-keys': keys})
+ cloud.datasource.publish_host_keys = mock.Mock()
+
+ cfg = {}
+ expected_call = [self.test_hostkeys[key_type] for key_type
+ in KEY_NAMES_NO_DSA]
+ cc_ssh.handle("name", cfg, cloud, LOG, None)
+ self.assertEqual([mock.call(expected_call)],
+ cloud.datasource.publish_host_keys.call_args_list)
+
+ @mock.patch(MODPATH + "glob.glob")
+ @mock.patch(MODPATH + "ug_util.normalize_users_groups")
+ @mock.patch(MODPATH + "os.path.exists")
+ def test_handle_publish_hostkeys_config_enable(
+ self, m_path_exists, m_nug, m_glob, m_setup_keys):
+ """Test handle with various configs for ssh_publish_hostkeys."""
+ self._publish_hostkey_test_setup()
+ cc_ssh.PUBLISH_HOST_KEYS = False
+ keys = ["key1"]
+ user = "clouduser"
+ # Return no matching keys for first glob, test keys for second.
+ m_glob.side_effect = iter([
+ [],
+ self.test_hostkey_files,
+ ])
+ # Mock os.path.exits to True to short-circuit the key writing logic
+ m_path_exists.return_value = True
+ m_nug.return_value = ({user: {"default": user}}, {})
+ cloud = self.tmp_cloud(
+ distro='ubuntu', metadata={'public-keys': keys})
+ cloud.datasource.publish_host_keys = mock.Mock()
+
+ cfg = {'ssh_publish_hostkeys': {'enabled': True}}
+ expected_call = [self.test_hostkeys[key_type] for key_type
+ in KEY_NAMES_NO_DSA]
+ cc_ssh.handle("name", cfg, cloud, LOG, None)
+ self.assertEqual([mock.call(expected_call)],
+ cloud.datasource.publish_host_keys.call_args_list)
+
+ @mock.patch(MODPATH + "glob.glob")
+ @mock.patch(MODPATH + "ug_util.normalize_users_groups")
+ @mock.patch(MODPATH + "os.path.exists")
+ def test_handle_publish_hostkeys_config_disable(
+ self, m_path_exists, m_nug, m_glob, m_setup_keys):
+ """Test handle with various configs for ssh_publish_hostkeys."""
+ self._publish_hostkey_test_setup()
+ cc_ssh.PUBLISH_HOST_KEYS = True
+ keys = ["key1"]
+ user = "clouduser"
+ # Return no matching keys for first glob, test keys for second.
+ m_glob.side_effect = iter([
+ [],
+ self.test_hostkey_files,
+ ])
+ # Mock os.path.exits to True to short-circuit the key writing logic
+ m_path_exists.return_value = True
+ m_nug.return_value = ({user: {"default": user}}, {})
+ cloud = self.tmp_cloud(
+ distro='ubuntu', metadata={'public-keys': keys})
+ cloud.datasource.publish_host_keys = mock.Mock()
+
+ cfg = {'ssh_publish_hostkeys': {'enabled': False}}
+ cc_ssh.handle("name", cfg, cloud, LOG, None)
+ self.assertFalse(cloud.datasource.publish_host_keys.call_args_list)
+ cloud.datasource.publish_host_keys.assert_not_called()
+
+ @mock.patch(MODPATH + "glob.glob")
+ @mock.patch(MODPATH + "ug_util.normalize_users_groups")
+ @mock.patch(MODPATH + "os.path.exists")
+ def test_handle_publish_hostkeys_config_blacklist(
+ self, m_path_exists, m_nug, m_glob, m_setup_keys):
+ """Test handle with various configs for ssh_publish_hostkeys."""
+ self._publish_hostkey_test_setup()
+ cc_ssh.PUBLISH_HOST_KEYS = True
+ keys = ["key1"]
+ user = "clouduser"
+ # Return no matching keys for first glob, test keys for second.
+ m_glob.side_effect = iter([
+ [],
+ self.test_hostkey_files,
+ ])
+ # Mock os.path.exits to True to short-circuit the key writing logic
+ m_path_exists.return_value = True
+ m_nug.return_value = ({user: {"default": user}}, {})
+ cloud = self.tmp_cloud(
+ distro='ubuntu', metadata={'public-keys': keys})
+ cloud.datasource.publish_host_keys = mock.Mock()
+
+ cfg = {'ssh_publish_hostkeys': {'enabled': True,
+ 'blacklist': ['dsa', 'rsa']}}
+ expected_call = [self.test_hostkeys[key_type] for key_type
+ in ['ecdsa', 'ed25519']]
+ cc_ssh.handle("name", cfg, cloud, LOG, None)
+ self.assertEqual([mock.call(expected_call)],
+ cloud.datasource.publish_host_keys.call_args_list)
+
+ @mock.patch(MODPATH + "glob.glob")
+ @mock.patch(MODPATH + "ug_util.normalize_users_groups")
+ @mock.patch(MODPATH + "os.path.exists")
+ def test_handle_publish_hostkeys_empty_blacklist(
+ self, m_path_exists, m_nug, m_glob, m_setup_keys):
+ """Test handle with various configs for ssh_publish_hostkeys."""
+ self._publish_hostkey_test_setup()
+ cc_ssh.PUBLISH_HOST_KEYS = True
+ keys = ["key1"]
+ user = "clouduser"
+ # Return no matching keys for first glob, test keys for second.
+ m_glob.side_effect = iter([
+ [],
+ self.test_hostkey_files,
+ ])
+ # Mock os.path.exits to True to short-circuit the key writing logic
+ m_path_exists.return_value = True
+ m_nug.return_value = ({user: {"default": user}}, {})
+ cloud = self.tmp_cloud(
+ distro='ubuntu', metadata={'public-keys': keys})
+ cloud.datasource.publish_host_keys = mock.Mock()
+
+ cfg = {'ssh_publish_hostkeys': {'enabled': True,
+ 'blacklist': []}}
+ expected_call = [self.test_hostkeys[key_type] for key_type
+ in cc_ssh.GENERATE_KEY_NAMES]
+ cc_ssh.handle("name", cfg, cloud, LOG, None)
+ self.assertEqual([mock.call(expected_call)],
+ cloud.datasource.publish_host_keys.call_args_list)
+
+ @mock.patch(MODPATH + "ug_util.normalize_users_groups")
+ @mock.patch(MODPATH + "util.write_file")
+ def test_handle_ssh_keys_in_cfg(self, m_write_file, m_nug, m_setup_keys):
+ """Test handle with ssh keys and certificate."""
+ # Populate a config dictionary to pass to handle() as well
+ # as the expected file-writing calls.
+ cfg = {"ssh_keys": {}}
+
+ expected_calls = []
+ for key_type in cc_ssh.GENERATE_KEY_NAMES:
+ private_name = "{}_private".format(key_type)
+ public_name = "{}_public".format(key_type)
+ cert_name = "{}_certificate".format(key_type)
+
+ # Actual key contents don"t have to be realistic
+ private_value = "{}_PRIVATE_KEY".format(key_type)
+ public_value = "{}_PUBLIC_KEY".format(key_type)
+ cert_value = "{}_CERT_KEY".format(key_type)
+
+ cfg["ssh_keys"][private_name] = private_value
+ cfg["ssh_keys"][public_name] = public_value
+ cfg["ssh_keys"][cert_name] = cert_value
+
+ expected_calls.extend([
+ mock.call(
+ '/etc/ssh/ssh_host_{}_key'.format(key_type),
+ private_value,
+ 384
+ ),
+ mock.call(
+ '/etc/ssh/ssh_host_{}_key.pub'.format(key_type),
+ public_value,
+ 384
+ ),
+ mock.call(
+ '/etc/ssh/ssh_host_{}_key-cert.pub'.format(key_type),
+ cert_value,
+ 384
+ ),
+ mock.call(
+ '/etc/ssh/sshd_config',
+ ('HostCertificate /etc/ssh/ssh_host_{}_key-cert.pub'
+ '\n'.format(key_type)),
+ preserve_mode=True
+ )
+ ])
+
+ # Run the handler.
+ m_nug.return_value = ([], {})
+ with mock.patch(MODPATH + 'ssh_util.parse_ssh_config',
+ return_value=[]):
+ cc_ssh.handle("name", cfg, self.tmp_cloud(distro='ubuntu'),
+ LOG, None)
+
+ # Check that all expected output has been done.
+ for call_ in expected_calls:
+ self.assertIn(call_, m_write_file.call_args_list)
diff --git a/tests/unittests/config/test_cc_timezone.py b/tests/unittests/config/test_cc_timezone.py
new file mode 100644
index 00000000..fb6aab5f
--- /dev/null
+++ b/tests/unittests/config/test_cc_timezone.py
@@ -0,0 +1,54 @@
+# Copyright (C) 2013 Hewlett-Packard Development Company, L.P.
+#
+# Author: Juerg Haefliger <juerg.haefliger@hp.com>
+#
+# This file is part of cloud-init. See LICENSE file for license information.
+
+from cloudinit.config import cc_timezone
+
+from cloudinit import util
+
+
+import logging
+import shutil
+import tempfile
+from configobj import ConfigObj
+from io import BytesIO
+
+from tests.unittests import helpers as t_help
+
+from tests.unittests.util import get_cloud
+
+LOG = logging.getLogger(__name__)
+
+
+class TestTimezone(t_help.FilesystemMockingTestCase):
+ def setUp(self):
+ super(TestTimezone, self).setUp()
+ self.new_root = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.new_root)
+ self.patchUtils(self.new_root)
+ self.patchOS(self.new_root)
+
+ def test_set_timezone_sles(self):
+
+ cfg = {
+ 'timezone': 'Tatooine/Bestine',
+ }
+ cc = get_cloud('sles')
+
+ # Create a dummy timezone file
+ dummy_contents = '0123456789abcdefgh'
+ util.write_file('/usr/share/zoneinfo/%s' % cfg['timezone'],
+ dummy_contents)
+
+ cc_timezone.handle('cc_timezone', cfg, cc, LOG, [])
+
+ contents = util.load_file('/etc/sysconfig/clock', decode=False)
+ n_cfg = ConfigObj(BytesIO(contents))
+ self.assertEqual({'TIMEZONE': cfg['timezone']}, dict(n_cfg))
+
+ contents = util.load_file('/etc/localtime')
+ self.assertEqual(dummy_contents, contents.strip())
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_ubuntu_advantage.py b/tests/unittests/config/test_cc_ubuntu_advantage.py
new file mode 100644
index 00000000..8d0c9665
--- /dev/null
+++ b/tests/unittests/config/test_cc_ubuntu_advantage.py
@@ -0,0 +1,333 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+from cloudinit.config.cc_ubuntu_advantage import (
+ configure_ua, handle, maybe_install_ua_tools, schema)
+from cloudinit.config.schema import validate_cloudconfig_schema
+from cloudinit import subp
+from tests.unittests.helpers import (
+ CiTestCase, mock, SchemaTestCaseMixin, skipUnlessJsonSchema)
+
+
+# Module path used in mocks
+MPATH = 'cloudinit.config.cc_ubuntu_advantage'
+
+
+class FakeCloud(object):
+ def __init__(self, distro):
+ self.distro = distro
+
+
+class TestConfigureUA(CiTestCase):
+
+ with_logs = True
+ allowed_subp = [CiTestCase.SUBP_SHELL_TRUE]
+
+ def setUp(self):
+ super(TestConfigureUA, self).setUp()
+ self.tmp = self.tmp_dir()
+
+ @mock.patch('%s.subp.subp' % MPATH)
+ def test_configure_ua_attach_error(self, m_subp):
+ """Errors from ua attach command are raised."""
+ m_subp.side_effect = subp.ProcessExecutionError(
+ 'Invalid token SomeToken')
+ with self.assertRaises(RuntimeError) as context_manager:
+ configure_ua(token='SomeToken')
+ self.assertEqual(
+ 'Failure attaching Ubuntu Advantage:\nUnexpected error while'
+ ' running command.\nCommand: -\nExit code: -\nReason: -\n'
+ 'Stdout: Invalid token SomeToken\nStderr: -',
+ str(context_manager.exception))
+
+ @mock.patch('%s.subp.subp' % MPATH)
+ def test_configure_ua_attach_with_token(self, m_subp):
+ """When token is provided, attach the machine to ua using the token."""
+ configure_ua(token='SomeToken')
+ m_subp.assert_called_once_with(['ua', 'attach', 'SomeToken'])
+ self.assertEqual(
+ 'DEBUG: Attaching to Ubuntu Advantage. ua attach SomeToken\n',
+ self.logs.getvalue())
+
+ @mock.patch('%s.subp.subp' % MPATH)
+ def test_configure_ua_attach_on_service_error(self, m_subp):
+ """all services should be enabled and then any failures raised"""
+
+ def fake_subp(cmd, capture=None):
+ fail_cmds = [['ua', 'enable', svc] for svc in ['esm', 'cc']]
+ if cmd in fail_cmds and capture:
+ svc = cmd[-1]
+ raise subp.ProcessExecutionError(
+ 'Invalid {} credentials'.format(svc.upper()))
+
+ m_subp.side_effect = fake_subp
+
+ with self.assertRaises(RuntimeError) as context_manager:
+ configure_ua(token='SomeToken', enable=['esm', 'cc', 'fips'])
+ self.assertEqual(
+ m_subp.call_args_list,
+ [mock.call(['ua', 'attach', 'SomeToken']),
+ mock.call(['ua', 'enable', 'esm'], capture=True),
+ mock.call(['ua', 'enable', 'cc'], capture=True),
+ mock.call(['ua', 'enable', 'fips'], capture=True)])
+ self.assertIn(
+ 'WARNING: Failure enabling "esm":\nUnexpected error'
+ ' while running command.\nCommand: -\nExit code: -\nReason: -\n'
+ 'Stdout: Invalid ESM credentials\nStderr: -\n',
+ self.logs.getvalue())
+ self.assertIn(
+ 'WARNING: Failure enabling "cc":\nUnexpected error'
+ ' while running command.\nCommand: -\nExit code: -\nReason: -\n'
+ 'Stdout: Invalid CC credentials\nStderr: -\n',
+ self.logs.getvalue())
+ self.assertEqual(
+ 'Failure enabling Ubuntu Advantage service(s): "esm", "cc"',
+ str(context_manager.exception))
+
+ @mock.patch('%s.subp.subp' % MPATH)
+ def test_configure_ua_attach_with_empty_services(self, m_subp):
+ """When services is an empty list, do not auto-enable attach."""
+ configure_ua(token='SomeToken', enable=[])
+ m_subp.assert_called_once_with(['ua', 'attach', 'SomeToken'])
+ self.assertEqual(
+ 'DEBUG: Attaching to Ubuntu Advantage. ua attach SomeToken\n',
+ self.logs.getvalue())
+
+ @mock.patch('%s.subp.subp' % MPATH)
+ def test_configure_ua_attach_with_specific_services(self, m_subp):
+ """When services a list, only enable specific services."""
+ configure_ua(token='SomeToken', enable=['fips'])
+ self.assertEqual(
+ m_subp.call_args_list,
+ [mock.call(['ua', 'attach', 'SomeToken']),
+ mock.call(['ua', 'enable', 'fips'], capture=True)])
+ self.assertEqual(
+ 'DEBUG: Attaching to Ubuntu Advantage. ua attach SomeToken\n',
+ self.logs.getvalue())
+
+ @mock.patch('%s.maybe_install_ua_tools' % MPATH, mock.MagicMock())
+ @mock.patch('%s.subp.subp' % MPATH)
+ def test_configure_ua_attach_with_string_services(self, m_subp):
+ """When services a string, treat as singleton list and warn"""
+ configure_ua(token='SomeToken', enable='fips')
+ self.assertEqual(
+ m_subp.call_args_list,
+ [mock.call(['ua', 'attach', 'SomeToken']),
+ mock.call(['ua', 'enable', 'fips'], capture=True)])
+ self.assertEqual(
+ 'WARNING: ubuntu_advantage: enable should be a list, not a'
+ ' string; treating as a single enable\n'
+ 'DEBUG: Attaching to Ubuntu Advantage. ua attach SomeToken\n',
+ self.logs.getvalue())
+
+ @mock.patch('%s.subp.subp' % MPATH)
+ def test_configure_ua_attach_with_weird_services(self, m_subp):
+ """When services not string or list, warn but still attach"""
+ configure_ua(token='SomeToken', enable={'deffo': 'wont work'})
+ self.assertEqual(
+ m_subp.call_args_list,
+ [mock.call(['ua', 'attach', 'SomeToken'])])
+ self.assertEqual(
+ 'WARNING: ubuntu_advantage: enable should be a list, not a'
+ ' dict; skipping enabling services\n'
+ 'DEBUG: Attaching to Ubuntu Advantage. ua attach SomeToken\n',
+ self.logs.getvalue())
+
+
+@skipUnlessJsonSchema()
+class TestSchema(CiTestCase, SchemaTestCaseMixin):
+
+ with_logs = True
+ schema = schema
+
+ @mock.patch('%s.maybe_install_ua_tools' % MPATH)
+ @mock.patch('%s.configure_ua' % MPATH)
+ def test_schema_warns_on_ubuntu_advantage_not_dict(self, _cfg, _):
+ """If ubuntu_advantage configuration is not a dict, emit a warning."""
+ validate_cloudconfig_schema({'ubuntu_advantage': 'wrong type'}, schema)
+ self.assertEqual(
+ "WARNING: Invalid config:\nubuntu_advantage: 'wrong type' is not"
+ " of type 'object'\n",
+ self.logs.getvalue())
+
+ @mock.patch('%s.maybe_install_ua_tools' % MPATH)
+ @mock.patch('%s.configure_ua' % MPATH)
+ def test_schema_disallows_unknown_keys(self, _cfg, _):
+ """Unknown keys in ubuntu_advantage configuration emit warnings."""
+ validate_cloudconfig_schema(
+ {'ubuntu_advantage': {'token': 'winner', 'invalid-key': ''}},
+ schema)
+ self.assertIn(
+ 'WARNING: Invalid config:\nubuntu_advantage: Additional properties'
+ " are not allowed ('invalid-key' was unexpected)",
+ self.logs.getvalue())
+
+ @mock.patch('%s.maybe_install_ua_tools' % MPATH)
+ @mock.patch('%s.configure_ua' % MPATH)
+ def test_warn_schema_requires_token(self, _cfg, _):
+ """Warn if ubuntu_advantage configuration lacks token."""
+ validate_cloudconfig_schema(
+ {'ubuntu_advantage': {'enable': ['esm']}}, schema)
+ self.assertEqual(
+ "WARNING: Invalid config:\nubuntu_advantage:"
+ " 'token' is a required property\n", self.logs.getvalue())
+
+ @mock.patch('%s.maybe_install_ua_tools' % MPATH)
+ @mock.patch('%s.configure_ua' % MPATH)
+ def test_warn_schema_services_is_not_list_or_dict(self, _cfg, _):
+ """Warn when ubuntu_advantage:enable config is not a list."""
+ validate_cloudconfig_schema(
+ {'ubuntu_advantage': {'enable': 'needslist'}}, schema)
+ self.assertEqual(
+ "WARNING: Invalid config:\nubuntu_advantage: 'token' is a"
+ " required property\nubuntu_advantage.enable: 'needslist'"
+ " is not of type 'array'\n",
+ self.logs.getvalue())
+
+
+class TestHandle(CiTestCase):
+
+ with_logs = True
+
+ def setUp(self):
+ super(TestHandle, self).setUp()
+ self.tmp = self.tmp_dir()
+
+ @mock.patch('%s.validate_cloudconfig_schema' % MPATH)
+ def test_handle_no_config(self, m_schema):
+ """When no ua-related configuration is provided, nothing happens."""
+ cfg = {}
+ handle('ua-test', cfg=cfg, cloud=None, log=self.logger, args=None)
+ self.assertIn(
+ "DEBUG: Skipping module named ua-test, no 'ubuntu_advantage'"
+ ' configuration found',
+ self.logs.getvalue())
+ m_schema.assert_not_called()
+
+ @mock.patch('%s.configure_ua' % MPATH)
+ @mock.patch('%s.maybe_install_ua_tools' % MPATH)
+ def test_handle_tries_to_install_ubuntu_advantage_tools(
+ self, m_install, m_cfg):
+ """If ubuntu_advantage is provided, try installing ua-tools package."""
+ cfg = {'ubuntu_advantage': {'token': 'valid'}}
+ mycloud = FakeCloud(None)
+ handle('nomatter', cfg=cfg, cloud=mycloud, log=self.logger, args=None)
+ m_install.assert_called_once_with(mycloud)
+
+ @mock.patch('%s.configure_ua' % MPATH)
+ @mock.patch('%s.maybe_install_ua_tools' % MPATH)
+ def test_handle_passes_credentials_and_services_to_configure_ua(
+ self, m_install, m_configure_ua):
+ """All ubuntu_advantage config keys are passed to configure_ua."""
+ cfg = {'ubuntu_advantage': {'token': 'token', 'enable': ['esm']}}
+ handle('nomatter', cfg=cfg, cloud=None, log=self.logger, args=None)
+ m_configure_ua.assert_called_once_with(
+ token='token', enable=['esm'])
+
+ @mock.patch('%s.maybe_install_ua_tools' % MPATH, mock.MagicMock())
+ @mock.patch('%s.configure_ua' % MPATH)
+ def test_handle_warns_on_deprecated_ubuntu_advantage_key_w_config(
+ self, m_configure_ua):
+ """Warning when ubuntu-advantage key is present with new config"""
+ cfg = {'ubuntu-advantage': {'token': 'token', 'enable': ['esm']}}
+ handle('nomatter', cfg=cfg, cloud=None, log=self.logger, args=None)
+ self.assertEqual(
+ 'WARNING: Deprecated configuration key "ubuntu-advantage"'
+ ' provided. Expected underscore delimited "ubuntu_advantage";'
+ ' will attempt to continue.',
+ self.logs.getvalue().splitlines()[0])
+ m_configure_ua.assert_called_once_with(
+ token='token', enable=['esm'])
+
+ def test_handle_error_on_deprecated_commands_key_dashed(self):
+ """Error when commands is present in ubuntu-advantage key."""
+ cfg = {'ubuntu-advantage': {'commands': 'nogo'}}
+ with self.assertRaises(RuntimeError) as context_manager:
+ handle('nomatter', cfg=cfg, cloud=None, log=self.logger, args=None)
+ self.assertEqual(
+ 'Deprecated configuration "ubuntu-advantage: commands" provided.'
+ ' Expected "token"',
+ str(context_manager.exception))
+
+ def test_handle_error_on_deprecated_commands_key_underscored(self):
+ """Error when commands is present in ubuntu_advantage key."""
+ cfg = {'ubuntu_advantage': {'commands': 'nogo'}}
+ with self.assertRaises(RuntimeError) as context_manager:
+ handle('nomatter', cfg=cfg, cloud=None, log=self.logger, args=None)
+ self.assertEqual(
+ 'Deprecated configuration "ubuntu-advantage: commands" provided.'
+ ' Expected "token"',
+ str(context_manager.exception))
+
+ @mock.patch('%s.maybe_install_ua_tools' % MPATH, mock.MagicMock())
+ @mock.patch('%s.configure_ua' % MPATH)
+ def test_handle_prefers_new_style_config(
+ self, m_configure_ua):
+ """ubuntu_advantage should be preferred over ubuntu-advantage"""
+ cfg = {
+ 'ubuntu-advantage': {'token': 'nope', 'enable': ['wrong']},
+ 'ubuntu_advantage': {'token': 'token', 'enable': ['esm']},
+ }
+ handle('nomatter', cfg=cfg, cloud=None, log=self.logger, args=None)
+ self.assertEqual(
+ 'WARNING: Deprecated configuration key "ubuntu-advantage"'
+ ' provided. Expected underscore delimited "ubuntu_advantage";'
+ ' will attempt to continue.',
+ self.logs.getvalue().splitlines()[0])
+ m_configure_ua.assert_called_once_with(
+ token='token', enable=['esm'])
+
+
+class TestMaybeInstallUATools(CiTestCase):
+
+ with_logs = True
+
+ def setUp(self):
+ super(TestMaybeInstallUATools, self).setUp()
+ self.tmp = self.tmp_dir()
+
+ @mock.patch('%s.subp.which' % MPATH)
+ def test_maybe_install_ua_tools_noop_when_ua_tools_present(self, m_which):
+ """Do nothing if ubuntu-advantage-tools already exists."""
+ m_which.return_value = '/usr/bin/ua' # already installed
+ distro = mock.MagicMock()
+ distro.update_package_sources.side_effect = RuntimeError(
+ 'Some apt error')
+ maybe_install_ua_tools(cloud=FakeCloud(distro)) # No RuntimeError
+
+ @mock.patch('%s.subp.which' % MPATH)
+ def test_maybe_install_ua_tools_raises_update_errors(self, m_which):
+ """maybe_install_ua_tools logs and raises apt update errors."""
+ m_which.return_value = None
+ distro = mock.MagicMock()
+ distro.update_package_sources.side_effect = RuntimeError(
+ 'Some apt error')
+ with self.assertRaises(RuntimeError) as context_manager:
+ maybe_install_ua_tools(cloud=FakeCloud(distro))
+ self.assertEqual('Some apt error', str(context_manager.exception))
+ self.assertIn('Package update failed\nTraceback', self.logs.getvalue())
+
+ @mock.patch('%s.subp.which' % MPATH)
+ def test_maybe_install_ua_raises_install_errors(self, m_which):
+ """maybe_install_ua_tools logs and raises package install errors."""
+ m_which.return_value = None
+ distro = mock.MagicMock()
+ distro.update_package_sources.return_value = None
+ distro.install_packages.side_effect = RuntimeError(
+ 'Some install error')
+ with self.assertRaises(RuntimeError) as context_manager:
+ maybe_install_ua_tools(cloud=FakeCloud(distro))
+ self.assertEqual('Some install error', str(context_manager.exception))
+ self.assertIn(
+ 'Failed to install ubuntu-advantage-tools\n', self.logs.getvalue())
+
+ @mock.patch('%s.subp.which' % MPATH)
+ def test_maybe_install_ua_tools_happy_path(self, m_which):
+ """maybe_install_ua_tools installs ubuntu-advantage-tools."""
+ m_which.return_value = None
+ distro = mock.MagicMock() # No errors raised
+ maybe_install_ua_tools(cloud=FakeCloud(distro))
+ distro.update_package_sources.assert_called_once_with()
+ distro.install_packages.assert_called_once_with(
+ ['ubuntu-advantage-tools'])
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_ubuntu_drivers.py b/tests/unittests/config/test_cc_ubuntu_drivers.py
new file mode 100644
index 00000000..d341fbfd
--- /dev/null
+++ b/tests/unittests/config/test_cc_ubuntu_drivers.py
@@ -0,0 +1,244 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import copy
+import os
+
+from tests.unittests.helpers import CiTestCase, skipUnlessJsonSchema, mock
+from cloudinit.config.schema import (
+ SchemaValidationError, validate_cloudconfig_schema)
+from cloudinit.config import cc_ubuntu_drivers as drivers
+from cloudinit.subp import ProcessExecutionError
+
+MPATH = "cloudinit.config.cc_ubuntu_drivers."
+M_TMP_PATH = MPATH + "temp_utils.mkdtemp"
+OLD_UBUNTU_DRIVERS_ERROR_STDERR = (
+ "ubuntu-drivers: error: argument <command>: invalid choice: 'install' "
+ "(choose from 'list', 'autoinstall', 'devices', 'debug')\n")
+
+
+# The tests in this module call helper methods which are decorated with
+# mock.patch. pylint doesn't understand that mock.patch passes parameters to
+# the decorated function, so it incorrectly reports that we aren't passing
+# values for all parameters. Instead of annotating every single call, we
+# disable it for the entire module:
+# pylint: disable=no-value-for-parameter
+
+class AnyTempScriptAndDebconfFile(object):
+
+ def __init__(self, tmp_dir, debconf_file):
+ self.tmp_dir = tmp_dir
+ self.debconf_file = debconf_file
+
+ def __eq__(self, cmd):
+ if not len(cmd) == 2:
+ return False
+ script, debconf_file = cmd
+ if bool(script.startswith(self.tmp_dir) and script.endswith('.sh')):
+ return debconf_file == self.debconf_file
+ return False
+
+
+class TestUbuntuDrivers(CiTestCase):
+ cfg_accepted = {'drivers': {'nvidia': {'license-accepted': True}}}
+ install_gpgpu = ['ubuntu-drivers', 'install', '--gpgpu', 'nvidia']
+
+ with_logs = True
+
+ @skipUnlessJsonSchema()
+ def test_schema_requires_boolean_for_license_accepted(self):
+ with self.assertRaisesRegex(
+ SchemaValidationError, ".*license-accepted.*TRUE.*boolean"):
+ validate_cloudconfig_schema(
+ {'drivers': {'nvidia': {'license-accepted': "TRUE"}}},
+ schema=drivers.schema, strict=True)
+
+ @mock.patch(M_TMP_PATH)
+ @mock.patch(MPATH + "subp.subp", return_value=('', ''))
+ @mock.patch(MPATH + "subp.which", return_value=False)
+ def _assert_happy_path_taken(
+ self, config, m_which, m_subp, m_tmp):
+ """Positive path test through handle. Package should be installed."""
+ tdir = self.tmp_dir()
+ debconf_file = os.path.join(tdir, 'nvidia.template')
+ m_tmp.return_value = tdir
+ myCloud = mock.MagicMock()
+ drivers.handle('ubuntu_drivers', config, myCloud, None, None)
+ self.assertEqual([mock.call(['ubuntu-drivers-common'])],
+ myCloud.distro.install_packages.call_args_list)
+ self.assertEqual(
+ [mock.call(AnyTempScriptAndDebconfFile(tdir, debconf_file)),
+ mock.call(self.install_gpgpu)],
+ m_subp.call_args_list)
+
+ def test_handle_does_package_install(self):
+ self._assert_happy_path_taken(self.cfg_accepted)
+
+ def test_trueish_strings_are_considered_approval(self):
+ for true_value in ['yes', 'true', 'on', '1']:
+ new_config = copy.deepcopy(self.cfg_accepted)
+ new_config['drivers']['nvidia']['license-accepted'] = true_value
+ self._assert_happy_path_taken(new_config)
+
+ @mock.patch(M_TMP_PATH)
+ @mock.patch(MPATH + "subp.subp")
+ @mock.patch(MPATH + "subp.which", return_value=False)
+ def test_handle_raises_error_if_no_drivers_found(
+ self, m_which, m_subp, m_tmp):
+ """If ubuntu-drivers doesn't install any drivers, raise an error."""
+ tdir = self.tmp_dir()
+ debconf_file = os.path.join(tdir, 'nvidia.template')
+ m_tmp.return_value = tdir
+ myCloud = mock.MagicMock()
+
+ def fake_subp(cmd):
+ if cmd[0].startswith(tdir):
+ return
+ raise ProcessExecutionError(
+ stdout='No drivers found for installation.\n', exit_code=1)
+ m_subp.side_effect = fake_subp
+
+ with self.assertRaises(Exception):
+ drivers.handle(
+ 'ubuntu_drivers', self.cfg_accepted, myCloud, None, None)
+ self.assertEqual([mock.call(['ubuntu-drivers-common'])],
+ myCloud.distro.install_packages.call_args_list)
+ self.assertEqual(
+ [mock.call(AnyTempScriptAndDebconfFile(tdir, debconf_file)),
+ mock.call(self.install_gpgpu)],
+ m_subp.call_args_list)
+ self.assertIn('ubuntu-drivers found no drivers for installation',
+ self.logs.getvalue())
+
+ @mock.patch(MPATH + "subp.subp", return_value=('', ''))
+ @mock.patch(MPATH + "subp.which", return_value=False)
+ def _assert_inert_with_config(self, config, m_which, m_subp):
+ """Helper to reduce repetition when testing negative cases"""
+ myCloud = mock.MagicMock()
+ drivers.handle('ubuntu_drivers', config, myCloud, None, None)
+ self.assertEqual(0, myCloud.distro.install_packages.call_count)
+ self.assertEqual(0, m_subp.call_count)
+
+ def test_handle_inert_if_license_not_accepted(self):
+ """Ensure we don't do anything if the license is rejected."""
+ self._assert_inert_with_config(
+ {'drivers': {'nvidia': {'license-accepted': False}}})
+
+ def test_handle_inert_if_garbage_in_license_field(self):
+ """Ensure we don't do anything if unknown text is in license field."""
+ self._assert_inert_with_config(
+ {'drivers': {'nvidia': {'license-accepted': 'garbage'}}})
+
+ def test_handle_inert_if_no_license_key(self):
+ """Ensure we don't do anything if no license key."""
+ self._assert_inert_with_config({'drivers': {'nvidia': {}}})
+
+ def test_handle_inert_if_no_nvidia_key(self):
+ """Ensure we don't do anything if other license accepted."""
+ self._assert_inert_with_config(
+ {'drivers': {'acme': {'license-accepted': True}}})
+
+ def test_handle_inert_if_string_given(self):
+ """Ensure we don't do anything if string refusal given."""
+ for false_value in ['no', 'false', 'off', '0']:
+ self._assert_inert_with_config(
+ {'drivers': {'nvidia': {'license-accepted': false_value}}})
+
+ @mock.patch(MPATH + "install_drivers")
+ def test_handle_no_drivers_does_nothing(self, m_install_drivers):
+ """If no 'drivers' key in the config, nothing should be done."""
+ myCloud = mock.MagicMock()
+ myLog = mock.MagicMock()
+ drivers.handle('ubuntu_drivers', {'foo': 'bzr'}, myCloud, myLog, None)
+ self.assertIn('Skipping module named',
+ myLog.debug.call_args_list[0][0][0])
+ self.assertEqual(0, m_install_drivers.call_count)
+
+ @mock.patch(M_TMP_PATH)
+ @mock.patch(MPATH + "subp.subp", return_value=('', ''))
+ @mock.patch(MPATH + "subp.which", return_value=True)
+ def test_install_drivers_no_install_if_present(
+ self, m_which, m_subp, m_tmp):
+ """If 'ubuntu-drivers' is present, no package install should occur."""
+ tdir = self.tmp_dir()
+ debconf_file = os.path.join(tdir, 'nvidia.template')
+ m_tmp.return_value = tdir
+ pkg_install = mock.MagicMock()
+ drivers.install_drivers(self.cfg_accepted['drivers'],
+ pkg_install_func=pkg_install)
+ self.assertEqual(0, pkg_install.call_count)
+ self.assertEqual([mock.call('ubuntu-drivers')],
+ m_which.call_args_list)
+ self.assertEqual(
+ [mock.call(AnyTempScriptAndDebconfFile(tdir, debconf_file)),
+ mock.call(self.install_gpgpu)],
+ m_subp.call_args_list)
+
+ def test_install_drivers_rejects_invalid_config(self):
+ """install_drivers should raise TypeError if not given a config dict"""
+ pkg_install = mock.MagicMock()
+ with self.assertRaisesRegex(TypeError, ".*expected dict.*"):
+ drivers.install_drivers("mystring", pkg_install_func=pkg_install)
+ self.assertEqual(0, pkg_install.call_count)
+
+ @mock.patch(M_TMP_PATH)
+ @mock.patch(MPATH + "subp.subp")
+ @mock.patch(MPATH + "subp.which", return_value=False)
+ def test_install_drivers_handles_old_ubuntu_drivers_gracefully(
+ self, m_which, m_subp, m_tmp):
+ """Older ubuntu-drivers versions should emit message and raise error"""
+ tdir = self.tmp_dir()
+ debconf_file = os.path.join(tdir, 'nvidia.template')
+ m_tmp.return_value = tdir
+ myCloud = mock.MagicMock()
+
+ def fake_subp(cmd):
+ if cmd[0].startswith(tdir):
+ return
+ raise ProcessExecutionError(
+ stderr=OLD_UBUNTU_DRIVERS_ERROR_STDERR, exit_code=2)
+ m_subp.side_effect = fake_subp
+
+ with self.assertRaises(Exception):
+ drivers.handle(
+ 'ubuntu_drivers', self.cfg_accepted, myCloud, None, None)
+ self.assertEqual([mock.call(['ubuntu-drivers-common'])],
+ myCloud.distro.install_packages.call_args_list)
+ self.assertEqual(
+ [mock.call(AnyTempScriptAndDebconfFile(tdir, debconf_file)),
+ mock.call(self.install_gpgpu)],
+ m_subp.call_args_list)
+ self.assertIn('WARNING: the available version of ubuntu-drivers is'
+ ' too old to perform requested driver installation',
+ self.logs.getvalue())
+
+
+# Sub-class TestUbuntuDrivers to run the same test cases, but with a version
+class TestUbuntuDriversWithVersion(TestUbuntuDrivers):
+ cfg_accepted = {
+ 'drivers': {'nvidia': {'license-accepted': True, 'version': '123'}}}
+ install_gpgpu = ['ubuntu-drivers', 'install', '--gpgpu', 'nvidia:123']
+
+ @mock.patch(M_TMP_PATH)
+ @mock.patch(MPATH + "subp.subp", return_value=('', ''))
+ @mock.patch(MPATH + "subp.which", return_value=False)
+ def test_version_none_uses_latest(self, m_which, m_subp, m_tmp):
+ tdir = self.tmp_dir()
+ debconf_file = os.path.join(tdir, 'nvidia.template')
+ m_tmp.return_value = tdir
+ myCloud = mock.MagicMock()
+ version_none_cfg = {
+ 'drivers': {'nvidia': {'license-accepted': True, 'version': None}}}
+ drivers.handle(
+ 'ubuntu_drivers', version_none_cfg, myCloud, None, None)
+ self.assertEqual(
+ [mock.call(AnyTempScriptAndDebconfFile(tdir, debconf_file)),
+ mock.call(['ubuntu-drivers', 'install', '--gpgpu', 'nvidia'])],
+ m_subp.call_args_list)
+
+ def test_specifying_a_version_doesnt_override_license_acceptance(self):
+ self._assert_inert_with_config({
+ 'drivers': {'nvidia': {'license-accepted': False,
+ 'version': '123'}}
+ })
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_update_etc_hosts.py b/tests/unittests/config/test_cc_update_etc_hosts.py
new file mode 100644
index 00000000..77a7f78f
--- /dev/null
+++ b/tests/unittests/config/test_cc_update_etc_hosts.py
@@ -0,0 +1,70 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+from cloudinit.config import cc_update_etc_hosts
+
+from cloudinit import cloud
+from cloudinit import distros
+from cloudinit import helpers
+from cloudinit import util
+
+from tests.unittests import helpers as t_help
+
+import logging
+import os
+import shutil
+
+LOG = logging.getLogger(__name__)
+
+
+class TestHostsFile(t_help.FilesystemMockingTestCase):
+ def setUp(self):
+ super(TestHostsFile, self).setUp()
+ self.tmp = self.tmp_dir()
+
+ def _fetch_distro(self, kind):
+ cls = distros.fetch(kind)
+ paths = helpers.Paths({})
+ return cls(kind, {}, paths)
+
+ def test_write_etc_hosts_suse_localhost(self):
+ cfg = {
+ 'manage_etc_hosts': 'localhost',
+ 'hostname': 'cloud-init.test.us'
+ }
+ os.makedirs('%s/etc/' % self.tmp)
+ hosts_content = '192.168.1.1 blah.blah.us blah\n'
+ fout = open('%s/etc/hosts' % self.tmp, 'w')
+ fout.write(hosts_content)
+ fout.close()
+ distro = self._fetch_distro('sles')
+ distro.hosts_fn = '%s/etc/hosts' % self.tmp
+ paths = helpers.Paths({})
+ ds = None
+ cc = cloud.Cloud(ds, paths, {}, distro, None)
+ self.patchUtils(self.tmp)
+ cc_update_etc_hosts.handle('test', cfg, cc, LOG, [])
+ contents = util.load_file('%s/etc/hosts' % self.tmp)
+ if '127.0.1.1\tcloud-init.test.us\tcloud-init' not in contents:
+ self.assertIsNone('No entry for 127.0.1.1 in etc/hosts')
+ if '192.168.1.1\tblah.blah.us\tblah' not in contents:
+ self.assertIsNone('Default etc/hosts content modified')
+
+ @t_help.skipUnlessJinja()
+ def test_write_etc_hosts_suse_template(self):
+ cfg = {
+ 'manage_etc_hosts': 'template',
+ 'hostname': 'cloud-init.test.us'
+ }
+ shutil.copytree('templates', '%s/etc/cloud/templates' % self.tmp)
+ distro = self._fetch_distro('sles')
+ paths = helpers.Paths({})
+ paths.template_tpl = '%s' % self.tmp + '/etc/cloud/templates/%s.tmpl'
+ ds = None
+ cc = cloud.Cloud(ds, paths, {}, distro, None)
+ self.patchUtils(self.tmp)
+ cc_update_etc_hosts.handle('test', cfg, cc, LOG, [])
+ contents = util.load_file('%s/etc/hosts' % self.tmp)
+ if '127.0.1.1 cloud-init.test.us cloud-init' not in contents:
+ self.assertIsNone('No entry for 127.0.1.1 in etc/hosts')
+ if '::1 cloud-init.test.us cloud-init' not in contents:
+ self.assertIsNone('No entry for 127.0.0.1 in etc/hosts')
diff --git a/tests/unittests/config/test_cc_users_groups.py b/tests/unittests/config/test_cc_users_groups.py
new file mode 100644
index 00000000..4ef844cb
--- /dev/null
+++ b/tests/unittests/config/test_cc_users_groups.py
@@ -0,0 +1,172 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+
+from cloudinit.config import cc_users_groups
+from tests.unittests.helpers import CiTestCase, mock
+
+MODPATH = "cloudinit.config.cc_users_groups"
+
+
+@mock.patch('cloudinit.distros.ubuntu.Distro.create_group')
+@mock.patch('cloudinit.distros.ubuntu.Distro.create_user')
+class TestHandleUsersGroups(CiTestCase):
+ """Test cc_users_groups handling of config."""
+
+ with_logs = True
+
+ def test_handle_no_cfg_creates_no_users_or_groups(self, m_user, m_group):
+ """Test handle with no config will not create users or groups."""
+ cfg = {} # merged cloud-config
+ # System config defines a default user for the distro.
+ sys_cfg = {'default_user': {'name': 'ubuntu', 'lock_passwd': True,
+ 'groups': ['lxd', 'sudo'],
+ 'shell': '/bin/bash'}}
+ metadata = {}
+ cloud = self.tmp_cloud(
+ distro='ubuntu', sys_cfg=sys_cfg, metadata=metadata)
+ cc_users_groups.handle('modulename', cfg, cloud, None, None)
+ m_user.assert_not_called()
+ m_group.assert_not_called()
+
+ def test_handle_users_in_cfg_calls_create_users(self, m_user, m_group):
+ """When users in config, create users with distro.create_user."""
+ cfg = {'users': ['default', {'name': 'me2'}]} # merged cloud-config
+ # System config defines a default user for the distro.
+ sys_cfg = {'default_user': {'name': 'ubuntu', 'lock_passwd': True,
+ 'groups': ['lxd', 'sudo'],
+ 'shell': '/bin/bash'}}
+ metadata = {}
+ cloud = self.tmp_cloud(
+ distro='ubuntu', sys_cfg=sys_cfg, metadata=metadata)
+ cc_users_groups.handle('modulename', cfg, cloud, None, None)
+ self.assertCountEqual(
+ m_user.call_args_list,
+ [mock.call('ubuntu', groups='lxd,sudo', lock_passwd=True,
+ shell='/bin/bash'),
+ mock.call('me2', default=False)])
+ m_group.assert_not_called()
+
+ @mock.patch('cloudinit.distros.freebsd.Distro.create_group')
+ @mock.patch('cloudinit.distros.freebsd.Distro.create_user')
+ def test_handle_users_in_cfg_calls_create_users_on_bsd(
+ self,
+ m_fbsd_user,
+ m_fbsd_group,
+ m_linux_user,
+ m_linux_group,
+ ):
+ """When users in config, create users with freebsd.create_user."""
+ cfg = {'users': ['default', {'name': 'me2'}]} # merged cloud-config
+ # System config defines a default user for the distro.
+ sys_cfg = {'default_user': {'name': 'freebsd', 'lock_passwd': True,
+ 'groups': ['wheel'],
+ 'shell': '/bin/tcsh'}}
+ metadata = {}
+ cloud = self.tmp_cloud(
+ distro='freebsd', sys_cfg=sys_cfg, metadata=metadata)
+ cc_users_groups.handle('modulename', cfg, cloud, None, None)
+ self.assertCountEqual(
+ m_fbsd_user.call_args_list,
+ [mock.call('freebsd', groups='wheel', lock_passwd=True,
+ shell='/bin/tcsh'),
+ mock.call('me2', default=False)])
+ m_fbsd_group.assert_not_called()
+ m_linux_group.assert_not_called()
+ m_linux_user.assert_not_called()
+
+ def test_users_with_ssh_redirect_user_passes_keys(self, m_user, m_group):
+ """When ssh_redirect_user is True pass default user and cloud keys."""
+ cfg = {
+ 'users': ['default', {'name': 'me2', 'ssh_redirect_user': True}]}
+ # System config defines a default user for the distro.
+ sys_cfg = {'default_user': {'name': 'ubuntu', 'lock_passwd': True,
+ 'groups': ['lxd', 'sudo'],
+ 'shell': '/bin/bash'}}
+ metadata = {'public-keys': ['key1']}
+ cloud = self.tmp_cloud(
+ distro='ubuntu', sys_cfg=sys_cfg, metadata=metadata)
+ cc_users_groups.handle('modulename', cfg, cloud, None, None)
+ self.assertCountEqual(
+ m_user.call_args_list,
+ [mock.call('ubuntu', groups='lxd,sudo', lock_passwd=True,
+ shell='/bin/bash'),
+ mock.call('me2', cloud_public_ssh_keys=['key1'], default=False,
+ ssh_redirect_user='ubuntu')])
+ m_group.assert_not_called()
+
+ def test_users_with_ssh_redirect_user_default_str(self, m_user, m_group):
+ """When ssh_redirect_user is 'default' pass default username."""
+ cfg = {
+ 'users': ['default', {'name': 'me2',
+ 'ssh_redirect_user': 'default'}]}
+ # System config defines a default user for the distro.
+ sys_cfg = {'default_user': {'name': 'ubuntu', 'lock_passwd': True,
+ 'groups': ['lxd', 'sudo'],
+ 'shell': '/bin/bash'}}
+ metadata = {'public-keys': ['key1']}
+ cloud = self.tmp_cloud(
+ distro='ubuntu', sys_cfg=sys_cfg, metadata=metadata)
+ cc_users_groups.handle('modulename', cfg, cloud, None, None)
+ self.assertCountEqual(
+ m_user.call_args_list,
+ [mock.call('ubuntu', groups='lxd,sudo', lock_passwd=True,
+ shell='/bin/bash'),
+ mock.call('me2', cloud_public_ssh_keys=['key1'], default=False,
+ ssh_redirect_user='ubuntu')])
+ m_group.assert_not_called()
+
+ def test_users_with_ssh_redirect_user_non_default(self, m_user, m_group):
+ """Warn when ssh_redirect_user is not 'default'."""
+ cfg = {
+ 'users': ['default', {'name': 'me2',
+ 'ssh_redirect_user': 'snowflake'}]}
+ # System config defines a default user for the distro.
+ sys_cfg = {'default_user': {'name': 'ubuntu', 'lock_passwd': True,
+ 'groups': ['lxd', 'sudo'],
+ 'shell': '/bin/bash'}}
+ metadata = {'public-keys': ['key1']}
+ cloud = self.tmp_cloud(
+ distro='ubuntu', sys_cfg=sys_cfg, metadata=metadata)
+ with self.assertRaises(ValueError) as context_manager:
+ cc_users_groups.handle('modulename', cfg, cloud, None, None)
+ m_group.assert_not_called()
+ self.assertEqual(
+ 'Not creating user me2. Invalid value of ssh_redirect_user:'
+ ' snowflake. Expected values: true, default or false.',
+ str(context_manager.exception))
+
+ def test_users_with_ssh_redirect_user_default_false(self, m_user, m_group):
+ """When unspecified ssh_redirect_user is false and not set up."""
+ cfg = {'users': ['default', {'name': 'me2'}]}
+ # System config defines a default user for the distro.
+ sys_cfg = {'default_user': {'name': 'ubuntu', 'lock_passwd': True,
+ 'groups': ['lxd', 'sudo'],
+ 'shell': '/bin/bash'}}
+ metadata = {'public-keys': ['key1']}
+ cloud = self.tmp_cloud(
+ distro='ubuntu', sys_cfg=sys_cfg, metadata=metadata)
+ cc_users_groups.handle('modulename', cfg, cloud, None, None)
+ self.assertCountEqual(
+ m_user.call_args_list,
+ [mock.call('ubuntu', groups='lxd,sudo', lock_passwd=True,
+ shell='/bin/bash'),
+ mock.call('me2', default=False)])
+ m_group.assert_not_called()
+
+ def test_users_ssh_redirect_user_and_no_default(self, m_user, m_group):
+ """Warn when ssh_redirect_user is True and no default user present."""
+ cfg = {
+ 'users': ['default', {'name': 'me2', 'ssh_redirect_user': True}]}
+ # System config defines *no* default user for the distro.
+ sys_cfg = {}
+ metadata = {} # no public-keys defined
+ cloud = self.tmp_cloud(
+ distro='ubuntu', sys_cfg=sys_cfg, metadata=metadata)
+ cc_users_groups.handle('modulename', cfg, cloud, None, None)
+ m_user.assert_called_once_with('me2', default=False)
+ m_group.assert_not_called()
+ self.assertEqual(
+ 'WARNING: Ignoring ssh_redirect_user: True for me2. No'
+ ' default_user defined. Perhaps missing'
+ ' cloud configuration users: [default, ..].\n',
+ self.logs.getvalue())
diff --git a/tests/unittests/config/test_cc_write_files.py b/tests/unittests/config/test_cc_write_files.py
new file mode 100644
index 00000000..99248f74
--- /dev/null
+++ b/tests/unittests/config/test_cc_write_files.py
@@ -0,0 +1,246 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import base64
+import copy
+import gzip
+import io
+import shutil
+import tempfile
+
+from cloudinit.config.cc_write_files import (
+ handle, decode_perms, write_files)
+from cloudinit import log as logging
+from cloudinit import util
+
+from tests.unittests.helpers import (
+ CiTestCase, FilesystemMockingTestCase, mock, skipUnlessJsonSchema)
+
+LOG = logging.getLogger(__name__)
+
+YAML_TEXT = """
+write_files:
+ - encoding: gzip
+ content: !!binary |
+ H4sIAIDb/U8C/1NW1E/KzNMvzuBKTc7IV8hIzcnJVyjPL8pJ4QIA6N+MVxsAAAA=
+ path: /usr/bin/hello
+ permissions: '0755'
+ - content: !!binary |
+ Zm9vYmFyCg==
+ path: /wark
+ permissions: '0755'
+ - content: |
+ hi mom line 1
+ hi mom line 2
+ path: /tmp/message
+"""
+
+YAML_CONTENT_EXPECTED = {
+ '/usr/bin/hello': "#!/bin/sh\necho hello world\n",
+ '/wark': "foobar\n",
+ '/tmp/message': "hi mom line 1\nhi mom line 2\n",
+}
+
+VALID_SCHEMA = {
+ 'write_files': [
+ {'append': False, 'content': 'a', 'encoding': 'gzip', 'owner': 'jeff',
+ 'path': '/some', 'permissions': '0777'}
+ ]
+}
+
+INVALID_SCHEMA = { # Dropped required path key
+ 'write_files': [
+ {'append': False, 'content': 'a', 'encoding': 'gzip', 'owner': 'jeff',
+ 'permissions': '0777'}
+ ]
+}
+
+
+@skipUnlessJsonSchema()
+@mock.patch('cloudinit.config.cc_write_files.write_files')
+class TestWriteFilesSchema(CiTestCase):
+
+ with_logs = True
+
+ def test_schema_validation_warns_missing_path(self, m_write_files):
+ """The only required file item property is 'path'."""
+ cc = self.tmp_cloud('ubuntu')
+ valid_config = {'write_files': [{'path': '/some/path'}]}
+ handle('cc_write_file', valid_config, cc, LOG, [])
+ self.assertNotIn('Invalid config:', self.logs.getvalue())
+ handle('cc_write_file', INVALID_SCHEMA, cc, LOG, [])
+ self.assertIn('Invalid config:', self.logs.getvalue())
+ self.assertIn("'path' is a required property", self.logs.getvalue())
+
+ def test_schema_validation_warns_non_string_type_for_files(
+ self, m_write_files):
+ """Schema validation warns of non-string values for each file item."""
+ cc = self.tmp_cloud('ubuntu')
+ for key in VALID_SCHEMA['write_files'][0].keys():
+ if key == 'append':
+ key_type = 'boolean'
+ else:
+ key_type = 'string'
+ invalid_config = copy.deepcopy(VALID_SCHEMA)
+ invalid_config['write_files'][0][key] = 1
+ handle('cc_write_file', invalid_config, cc, LOG, [])
+ self.assertIn(
+ mock.call('cc_write_file', invalid_config['write_files']),
+ m_write_files.call_args_list)
+ self.assertIn(
+ 'write_files.0.%s: 1 is not of type \'%s\'' % (key, key_type),
+ self.logs.getvalue())
+ self.assertIn('Invalid config:', self.logs.getvalue())
+
+ def test_schema_validation_warns_on_additional_undefined_propertes(
+ self, m_write_files):
+ """Schema validation warns on additional undefined file properties."""
+ cc = self.tmp_cloud('ubuntu')
+ invalid_config = copy.deepcopy(VALID_SCHEMA)
+ invalid_config['write_files'][0]['bogus'] = 'value'
+ handle('cc_write_file', invalid_config, cc, LOG, [])
+ self.assertIn(
+ "Invalid config:\nwrite_files.0: Additional properties"
+ " are not allowed ('bogus' was unexpected)",
+ self.logs.getvalue())
+
+
+class TestWriteFiles(FilesystemMockingTestCase):
+
+ with_logs = True
+
+ def setUp(self):
+ super(TestWriteFiles, self).setUp()
+ self.tmp = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.tmp)
+
+ @skipUnlessJsonSchema()
+ def test_handler_schema_validation_warns_non_array_type(self):
+ """Schema validation warns of non-array value."""
+ invalid_config = {'write_files': 1}
+ cc = self.tmp_cloud('ubuntu')
+ with self.assertRaises(TypeError):
+ handle('cc_write_file', invalid_config, cc, LOG, [])
+ self.assertIn(
+ 'Invalid config:\nwrite_files: 1 is not of type \'array\'',
+ self.logs.getvalue())
+
+ def test_simple(self):
+ self.patchUtils(self.tmp)
+ expected = "hello world\n"
+ filename = "/tmp/my.file"
+ write_files(
+ "test_simple", [{"content": expected, "path": filename}])
+ self.assertEqual(util.load_file(filename), expected)
+
+ def test_append(self):
+ self.patchUtils(self.tmp)
+ existing = "hello "
+ added = "world\n"
+ expected = existing + added
+ filename = "/tmp/append.file"
+ util.write_file(filename, existing)
+ write_files(
+ "test_append",
+ [{"content": added, "path": filename, "append": "true"}])
+ self.assertEqual(util.load_file(filename), expected)
+
+ def test_yaml_binary(self):
+ self.patchUtils(self.tmp)
+ data = util.load_yaml(YAML_TEXT)
+ write_files("testname", data['write_files'])
+ for path, content in YAML_CONTENT_EXPECTED.items():
+ self.assertEqual(util.load_file(path), content)
+
+ def test_all_decodings(self):
+ self.patchUtils(self.tmp)
+
+ # build a 'files' array that has a dictionary of encodings
+ # for 'gz', 'gzip', 'gz+base64' ...
+ data = b"foobzr"
+ utf8_valid = b"foobzr"
+ utf8_invalid = b'ab\xaadef'
+ files = []
+ expected = []
+
+ gz_aliases = ('gz', 'gzip')
+ gz_b64_aliases = ('gz+base64', 'gzip+base64', 'gz+b64', 'gzip+b64')
+ b64_aliases = ('base64', 'b64')
+
+ datum = (("utf8", utf8_valid), ("no-utf8", utf8_invalid))
+ for name, data in datum:
+ gz = (_gzip_bytes(data), gz_aliases)
+ gz_b64 = (base64.b64encode(_gzip_bytes(data)), gz_b64_aliases)
+ b64 = (base64.b64encode(data), b64_aliases)
+ for content, aliases in (gz, gz_b64, b64):
+ for enc in aliases:
+ cur = {'content': content,
+ 'path': '/tmp/file-%s-%s' % (name, enc),
+ 'encoding': enc}
+ files.append(cur)
+ expected.append((cur['path'], data))
+
+ write_files("test_decoding", files)
+
+ for path, content in expected:
+ self.assertEqual(util.load_file(path, decode=False), content)
+
+ # make sure we actually wrote *some* files.
+ flen_expected = (
+ len(gz_aliases + gz_b64_aliases + b64_aliases) * len(datum))
+ self.assertEqual(len(expected), flen_expected)
+
+ def test_deferred(self):
+ self.patchUtils(self.tmp)
+ file_path = '/tmp/deferred.file'
+ config = {
+ 'write_files': [
+ {'path': file_path, 'defer': True}
+ ]
+ }
+ cc = self.tmp_cloud('ubuntu')
+ handle('cc_write_file', config, cc, LOG, [])
+ with self.assertRaises(FileNotFoundError):
+ util.load_file(file_path)
+
+
+class TestDecodePerms(CiTestCase):
+
+ with_logs = True
+
+ def test_none_returns_default(self):
+ """If None is passed as perms, then default should be returned."""
+ default = object()
+ found = decode_perms(None, default)
+ self.assertEqual(default, found)
+
+ def test_integer(self):
+ """A valid integer should return itself."""
+ found = decode_perms(0o755, None)
+ self.assertEqual(0o755, found)
+
+ def test_valid_octal_string(self):
+ """A string should be read as octal."""
+ found = decode_perms("644", None)
+ self.assertEqual(0o644, found)
+
+ def test_invalid_octal_string_returns_default_and_warns(self):
+ """A string with invalid octal should warn and return default."""
+ found = decode_perms("999", None)
+ self.assertIsNone(found)
+ self.assertIn("WARNING: Undecodable", self.logs.getvalue())
+
+
+def _gzip_bytes(data):
+ buf = io.BytesIO()
+ fp = None
+ try:
+ fp = gzip.GzipFile(fileobj=buf, mode="wb")
+ fp.write(data)
+ fp.close()
+ return buf.getvalue()
+ finally:
+ if fp:
+ fp.close()
+
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_write_files_deferred.py b/tests/unittests/config/test_cc_write_files_deferred.py
new file mode 100644
index 00000000..d33d250a
--- /dev/null
+++ b/tests/unittests/config/test_cc_write_files_deferred.py
@@ -0,0 +1,77 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import tempfile
+import shutil
+
+from cloudinit.config.cc_write_files_deferred import (handle)
+from .test_cc_write_files import (VALID_SCHEMA)
+from cloudinit import log as logging
+from cloudinit import util
+
+from tests.unittests.helpers import (
+ CiTestCase, FilesystemMockingTestCase, mock, skipUnlessJsonSchema)
+
+LOG = logging.getLogger(__name__)
+
+
+@skipUnlessJsonSchema()
+@mock.patch('cloudinit.config.cc_write_files_deferred.write_files')
+class TestWriteFilesDeferredSchema(CiTestCase):
+
+ with_logs = True
+
+ def test_schema_validation_warns_invalid_value(self,
+ m_write_files_deferred):
+ """If 'defer' is defined, it must be of type 'bool'."""
+
+ valid_config = {
+ 'write_files': [
+ {**VALID_SCHEMA.get('write_files')[0], 'defer': True}
+ ]
+ }
+
+ invalid_config = {
+ 'write_files': [
+ {**VALID_SCHEMA.get('write_files')[0], 'defer': str('no')}
+ ]
+ }
+
+ cc = self.tmp_cloud('ubuntu')
+ handle('cc_write_files_deferred', valid_config, cc, LOG, [])
+ self.assertNotIn('Invalid config:', self.logs.getvalue())
+ handle('cc_write_files_deferred', invalid_config, cc, LOG, [])
+ self.assertIn('Invalid config:', self.logs.getvalue())
+ self.assertIn("defer: 'no' is not of type 'boolean'",
+ self.logs.getvalue())
+
+
+class TestWriteFilesDeferred(FilesystemMockingTestCase):
+
+ with_logs = True
+
+ def setUp(self):
+ super(TestWriteFilesDeferred, self).setUp()
+ self.tmp = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.tmp)
+
+ def test_filtering_deferred_files(self):
+ self.patchUtils(self.tmp)
+ expected = "hello world\n"
+ config = {
+ 'write_files': [
+ {
+ 'path': '/tmp/deferred.file',
+ 'defer': True,
+ 'content': expected
+ },
+ {'path': '/tmp/not_deferred.file'}
+ ]
+ }
+ cc = self.tmp_cloud('ubuntu')
+ handle('cc_write_files_deferred', config, cc, LOG, [])
+ self.assertEqual(util.load_file('/tmp/deferred.file'), expected)
+ with self.assertRaises(FileNotFoundError):
+ util.load_file('/tmp/not_deferred.file')
+
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_yum_add_repo.py b/tests/unittests/config/test_cc_yum_add_repo.py
new file mode 100644
index 00000000..2f11b96a
--- /dev/null
+++ b/tests/unittests/config/test_cc_yum_add_repo.py
@@ -0,0 +1,111 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import configparser
+import logging
+import shutil
+import tempfile
+
+from cloudinit import util
+from cloudinit.config import cc_yum_add_repo
+from tests.unittests import helpers
+
+LOG = logging.getLogger(__name__)
+
+
+class TestConfig(helpers.FilesystemMockingTestCase):
+ def setUp(self):
+ super(TestConfig, self).setUp()
+ self.tmp = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.tmp)
+
+ def test_bad_config(self):
+ cfg = {
+ 'yum_repos': {
+ 'epel-testing': {
+ 'name': 'Extra Packages for Enterprise Linux 5 - Testing',
+ # Missing this should cause the repo not to be written
+ # 'baseurl': 'http://blah.org/pub/epel/testing/5/$barch',
+ 'enabled': False,
+ 'gpgcheck': True,
+ 'gpgkey': 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL',
+ 'failovermethod': 'priority',
+ },
+ },
+ }
+ self.patchUtils(self.tmp)
+ cc_yum_add_repo.handle('yum_add_repo', cfg, None, LOG, [])
+ self.assertRaises(IOError, util.load_file,
+ "/etc/yum.repos.d/epel_testing.repo")
+
+ def test_write_config(self):
+ cfg = {
+ 'yum_repos': {
+ 'epel-testing': {
+ 'name': 'Extra Packages for Enterprise Linux 5 - Testing',
+ 'baseurl': 'http://blah.org/pub/epel/testing/5/$basearch',
+ 'enabled': False,
+ 'gpgcheck': True,
+ 'gpgkey': 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL',
+ 'failovermethod': 'priority',
+ },
+ },
+ }
+ self.patchUtils(self.tmp)
+ cc_yum_add_repo.handle('yum_add_repo', cfg, None, LOG, [])
+ contents = util.load_file("/etc/yum.repos.d/epel_testing.repo")
+ parser = configparser.ConfigParser()
+ parser.read_string(contents)
+ expected = {
+ 'epel_testing': {
+ 'name': 'Extra Packages for Enterprise Linux 5 - Testing',
+ 'failovermethod': 'priority',
+ 'gpgkey': 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL',
+ 'enabled': '0',
+ 'baseurl': 'http://blah.org/pub/epel/testing/5/$basearch',
+ 'gpgcheck': '1',
+ }
+ }
+ for section in expected:
+ self.assertTrue(parser.has_section(section),
+ "Contains section {0}".format(section))
+ for k, v in expected[section].items():
+ self.assertEqual(parser.get(section, k), v)
+
+ def test_write_config_array(self):
+ cfg = {
+ 'yum_repos': {
+ 'puppetlabs-products': {
+ 'name': 'Puppet Labs Products El 6 - $basearch',
+ 'baseurl':
+ 'http://yum.puppetlabs.com/el/6/products/$basearch',
+ 'gpgkey': [
+ 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-puppetlabs',
+ 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-puppet',
+ ],
+ 'enabled': True,
+ 'gpgcheck': True,
+ }
+ }
+ }
+ self.patchUtils(self.tmp)
+ cc_yum_add_repo.handle('yum_add_repo', cfg, None, LOG, [])
+ contents = util.load_file("/etc/yum.repos.d/puppetlabs_products.repo")
+ parser = configparser.ConfigParser()
+ parser.read_string(contents)
+ expected = {
+ 'puppetlabs_products': {
+ 'name': 'Puppet Labs Products El 6 - $basearch',
+ 'baseurl': 'http://yum.puppetlabs.com/el/6/products/$basearch',
+ 'gpgkey': 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-puppetlabs\n'
+ 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-puppet',
+ 'enabled': '1',
+ 'gpgcheck': '1',
+ }
+ }
+ for section in expected:
+ self.assertTrue(parser.has_section(section),
+ "Contains section {0}".format(section))
+ for k, v in expected[section].items():
+ self.assertEqual(parser.get(section, k), v)
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_zypper_add_repo.py b/tests/unittests/config/test_cc_zypper_add_repo.py
new file mode 100644
index 00000000..4af04bee
--- /dev/null
+++ b/tests/unittests/config/test_cc_zypper_add_repo.py
@@ -0,0 +1,231 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import configparser
+import glob
+import logging
+import os
+
+from cloudinit import util
+from cloudinit.config import cc_zypper_add_repo
+from tests.unittests import helpers
+from tests.unittests.helpers import mock
+
+LOG = logging.getLogger(__name__)
+
+
+class TestConfig(helpers.FilesystemMockingTestCase):
+ def setUp(self):
+ super(TestConfig, self).setUp()
+ self.tmp = self.tmp_dir()
+ self.zypp_conf = 'etc/zypp/zypp.conf'
+
+ def test_bad_repo_config(self):
+ """Config has no baseurl, no file should be written"""
+ cfg = {
+ 'repos': [
+ {
+ 'id': 'foo',
+ 'name': 'suse-test',
+ 'enabled': '1'
+ },
+ ]
+ }
+ self.patchUtils(self.tmp)
+ cc_zypper_add_repo._write_repos(cfg['repos'], '/etc/zypp/repos.d')
+ self.assertRaises(IOError, util.load_file,
+ "/etc/zypp/repos.d/foo.repo")
+
+ def test_write_repos(self):
+ """Verify valid repos get written"""
+ cfg = self._get_base_config_repos()
+ root_d = self.tmp_dir()
+ cc_zypper_add_repo._write_repos(cfg['zypper']['repos'], root_d)
+ repos = glob.glob('%s/*.repo' % root_d)
+ expected_repos = ['testing-foo.repo', 'testing-bar.repo']
+ if len(repos) != 2:
+ assert 'Number of repos written is "%d" expected 2' % len(repos)
+ for repo in repos:
+ repo_name = os.path.basename(repo)
+ if repo_name not in expected_repos:
+ assert 'Found repo with name "%s"; unexpected' % repo_name
+ # Validation that the content gets properly written is in another test
+
+ def test_write_repo(self):
+ """Verify the content of a repo file"""
+ cfg = {
+ 'repos': [
+ {
+ 'baseurl': 'http://foo',
+ 'name': 'test-foo',
+ 'id': 'testing-foo'
+ },
+ ]
+ }
+ root_d = self.tmp_dir()
+ cc_zypper_add_repo._write_repos(cfg['repos'], root_d)
+ contents = util.load_file("%s/testing-foo.repo" % root_d)
+ parser = configparser.ConfigParser()
+ parser.read_string(contents)
+ expected = {
+ 'testing-foo': {
+ 'name': 'test-foo',
+ 'baseurl': 'http://foo',
+ 'enabled': '1',
+ 'autorefresh': '1'
+ }
+ }
+ for section in expected:
+ self.assertTrue(parser.has_section(section),
+ "Contains section {0}".format(section))
+ for k, v in expected[section].items():
+ self.assertEqual(parser.get(section, k), v)
+
+ def test_config_write(self):
+ """Write valid configuration data"""
+ cfg = {
+ 'config': {
+ 'download.deltarpm': 'False',
+ 'reposdir': 'foo'
+ }
+ }
+ root_d = self.tmp_dir()
+ helpers.populate_dir(root_d, {self.zypp_conf: '# Zypp config\n'})
+ self.reRoot(root_d)
+ cc_zypper_add_repo._write_zypp_config(cfg['config'])
+ cfg_out = os.path.join(root_d, self.zypp_conf)
+ contents = util.load_file(cfg_out)
+ expected = [
+ '# Zypp config',
+ '# Added via cloud.cfg',
+ 'download.deltarpm=False',
+ 'reposdir=foo'
+ ]
+ for item in contents.split('\n'):
+ if item not in expected:
+ self.assertIsNone(item)
+
+ @mock.patch('cloudinit.log.logging')
+ def test_config_write_skip_configdir(self, mock_logging):
+ """Write configuration but skip writing 'configdir' setting"""
+ cfg = {
+ 'config': {
+ 'download.deltarpm': 'False',
+ 'reposdir': 'foo',
+ 'configdir': 'bar'
+ }
+ }
+ root_d = self.tmp_dir()
+ helpers.populate_dir(root_d, {self.zypp_conf: '# Zypp config\n'})
+ self.reRoot(root_d)
+ cc_zypper_add_repo._write_zypp_config(cfg['config'])
+ cfg_out = os.path.join(root_d, self.zypp_conf)
+ contents = util.load_file(cfg_out)
+ expected = [
+ '# Zypp config',
+ '# Added via cloud.cfg',
+ 'download.deltarpm=False',
+ 'reposdir=foo'
+ ]
+ for item in contents.split('\n'):
+ if item not in expected:
+ self.assertIsNone(item)
+ # Not finding teh right path for mocking :(
+ # assert mock_logging.warning.called
+
+ def test_empty_config_section_no_new_data(self):
+ """When the config section is empty no new data should be written to
+ zypp.conf"""
+ cfg = self._get_base_config_repos()
+ cfg['zypper']['config'] = None
+ root_d = self.tmp_dir()
+ helpers.populate_dir(root_d, {self.zypp_conf: '# No data'})
+ self.reRoot(root_d)
+ cc_zypper_add_repo._write_zypp_config(cfg.get('config', {}))
+ cfg_out = os.path.join(root_d, self.zypp_conf)
+ contents = util.load_file(cfg_out)
+ self.assertEqual(contents, '# No data')
+
+ def test_empty_config_value_no_new_data(self):
+ """When the config section is not empty but there are no values
+ no new data should be written to zypp.conf"""
+ cfg = self._get_base_config_repos()
+ cfg['zypper']['config'] = {
+ 'download.deltarpm': None
+ }
+ root_d = self.tmp_dir()
+ helpers.populate_dir(root_d, {self.zypp_conf: '# No data'})
+ self.reRoot(root_d)
+ cc_zypper_add_repo._write_zypp_config(cfg.get('config', {}))
+ cfg_out = os.path.join(root_d, self.zypp_conf)
+ contents = util.load_file(cfg_out)
+ self.assertEqual(contents, '# No data')
+
+ def test_handler_full_setup(self):
+ """Test that the handler ends up calling the renderers"""
+ cfg = self._get_base_config_repos()
+ cfg['zypper']['config'] = {
+ 'download.deltarpm': 'False',
+ }
+ root_d = self.tmp_dir()
+ os.makedirs('%s/etc/zypp/repos.d' % root_d)
+ helpers.populate_dir(root_d, {self.zypp_conf: '# Zypp config\n'})
+ self.reRoot(root_d)
+ cc_zypper_add_repo.handle('zypper_add_repo', cfg, None, LOG, [])
+ cfg_out = os.path.join(root_d, self.zypp_conf)
+ contents = util.load_file(cfg_out)
+ expected = [
+ '# Zypp config',
+ '# Added via cloud.cfg',
+ 'download.deltarpm=False',
+ ]
+ for item in contents.split('\n'):
+ if item not in expected:
+ self.assertIsNone(item)
+ repos = glob.glob('%s/etc/zypp/repos.d/*.repo' % root_d)
+ expected_repos = ['testing-foo.repo', 'testing-bar.repo']
+ if len(repos) != 2:
+ assert 'Number of repos written is "%d" expected 2' % len(repos)
+ for repo in repos:
+ repo_name = os.path.basename(repo)
+ if repo_name not in expected_repos:
+ assert 'Found repo with name "%s"; unexpected' % repo_name
+
+ def test_no_config_section_no_new_data(self):
+ """When there is no config section no new data should be written to
+ zypp.conf"""
+ cfg = self._get_base_config_repos()
+ root_d = self.tmp_dir()
+ helpers.populate_dir(root_d, {self.zypp_conf: '# No data'})
+ self.reRoot(root_d)
+ cc_zypper_add_repo._write_zypp_config(cfg.get('config', {}))
+ cfg_out = os.path.join(root_d, self.zypp_conf)
+ contents = util.load_file(cfg_out)
+ self.assertEqual(contents, '# No data')
+
+ def test_no_repo_data(self):
+ """When there is no repo data nothing should happen"""
+ root_d = self.tmp_dir()
+ self.reRoot(root_d)
+ cc_zypper_add_repo._write_repos(None, root_d)
+ content = glob.glob('%s/*' % root_d)
+ self.assertEqual(len(content), 0)
+
+ def _get_base_config_repos(self):
+ """Basic valid repo configuration"""
+ cfg = {
+ 'zypper': {
+ 'repos': [
+ {
+ 'baseurl': 'http://foo',
+ 'name': 'test-foo',
+ 'id': 'testing-foo'
+ },
+ {
+ 'baseurl': 'http://bar',
+ 'name': 'test-bar',
+ 'id': 'testing-bar'
+ }
+ ]
+ }
+ }
+ return cfg
diff --git a/tests/unittests/config/test_schema.py b/tests/unittests/config/test_schema.py
new file mode 100644
index 00000000..b01f5eea
--- /dev/null
+++ b/tests/unittests/config/test_schema.py
@@ -0,0 +1,515 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+import cloudinit
+from cloudinit.config.schema import (
+ CLOUD_CONFIG_HEADER, SchemaValidationError, annotated_cloudconfig_file,
+ get_schema_doc, get_schema, validate_cloudconfig_file,
+ validate_cloudconfig_schema, main)
+from cloudinit.util import write_file
+
+from tests.unittests.helpers import CiTestCase, mock, skipUnlessJsonSchema
+
+from copy import copy
+import itertools
+import pytest
+from pathlib import Path
+from textwrap import dedent
+from yaml import safe_load
+
+
+class GetSchemaTest(CiTestCase):
+
+ def test_get_schema_coalesces_known_schema(self):
+ """Every cloudconfig module with schema is listed in allOf keyword."""
+ schema = get_schema()
+ self.assertCountEqual(
+ [
+ 'cc_apk_configure',
+ 'cc_apt_configure',
+ 'cc_bootcmd',
+ 'cc_locale',
+ 'cc_ntp',
+ 'cc_resizefs',
+ 'cc_runcmd',
+ 'cc_snap',
+ 'cc_ubuntu_advantage',
+ 'cc_ubuntu_drivers',
+ 'cc_write_files',
+ 'cc_write_files_deferred',
+ 'cc_zypper_add_repo',
+ 'cc_chef',
+ 'cc_install_hotplug',
+ ],
+ [subschema['id'] for subschema in schema['allOf']])
+ self.assertEqual('cloud-config-schema', schema['id'])
+ self.assertEqual(
+ 'http://json-schema.org/draft-04/schema#',
+ schema['$schema'])
+ # FULL_SCHEMA is updated by the get_schema call
+ from cloudinit.config.schema import FULL_SCHEMA
+ self.assertCountEqual(['id', '$schema', 'allOf'], FULL_SCHEMA.keys())
+
+ def test_get_schema_returns_global_when_set(self):
+ """When FULL_SCHEMA global is already set, get_schema returns it."""
+ m_schema_path = 'cloudinit.config.schema.FULL_SCHEMA'
+ with mock.patch(m_schema_path, {'here': 'iam'}):
+ self.assertEqual({'here': 'iam'}, get_schema())
+
+
+class SchemaValidationErrorTest(CiTestCase):
+ """Test validate_cloudconfig_schema"""
+
+ def test_schema_validation_error_expects_schema_errors(self):
+ """SchemaValidationError is initialized from schema_errors."""
+ errors = (('key.path', 'unexpected key "junk"'),
+ ('key2.path', '"-123" is not a valid "hostname" format'))
+ exception = SchemaValidationError(schema_errors=errors)
+ self.assertIsInstance(exception, Exception)
+ self.assertEqual(exception.schema_errors, errors)
+ self.assertEqual(
+ 'Cloud config schema errors: key.path: unexpected key "junk", '
+ 'key2.path: "-123" is not a valid "hostname" format',
+ str(exception))
+ self.assertTrue(isinstance(exception, ValueError))
+
+
+class ValidateCloudConfigSchemaTest(CiTestCase):
+ """Tests for validate_cloudconfig_schema."""
+
+ with_logs = True
+
+ @skipUnlessJsonSchema()
+ def test_validateconfig_schema_non_strict_emits_warnings(self):
+ """When strict is False validate_cloudconfig_schema emits warnings."""
+ schema = {'properties': {'p1': {'type': 'string'}}}
+ validate_cloudconfig_schema({'p1': -1}, schema, strict=False)
+ self.assertIn(
+ "Invalid config:\np1: -1 is not of type 'string'\n",
+ self.logs.getvalue())
+
+ @skipUnlessJsonSchema()
+ def test_validateconfig_schema_emits_warning_on_missing_jsonschema(self):
+ """Warning from validate_cloudconfig_schema when missing jsonschema."""
+ schema = {'properties': {'p1': {'type': 'string'}}}
+ with mock.patch.dict('sys.modules', **{'jsonschema': ImportError()}):
+ validate_cloudconfig_schema({'p1': -1}, schema, strict=True)
+ self.assertIn(
+ 'Ignoring schema validation. python-jsonschema is not present',
+ self.logs.getvalue())
+
+ @skipUnlessJsonSchema()
+ def test_validateconfig_schema_strict_raises_errors(self):
+ """When strict is True validate_cloudconfig_schema raises errors."""
+ schema = {'properties': {'p1': {'type': 'string'}}}
+ with self.assertRaises(SchemaValidationError) as context_mgr:
+ validate_cloudconfig_schema({'p1': -1}, schema, strict=True)
+ self.assertEqual(
+ "Cloud config schema errors: p1: -1 is not of type 'string'",
+ str(context_mgr.exception))
+
+ @skipUnlessJsonSchema()
+ def test_validateconfig_schema_honors_formats(self):
+ """With strict True, validate_cloudconfig_schema errors on format."""
+ schema = {
+ 'properties': {'p1': {'type': 'string', 'format': 'email'}}}
+ with self.assertRaises(SchemaValidationError) as context_mgr:
+ validate_cloudconfig_schema({'p1': '-1'}, schema, strict=True)
+ self.assertEqual(
+ "Cloud config schema errors: p1: '-1' is not a 'email'",
+ str(context_mgr.exception))
+
+
+class TestCloudConfigExamples:
+ schema = get_schema()
+ params = [
+ (schema["id"], example)
+ for schema in schema["allOf"] for example in schema["examples"]]
+
+ @pytest.mark.parametrize("schema_id,example", params)
+ @skipUnlessJsonSchema()
+ def test_validateconfig_schema_of_example(self, schema_id, example):
+ """ For a given example in a config module we test if it is valid
+ according to the unified schema of all config modules
+ """
+ config_load = safe_load(example)
+ validate_cloudconfig_schema(
+ config_load, self.schema, strict=True)
+
+
+class ValidateCloudConfigFileTest(CiTestCase):
+ """Tests for validate_cloudconfig_file."""
+
+ def setUp(self):
+ super(ValidateCloudConfigFileTest, self).setUp()
+ self.config_file = self.tmp_path('cloudcfg.yaml')
+
+ def test_validateconfig_file_error_on_absent_file(self):
+ """On absent config_path, validate_cloudconfig_file errors."""
+ with self.assertRaises(RuntimeError) as context_mgr:
+ validate_cloudconfig_file('/not/here', {})
+ self.assertEqual(
+ 'Configfile /not/here does not exist',
+ str(context_mgr.exception))
+
+ def test_validateconfig_file_error_on_invalid_header(self):
+ """On invalid header, validate_cloudconfig_file errors.
+
+ A SchemaValidationError is raised when the file doesn't begin with
+ CLOUD_CONFIG_HEADER.
+ """
+ write_file(self.config_file, '#junk')
+ with self.assertRaises(SchemaValidationError) as context_mgr:
+ validate_cloudconfig_file(self.config_file, {})
+ self.assertEqual(
+ 'Cloud config schema errors: format-l1.c1: File {0} needs to begin'
+ ' with "{1}"'.format(
+ self.config_file, CLOUD_CONFIG_HEADER.decode()),
+ str(context_mgr.exception))
+
+ def test_validateconfig_file_error_on_non_yaml_scanner_error(self):
+ """On non-yaml scan issues, validate_cloudconfig_file errors."""
+ # Generate a scanner error by providing text on a single line with
+ # improper indent.
+ write_file(self.config_file, '#cloud-config\nasdf:\nasdf')
+ with self.assertRaises(SchemaValidationError) as context_mgr:
+ validate_cloudconfig_file(self.config_file, {})
+ self.assertIn(
+ 'schema errors: format-l3.c1: File {0} is not valid yaml.'.format(
+ self.config_file),
+ str(context_mgr.exception))
+
+ def test_validateconfig_file_error_on_non_yaml_parser_error(self):
+ """On non-yaml parser issues, validate_cloudconfig_file errors."""
+ write_file(self.config_file, '#cloud-config\n{}}')
+ with self.assertRaises(SchemaValidationError) as context_mgr:
+ validate_cloudconfig_file(self.config_file, {})
+ self.assertIn(
+ 'schema errors: format-l2.c3: File {0} is not valid yaml.'.format(
+ self.config_file),
+ str(context_mgr.exception))
+
+ @skipUnlessJsonSchema()
+ def test_validateconfig_file_sctrictly_validates_schema(self):
+ """validate_cloudconfig_file raises errors on invalid schema."""
+ schema = {
+ 'properties': {'p1': {'type': 'string', 'format': 'string'}}}
+ write_file(self.config_file, '#cloud-config\np1: -1')
+ with self.assertRaises(SchemaValidationError) as context_mgr:
+ validate_cloudconfig_file(self.config_file, schema)
+ self.assertEqual(
+ "Cloud config schema errors: p1: -1 is not of type 'string'",
+ str(context_mgr.exception))
+
+
+class GetSchemaDocTest(CiTestCase):
+ """Tests for get_schema_doc."""
+
+ def setUp(self):
+ super(GetSchemaDocTest, self).setUp()
+ self.required_schema = {
+ 'title': 'title', 'description': 'description', 'id': 'id',
+ 'name': 'name', 'frequency': 'frequency',
+ 'distros': ['debian', 'rhel']}
+
+ def test_get_schema_doc_returns_restructured_text(self):
+ """get_schema_doc returns restructured text for a cloudinit schema."""
+ full_schema = copy(self.required_schema)
+ full_schema.update(
+ {'properties': {
+ 'prop1': {'type': 'array', 'description': 'prop-description',
+ 'items': {'type': 'integer'}}}})
+ self.assertEqual(
+ dedent("""
+ name
+ ----
+ **Summary:** title
+
+ description
+
+ **Internal name:** ``id``
+
+ **Module frequency:** frequency
+
+ **Supported distros:** debian, rhel
+
+ **Config schema**:
+ **prop1:** (array of integer) prop-description\n\n"""),
+ get_schema_doc(full_schema))
+
+ def test_get_schema_doc_handles_multiple_types(self):
+ """get_schema_doc delimits multiple property types with a '/'."""
+ full_schema = copy(self.required_schema)
+ full_schema.update(
+ {'properties': {
+ 'prop1': {'type': ['string', 'integer'],
+ 'description': 'prop-description'}}})
+ self.assertIn(
+ '**prop1:** (string/integer) prop-description',
+ get_schema_doc(full_schema))
+
+ def test_get_schema_doc_handles_enum_types(self):
+ """get_schema_doc converts enum types to yaml and delimits with '/'."""
+ full_schema = copy(self.required_schema)
+ full_schema.update(
+ {'properties': {
+ 'prop1': {'enum': [True, False, 'stuff'],
+ 'description': 'prop-description'}}})
+ self.assertIn(
+ '**prop1:** (true/false/stuff) prop-description',
+ get_schema_doc(full_schema))
+
+ def test_get_schema_doc_handles_nested_oneof_property_types(self):
+ """get_schema_doc describes array items oneOf declarations in type."""
+ full_schema = copy(self.required_schema)
+ full_schema.update(
+ {'properties': {
+ 'prop1': {'type': 'array',
+ 'items': {
+ 'oneOf': [{'type': 'string'},
+ {'type': 'integer'}]},
+ 'description': 'prop-description'}}})
+ self.assertIn(
+ '**prop1:** (array of (string)/(integer)) prop-description',
+ get_schema_doc(full_schema))
+
+ def test_get_schema_doc_handles_string_examples(self):
+ """get_schema_doc properly indented examples as a list of strings."""
+ full_schema = copy(self.required_schema)
+ full_schema.update(
+ {'examples': ['ex1:\n [don\'t, expand, "this"]', 'ex2: true'],
+ 'properties': {
+ 'prop1': {'type': 'array', 'description': 'prop-description',
+ 'items': {'type': 'integer'}}}})
+ self.assertIn(
+ dedent("""
+ **Config schema**:
+ **prop1:** (array of integer) prop-description
+
+ **Examples**::
+
+ ex1:
+ [don't, expand, "this"]
+ # --- Example2 ---
+ ex2: true
+ """),
+ get_schema_doc(full_schema))
+
+ def test_get_schema_doc_properly_parse_description(self):
+ """get_schema_doc description properly formatted"""
+ full_schema = copy(self.required_schema)
+ full_schema.update(
+ {'properties': {
+ 'p1': {
+ 'type': 'string',
+ 'description': dedent("""\
+ This item
+ has the
+ following options:
+
+ - option1
+ - option2
+ - option3
+
+ The default value is
+ option1""")
+ }
+ }}
+ )
+
+ self.assertIn(
+ dedent("""
+ **Config schema**:
+ **p1:** (string) This item has the following options:
+
+ - option1
+ - option2
+ - option3
+
+ The default value is option1
+ """),
+ get_schema_doc(full_schema))
+
+ def test_get_schema_doc_raises_key_errors(self):
+ """get_schema_doc raises KeyErrors on missing keys."""
+ for key in self.required_schema:
+ invalid_schema = copy(self.required_schema)
+ invalid_schema.pop(key)
+ with self.assertRaises(KeyError) as context_mgr:
+ get_schema_doc(invalid_schema)
+ self.assertIn(key, str(context_mgr.exception))
+
+
+class AnnotatedCloudconfigFileTest(CiTestCase):
+ maxDiff = None
+
+ def test_annotated_cloudconfig_file_no_schema_errors(self):
+ """With no schema_errors, print the original content."""
+ content = b'ntp:\n pools: [ntp1.pools.com]\n'
+ self.assertEqual(
+ content,
+ annotated_cloudconfig_file({}, content, schema_errors=[]))
+
+ def test_annotated_cloudconfig_file_schema_annotates_and_adds_footer(self):
+ """With schema_errors, error lines are annotated and a footer added."""
+ content = dedent("""\
+ #cloud-config
+ # comment
+ ntp:
+ pools: [-99, 75]
+ """).encode()
+ expected = dedent("""\
+ #cloud-config
+ # comment
+ ntp: # E1
+ pools: [-99, 75] # E2,E3
+
+ # Errors: -------------
+ # E1: Some type error
+ # E2: -99 is not a string
+ # E3: 75 is not a string
+
+ """)
+ parsed_config = safe_load(content[13:])
+ schema_errors = [
+ ('ntp', 'Some type error'), ('ntp.pools.0', '-99 is not a string'),
+ ('ntp.pools.1', '75 is not a string')]
+ self.assertEqual(
+ expected,
+ annotated_cloudconfig_file(parsed_config, content, schema_errors))
+
+ def test_annotated_cloudconfig_file_annotates_separate_line_items(self):
+ """Errors are annotated for lists with items on separate lines."""
+ content = dedent("""\
+ #cloud-config
+ # comment
+ ntp:
+ pools:
+ - -99
+ - 75
+ """).encode()
+ expected = dedent("""\
+ ntp:
+ pools:
+ - -99 # E1
+ - 75 # E2
+ """)
+ parsed_config = safe_load(content[13:])
+ schema_errors = [
+ ('ntp.pools.0', '-99 is not a string'),
+ ('ntp.pools.1', '75 is not a string')]
+ self.assertIn(
+ expected,
+ annotated_cloudconfig_file(parsed_config, content, schema_errors))
+
+
+class TestMain:
+
+ exclusive_combinations = itertools.combinations(
+ ["--system", "--docs all", "--config-file something"], 2
+ )
+
+ @pytest.mark.parametrize("params", exclusive_combinations)
+ def test_main_exclusive_args(self, params, capsys):
+ """Main exits non-zero and error on required exclusive args."""
+ params = list(itertools.chain(*[a.split() for a in params]))
+ with mock.patch('sys.argv', ['mycmd'] + params):
+ with pytest.raises(SystemExit) as context_manager:
+ main()
+ assert 1 == context_manager.value.code
+
+ _out, err = capsys.readouterr()
+ expected = (
+ 'Expected one of --config-file, --system or --docs arguments\n'
+ )
+ assert expected == err
+
+ def test_main_missing_args(self, capsys):
+ """Main exits non-zero and reports an error on missing parameters."""
+ with mock.patch('sys.argv', ['mycmd']):
+ with pytest.raises(SystemExit) as context_manager:
+ main()
+ assert 1 == context_manager.value.code
+
+ _out, err = capsys.readouterr()
+ expected = (
+ 'Expected one of --config-file, --system or --docs arguments\n'
+ )
+ assert expected == err
+
+ def test_main_absent_config_file(self, capsys):
+ """Main exits non-zero when config file is absent."""
+ myargs = ['mycmd', '--annotate', '--config-file', 'NOT_A_FILE']
+ with mock.patch('sys.argv', myargs):
+ with pytest.raises(SystemExit) as context_manager:
+ main()
+ assert 1 == context_manager.value.code
+ _out, err = capsys.readouterr()
+ assert 'Configfile NOT_A_FILE does not exist\n' == err
+
+ def test_main_prints_docs(self, capsys):
+ """When --docs parameter is provided, main generates documentation."""
+ myargs = ['mycmd', '--docs', 'all']
+ with mock.patch('sys.argv', myargs):
+ assert 0 == main(), 'Expected 0 exit code'
+ out, _err = capsys.readouterr()
+ assert '\nNTP\n---\n' in out
+ assert '\nRuncmd\n------\n' in out
+
+ def test_main_validates_config_file(self, tmpdir, capsys):
+ """When --config-file parameter is provided, main validates schema."""
+ myyaml = tmpdir.join('my.yaml')
+ myargs = ['mycmd', '--config-file', myyaml.strpath]
+ myyaml.write(b'#cloud-config\nntp:') # shortest ntp schema
+ with mock.patch('sys.argv', myargs):
+ assert 0 == main(), 'Expected 0 exit code'
+ out, _err = capsys.readouterr()
+ assert 'Valid cloud-config: {0}\n'.format(myyaml) == out
+
+ @mock.patch('cloudinit.config.schema.read_cfg_paths')
+ @mock.patch('cloudinit.config.schema.os.getuid', return_value=0)
+ def test_main_validates_system_userdata(
+ self, m_getuid, m_read_cfg_paths, capsys, paths
+ ):
+ """When --system is provided, main validates system userdata."""
+ m_read_cfg_paths.return_value = paths
+ ud_file = paths.get_ipath_cur("userdata_raw")
+ write_file(ud_file, b'#cloud-config\nntp:')
+ myargs = ['mycmd', '--system']
+ with mock.patch('sys.argv', myargs):
+ assert 0 == main(), 'Expected 0 exit code'
+ out, _err = capsys.readouterr()
+ assert 'Valid cloud-config: system userdata\n' == out
+
+ @mock.patch('cloudinit.config.schema.os.getuid', return_value=1000)
+ def test_main_system_userdata_requires_root(self, m_getuid, capsys, paths):
+ """Non-root user can't use --system param"""
+ myargs = ['mycmd', '--system']
+ with mock.patch('sys.argv', myargs):
+ with pytest.raises(SystemExit) as context_manager:
+ main()
+ assert 1 == context_manager.value.code
+ _out, err = capsys.readouterr()
+ expected = (
+ 'Unable to read system userdata as non-root user. Try using sudo\n'
+ )
+ assert expected == err
+
+
+def _get_schema_doc_examples():
+ examples_dir = Path(
+ cloudinit.__file__).parent.parent / 'doc' / 'examples'
+ assert examples_dir.is_dir()
+
+ all_text_files = (f for f in examples_dir.glob('cloud-config*.txt')
+ if not f.name.startswith('cloud-config-archive'))
+ return all_text_files
+
+
+class TestSchemaDocExamples:
+ schema = get_schema()
+
+ @pytest.mark.parametrize("example_path", _get_schema_doc_examples())
+ @skipUnlessJsonSchema()
+ def test_schema_doc_examples(self, example_path):
+ validate_cloudconfig_file(str(example_path), self.schema)
+
+# vi: ts=4 expandtab syntax=python