summaryrefslogtreecommitdiff
path: root/tests/unittests/test_handler
diff options
context:
space:
mode:
authorBrett Holman <bholman.devel@gmail.com>2021-12-03 13:11:46 -0700
committerGitHub <noreply@github.com>2021-12-03 13:11:46 -0700
commit039c40f9b3d88ee8158604bb18ca4bf2fb5d5e51 (patch)
tree5f1b09486ccaf98ee8159de58d9a2a1ef0af5dc1 /tests/unittests/test_handler
parentffa6fc88249aa080aa31811a45569a45e567418a (diff)
downloadvyos-cloud-init-039c40f9b3d88ee8158604bb18ca4bf2fb5d5e51.tar.gz
vyos-cloud-init-039c40f9b3d88ee8158604bb18ca4bf2fb5d5e51.zip
Reorganize unit test locations under tests/unittests (#1126)
This attempts to standardize unit test file location under test/unittests/ such that any source file located at cloudinit/path/to/file.py may have a corresponding unit test file at test/unittests/path/to/test_file.py. Noteworthy Comments: ==================== Four different duplicate test files existed: test_{gpg,util,cc_mounts,cc_resolv_conf}.py Each of these duplicate file pairs has been merged together. This is a break in git history for these files. The test suite appears to have a dependency on test order. Changing test order causes some tests to fail. This should be rectified, but for now some tests have been modified in tests/unittests/config/test_set_passwords.py. A helper class name starts with "Test" which causes pytest to try executing it as a test case, which then throws warnings "due to Class having __init__()". Silence by changing the name of the class. # helpers.py is imported in many test files, import paths change cloudinit/tests/helpers.py -> tests/unittests/helpers.py # Move directories: cloudinit/distros/tests -> tests/unittests/distros cloudinit/cmd/devel/tests -> tests/unittests/cmd/devel cloudinit/cmd/tests -> tests/unittests/cmd/ cloudinit/sources/helpers/tests -> tests/unittests/sources/helpers cloudinit/sources/tests -> tests/unittests/sources cloudinit/net/tests -> tests/unittests/net cloudinit/config/tests -> tests/unittests/config cloudinit/analyze/tests/ -> tests/unittests/analyze/ # Standardize tests already in tests/unittests/ test_datasource -> sources test_distros -> distros test_vmware -> sources/vmware test_handler -> config # this contains cloudconfig module tests test_runs -> runs
Diffstat (limited to 'tests/unittests/test_handler')
-rw-r--r--tests/unittests/test_handler/__init__.py0
-rw-r--r--tests/unittests/test_handler/test_handler_apk_configure.py299
-rw-r--r--tests/unittests/test_handler/test_handler_apt_conf_v1.py129
-rw-r--r--tests/unittests/test_handler/test_handler_apt_configure_sources_list_v1.py181
-rw-r--r--tests/unittests/test_handler/test_handler_apt_configure_sources_list_v3.py226
-rw-r--r--tests/unittests/test_handler/test_handler_apt_key.py137
-rw-r--r--tests/unittests/test_handler/test_handler_apt_source_v1.py651
-rw-r--r--tests/unittests/test_handler/test_handler_apt_source_v3.py1170
-rw-r--r--tests/unittests/test_handler/test_handler_bootcmd.py152
-rw-r--r--tests/unittests/test_handler/test_handler_ca_certs.py361
-rw-r--r--tests/unittests/test_handler/test_handler_chef.py271
-rw-r--r--tests/unittests/test_handler/test_handler_debug.py59
-rw-r--r--tests/unittests/test_handler/test_handler_disk_setup.py243
-rw-r--r--tests/unittests/test_handler/test_handler_etc_hosts.py70
-rw-r--r--tests/unittests/test_handler/test_handler_growpart.py309
-rw-r--r--tests/unittests/test_handler/test_handler_install_hotplug.py113
-rw-r--r--tests/unittests/test_handler/test_handler_landscape.py126
-rw-r--r--tests/unittests/test_handler/test_handler_locale.py116
-rw-r--r--tests/unittests/test_handler/test_handler_lxd.py222
-rw-r--r--tests/unittests/test_handler/test_handler_mcollective.py146
-rw-r--r--tests/unittests/test_handler/test_handler_mounts.py406
-rw-r--r--tests/unittests/test_handler/test_handler_ntp.py765
-rw-r--r--tests/unittests/test_handler/test_handler_power_state.py159
-rw-r--r--tests/unittests/test_handler/test_handler_puppet.py380
-rw-r--r--tests/unittests/test_handler/test_handler_refresh_rmc_and_interface.py109
-rw-r--r--tests/unittests/test_handler/test_handler_resizefs.py398
-rw-r--r--tests/unittests/test_handler/test_handler_resolv_conf.py105
-rw-r--r--tests/unittests/test_handler/test_handler_rsyslog.py178
-rw-r--r--tests/unittests/test_handler/test_handler_runcmd.py129
-rw-r--r--tests/unittests/test_handler/test_handler_seed_random.py205
-rw-r--r--tests/unittests/test_handler/test_handler_set_hostname.py207
-rw-r--r--tests/unittests/test_handler/test_handler_spacewalk.py42
-rw-r--r--tests/unittests/test_handler/test_handler_timezone.py54
-rw-r--r--tests/unittests/test_handler/test_handler_write_files.py246
-rw-r--r--tests/unittests/test_handler/test_handler_write_files_deferred.py77
-rw-r--r--tests/unittests/test_handler/test_handler_yum_add_repo.py111
-rw-r--r--tests/unittests/test_handler/test_handler_zypper_add_repo.py231
-rw-r--r--tests/unittests/test_handler/test_schema.py515
38 files changed, 0 insertions, 9298 deletions
diff --git a/tests/unittests/test_handler/__init__.py b/tests/unittests/test_handler/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/tests/unittests/test_handler/__init__.py
+++ /dev/null
diff --git a/tests/unittests/test_handler/test_handler_apk_configure.py b/tests/unittests/test_handler/test_handler_apk_configure.py
deleted file mode 100644
index 8acc0b33..00000000
--- a/tests/unittests/test_handler/test_handler_apk_configure.py
+++ /dev/null
@@ -1,299 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-""" test_apk_configure
-Test creation of repositories file
-"""
-
-import logging
-import os
-import textwrap
-
-from cloudinit import (cloud, helpers, util)
-
-from cloudinit.config import cc_apk_configure
-from cloudinit.tests.helpers import (FilesystemMockingTestCase, mock)
-
-REPO_FILE = "/etc/apk/repositories"
-DEFAULT_MIRROR_URL = "https://alpine.global.ssl.fastly.net/alpine"
-CC_APK = 'cloudinit.config.cc_apk_configure'
-
-
-class TestNoConfig(FilesystemMockingTestCase):
- def setUp(self):
- super(TestNoConfig, self).setUp()
- self.add_patch(CC_APK + '._write_repositories_file', 'm_write_repos')
- self.name = "apk-configure"
- self.cloud_init = None
- self.log = logging.getLogger("TestNoConfig")
- self.args = []
-
- def test_no_config(self):
- """
- Test that nothing is done if no apk-configure
- configuration is provided.
- """
- config = util.get_builtin_cfg()
-
- cc_apk_configure.handle(self.name, config, self.cloud_init,
- self.log, self.args)
-
- self.assertEqual(0, self.m_write_repos.call_count)
-
-
-class TestConfig(FilesystemMockingTestCase):
- def setUp(self):
- super(TestConfig, self).setUp()
- self.new_root = self.tmp_dir()
- self.new_root = self.reRoot(root=self.new_root)
- for dirname in ['tmp', 'etc/apk']:
- util.ensure_dir(os.path.join(self.new_root, dirname))
- self.paths = helpers.Paths({'templates_dir': self.new_root})
- self.name = "apk-configure"
- self.cloud = cloud.Cloud(None, self.paths, None, None, None)
- self.log = logging.getLogger("TestNoConfig")
- self.args = []
-
- @mock.patch(CC_APK + '._write_repositories_file')
- def test_no_repo_settings(self, m_write_repos):
- """
- Test that nothing is written if the 'alpine-repo' key
- is not present.
- """
- config = {"apk_repos": {}}
-
- cc_apk_configure.handle(self.name, config, self.cloud, self.log,
- self.args)
-
- self.assertEqual(0, m_write_repos.call_count)
-
- @mock.patch(CC_APK + '._write_repositories_file')
- def test_empty_repo_settings(self, m_write_repos):
- """
- Test that nothing is written if 'alpine_repo' list is empty.
- """
- config = {"apk_repos": {"alpine_repo": []}}
-
- cc_apk_configure.handle(self.name, config, self.cloud, self.log,
- self.args)
-
- self.assertEqual(0, m_write_repos.call_count)
-
- def test_only_main_repo(self):
- """
- Test when only details of main repo is written to file.
- """
- alpine_version = 'v3.12'
- config = {
- "apk_repos": {
- "alpine_repo": {
- "version": alpine_version
- }
- }
- }
-
- cc_apk_configure.handle(self.name, config, self.cloud, self.log,
- self.args)
-
- expected_content = textwrap.dedent("""\
- #
- # Created by cloud-init
- #
- # This file is written on first boot of an instance
- #
-
- {0}/{1}/main
-
- """.format(DEFAULT_MIRROR_URL, alpine_version))
-
- self.assertEqual(expected_content, util.load_file(REPO_FILE))
-
- def test_main_and_community_repos(self):
- """
- Test when only details of main and community repos are
- written to file.
- """
- alpine_version = 'edge'
- config = {
- "apk_repos": {
- "alpine_repo": {
- "version": alpine_version,
- "community_enabled": True
- }
- }
- }
-
- cc_apk_configure.handle(self.name, config, self.cloud, self.log,
- self.args)
-
- expected_content = textwrap.dedent("""\
- #
- # Created by cloud-init
- #
- # This file is written on first boot of an instance
- #
-
- {0}/{1}/main
- {0}/{1}/community
-
- """.format(DEFAULT_MIRROR_URL, alpine_version))
-
- self.assertEqual(expected_content, util.load_file(REPO_FILE))
-
- def test_main_community_testing_repos(self):
- """
- Test when details of main, community and testing repos
- are written to file.
- """
- alpine_version = 'v3.12'
- config = {
- "apk_repos": {
- "alpine_repo": {
- "version": alpine_version,
- "community_enabled": True,
- "testing_enabled": True
- }
- }
- }
-
- cc_apk_configure.handle(self.name, config, self.cloud, self.log,
- self.args)
-
- expected_content = textwrap.dedent("""\
- #
- # Created by cloud-init
- #
- # This file is written on first boot of an instance
- #
-
- {0}/{1}/main
- {0}/{1}/community
- #
- # Testing - using with non-Edge installation may cause problems!
- #
- {0}/edge/testing
-
- """.format(DEFAULT_MIRROR_URL, alpine_version))
-
- self.assertEqual(expected_content, util.load_file(REPO_FILE))
-
- def test_edge_main_community_testing_repos(self):
- """
- Test when details of main, community and testing repos
- for Edge version of Alpine are written to file.
- """
- alpine_version = 'edge'
- config = {
- "apk_repos": {
- "alpine_repo": {
- "version": alpine_version,
- "community_enabled": True,
- "testing_enabled": True
- }
- }
- }
-
- cc_apk_configure.handle(self.name, config, self.cloud, self.log,
- self.args)
-
- expected_content = textwrap.dedent("""\
- #
- # Created by cloud-init
- #
- # This file is written on first boot of an instance
- #
-
- {0}/{1}/main
- {0}/{1}/community
- {0}/{1}/testing
-
- """.format(DEFAULT_MIRROR_URL, alpine_version))
-
- self.assertEqual(expected_content, util.load_file(REPO_FILE))
-
- def test_main_community_testing_local_repos(self):
- """
- Test when details of main, community, testing and
- local repos are written to file.
- """
- alpine_version = 'v3.12'
- local_repo_url = 'http://some.mirror/whereever'
- config = {
- "apk_repos": {
- "alpine_repo": {
- "version": alpine_version,
- "community_enabled": True,
- "testing_enabled": True
- },
- "local_repo_base_url": local_repo_url
- }
- }
-
- cc_apk_configure.handle(self.name, config, self.cloud, self.log,
- self.args)
-
- expected_content = textwrap.dedent("""\
- #
- # Created by cloud-init
- #
- # This file is written on first boot of an instance
- #
-
- {0}/{1}/main
- {0}/{1}/community
- #
- # Testing - using with non-Edge installation may cause problems!
- #
- {0}/edge/testing
-
- #
- # Local repo
- #
- {2}/{1}
-
- """.format(DEFAULT_MIRROR_URL, alpine_version, local_repo_url))
-
- self.assertEqual(expected_content, util.load_file(REPO_FILE))
-
- def test_edge_main_community_testing_local_repos(self):
- """
- Test when details of main, community, testing and local repos
- for Edge version of Alpine are written to file.
- """
- alpine_version = 'edge'
- local_repo_url = 'http://some.mirror/whereever'
- config = {
- "apk_repos": {
- "alpine_repo": {
- "version": alpine_version,
- "community_enabled": True,
- "testing_enabled": True
- },
- "local_repo_base_url": local_repo_url
- }
- }
-
- cc_apk_configure.handle(self.name, config, self.cloud, self.log,
- self.args)
-
- expected_content = textwrap.dedent("""\
- #
- # Created by cloud-init
- #
- # This file is written on first boot of an instance
- #
-
- {0}/{1}/main
- {0}/{1}/community
- {0}/edge/testing
-
- #
- # Local repo
- #
- {2}/{1}
-
- """.format(DEFAULT_MIRROR_URL, alpine_version, local_repo_url))
-
- self.assertEqual(expected_content, util.load_file(REPO_FILE))
-
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_apt_conf_v1.py b/tests/unittests/test_handler/test_handler_apt_conf_v1.py
deleted file mode 100644
index 6a4b03ee..00000000
--- a/tests/unittests/test_handler/test_handler_apt_conf_v1.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-from cloudinit.config import cc_apt_configure
-from cloudinit import util
-
-from cloudinit.tests.helpers import TestCase
-
-import copy
-import os
-import re
-import shutil
-import tempfile
-
-
-class TestAptProxyConfig(TestCase):
- def setUp(self):
- super(TestAptProxyConfig, self).setUp()
- self.tmp = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.tmp)
- self.pfile = os.path.join(self.tmp, "proxy.cfg")
- self.cfile = os.path.join(self.tmp, "config.cfg")
-
- def _search_apt_config(self, contents, ptype, value):
- return re.search(
- r"acquire::%s::proxy\s+[\"']%s[\"'];\n" % (ptype, value),
- contents, flags=re.IGNORECASE)
-
- def test_apt_proxy_written(self):
- cfg = {'proxy': 'myproxy'}
- cc_apt_configure.apply_apt_config(cfg, self.pfile, self.cfile)
-
- self.assertTrue(os.path.isfile(self.pfile))
- self.assertFalse(os.path.isfile(self.cfile))
-
- contents = util.load_file(self.pfile)
- self.assertTrue(self._search_apt_config(contents, "http", "myproxy"))
-
- def test_apt_http_proxy_written(self):
- cfg = {'http_proxy': 'myproxy'}
- cc_apt_configure.apply_apt_config(cfg, self.pfile, self.cfile)
-
- self.assertTrue(os.path.isfile(self.pfile))
- self.assertFalse(os.path.isfile(self.cfile))
-
- contents = util.load_file(self.pfile)
- self.assertTrue(self._search_apt_config(contents, "http", "myproxy"))
-
- def test_apt_all_proxy_written(self):
- cfg = {'http_proxy': 'myproxy_http_proxy',
- 'https_proxy': 'myproxy_https_proxy',
- 'ftp_proxy': 'myproxy_ftp_proxy'}
-
- values = {'http': cfg['http_proxy'],
- 'https': cfg['https_proxy'],
- 'ftp': cfg['ftp_proxy'],
- }
-
- cc_apt_configure.apply_apt_config(cfg, self.pfile, self.cfile)
-
- self.assertTrue(os.path.isfile(self.pfile))
- self.assertFalse(os.path.isfile(self.cfile))
-
- contents = util.load_file(self.pfile)
-
- for ptype, pval in values.items():
- self.assertTrue(self._search_apt_config(contents, ptype, pval))
-
- def test_proxy_deleted(self):
- util.write_file(self.cfile, "content doesnt matter")
- cc_apt_configure.apply_apt_config({}, self.pfile, self.cfile)
- self.assertFalse(os.path.isfile(self.pfile))
- self.assertFalse(os.path.isfile(self.cfile))
-
- def test_proxy_replaced(self):
- util.write_file(self.cfile, "content doesnt matter")
- cc_apt_configure.apply_apt_config({'proxy': "foo"},
- self.pfile, self.cfile)
- self.assertTrue(os.path.isfile(self.pfile))
- contents = util.load_file(self.pfile)
- self.assertTrue(self._search_apt_config(contents, "http", "foo"))
-
- def test_config_written(self):
- payload = 'this is my apt config'
- cfg = {'conf': payload}
-
- cc_apt_configure.apply_apt_config(cfg, self.pfile, self.cfile)
-
- self.assertTrue(os.path.isfile(self.cfile))
- self.assertFalse(os.path.isfile(self.pfile))
-
- self.assertEqual(util.load_file(self.cfile), payload)
-
- def test_config_replaced(self):
- util.write_file(self.pfile, "content doesnt matter")
- cc_apt_configure.apply_apt_config({'conf': "foo"},
- self.pfile, self.cfile)
- self.assertTrue(os.path.isfile(self.cfile))
- self.assertEqual(util.load_file(self.cfile), "foo")
-
- def test_config_deleted(self):
- # if no 'conf' is provided, delete any previously written file
- util.write_file(self.pfile, "content doesnt matter")
- cc_apt_configure.apply_apt_config({}, self.pfile, self.cfile)
- self.assertFalse(os.path.isfile(self.pfile))
- self.assertFalse(os.path.isfile(self.cfile))
-
-
-class TestConversion(TestCase):
- def test_convert_with_apt_mirror_as_empty_string(self):
- # an empty apt_mirror is the same as no apt_mirror
- empty_m_found = cc_apt_configure.convert_to_v3_apt_format(
- {'apt_mirror': ''})
- default_found = cc_apt_configure.convert_to_v3_apt_format({})
- self.assertEqual(default_found, empty_m_found)
-
- def test_convert_with_apt_mirror(self):
- mirror = 'http://my.mirror/ubuntu'
- f = cc_apt_configure.convert_to_v3_apt_format({'apt_mirror': mirror})
- self.assertIn(mirror, set(m['uri'] for m in f['apt']['primary']))
-
- def test_no_old_content(self):
- mirror = 'http://my.mirror/ubuntu'
- mydata = {'apt': {'primary': {'arches': ['default'], 'uri': mirror}}}
- expected = copy.deepcopy(mydata)
- self.assertEqual(expected,
- cc_apt_configure.convert_to_v3_apt_format(mydata))
-
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v1.py b/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v1.py
deleted file mode 100644
index d69916f9..00000000
--- a/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v1.py
+++ /dev/null
@@ -1,181 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-""" test_handler_apt_configure_sources_list
-Test templating of sources list
-"""
-import logging
-import os
-import shutil
-import tempfile
-from unittest import mock
-
-from cloudinit import templater
-from cloudinit import subp
-from cloudinit import util
-
-from cloudinit.config import cc_apt_configure
-
-from cloudinit.distros.debian import Distro
-
-from cloudinit.tests import helpers as t_help
-from tests.unittests.util import get_cloud
-
-LOG = logging.getLogger(__name__)
-
-YAML_TEXT_CUSTOM_SL = """
-apt_mirror: http://archive.ubuntu.com/ubuntu/
-apt_custom_sources_list: |
- ## template:jinja
- ## Note, this file is written by cloud-init on first boot of an instance
- ## modifications made here will not survive a re-bundle.
- ## if you wish to make changes you can:
- ## a.) add 'apt_preserve_sources_list: true' to /etc/cloud/cloud.cfg
- ## or do the same in user-data
- ## b.) add sources in /etc/apt/sources.list.d
- ## c.) make changes to template file /etc/cloud/templates/sources.list.tmpl
-
- # See http://help.ubuntu.com/community/UpgradeNotes for how to upgrade to
- # newer versions of the distribution.
- deb {{mirror}} {{codename}} main restricted
- deb-src {{mirror}} {{codename}} main restricted
- # FIND_SOMETHING_SPECIAL
-"""
-
-EXPECTED_CONVERTED_CONTENT = (
- """## Note, this file is written by cloud-init on first boot of an instance
-## modifications made here will not survive a re-bundle.
-## if you wish to make changes you can:
-## a.) add 'apt_preserve_sources_list: true' to /etc/cloud/cloud.cfg
-## or do the same in user-data
-## b.) add sources in /etc/apt/sources.list.d
-## c.) make changes to template file /etc/cloud/templates/sources.list.tmpl
-
-# See http://help.ubuntu.com/community/UpgradeNotes for how to upgrade to
-# newer versions of the distribution.
-deb http://archive.ubuntu.com/ubuntu/ fakerelease main restricted
-deb-src http://archive.ubuntu.com/ubuntu/ fakerelease main restricted
-# FIND_SOMETHING_SPECIAL
-""")
-
-
-class TestAptSourceConfigSourceList(t_help.FilesystemMockingTestCase):
- """TestAptSourceConfigSourceList
- Main Class to test sources list rendering
- """
- def setUp(self):
- super(TestAptSourceConfigSourceList, self).setUp()
- self.subp = subp.subp
- self.new_root = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.new_root)
-
- rpatcher = mock.patch("cloudinit.util.lsb_release")
- get_rel = rpatcher.start()
- get_rel.return_value = {'codename': "fakerelease"}
- self.addCleanup(rpatcher.stop)
- apatcher = mock.patch("cloudinit.util.get_dpkg_architecture")
- get_arch = apatcher.start()
- get_arch.return_value = 'amd64'
- self.addCleanup(apatcher.stop)
-
- def apt_source_list(self, distro, mirror, mirrorcheck=None):
- """apt_source_list
- Test rendering of a source.list from template for a given distro
- """
- if mirrorcheck is None:
- mirrorcheck = mirror
-
- if isinstance(mirror, list):
- cfg = {'apt_mirror_search': mirror}
- else:
- cfg = {'apt_mirror': mirror}
-
- mycloud = get_cloud(distro)
-
- with mock.patch.object(util, 'write_file') as mockwf:
- with mock.patch.object(util, 'load_file',
- return_value="faketmpl") as mocklf:
- with mock.patch.object(os.path, 'isfile',
- return_value=True) as mockisfile:
- with mock.patch.object(
- templater, 'render_string',
- return_value='fake') as mockrnd:
- with mock.patch.object(util, 'rename'):
- cc_apt_configure.handle("test", cfg, mycloud,
- LOG, None)
-
- mockisfile.assert_any_call(
- ('/etc/cloud/templates/sources.list.%s.tmpl' % distro))
- mocklf.assert_any_call(
- ('/etc/cloud/templates/sources.list.%s.tmpl' % distro))
- mockrnd.assert_called_once_with('faketmpl',
- {'RELEASE': 'fakerelease',
- 'PRIMARY': mirrorcheck,
- 'MIRROR': mirrorcheck,
- 'SECURITY': mirrorcheck,
- 'codename': 'fakerelease',
- 'primary': mirrorcheck,
- 'mirror': mirrorcheck,
- 'security': mirrorcheck})
- mockwf.assert_called_once_with('/etc/apt/sources.list', 'fake',
- mode=0o644)
-
- def test_apt_v1_source_list_debian(self):
- """Test rendering of a source.list from template for debian"""
- self.apt_source_list('debian', 'http://httpredir.debian.org/debian')
-
- def test_apt_v1_source_list_ubuntu(self):
- """Test rendering of a source.list from template for ubuntu"""
- self.apt_source_list('ubuntu', 'http://archive.ubuntu.com/ubuntu/')
-
- @staticmethod
- def myresolve(name):
- """Fake util.is_resolvable for mirrorfail tests"""
- if name == "does.not.exist":
- print("Faking FAIL for '%s'" % name)
- return False
- else:
- print("Faking SUCCESS for '%s'" % name)
- return True
-
- def test_apt_v1_srcl_debian_mirrorfail(self):
- """Test rendering of a source.list from template for debian"""
- with mock.patch.object(util, 'is_resolvable',
- side_effect=self.myresolve) as mockresolve:
- self.apt_source_list('debian',
- ['http://does.not.exist',
- 'http://httpredir.debian.org/debian'],
- 'http://httpredir.debian.org/debian')
- mockresolve.assert_any_call("does.not.exist")
- mockresolve.assert_any_call("httpredir.debian.org")
-
- def test_apt_v1_srcl_ubuntu_mirrorfail(self):
- """Test rendering of a source.list from template for ubuntu"""
- with mock.patch.object(util, 'is_resolvable',
- side_effect=self.myresolve) as mockresolve:
- self.apt_source_list('ubuntu',
- ['http://does.not.exist',
- 'http://archive.ubuntu.com/ubuntu/'],
- 'http://archive.ubuntu.com/ubuntu/')
- mockresolve.assert_any_call("does.not.exist")
- mockresolve.assert_any_call("archive.ubuntu.com")
-
- def test_apt_v1_srcl_custom(self):
- """Test rendering from a custom source.list template"""
- cfg = util.load_yaml(YAML_TEXT_CUSTOM_SL)
- mycloud = get_cloud()
-
- # the second mock restores the original subp
- with mock.patch.object(util, 'write_file') as mockwrite:
- with mock.patch.object(subp, 'subp', self.subp):
- with mock.patch.object(Distro, 'get_primary_arch',
- return_value='amd64'):
- cc_apt_configure.handle("notimportant", cfg, mycloud,
- LOG, None)
-
- mockwrite.assert_called_once_with(
- '/etc/apt/sources.list',
- EXPECTED_CONVERTED_CONTENT,
- mode=420)
-
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v3.py b/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v3.py
deleted file mode 100644
index cd6f9239..00000000
--- a/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v3.py
+++ /dev/null
@@ -1,226 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-""" test_apt_custom_sources_list
-Test templating of custom sources list
-"""
-from contextlib import ExitStack
-import logging
-import os
-import shutil
-import tempfile
-from unittest import mock
-from unittest.mock import call
-
-from cloudinit import subp
-from cloudinit import util
-from cloudinit.config import cc_apt_configure
-from cloudinit.distros.debian import Distro
-from cloudinit.tests import helpers as t_help
-
-from tests.unittests.util import get_cloud
-
-LOG = logging.getLogger(__name__)
-
-TARGET = "/"
-
-# Input and expected output for the custom template
-YAML_TEXT_CUSTOM_SL = """
-apt:
- primary:
- - arches: [default]
- uri: http://test.ubuntu.com/ubuntu/
- security:
- - arches: [default]
- uri: http://testsec.ubuntu.com/ubuntu/
- sources_list: |
-
- # Note, this file is written by cloud-init at install time. It should not
- # end up on the installed system itself.
- # See http://help.ubuntu.com/community/UpgradeNotes for how to upgrade to
- # newer versions of the distribution.
- deb $MIRROR $RELEASE main restricted
- deb-src $MIRROR $RELEASE main restricted
- deb $PRIMARY $RELEASE universe restricted
- deb $SECURITY $RELEASE-security multiverse
- # FIND_SOMETHING_SPECIAL
-"""
-
-EXPECTED_CONVERTED_CONTENT = """
-# Note, this file is written by cloud-init at install time. It should not
-# end up on the installed system itself.
-# See http://help.ubuntu.com/community/UpgradeNotes for how to upgrade to
-# newer versions of the distribution.
-deb http://test.ubuntu.com/ubuntu/ fakerel main restricted
-deb-src http://test.ubuntu.com/ubuntu/ fakerel main restricted
-deb http://test.ubuntu.com/ubuntu/ fakerel universe restricted
-deb http://testsec.ubuntu.com/ubuntu/ fakerel-security multiverse
-# FIND_SOMETHING_SPECIAL
-"""
-
-# mocked to be independent to the unittest system
-MOCKED_APT_SRC_LIST = """
-deb http://test.ubuntu.com/ubuntu/ notouched main restricted
-deb-src http://test.ubuntu.com/ubuntu/ notouched main restricted
-deb http://test.ubuntu.com/ubuntu/ notouched-updates main restricted
-deb http://testsec.ubuntu.com/ubuntu/ notouched-security main restricted
-"""
-
-EXPECTED_BASE_CONTENT = ("""
-deb http://test.ubuntu.com/ubuntu/ notouched main restricted
-deb-src http://test.ubuntu.com/ubuntu/ notouched main restricted
-deb http://test.ubuntu.com/ubuntu/ notouched-updates main restricted
-deb http://testsec.ubuntu.com/ubuntu/ notouched-security main restricted
-""")
-
-EXPECTED_MIRROR_CONTENT = ("""
-deb http://test.ubuntu.com/ubuntu/ notouched main restricted
-deb-src http://test.ubuntu.com/ubuntu/ notouched main restricted
-deb http://test.ubuntu.com/ubuntu/ notouched-updates main restricted
-deb http://test.ubuntu.com/ubuntu/ notouched-security main restricted
-""")
-
-EXPECTED_PRIMSEC_CONTENT = ("""
-deb http://test.ubuntu.com/ubuntu/ notouched main restricted
-deb-src http://test.ubuntu.com/ubuntu/ notouched main restricted
-deb http://test.ubuntu.com/ubuntu/ notouched-updates main restricted
-deb http://testsec.ubuntu.com/ubuntu/ notouched-security main restricted
-""")
-
-
-class TestAptSourceConfigSourceList(t_help.FilesystemMockingTestCase):
- """TestAptSourceConfigSourceList - Class to test sources list rendering"""
- def setUp(self):
- super(TestAptSourceConfigSourceList, self).setUp()
- self.subp = subp.subp
- self.new_root = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.new_root)
-
- rpatcher = mock.patch("cloudinit.util.lsb_release")
- get_rel = rpatcher.start()
- get_rel.return_value = {'codename': "fakerel"}
- self.addCleanup(rpatcher.stop)
- apatcher = mock.patch("cloudinit.util.get_dpkg_architecture")
- get_arch = apatcher.start()
- get_arch.return_value = 'amd64'
- self.addCleanup(apatcher.stop)
-
- def _apt_source_list(self, distro, cfg, cfg_on_empty=False):
- """_apt_source_list - Test rendering from template (generic)"""
- # entry at top level now, wrap in 'apt' key
- cfg = {'apt': cfg}
- mycloud = get_cloud(distro)
-
- with ExitStack() as stack:
- mock_writefile = stack.enter_context(mock.patch.object(
- util, 'write_file'))
- mock_loadfile = stack.enter_context(mock.patch.object(
- util, 'load_file', return_value=MOCKED_APT_SRC_LIST))
- mock_isfile = stack.enter_context(mock.patch.object(
- os.path, 'isfile', return_value=True))
- stack.enter_context(mock.patch.object(
- util, 'del_file'))
- cfg_func = ('cloudinit.config.cc_apt_configure.'
- '_should_configure_on_empty_apt')
- mock_shouldcfg = stack.enter_context(mock.patch(
- cfg_func, return_value=(cfg_on_empty, 'test')
- ))
- cc_apt_configure.handle("test", cfg, mycloud, LOG, None)
-
- return mock_writefile, mock_loadfile, mock_isfile, mock_shouldcfg
-
- def test_apt_v3_source_list_debian(self):
- """test_apt_v3_source_list_debian - without custom sources or parms"""
- cfg = {}
- distro = 'debian'
- expected = EXPECTED_BASE_CONTENT
-
- mock_writefile, mock_load_file, mock_isfile, mock_shouldcfg = (
- self._apt_source_list(distro, cfg, cfg_on_empty=True))
-
- template = '/etc/cloud/templates/sources.list.%s.tmpl' % distro
- mock_writefile.assert_called_once_with('/etc/apt/sources.list',
- expected, mode=0o644)
- mock_load_file.assert_called_with(template)
- mock_isfile.assert_any_call(template)
- self.assertEqual(1, mock_shouldcfg.call_count)
-
- def test_apt_v3_source_list_ubuntu(self):
- """test_apt_v3_source_list_ubuntu - without custom sources or parms"""
- cfg = {}
- distro = 'ubuntu'
- expected = EXPECTED_BASE_CONTENT
-
- mock_writefile, mock_load_file, mock_isfile, mock_shouldcfg = (
- self._apt_source_list(distro, cfg, cfg_on_empty=True))
-
- template = '/etc/cloud/templates/sources.list.%s.tmpl' % distro
- mock_writefile.assert_called_once_with('/etc/apt/sources.list',
- expected, mode=0o644)
- mock_load_file.assert_called_with(template)
- mock_isfile.assert_any_call(template)
- self.assertEqual(1, mock_shouldcfg.call_count)
-
- def test_apt_v3_source_list_ubuntu_snappy(self):
- """test_apt_v3_source_list_ubuntu_snappy - without custom sources or
- parms"""
- cfg = {'apt': {}}
- mycloud = get_cloud()
-
- with mock.patch.object(util, 'write_file') as mock_writefile:
- with mock.patch.object(util, 'system_is_snappy',
- return_value=True) as mock_issnappy:
- cc_apt_configure.handle("test", cfg, mycloud, LOG, None)
-
- self.assertEqual(0, mock_writefile.call_count)
- self.assertEqual(1, mock_issnappy.call_count)
-
- def test_apt_v3_source_list_centos(self):
- """test_apt_v3_source_list_centos - without custom sources or parms"""
- cfg = {}
- distro = 'rhel'
-
- mock_writefile, _, _, _ = self._apt_source_list(distro, cfg)
-
- self.assertEqual(0, mock_writefile.call_count)
-
- def test_apt_v3_source_list_psm(self):
- """test_apt_v3_source_list_psm - Test specifying prim+sec mirrors"""
- pm = 'http://test.ubuntu.com/ubuntu/'
- sm = 'http://testsec.ubuntu.com/ubuntu/'
- cfg = {'preserve_sources_list': False,
- 'primary': [{'arches': ["default"],
- 'uri': pm}],
- 'security': [{'arches': ["default"],
- 'uri': sm}]}
- distro = 'ubuntu'
- expected = EXPECTED_PRIMSEC_CONTENT
-
- mock_writefile, mock_load_file, mock_isfile, _ = (
- self._apt_source_list(distro, cfg, cfg_on_empty=True))
-
- template = '/etc/cloud/templates/sources.list.%s.tmpl' % distro
- mock_writefile.assert_called_once_with('/etc/apt/sources.list',
- expected, mode=0o644)
- mock_load_file.assert_called_with(template)
- mock_isfile.assert_any_call(template)
-
- def test_apt_v3_srcl_custom(self):
- """test_apt_v3_srcl_custom - Test rendering a custom source template"""
- cfg = util.load_yaml(YAML_TEXT_CUSTOM_SL)
- mycloud = get_cloud()
-
- # the second mock restores the original subp
- with mock.patch.object(util, 'write_file') as mockwrite:
- with mock.patch.object(subp, 'subp', self.subp):
- with mock.patch.object(Distro, 'get_primary_arch',
- return_value='amd64'):
- cc_apt_configure.handle("notimportant", cfg, mycloud,
- LOG, None)
-
- calls = [call('/etc/apt/sources.list',
- EXPECTED_CONVERTED_CONTENT,
- mode=0o644)]
- mockwrite.assert_has_calls(calls)
-
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_apt_key.py b/tests/unittests/test_handler/test_handler_apt_key.py
deleted file mode 100644
index 00e5a38d..00000000
--- a/tests/unittests/test_handler/test_handler_apt_key.py
+++ /dev/null
@@ -1,137 +0,0 @@
-import os
-from unittest import mock
-
-from cloudinit.config import cc_apt_configure
-from cloudinit import subp
-from cloudinit import util
-
-TEST_KEY_HUMAN = '''
-/etc/apt/cloud-init.gpg.d/my_key.gpg
---------------------------------------------
-pub rsa4096 2021-10-22 [SC]
- 3A3E F34D FDED B3B7 F3FD F603 F83F 7712 9A5E BD85
-uid [ unknown] Brett Holman <brett.holman@canonical.com>
-sub rsa4096 2021-10-22 [A]
-sub rsa4096 2021-10-22 [E]
-'''
-
-TEST_KEY_MACHINE = '''
-tru::1:1635129362:0:3:1:5
-pub:-:4096:1:F83F77129A5EBD85:1634912922:::-:::scESCA::::::23::0:
-fpr:::::::::3A3EF34DFDEDB3B7F3FDF603F83F77129A5EBD85:
-uid:-::::1634912922::64F1F1D6FA96316752D635D7C6406C52C40713C7::Brett Holman \
-<brett.holman@canonical.com>::::::::::0:
-sub:-:4096:1:544B39C9A9141F04:1634912922::::::a::::::23:
-fpr:::::::::8BD901490D6EC986D03D6F0D544B39C9A9141F04:
-sub:-:4096:1:F45D9443F0A87092:1634912922::::::e::::::23:
-fpr:::::::::8CCCB332317324F030A45B19F45D9443F0A87092:
-'''
-
-TEST_KEY_FINGERPRINT_HUMAN = \
- '3A3E F34D FDED B3B7 F3FD F603 F83F 7712 9A5E BD85'
-
-TEST_KEY_FINGERPRINT_MACHINE = \
- '3A3EF34DFDEDB3B7F3FDF603F83F77129A5EBD85'
-
-
-class TestAptKey:
- """TestAptKey
- Class to test apt-key commands
- """
- @mock.patch.object(subp, 'subp', return_value=('fakekey', ''))
- @mock.patch.object(util, 'write_file')
- def _apt_key_add_success_helper(self, directory, *args, hardened=False):
- file = cc_apt_configure.apt_key(
- 'add',
- output_file='my-key',
- data='fakekey',
- hardened=hardened)
- assert file == directory + '/my-key.gpg'
-
- def test_apt_key_add_success(self):
- """Verify the correct directory path gets returned for unhardened case
- """
- self._apt_key_add_success_helper('/etc/apt/trusted.gpg.d')
-
- def test_apt_key_add_success_hardened(self):
- """Verify the correct directory path gets returned for hardened case
- """
- self._apt_key_add_success_helper(
- '/etc/apt/cloud-init.gpg.d',
- hardened=True)
-
- def test_apt_key_add_fail_no_file_name(self):
- """Verify that null filename gets handled correctly
- """
- file = cc_apt_configure.apt_key(
- 'add',
- output_file=None,
- data='')
- assert '/dev/null' == file
-
- def _apt_key_fail_helper(self):
- file = cc_apt_configure.apt_key(
- 'add',
- output_file='my-key',
- data='fakekey')
- assert file == '/dev/null'
-
- @mock.patch.object(subp, 'subp', side_effect=subp.ProcessExecutionError)
- def test_apt_key_add_fail_no_file_name_subproc(self, *args):
- """Verify that bad key value gets handled correctly
- """
- self._apt_key_fail_helper()
-
- @mock.patch.object(
- subp, 'subp', side_effect=UnicodeDecodeError('test', b'', 1, 1, ''))
- def test_apt_key_add_fail_no_file_name_unicode(self, *args):
- """Verify that bad key encoding gets handled correctly
- """
- self._apt_key_fail_helper()
-
- def _apt_key_list_success_helper(self, finger, key, human_output=True):
- @mock.patch.object(os, 'listdir', return_value=('/fake/dir/key.gpg',))
- @mock.patch.object(subp, 'subp', return_value=(key, ''))
- def mocked_list(*a):
-
- keys = cc_apt_configure.apt_key('list', human_output)
- assert finger in keys
- mocked_list()
-
- def test_apt_key_list_success_human(self):
- """Verify expected key output, human
- """
- self._apt_key_list_success_helper(
- TEST_KEY_FINGERPRINT_HUMAN,
- TEST_KEY_HUMAN)
-
- def test_apt_key_list_success_machine(self):
- """Verify expected key output, machine
- """
- self._apt_key_list_success_helper(
- TEST_KEY_FINGERPRINT_MACHINE,
- TEST_KEY_MACHINE, human_output=False)
-
- @mock.patch.object(os, 'listdir', return_value=())
- @mock.patch.object(subp, 'subp', return_value=('', ''))
- def test_apt_key_list_fail_no_keys(self, *args):
- """Ensure falsy output for no keys
- """
- keys = cc_apt_configure.apt_key('list')
- assert not keys
-
- @mock.patch.object(os, 'listdir', return_value=('file_not_gpg_key.txt'))
- @mock.patch.object(subp, 'subp', return_value=('', ''))
- def test_apt_key_list_fail_no_keys_file(self, *args):
- """Ensure non-gpg file is not returned.
-
- apt-key used file extensions for this, so we do too
- """
- assert not cc_apt_configure.apt_key('list')
-
- @mock.patch.object(subp, 'subp', side_effect=subp.ProcessExecutionError)
- @mock.patch.object(os, 'listdir', return_value=('bad_gpg_key.gpg'))
- def test_apt_key_list_fail_bad_key_file(self, *args):
- """Ensure bad gpg key doesn't throw exeption.
- """
- assert not cc_apt_configure.apt_key('list')
diff --git a/tests/unittests/test_handler/test_handler_apt_source_v1.py b/tests/unittests/test_handler/test_handler_apt_source_v1.py
deleted file mode 100644
index 2357d699..00000000
--- a/tests/unittests/test_handler/test_handler_apt_source_v1.py
+++ /dev/null
@@ -1,651 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-""" test_handler_apt_source_v1
-Testing various config variations of the apt_source config
-This calls all things with v1 format to stress the conversion code on top of
-the actually tested code.
-"""
-import os
-import re
-import shutil
-import tempfile
-import pathlib
-from unittest import mock
-from unittest.mock import call
-
-from cloudinit.config import cc_apt_configure
-from cloudinit import gpg
-from cloudinit import subp
-from cloudinit import util
-
-from cloudinit.tests.helpers import TestCase
-
-EXPECTEDKEY = """-----BEGIN PGP PUBLIC KEY BLOCK-----
-Version: GnuPG v1
-
-mI0ESuZLUgEEAKkqq3idtFP7g9hzOu1a8+v8ImawQN4TrvlygfScMU1TIS1eC7UQ
-NUA8Qqgr9iUaGnejb0VciqftLrU9D6WYHSKz+EITefgdyJ6SoQxjoJdsCpJ7o9Jy
-8PQnpRttiFm4qHu6BVnKnBNxw/z3ST9YMqW5kbMQpfxbGe+obRox59NpABEBAAG0
-HUxhdW5jaHBhZCBQUEEgZm9yIFNjb3R0IE1vc2VyiLYEEwECACAFAkrmS1ICGwMG
-CwkIBwMCBBUCCAMEFgIDAQIeAQIXgAAKCRAGILvPA2g/d3aEA/9tVjc10HOZwV29
-OatVuTeERjjrIbxflO586GLA8cp0C9RQCwgod/R+cKYdQcHjbqVcP0HqxveLg0RZ
-FJpWLmWKamwkABErwQLGlM/Hwhjfade8VvEQutH5/0JgKHmzRsoqfR+LMO6OS+Sm
-S0ORP6HXET3+jC8BMG4tBWCTK/XEZw==
-=ACB2
------END PGP PUBLIC KEY BLOCK-----"""
-
-ADD_APT_REPO_MATCH = r"^[\w-]+:\w"
-
-
-class FakeDistro(object):
- """Fake Distro helper object"""
- def update_package_sources(self):
- """Fake update_package_sources helper method"""
- return
-
-
-class FakeDatasource:
- """Fake Datasource helper object"""
- def __init__(self):
- self.region = 'region'
-
-
-class FakeCloud(object):
- """Fake Cloud helper object"""
- def __init__(self):
- self.distro = FakeDistro()
- self.datasource = FakeDatasource()
-
-
-class TestAptSourceConfig(TestCase):
- """TestAptSourceConfig
- Main Class to test apt_source configs
- """
- release = "fantastic"
-
- def setUp(self):
- super(TestAptSourceConfig, self).setUp()
- self.tmp = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.tmp)
- self.aptlistfile = os.path.join(self.tmp, "single-deb.list")
- self.aptlistfile2 = os.path.join(self.tmp, "single-deb2.list")
- self.aptlistfile3 = os.path.join(self.tmp, "single-deb3.list")
- self.join = os.path.join
- self.matcher = re.compile(ADD_APT_REPO_MATCH).search
- # mock fallback filename into writable tmp dir
- self.fallbackfn = os.path.join(self.tmp, "etc/apt/sources.list.d/",
- "cloud_config_sources.list")
-
- self.fakecloud = FakeCloud()
-
- rpatcher = mock.patch("cloudinit.util.lsb_release")
- get_rel = rpatcher.start()
- get_rel.return_value = {'codename': self.release}
- self.addCleanup(rpatcher.stop)
- apatcher = mock.patch("cloudinit.util.get_dpkg_architecture")
- get_arch = apatcher.start()
- get_arch.return_value = 'amd64'
- self.addCleanup(apatcher.stop)
-
- def _get_default_params(self):
- """get_default_params
- Get the most basic default mrror and release info to be used in tests
- """
- params = {}
- params['RELEASE'] = self.release
- params['MIRROR'] = "http://archive.ubuntu.com/ubuntu"
- return params
-
- def wrapv1conf(self, cfg):
- params = self._get_default_params()
- # old v1 list format under old keys, but callabe to main handler
- # disable source.list rendering and set mirror to avoid other code
- return {'apt_preserve_sources_list': True,
- 'apt_mirror': params['MIRROR'],
- 'apt_sources': cfg}
-
- def myjoin(self, *args, **kwargs):
- """myjoin - redir into writable tmpdir"""
- if (args[0] == "/etc/apt/sources.list.d/" and
- args[1] == "cloud_config_sources.list" and
- len(args) == 2):
- return self.join(self.tmp, args[0].lstrip("/"), args[1])
- else:
- return self.join(*args, **kwargs)
-
- def apt_src_basic(self, filename, cfg):
- """apt_src_basic
- Test Fix deb source string, has to overwrite mirror conf in params
- """
- cfg = self.wrapv1conf(cfg)
-
- cc_apt_configure.handle("test", cfg, self.fakecloud, None, None)
-
- self.assertTrue(os.path.isfile(filename))
-
- contents = util.load_file(filename)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb", "http://archive.ubuntu.com/ubuntu",
- "karmic-backports",
- "main universe multiverse restricted"),
- contents, flags=re.IGNORECASE))
-
- def test_apt_src_basic(self):
- """Test deb source string, overwrite mirror and filename"""
- cfg = {'source': ('deb http://archive.ubuntu.com/ubuntu'
- ' karmic-backports'
- ' main universe multiverse restricted'),
- 'filename': self.aptlistfile}
- self.apt_src_basic(self.aptlistfile, [cfg])
-
- def test_apt_src_basic_dict(self):
- """Test deb source string, overwrite mirror and filename (dict)"""
- cfg = {self.aptlistfile: {'source':
- ('deb http://archive.ubuntu.com/ubuntu'
- ' karmic-backports'
- ' main universe multiverse restricted')}}
- self.apt_src_basic(self.aptlistfile, cfg)
-
- def apt_src_basic_tri(self, cfg):
- """apt_src_basic_tri
- Test Fix three deb source string, has to overwrite mirror conf in
- params. Test with filenames provided in config.
- generic part to check three files with different content
- """
- self.apt_src_basic(self.aptlistfile, cfg)
-
- # extra verify on two extra files of this test
- contents = util.load_file(self.aptlistfile2)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb", "http://archive.ubuntu.com/ubuntu",
- "precise-backports",
- "main universe multiverse restricted"),
- contents, flags=re.IGNORECASE))
- contents = util.load_file(self.aptlistfile3)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb", "http://archive.ubuntu.com/ubuntu",
- "lucid-backports",
- "main universe multiverse restricted"),
- contents, flags=re.IGNORECASE))
-
- def test_apt_src_basic_tri(self):
- """Test Fix three deb source string with filenames"""
- cfg1 = {'source': ('deb http://archive.ubuntu.com/ubuntu'
- ' karmic-backports'
- ' main universe multiverse restricted'),
- 'filename': self.aptlistfile}
- cfg2 = {'source': ('deb http://archive.ubuntu.com/ubuntu'
- ' precise-backports'
- ' main universe multiverse restricted'),
- 'filename': self.aptlistfile2}
- cfg3 = {'source': ('deb http://archive.ubuntu.com/ubuntu'
- ' lucid-backports'
- ' main universe multiverse restricted'),
- 'filename': self.aptlistfile3}
- self.apt_src_basic_tri([cfg1, cfg2, cfg3])
-
- def test_apt_src_basic_dict_tri(self):
- """Test Fix three deb source string with filenames (dict)"""
- cfg = {self.aptlistfile: {'source':
- ('deb http://archive.ubuntu.com/ubuntu'
- ' karmic-backports'
- ' main universe multiverse restricted')},
- self.aptlistfile2: {'source':
- ('deb http://archive.ubuntu.com/ubuntu'
- ' precise-backports'
- ' main universe multiverse restricted')},
- self.aptlistfile3: {'source':
- ('deb http://archive.ubuntu.com/ubuntu'
- ' lucid-backports'
- ' main universe multiverse restricted')}}
- self.apt_src_basic_tri(cfg)
-
- def test_apt_src_basic_nofn(self):
- """Test Fix three deb source string without filenames (dict)"""
- cfg = {'source': ('deb http://archive.ubuntu.com/ubuntu'
- ' karmic-backports'
- ' main universe multiverse restricted')}
- with mock.patch.object(os.path, 'join', side_effect=self.myjoin):
- self.apt_src_basic(self.fallbackfn, [cfg])
-
- def apt_src_replacement(self, filename, cfg):
- """apt_src_replace
- Test Autoreplacement of MIRROR and RELEASE in source specs
- """
- cfg = self.wrapv1conf(cfg)
- params = self._get_default_params()
- cc_apt_configure.handle("test", cfg, self.fakecloud, None, None)
-
- self.assertTrue(os.path.isfile(filename))
-
- contents = util.load_file(filename)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb", params['MIRROR'], params['RELEASE'],
- "multiverse"),
- contents, flags=re.IGNORECASE))
-
- def test_apt_src_replace(self):
- """Test Autoreplacement of MIRROR and RELEASE in source specs"""
- cfg = {'source': 'deb $MIRROR $RELEASE multiverse',
- 'filename': self.aptlistfile}
- self.apt_src_replacement(self.aptlistfile, [cfg])
-
- def apt_src_replace_tri(self, cfg):
- """apt_src_replace_tri
- Test three autoreplacements of MIRROR and RELEASE in source specs with
- generic part
- """
- self.apt_src_replacement(self.aptlistfile, cfg)
-
- # extra verify on two extra files of this test
- params = self._get_default_params()
- contents = util.load_file(self.aptlistfile2)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb", params['MIRROR'], params['RELEASE'],
- "main"),
- contents, flags=re.IGNORECASE))
- contents = util.load_file(self.aptlistfile3)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb", params['MIRROR'], params['RELEASE'],
- "universe"),
- contents, flags=re.IGNORECASE))
-
- def test_apt_src_replace_tri(self):
- """Test triple Autoreplacement of MIRROR and RELEASE in source specs"""
- cfg1 = {'source': 'deb $MIRROR $RELEASE multiverse',
- 'filename': self.aptlistfile}
- cfg2 = {'source': 'deb $MIRROR $RELEASE main',
- 'filename': self.aptlistfile2}
- cfg3 = {'source': 'deb $MIRROR $RELEASE universe',
- 'filename': self.aptlistfile3}
- self.apt_src_replace_tri([cfg1, cfg2, cfg3])
-
- def test_apt_src_replace_dict_tri(self):
- """Test triple Autoreplacement in source specs (dict)"""
- cfg = {self.aptlistfile: {'source': 'deb $MIRROR $RELEASE multiverse'},
- 'notused': {'source': 'deb $MIRROR $RELEASE main',
- 'filename': self.aptlistfile2},
- self.aptlistfile3: {'source': 'deb $MIRROR $RELEASE universe'}}
- self.apt_src_replace_tri(cfg)
-
- def test_apt_src_replace_nofn(self):
- """Test Autoreplacement of MIRROR and RELEASE in source specs nofile"""
- cfg = {'source': 'deb $MIRROR $RELEASE multiverse'}
- with mock.patch.object(os.path, 'join', side_effect=self.myjoin):
- self.apt_src_replacement(self.fallbackfn, [cfg])
-
- def apt_src_keyid(self, filename, cfg, keynum):
- """apt_src_keyid
- Test specification of a source + keyid
- """
- cfg = self.wrapv1conf(cfg)
-
- with mock.patch.object(cc_apt_configure, 'add_apt_key') as mockobj:
- cc_apt_configure.handle("test", cfg, self.fakecloud, None, None)
-
- # check if it added the right number of keys
- calls = []
- sources = cfg['apt']['sources']
- for src in sources:
- print(sources[src])
- calls.append(call(sources[src], None))
-
- mockobj.assert_has_calls(calls, any_order=True)
-
- self.assertTrue(os.path.isfile(filename))
-
- contents = util.load_file(filename)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb",
- ('http://ppa.launchpad.net/smoser/'
- 'cloud-init-test/ubuntu'),
- "xenial", "main"),
- contents, flags=re.IGNORECASE))
-
- def test_apt_src_keyid(self):
- """Test specification of a source + keyid with filename being set"""
- cfg = {'source': ('deb '
- 'http://ppa.launchpad.net/'
- 'smoser/cloud-init-test/ubuntu'
- ' xenial main'),
- 'keyid': "03683F77",
- 'filename': self.aptlistfile}
- self.apt_src_keyid(self.aptlistfile, [cfg], 1)
-
- def test_apt_src_keyid_tri(self):
- """Test 3x specification of a source + keyid with filename being set"""
- cfg1 = {'source': ('deb '
- 'http://ppa.launchpad.net/'
- 'smoser/cloud-init-test/ubuntu'
- ' xenial main'),
- 'keyid': "03683F77",
- 'filename': self.aptlistfile}
- cfg2 = {'source': ('deb '
- 'http://ppa.launchpad.net/'
- 'smoser/cloud-init-test/ubuntu'
- ' xenial universe'),
- 'keyid': "03683F77",
- 'filename': self.aptlistfile2}
- cfg3 = {'source': ('deb '
- 'http://ppa.launchpad.net/'
- 'smoser/cloud-init-test/ubuntu'
- ' xenial multiverse'),
- 'keyid': "03683F77",
- 'filename': self.aptlistfile3}
-
- self.apt_src_keyid(self.aptlistfile, [cfg1, cfg2, cfg3], 3)
- contents = util.load_file(self.aptlistfile2)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb",
- ('http://ppa.launchpad.net/smoser/'
- 'cloud-init-test/ubuntu'),
- "xenial", "universe"),
- contents, flags=re.IGNORECASE))
- contents = util.load_file(self.aptlistfile3)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb",
- ('http://ppa.launchpad.net/smoser/'
- 'cloud-init-test/ubuntu'),
- "xenial", "multiverse"),
- contents, flags=re.IGNORECASE))
-
- def test_apt_src_keyid_nofn(self):
- """Test specification of a source + keyid without filename being set"""
- cfg = {'source': ('deb '
- 'http://ppa.launchpad.net/'
- 'smoser/cloud-init-test/ubuntu'
- ' xenial main'),
- 'keyid': "03683F77"}
- with mock.patch.object(os.path, 'join', side_effect=self.myjoin):
- self.apt_src_keyid(self.fallbackfn, [cfg], 1)
-
- def apt_src_key(self, filename, cfg):
- """apt_src_key
- Test specification of a source + key
- """
- cfg = self.wrapv1conf([cfg])
-
- with mock.patch.object(cc_apt_configure, 'add_apt_key') as mockobj:
- cc_apt_configure.handle("test", cfg, self.fakecloud, None, None)
-
- # check if it added the right amount of keys
- sources = cfg['apt']['sources']
- calls = []
- for src in sources:
- print(sources[src])
- calls.append(call(sources[src], None))
-
- mockobj.assert_has_calls(calls, any_order=True)
-
- self.assertTrue(os.path.isfile(filename))
-
- contents = util.load_file(filename)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb",
- ('http://ppa.launchpad.net/smoser/'
- 'cloud-init-test/ubuntu'),
- "xenial", "main"),
- contents, flags=re.IGNORECASE))
-
- def test_apt_src_key(self):
- """Test specification of a source + key with filename being set"""
- cfg = {'source': ('deb '
- 'http://ppa.launchpad.net/'
- 'smoser/cloud-init-test/ubuntu'
- ' xenial main'),
- 'key': "fakekey 4321",
- 'filename': self.aptlistfile}
- self.apt_src_key(self.aptlistfile, cfg)
-
- def test_apt_src_key_nofn(self):
- """Test specification of a source + key without filename being set"""
- cfg = {'source': ('deb '
- 'http://ppa.launchpad.net/'
- 'smoser/cloud-init-test/ubuntu'
- ' xenial main'),
- 'key': "fakekey 4321"}
- with mock.patch.object(os.path, 'join', side_effect=self.myjoin):
- self.apt_src_key(self.fallbackfn, cfg)
-
- def test_apt_src_keyonly(self):
- """Test specifying key without source"""
- cfg = {'key': "fakekey 4242",
- 'filename': self.aptlistfile}
- cfg = self.wrapv1conf([cfg])
- with mock.patch.object(cc_apt_configure, 'apt_key') as mockobj:
- cc_apt_configure.handle("test", cfg, self.fakecloud, None, None)
-
- calls = (call(
- 'add',
- output_file=pathlib.Path(self.aptlistfile).stem,
- data='fakekey 4242',
- hardened=False),)
- mockobj.assert_has_calls(calls, any_order=True)
-
- # filename should be ignored on key only
- self.assertFalse(os.path.isfile(self.aptlistfile))
-
- def test_apt_src_keyidonly(self):
- """Test specification of a keyid without source"""
- cfg = {'keyid': "03683F77",
- 'filename': self.aptlistfile}
- cfg = self.wrapv1conf([cfg])
-
- with mock.patch.object(subp, 'subp',
- return_value=('fakekey 1212', '')):
- with mock.patch.object(cc_apt_configure, 'apt_key') as mockobj:
- cc_apt_configure.handle(
- "test",
- cfg,
- self.fakecloud,
- None,
- None)
-
- calls = (call(
- 'add',
- output_file=pathlib.Path(self.aptlistfile).stem,
- data='fakekey 1212',
- hardened=False),)
- mockobj.assert_has_calls(calls, any_order=True)
-
- # filename should be ignored on key only
- self.assertFalse(os.path.isfile(self.aptlistfile))
-
- def apt_src_keyid_real(self, cfg, expectedkey, is_hardened=None):
- """apt_src_keyid_real
- Test specification of a keyid without source including
- up to addition of the key (add_apt_key_raw mocked to keep the
- environment as is)
- """
- key = cfg['keyid']
- keyserver = cfg.get('keyserver', 'keyserver.ubuntu.com')
- cfg = self.wrapv1conf([cfg])
-
- with mock.patch.object(cc_apt_configure, 'add_apt_key_raw') as mockkey:
- with mock.patch.object(gpg, 'getkeybyid',
- return_value=expectedkey) as mockgetkey:
- cc_apt_configure.handle("test", cfg, self.fakecloud,
- None, None)
- if is_hardened is not None:
- mockkey.assert_called_with(
- expectedkey,
- self.aptlistfile,
- hardened=is_hardened)
- else:
- mockkey.assert_called_with(expectedkey, self.aptlistfile)
- mockgetkey.assert_called_with(key, keyserver)
-
- # filename should be ignored on key only
- self.assertFalse(os.path.isfile(self.aptlistfile))
-
- def test_apt_src_keyid_real(self):
- """test_apt_src_keyid_real - Test keyid including key add"""
- keyid = "03683F77"
- cfg = {'keyid': keyid,
- 'filename': self.aptlistfile}
-
- self.apt_src_keyid_real(cfg, EXPECTEDKEY, is_hardened=False)
-
- def test_apt_src_longkeyid_real(self):
- """test_apt_src_longkeyid_real - Test long keyid including key add"""
- keyid = "B59D 5F15 97A5 04B7 E230 6DCA 0620 BBCF 0368 3F77"
- cfg = {'keyid': keyid,
- 'filename': self.aptlistfile}
-
- self.apt_src_keyid_real(cfg, EXPECTEDKEY, is_hardened=False)
-
- def test_apt_src_longkeyid_ks_real(self):
- """test_apt_src_longkeyid_ks_real - Test long keyid from other ks"""
- keyid = "B59D 5F15 97A5 04B7 E230 6DCA 0620 BBCF 0368 3F77"
- cfg = {'keyid': keyid,
- 'keyserver': 'keys.gnupg.net',
- 'filename': self.aptlistfile}
-
- self.apt_src_keyid_real(cfg, EXPECTEDKEY, is_hardened=False)
-
- def test_apt_src_ppa(self):
- """Test adding a ppa"""
- cfg = {'source': 'ppa:smoser/cloud-init-test',
- 'filename': self.aptlistfile}
- cfg = self.wrapv1conf([cfg])
-
- with mock.patch.object(subp, 'subp') as mockobj:
- cc_apt_configure.handle("test", cfg, self.fakecloud, None, None)
- mockobj.assert_called_once_with(['add-apt-repository',
- 'ppa:smoser/cloud-init-test'],
- target=None)
-
- # adding ppa should ignore filename (uses add-apt-repository)
- self.assertFalse(os.path.isfile(self.aptlistfile))
-
- def test_apt_src_ppa_tri(self):
- """Test adding three ppa's"""
- cfg1 = {'source': 'ppa:smoser/cloud-init-test',
- 'filename': self.aptlistfile}
- cfg2 = {'source': 'ppa:smoser/cloud-init-test2',
- 'filename': self.aptlistfile2}
- cfg3 = {'source': 'ppa:smoser/cloud-init-test3',
- 'filename': self.aptlistfile3}
- cfg = self.wrapv1conf([cfg1, cfg2, cfg3])
-
- with mock.patch.object(subp, 'subp') as mockobj:
- cc_apt_configure.handle("test", cfg, self.fakecloud,
- None, None)
- calls = [call(['add-apt-repository', 'ppa:smoser/cloud-init-test'],
- target=None),
- call(['add-apt-repository', 'ppa:smoser/cloud-init-test2'],
- target=None),
- call(['add-apt-repository', 'ppa:smoser/cloud-init-test3'],
- target=None)]
- mockobj.assert_has_calls(calls, any_order=True)
-
- # adding ppa should ignore all filenames (uses add-apt-repository)
- self.assertFalse(os.path.isfile(self.aptlistfile))
- self.assertFalse(os.path.isfile(self.aptlistfile2))
- self.assertFalse(os.path.isfile(self.aptlistfile3))
-
- def test_convert_to_new_format(self):
- """Test the conversion of old to new format"""
- cfg1 = {'source': 'deb $MIRROR $RELEASE multiverse',
- 'filename': self.aptlistfile}
- cfg2 = {'source': 'deb $MIRROR $RELEASE main',
- 'filename': self.aptlistfile2}
- cfg3 = {'source': 'deb $MIRROR $RELEASE universe',
- 'filename': self.aptlistfile3}
- cfg = {'apt_sources': [cfg1, cfg2, cfg3]}
- checkcfg = {self.aptlistfile: {'filename': self.aptlistfile,
- 'source': 'deb $MIRROR $RELEASE '
- 'multiverse'},
- self.aptlistfile2: {'filename': self.aptlistfile2,
- 'source': 'deb $MIRROR $RELEASE main'},
- self.aptlistfile3: {'filename': self.aptlistfile3,
- 'source': 'deb $MIRROR $RELEASE '
- 'universe'}}
-
- newcfg = cc_apt_configure.convert_to_v3_apt_format(cfg)
- self.assertEqual(newcfg['apt']['sources'], checkcfg)
-
- # convert again, should stay the same
- newcfg2 = cc_apt_configure.convert_to_v3_apt_format(newcfg)
- self.assertEqual(newcfg2['apt']['sources'], checkcfg)
-
- # should work without raising an exception
- cc_apt_configure.convert_to_v3_apt_format({})
-
- with self.assertRaises(ValueError):
- cc_apt_configure.convert_to_v3_apt_format({'apt_sources': 5})
-
- def test_convert_to_new_format_collision(self):
- """Test the conversion of old to new format with collisions
- That matches e.g. the MAAS case specifying old and new config"""
- cfg_1_and_3 = {'apt': {'proxy': 'http://192.168.122.1:8000/'},
- 'apt_proxy': 'http://192.168.122.1:8000/'}
- cfg_3_only = {'apt': {'proxy': 'http://192.168.122.1:8000/'}}
- cfgconflict = {'apt': {'proxy': 'http://192.168.122.1:8000/'},
- 'apt_proxy': 'ftp://192.168.122.1:8000/'}
-
- # collision (equal)
- newcfg = cc_apt_configure.convert_to_v3_apt_format(cfg_1_and_3)
- self.assertEqual(newcfg, cfg_3_only)
- # collision (equal, so ok to remove)
- newcfg = cc_apt_configure.convert_to_v3_apt_format(cfg_3_only)
- self.assertEqual(newcfg, cfg_3_only)
- # collision (unequal)
- match = "Old and New.*unequal.*apt_proxy"
- with self.assertRaisesRegex(ValueError, match):
- cc_apt_configure.convert_to_v3_apt_format(cfgconflict)
-
- def test_convert_to_new_format_dict_collision(self):
- cfg1 = {'source': 'deb $MIRROR $RELEASE multiverse',
- 'filename': self.aptlistfile}
- cfg2 = {'source': 'deb $MIRROR $RELEASE main',
- 'filename': self.aptlistfile2}
- cfg3 = {'source': 'deb $MIRROR $RELEASE universe',
- 'filename': self.aptlistfile3}
- fullv3 = {self.aptlistfile: {'filename': self.aptlistfile,
- 'source': 'deb $MIRROR $RELEASE '
- 'multiverse'},
- self.aptlistfile2: {'filename': self.aptlistfile2,
- 'source': 'deb $MIRROR $RELEASE main'},
- self.aptlistfile3: {'filename': self.aptlistfile3,
- 'source': 'deb $MIRROR $RELEASE '
- 'universe'}}
- cfg_3_only = {'apt': {'sources': fullv3}}
- cfg_1_and_3 = {'apt_sources': [cfg1, cfg2, cfg3]}
- cfg_1_and_3.update(cfg_3_only)
-
- # collision (equal, so ok to remove)
- newcfg = cc_apt_configure.convert_to_v3_apt_format(cfg_1_and_3)
- self.assertEqual(newcfg, cfg_3_only)
- # no old spec (same result)
- newcfg = cc_apt_configure.convert_to_v3_apt_format(cfg_3_only)
- self.assertEqual(newcfg, cfg_3_only)
-
- diff = {self.aptlistfile: {'filename': self.aptlistfile,
- 'source': 'deb $MIRROR $RELEASE '
- 'DIFFERENTVERSE'},
- self.aptlistfile2: {'filename': self.aptlistfile2,
- 'source': 'deb $MIRROR $RELEASE main'},
- self.aptlistfile3: {'filename': self.aptlistfile3,
- 'source': 'deb $MIRROR $RELEASE '
- 'universe'}}
- cfg_3_only = {'apt': {'sources': diff}}
- cfg_1_and_3_different = {'apt_sources': [cfg1, cfg2, cfg3]}
- cfg_1_and_3_different.update(cfg_3_only)
-
- # collision (unequal by dict having a different entry)
- with self.assertRaises(ValueError):
- cc_apt_configure.convert_to_v3_apt_format(cfg_1_and_3_different)
-
- missing = {self.aptlistfile: {'filename': self.aptlistfile,
- 'source': 'deb $MIRROR $RELEASE '
- 'multiverse'}}
- cfg_3_only = {'apt': {'sources': missing}}
- cfg_1_and_3_missing = {'apt_sources': [cfg1, cfg2, cfg3]}
- cfg_1_and_3_missing.update(cfg_3_only)
- # collision (unequal by dict missing an entry)
- with self.assertRaises(ValueError):
- cc_apt_configure.convert_to_v3_apt_format(cfg_1_and_3_missing)
-
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_apt_source_v3.py b/tests/unittests/test_handler/test_handler_apt_source_v3.py
deleted file mode 100644
index 20289121..00000000
--- a/tests/unittests/test_handler/test_handler_apt_source_v3.py
+++ /dev/null
@@ -1,1170 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-"""test_handler_apt_source_v3
-Testing various config variations of the apt_source custom config
-This tries to call all in the new v3 format and cares about new features
-"""
-import glob
-import os
-import re
-import shutil
-import socket
-import tempfile
-import pathlib
-
-from unittest import TestCase, mock
-from unittest.mock import call
-
-from cloudinit import gpg
-from cloudinit import subp
-from cloudinit import util
-from cloudinit.config import cc_apt_configure
-from cloudinit.tests import helpers as t_help
-
-from tests.unittests.util import get_cloud
-
-EXPECTEDKEY = """-----BEGIN PGP PUBLIC KEY BLOCK-----
-Version: GnuPG v1
-
-mI0ESuZLUgEEAKkqq3idtFP7g9hzOu1a8+v8ImawQN4TrvlygfScMU1TIS1eC7UQ
-NUA8Qqgr9iUaGnejb0VciqftLrU9D6WYHSKz+EITefgdyJ6SoQxjoJdsCpJ7o9Jy
-8PQnpRttiFm4qHu6BVnKnBNxw/z3ST9YMqW5kbMQpfxbGe+obRox59NpABEBAAG0
-HUxhdW5jaHBhZCBQUEEgZm9yIFNjb3R0IE1vc2VyiLYEEwECACAFAkrmS1ICGwMG
-CwkIBwMCBBUCCAMEFgIDAQIeAQIXgAAKCRAGILvPA2g/d3aEA/9tVjc10HOZwV29
-OatVuTeERjjrIbxflO586GLA8cp0C9RQCwgod/R+cKYdQcHjbqVcP0HqxveLg0RZ
-FJpWLmWKamwkABErwQLGlM/Hwhjfade8VvEQutH5/0JgKHmzRsoqfR+LMO6OS+Sm
-S0ORP6HXET3+jC8BMG4tBWCTK/XEZw==
-=ACB2
------END PGP PUBLIC KEY BLOCK-----"""
-
-ADD_APT_REPO_MATCH = r"^[\w-]+:\w"
-
-TARGET = None
-
-MOCK_LSB_RELEASE_DATA = {
- 'id': 'Ubuntu', 'description': 'Ubuntu 18.04.1 LTS',
- 'release': '18.04', 'codename': 'bionic'}
-
-
-class FakeDatasource:
- """Fake Datasource helper object"""
- def __init__(self):
- self.region = 'region'
-
-
-class FakeCloud:
- """Fake Cloud helper object"""
- def __init__(self):
- self.datasource = FakeDatasource()
-
-
-class TestAptSourceConfig(t_help.FilesystemMockingTestCase):
- """TestAptSourceConfig
- Main Class to test apt configs
- """
- def setUp(self):
- super(TestAptSourceConfig, self).setUp()
- self.tmp = tempfile.mkdtemp()
- self.new_root = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.tmp)
- self.addCleanup(shutil.rmtree, self.new_root)
- self.aptlistfile = os.path.join(self.tmp, "single-deb.list")
- self.aptlistfile2 = os.path.join(self.tmp, "single-deb2.list")
- self.aptlistfile3 = os.path.join(self.tmp, "single-deb3.list")
- self.join = os.path.join
- self.matcher = re.compile(ADD_APT_REPO_MATCH).search
- self.add_patch(
- 'cloudinit.config.cc_apt_configure.util.lsb_release',
- 'm_lsb_release', return_value=MOCK_LSB_RELEASE_DATA.copy())
-
- @staticmethod
- def _add_apt_sources(*args, **kwargs):
- with mock.patch.object(cc_apt_configure, 'update_packages'):
- cc_apt_configure.add_apt_sources(*args, **kwargs)
-
- @staticmethod
- def _get_default_params():
- """get_default_params
- Get the most basic default mrror and release info to be used in tests
- """
- params = {}
- params['RELEASE'] = MOCK_LSB_RELEASE_DATA['release']
- arch = 'amd64'
- params['MIRROR'] = cc_apt_configure.\
- get_default_mirrors(arch)["PRIMARY"]
- return params
-
- def _myjoin(self, *args, **kwargs):
- """_myjoin - redir into writable tmpdir"""
- if (args[0] == "/etc/apt/sources.list.d/" and
- args[1] == "cloud_config_sources.list" and
- len(args) == 2):
- return self.join(self.tmp, args[0].lstrip("/"), args[1])
- else:
- return self.join(*args, **kwargs)
-
- def _apt_src_basic(self, filename, cfg):
- """_apt_src_basic
- Test Fix deb source string, has to overwrite mirror conf in params
- """
- params = self._get_default_params()
-
- self._add_apt_sources(cfg, TARGET, template_params=params,
- aa_repo_match=self.matcher)
-
- self.assertTrue(os.path.isfile(filename))
-
- contents = util.load_file(filename)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb", "http://test.ubuntu.com/ubuntu",
- "karmic-backports",
- "main universe multiverse restricted"),
- contents, flags=re.IGNORECASE))
-
- def test_apt_v3_src_basic(self):
- """test_apt_v3_src_basic - Test fix deb source string"""
- cfg = {self.aptlistfile: {'source':
- ('deb http://test.ubuntu.com/ubuntu'
- ' karmic-backports'
- ' main universe multiverse restricted')}}
- self._apt_src_basic(self.aptlistfile, cfg)
-
- def test_apt_v3_src_basic_tri(self):
- """test_apt_v3_src_basic_tri - Test multiple fix deb source strings"""
- cfg = {self.aptlistfile: {'source':
- ('deb http://test.ubuntu.com/ubuntu'
- ' karmic-backports'
- ' main universe multiverse restricted')},
- self.aptlistfile2: {'source':
- ('deb http://test.ubuntu.com/ubuntu'
- ' precise-backports'
- ' main universe multiverse restricted')},
- self.aptlistfile3: {'source':
- ('deb http://test.ubuntu.com/ubuntu'
- ' lucid-backports'
- ' main universe multiverse restricted')}}
- self._apt_src_basic(self.aptlistfile, cfg)
-
- # extra verify on two extra files of this test
- contents = util.load_file(self.aptlistfile2)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb", "http://test.ubuntu.com/ubuntu",
- "precise-backports",
- "main universe multiverse restricted"),
- contents, flags=re.IGNORECASE))
- contents = util.load_file(self.aptlistfile3)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb", "http://test.ubuntu.com/ubuntu",
- "lucid-backports",
- "main universe multiverse restricted"),
- contents, flags=re.IGNORECASE))
-
- def _apt_src_replacement(self, filename, cfg):
- """apt_src_replace
- Test Autoreplacement of MIRROR and RELEASE in source specs
- """
- params = self._get_default_params()
- self._add_apt_sources(cfg, TARGET, template_params=params,
- aa_repo_match=self.matcher)
-
- self.assertTrue(os.path.isfile(filename))
-
- contents = util.load_file(filename)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb", params['MIRROR'], params['RELEASE'],
- "multiverse"),
- contents, flags=re.IGNORECASE))
-
- def test_apt_v3_src_replace(self):
- """test_apt_v3_src_replace - Test replacement of MIRROR & RELEASE"""
- cfg = {self.aptlistfile: {'source': 'deb $MIRROR $RELEASE multiverse'}}
- self._apt_src_replacement(self.aptlistfile, cfg)
-
- def test_apt_v3_src_replace_fn(self):
- """test_apt_v3_src_replace_fn - Test filename overwritten in dict"""
- cfg = {'ignored': {'source': 'deb $MIRROR $RELEASE multiverse',
- 'filename': self.aptlistfile}}
- # second file should overwrite the dict key
- self._apt_src_replacement(self.aptlistfile, cfg)
-
- def _apt_src_replace_tri(self, cfg):
- """_apt_src_replace_tri
- Test three autoreplacements of MIRROR and RELEASE in source specs with
- generic part
- """
- self._apt_src_replacement(self.aptlistfile, cfg)
-
- # extra verify on two extra files of this test
- params = self._get_default_params()
- contents = util.load_file(self.aptlistfile2)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb", params['MIRROR'], params['RELEASE'],
- "main"),
- contents, flags=re.IGNORECASE))
- contents = util.load_file(self.aptlistfile3)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb", params['MIRROR'], params['RELEASE'],
- "universe"),
- contents, flags=re.IGNORECASE))
-
- def test_apt_v3_src_replace_tri(self):
- """test_apt_v3_src_replace_tri - Test multiple replace/overwrites"""
- cfg = {self.aptlistfile: {'source': 'deb $MIRROR $RELEASE multiverse'},
- 'notused': {'source': 'deb $MIRROR $RELEASE main',
- 'filename': self.aptlistfile2},
- self.aptlistfile3: {'source': 'deb $MIRROR $RELEASE universe'}}
- self._apt_src_replace_tri(cfg)
-
- def _apt_src_keyid(self, filename, cfg, keynum, is_hardened=None):
- """_apt_src_keyid
- Test specification of a source + keyid
- """
- params = self._get_default_params()
-
- with mock.patch.object(cc_apt_configure, 'add_apt_key') as mockobj:
- self._add_apt_sources(cfg, TARGET, template_params=params,
- aa_repo_match=self.matcher)
-
- # check if it added the right number of keys
- calls = []
- for key in cfg:
- if is_hardened is not None:
- calls.append(call(cfg[key], hardened=is_hardened))
- else:
- calls.append(call(cfg[key], TARGET))
-
- mockobj.assert_has_calls(calls, any_order=True)
-
- self.assertTrue(os.path.isfile(filename))
-
- contents = util.load_file(filename)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb",
- ('http://ppa.launchpad.net/smoser/'
- 'cloud-init-test/ubuntu'),
- "xenial", "main"),
- contents, flags=re.IGNORECASE))
-
- def test_apt_v3_src_keyid(self):
- """test_apt_v3_src_keyid - Test source + keyid with filename"""
- cfg = {self.aptlistfile: {'source': ('deb '
- 'http://ppa.launchpad.net/'
- 'smoser/cloud-init-test/ubuntu'
- ' xenial main'),
- 'filename': self.aptlistfile,
- 'keyid': "03683F77"}}
- self._apt_src_keyid(self.aptlistfile, cfg, 1)
-
- def test_apt_v3_src_keyid_tri(self):
- """test_apt_v3_src_keyid_tri - Test multiple src+key+filen writes"""
- cfg = {self.aptlistfile: {'source': ('deb '
- 'http://ppa.launchpad.net/'
- 'smoser/cloud-init-test/ubuntu'
- ' xenial main'),
- 'keyid': "03683F77"},
- 'ignored': {'source': ('deb '
- 'http://ppa.launchpad.net/'
- 'smoser/cloud-init-test/ubuntu'
- ' xenial universe'),
- 'keyid': "03683F77",
- 'filename': self.aptlistfile2},
- self.aptlistfile3: {'source': ('deb '
- 'http://ppa.launchpad.net/'
- 'smoser/cloud-init-test/ubuntu'
- ' xenial multiverse'),
- 'filename': self.aptlistfile3,
- 'keyid': "03683F77"}}
-
- self._apt_src_keyid(self.aptlistfile, cfg, 3)
- contents = util.load_file(self.aptlistfile2)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb",
- ('http://ppa.launchpad.net/smoser/'
- 'cloud-init-test/ubuntu'),
- "xenial", "universe"),
- contents, flags=re.IGNORECASE))
- contents = util.load_file(self.aptlistfile3)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb",
- ('http://ppa.launchpad.net/smoser/'
- 'cloud-init-test/ubuntu'),
- "xenial", "multiverse"),
- contents, flags=re.IGNORECASE))
-
- def test_apt_v3_src_key(self):
- """test_apt_v3_src_key - Test source + key"""
- params = self._get_default_params()
- cfg = {self.aptlistfile: {'source': ('deb '
- 'http://ppa.launchpad.net/'
- 'smoser/cloud-init-test/ubuntu'
- ' xenial main'),
- 'filename': self.aptlistfile,
- 'key': "fakekey 4321"}}
-
- with mock.patch.object(cc_apt_configure, 'apt_key') as mockobj:
- self._add_apt_sources(cfg, TARGET, template_params=params,
- aa_repo_match=self.matcher)
-
- calls = (call(
- 'add',
- output_file=pathlib.Path(self.aptlistfile).stem,
- data='fakekey 4321',
- hardened=False),)
- mockobj.assert_has_calls(calls, any_order=True)
- self.assertTrue(os.path.isfile(self.aptlistfile))
-
- contents = util.load_file(self.aptlistfile)
- self.assertTrue(re.search(r"%s %s %s %s\n" %
- ("deb",
- ('http://ppa.launchpad.net/smoser/'
- 'cloud-init-test/ubuntu'),
- "xenial", "main"),
- contents, flags=re.IGNORECASE))
-
- def test_apt_v3_src_keyonly(self):
- """test_apt_v3_src_keyonly - Test key without source"""
- params = self._get_default_params()
- cfg = {self.aptlistfile: {'key': "fakekey 4242"}}
-
- with mock.patch.object(cc_apt_configure, 'apt_key') as mockobj:
- self._add_apt_sources(cfg, TARGET, template_params=params,
- aa_repo_match=self.matcher)
-
- calls = (call(
- 'add',
- output_file=pathlib.Path(self.aptlistfile).stem,
- data='fakekey 4242',
- hardened=False),)
- mockobj.assert_has_calls(calls, any_order=True)
-
- # filename should be ignored on key only
- self.assertFalse(os.path.isfile(self.aptlistfile))
-
- def test_apt_v3_src_keyidonly(self):
- """test_apt_v3_src_keyidonly - Test keyid without source"""
- params = self._get_default_params()
- cfg = {self.aptlistfile: {'keyid': "03683F77"}}
- with mock.patch.object(subp, 'subp',
- return_value=('fakekey 1212', '')):
- with mock.patch.object(cc_apt_configure, 'apt_key') as mockobj:
- self._add_apt_sources(cfg, TARGET, template_params=params,
- aa_repo_match=self.matcher)
-
- calls = (call(
- 'add',
- output_file=pathlib.Path(self.aptlistfile).stem,
- data='fakekey 1212',
- hardened=False),)
- mockobj.assert_has_calls(calls, any_order=True)
-
- # filename should be ignored on key only
- self.assertFalse(os.path.isfile(self.aptlistfile))
-
- def apt_src_keyid_real(self, cfg, expectedkey, is_hardened=None):
- """apt_src_keyid_real
- Test specification of a keyid without source including
- up to addition of the key (add_apt_key_raw mocked to keep the
- environment as is)
- """
- params = self._get_default_params()
-
- with mock.patch.object(cc_apt_configure, 'add_apt_key_raw') as mockkey:
- with mock.patch.object(gpg, 'getkeybyid',
- return_value=expectedkey) as mockgetkey:
- self._add_apt_sources(cfg, TARGET, template_params=params,
- aa_repo_match=self.matcher)
-
- keycfg = cfg[self.aptlistfile]
- mockgetkey.assert_called_with(keycfg['keyid'],
- keycfg.get('keyserver',
- 'keyserver.ubuntu.com'))
- if is_hardened is not None:
- mockkey.assert_called_with(
- expectedkey,
- keycfg['keyfile'],
- hardened=is_hardened)
-
- # filename should be ignored on key only
- self.assertFalse(os.path.isfile(self.aptlistfile))
-
- def test_apt_v3_src_keyid_real(self):
- """test_apt_v3_src_keyid_real - Test keyid including key add"""
- keyid = "03683F77"
- cfg = {self.aptlistfile: {'keyid': keyid,
- 'keyfile': self.aptlistfile}}
-
- self.apt_src_keyid_real(cfg, EXPECTEDKEY, is_hardened=False)
-
- def test_apt_v3_src_longkeyid_real(self):
- """test_apt_v3_src_longkeyid_real Test long keyid including key add"""
- keyid = "B59D 5F15 97A5 04B7 E230 6DCA 0620 BBCF 0368 3F77"
- cfg = {self.aptlistfile: {'keyid': keyid,
- 'keyfile': self.aptlistfile}}
-
- self.apt_src_keyid_real(cfg, EXPECTEDKEY, is_hardened=False)
-
- def test_apt_v3_src_longkeyid_ks_real(self):
- """test_apt_v3_src_longkeyid_ks_real Test long keyid from other ks"""
- keyid = "B59D 5F15 97A5 04B7 E230 6DCA 0620 BBCF 0368 3F77"
- cfg = {self.aptlistfile: {'keyid': keyid,
- 'keyfile': self.aptlistfile,
- 'keyserver': 'keys.gnupg.net'}}
-
- self.apt_src_keyid_real(cfg, EXPECTEDKEY)
-
- def test_apt_v3_src_keyid_keyserver(self):
- """test_apt_v3_src_keyid_keyserver - Test custom keyserver"""
- keyid = "03683F77"
- params = self._get_default_params()
- cfg = {self.aptlistfile: {'keyid': keyid,
- 'keyfile': self.aptlistfile,
- 'keyserver': 'test.random.com'}}
-
- # in some test environments only *.ubuntu.com is reachable
- # so mock the call and check if the config got there
- with mock.patch.object(gpg, 'getkeybyid',
- return_value="fakekey") as mockgetkey:
- with mock.patch.object(cc_apt_configure,
- 'add_apt_key_raw') as mockadd:
- self._add_apt_sources(cfg, TARGET, template_params=params,
- aa_repo_match=self.matcher)
-
- mockgetkey.assert_called_with('03683F77', 'test.random.com')
- mockadd.assert_called_with('fakekey', self.aptlistfile, hardened=False)
-
- # filename should be ignored on key only
- self.assertFalse(os.path.isfile(self.aptlistfile))
-
- def test_apt_v3_src_ppa(self):
- """test_apt_v3_src_ppa - Test specification of a ppa"""
- params = self._get_default_params()
- cfg = {self.aptlistfile: {'source': 'ppa:smoser/cloud-init-test'}}
-
- with mock.patch("cloudinit.subp.subp") as mockobj:
- self._add_apt_sources(cfg, TARGET, template_params=params,
- aa_repo_match=self.matcher)
- mockobj.assert_any_call(['add-apt-repository',
- 'ppa:smoser/cloud-init-test'], target=TARGET)
-
- # adding ppa should ignore filename (uses add-apt-repository)
- self.assertFalse(os.path.isfile(self.aptlistfile))
-
- def test_apt_v3_src_ppa_tri(self):
- """test_apt_v3_src_ppa_tri - Test specification of multiple ppa's"""
- params = self._get_default_params()
- cfg = {self.aptlistfile: {'source': 'ppa:smoser/cloud-init-test'},
- self.aptlistfile2: {'source': 'ppa:smoser/cloud-init-test2'},
- self.aptlistfile3: {'source': 'ppa:smoser/cloud-init-test3'}}
-
- with mock.patch("cloudinit.subp.subp") as mockobj:
- self._add_apt_sources(cfg, TARGET, template_params=params,
- aa_repo_match=self.matcher)
- calls = [call(['add-apt-repository', 'ppa:smoser/cloud-init-test'],
- target=TARGET),
- call(['add-apt-repository', 'ppa:smoser/cloud-init-test2'],
- target=TARGET),
- call(['add-apt-repository', 'ppa:smoser/cloud-init-test3'],
- target=TARGET)]
- mockobj.assert_has_calls(calls, any_order=True)
-
- # adding ppa should ignore all filenames (uses add-apt-repository)
- self.assertFalse(os.path.isfile(self.aptlistfile))
- self.assertFalse(os.path.isfile(self.aptlistfile2))
- self.assertFalse(os.path.isfile(self.aptlistfile3))
-
- @mock.patch("cloudinit.config.cc_apt_configure.util.get_dpkg_architecture")
- def test_apt_v3_list_rename(self, m_get_dpkg_architecture):
- """test_apt_v3_list_rename - Test find mirror and apt list renaming"""
- pre = "/var/lib/apt/lists"
- # filenames are archive dependent
-
- arch = 's390x'
- m_get_dpkg_architecture.return_value = arch
- component = "ubuntu-ports"
- archive = "ports.ubuntu.com"
-
- cfg = {'primary': [{'arches': ["default"],
- 'uri':
- 'http://test.ubuntu.com/%s/' % component}],
- 'security': [{'arches': ["default"],
- 'uri':
- 'http://testsec.ubuntu.com/%s/' % component}]}
- post = ("%s_dists_%s-updates_InRelease" %
- (component, MOCK_LSB_RELEASE_DATA['codename']))
- fromfn = ("%s/%s_%s" % (pre, archive, post))
- tofn = ("%s/test.ubuntu.com_%s" % (pre, post))
-
- mirrors = cc_apt_configure.find_apt_mirror_info(cfg, FakeCloud(), arch)
-
- self.assertEqual(mirrors['MIRROR'],
- "http://test.ubuntu.com/%s/" % component)
- self.assertEqual(mirrors['PRIMARY'],
- "http://test.ubuntu.com/%s/" % component)
- self.assertEqual(mirrors['SECURITY'],
- "http://testsec.ubuntu.com/%s/" % component)
-
- with mock.patch.object(os, 'rename') as mockren:
- with mock.patch.object(glob, 'glob',
- return_value=[fromfn]):
- cc_apt_configure.rename_apt_lists(mirrors, TARGET, arch)
-
- mockren.assert_any_call(fromfn, tofn)
-
- @mock.patch("cloudinit.config.cc_apt_configure.util.get_dpkg_architecture")
- def test_apt_v3_list_rename_non_slash(self, m_get_dpkg_architecture):
- target = os.path.join(self.tmp, "rename_non_slash")
- apt_lists_d = os.path.join(target, "./" + cc_apt_configure.APT_LISTS)
-
- arch = 'amd64'
- m_get_dpkg_architecture.return_value = arch
-
- mirror_path = "some/random/path/"
- primary = "http://test.ubuntu.com/" + mirror_path
- security = "http://test-security.ubuntu.com/" + mirror_path
- mirrors = {'PRIMARY': primary, 'SECURITY': security}
-
- # these match default archive prefixes
- opri_pre = "archive.ubuntu.com_ubuntu_dists_xenial"
- osec_pre = "security.ubuntu.com_ubuntu_dists_xenial"
- # this one won't match and should not be renamed defaults.
- other_pre = "dl.google.com_linux_chrome_deb_dists_stable"
- # these are our new expected prefixes
- npri_pre = "test.ubuntu.com_some_random_path_dists_xenial"
- nsec_pre = "test-security.ubuntu.com_some_random_path_dists_xenial"
-
- files = [
- # orig prefix, new prefix, suffix
- (opri_pre, npri_pre, "_main_binary-amd64_Packages"),
- (opri_pre, npri_pre, "_main_binary-amd64_InRelease"),
- (opri_pre, npri_pre, "-updates_main_binary-amd64_Packages"),
- (opri_pre, npri_pre, "-updates_main_binary-amd64_InRelease"),
- (other_pre, other_pre, "_main_binary-amd64_Packages"),
- (other_pre, other_pre, "_Release"),
- (other_pre, other_pre, "_Release.gpg"),
- (osec_pre, nsec_pre, "_InRelease"),
- (osec_pre, nsec_pre, "_main_binary-amd64_Packages"),
- (osec_pre, nsec_pre, "_universe_binary-amd64_Packages"),
- ]
-
- expected = sorted([npre + suff for opre, npre, suff in files])
- # create files
- for (opre, _npre, suff) in files:
- fpath = os.path.join(apt_lists_d, opre + suff)
- util.write_file(fpath, content=fpath)
-
- cc_apt_configure.rename_apt_lists(mirrors, target, arch)
- found = sorted(os.listdir(apt_lists_d))
- self.assertEqual(expected, found)
-
- @staticmethod
- def test_apt_v3_proxy():
- """test_apt_v3_proxy - Test apt_*proxy configuration"""
- cfg = {"proxy": "foobar1",
- "http_proxy": "foobar2",
- "ftp_proxy": "foobar3",
- "https_proxy": "foobar4"}
-
- with mock.patch.object(util, 'write_file') as mockobj:
- cc_apt_configure.apply_apt_config(cfg, "proxyfn", "notused")
-
- mockobj.assert_called_with('proxyfn',
- ('Acquire::http::Proxy "foobar1";\n'
- 'Acquire::http::Proxy "foobar2";\n'
- 'Acquire::ftp::Proxy "foobar3";\n'
- 'Acquire::https::Proxy "foobar4";\n'))
-
- def test_apt_v3_mirror(self):
- """test_apt_v3_mirror - Test defining a mirror"""
- pmir = "http://us.archive.ubuntu.com/ubuntu/"
- smir = "http://security.ubuntu.com/ubuntu/"
- cfg = {"primary": [{'arches': ["default"],
- "uri": pmir}],
- "security": [{'arches': ["default"],
- "uri": smir}]}
-
- mirrors = cc_apt_configure.find_apt_mirror_info(
- cfg, FakeCloud(), 'amd64')
-
- self.assertEqual(mirrors['MIRROR'],
- pmir)
- self.assertEqual(mirrors['PRIMARY'],
- pmir)
- self.assertEqual(mirrors['SECURITY'],
- smir)
-
- def test_apt_v3_mirror_default(self):
- """test_apt_v3_mirror_default - Test without defining a mirror"""
- arch = 'amd64'
- default_mirrors = cc_apt_configure.get_default_mirrors(arch)
- pmir = default_mirrors["PRIMARY"]
- smir = default_mirrors["SECURITY"]
- mycloud = get_cloud()
- mirrors = cc_apt_configure.find_apt_mirror_info({}, mycloud, arch)
-
- self.assertEqual(mirrors['MIRROR'],
- pmir)
- self.assertEqual(mirrors['PRIMARY'],
- pmir)
- self.assertEqual(mirrors['SECURITY'],
- smir)
-
- def test_apt_v3_mirror_arches(self):
- """test_apt_v3_mirror_arches - Test arches selection of mirror"""
- pmir = "http://my-primary.ubuntu.com/ubuntu/"
- smir = "http://my-security.ubuntu.com/ubuntu/"
- arch = 'ppc64el'
- cfg = {"primary": [{'arches': ["default"], "uri": "notthis-primary"},
- {'arches': [arch], "uri": pmir}],
- "security": [{'arches': ["default"], "uri": "nothis-security"},
- {'arches': [arch], "uri": smir}]}
-
- mirrors = cc_apt_configure.find_apt_mirror_info(cfg, FakeCloud(), arch)
-
- self.assertEqual(mirrors['PRIMARY'], pmir)
- self.assertEqual(mirrors['MIRROR'], pmir)
- self.assertEqual(mirrors['SECURITY'], smir)
-
- def test_apt_v3_mirror_arches_default(self):
- """test_apt_v3_mirror_arches - Test falling back to default arch"""
- pmir = "http://us.archive.ubuntu.com/ubuntu/"
- smir = "http://security.ubuntu.com/ubuntu/"
- cfg = {"primary": [{'arches': ["default"],
- "uri": pmir},
- {'arches': ["thisarchdoesntexist"],
- "uri": "notthis"}],
- "security": [{'arches': ["thisarchdoesntexist"],
- "uri": "nothat"},
- {'arches': ["default"],
- "uri": smir}]}
-
- mirrors = cc_apt_configure.find_apt_mirror_info(
- cfg, FakeCloud(), 'amd64')
-
- self.assertEqual(mirrors['MIRROR'],
- pmir)
- self.assertEqual(mirrors['PRIMARY'],
- pmir)
- self.assertEqual(mirrors['SECURITY'],
- smir)
-
- @mock.patch("cloudinit.config.cc_apt_configure.util.get_dpkg_architecture")
- def test_apt_v3_get_def_mir_non_intel_no_arch(
- self, m_get_dpkg_architecture
- ):
- arch = 'ppc64el'
- m_get_dpkg_architecture.return_value = arch
- expected = {'PRIMARY': 'http://ports.ubuntu.com/ubuntu-ports',
- 'SECURITY': 'http://ports.ubuntu.com/ubuntu-ports'}
- self.assertEqual(expected, cc_apt_configure.get_default_mirrors())
-
- def test_apt_v3_get_default_mirrors_non_intel_with_arch(self):
- found = cc_apt_configure.get_default_mirrors('ppc64el')
-
- expected = {'PRIMARY': 'http://ports.ubuntu.com/ubuntu-ports',
- 'SECURITY': 'http://ports.ubuntu.com/ubuntu-ports'}
- self.assertEqual(expected, found)
-
- def test_apt_v3_mirror_arches_sysdefault(self):
- """test_apt_v3_mirror_arches - Test arches fallback to sys default"""
- arch = 'amd64'
- default_mirrors = cc_apt_configure.get_default_mirrors(arch)
- pmir = default_mirrors["PRIMARY"]
- smir = default_mirrors["SECURITY"]
- mycloud = get_cloud()
- cfg = {"primary": [{'arches': ["thisarchdoesntexist_64"],
- "uri": "notthis"},
- {'arches': ["thisarchdoesntexist"],
- "uri": "notthiseither"}],
- "security": [{'arches': ["thisarchdoesntexist"],
- "uri": "nothat"},
- {'arches': ["thisarchdoesntexist_64"],
- "uri": "nothateither"}]}
-
- mirrors = cc_apt_configure.find_apt_mirror_info(cfg, mycloud, arch)
-
- self.assertEqual(mirrors['MIRROR'], pmir)
- self.assertEqual(mirrors['PRIMARY'], pmir)
- self.assertEqual(mirrors['SECURITY'], smir)
-
- def test_apt_v3_mirror_search(self):
- """test_apt_v3_mirror_search - Test searching mirrors in a list
- mock checks to avoid relying on network connectivity"""
- pmir = "http://us.archive.ubuntu.com/ubuntu/"
- smir = "http://security.ubuntu.com/ubuntu/"
- cfg = {"primary": [{'arches': ["default"],
- "search": ["pfailme", pmir]}],
- "security": [{'arches': ["default"],
- "search": ["sfailme", smir]}]}
-
- with mock.patch.object(cc_apt_configure.util, 'search_for_mirror',
- side_effect=[pmir, smir]) as mocksearch:
- mirrors = cc_apt_configure.find_apt_mirror_info(cfg, FakeCloud(),
- 'amd64')
-
- calls = [call(["pfailme", pmir]),
- call(["sfailme", smir])]
- mocksearch.assert_has_calls(calls)
-
- self.assertEqual(mirrors['MIRROR'],
- pmir)
- self.assertEqual(mirrors['PRIMARY'],
- pmir)
- self.assertEqual(mirrors['SECURITY'],
- smir)
-
- def test_apt_v3_mirror_search_many2(self):
- """test_apt_v3_mirror_search_many3 - Test both mirrors specs at once"""
- pmir = "http://us.archive.ubuntu.com/ubuntu/"
- smir = "http://security.ubuntu.com/ubuntu/"
- cfg = {"primary": [{'arches': ["default"],
- "uri": pmir,
- "search": ["pfailme", "foo"]}],
- "security": [{'arches': ["default"],
- "uri": smir,
- "search": ["sfailme", "bar"]}]}
-
- arch = 'amd64'
-
- # should be called only once per type, despite two mirror configs
- mycloud = None
- with mock.patch.object(cc_apt_configure, 'get_mirror',
- return_value="http://mocked/foo") as mockgm:
- mirrors = cc_apt_configure.find_apt_mirror_info(cfg, mycloud, arch)
- calls = [call(cfg, 'primary', arch, mycloud),
- call(cfg, 'security', arch, mycloud)]
- mockgm.assert_has_calls(calls)
-
- # should not be called, since primary is specified
- with mock.patch.object(cc_apt_configure.util,
- 'search_for_mirror') as mockse:
- mirrors = cc_apt_configure.find_apt_mirror_info(
- cfg, FakeCloud(), arch)
- mockse.assert_not_called()
-
- self.assertEqual(mirrors['MIRROR'],
- pmir)
- self.assertEqual(mirrors['PRIMARY'],
- pmir)
- self.assertEqual(mirrors['SECURITY'],
- smir)
-
- def test_apt_v3_url_resolvable(self):
- """test_apt_v3_url_resolvable - Test resolving urls"""
-
- with mock.patch.object(util, 'is_resolvable') as mockresolve:
- util.is_resolvable_url("http://1.2.3.4/ubuntu")
- mockresolve.assert_called_with("1.2.3.4")
-
- with mock.patch.object(util, 'is_resolvable') as mockresolve:
- util.is_resolvable_url("http://us.archive.ubuntu.com/ubuntu")
- mockresolve.assert_called_with("us.archive.ubuntu.com")
-
- # former tests can leave this set (or not if the test is ran directly)
- # do a hard reset to ensure a stable result
- util._DNS_REDIRECT_IP = None
- bad = [(None, None, None, "badname", ["10.3.2.1"])]
- good = [(None, None, None, "goodname", ["10.2.3.4"])]
- with mock.patch.object(socket, 'getaddrinfo',
- side_effect=[bad, bad, bad, good,
- good]) as mocksock:
- ret = util.is_resolvable_url("http://us.archive.ubuntu.com/ubuntu")
- ret2 = util.is_resolvable_url("http://1.2.3.4/ubuntu")
- mocksock.assert_any_call('does-not-exist.example.com.', None,
- 0, 0, 1, 2)
- mocksock.assert_any_call('example.invalid.', None, 0, 0, 1, 2)
- mocksock.assert_any_call('us.archive.ubuntu.com', None)
- mocksock.assert_any_call('1.2.3.4', None)
-
- self.assertTrue(ret)
- self.assertTrue(ret2)
-
- # side effect need only bad ret after initial call
- with mock.patch.object(socket, 'getaddrinfo',
- side_effect=[bad]) as mocksock:
- ret3 = util.is_resolvable_url("http://failme.com/ubuntu")
- calls = [call('failme.com', None)]
- mocksock.assert_has_calls(calls)
- self.assertFalse(ret3)
-
- def test_apt_v3_disable_suites(self):
- """test_disable_suites - disable_suites with many configurations"""
- release = "xenial"
- orig = """deb http://ubuntu.com//ubuntu xenial main
-deb http://ubuntu.com//ubuntu xenial-updates main
-deb http://ubuntu.com//ubuntu xenial-security main
-deb-src http://ubuntu.com//ubuntu universe multiverse
-deb http://ubuntu.com/ubuntu/ xenial-proposed main"""
-
- # disable nothing
- disabled = []
- expect = """deb http://ubuntu.com//ubuntu xenial main
-deb http://ubuntu.com//ubuntu xenial-updates main
-deb http://ubuntu.com//ubuntu xenial-security main
-deb-src http://ubuntu.com//ubuntu universe multiverse
-deb http://ubuntu.com/ubuntu/ xenial-proposed main"""
- result = cc_apt_configure.disable_suites(disabled, orig, release)
- self.assertEqual(expect, result)
-
- # single disable release suite
- disabled = ["$RELEASE"]
- expect = """\
-# suite disabled by cloud-init: deb http://ubuntu.com//ubuntu xenial main
-deb http://ubuntu.com//ubuntu xenial-updates main
-deb http://ubuntu.com//ubuntu xenial-security main
-deb-src http://ubuntu.com//ubuntu universe multiverse
-deb http://ubuntu.com/ubuntu/ xenial-proposed main"""
- result = cc_apt_configure.disable_suites(disabled, orig, release)
- self.assertEqual(expect, result)
-
- # single disable other suite
- disabled = ["$RELEASE-updates"]
- expect = ("""deb http://ubuntu.com//ubuntu xenial main
-# suite disabled by cloud-init: deb http://ubuntu.com//ubuntu"""
- """ xenial-updates main
-deb http://ubuntu.com//ubuntu xenial-security main
-deb-src http://ubuntu.com//ubuntu universe multiverse
-deb http://ubuntu.com/ubuntu/ xenial-proposed main""")
- result = cc_apt_configure.disable_suites(disabled, orig, release)
- self.assertEqual(expect, result)
-
- # multi disable
- disabled = ["$RELEASE-updates", "$RELEASE-security"]
- expect = ("""deb http://ubuntu.com//ubuntu xenial main
-# suite disabled by cloud-init: deb http://ubuntu.com//ubuntu """
- """xenial-updates main
-# suite disabled by cloud-init: deb http://ubuntu.com//ubuntu """
- """xenial-security main
-deb-src http://ubuntu.com//ubuntu universe multiverse
-deb http://ubuntu.com/ubuntu/ xenial-proposed main""")
- result = cc_apt_configure.disable_suites(disabled, orig, release)
- self.assertEqual(expect, result)
-
- # multi line disable (same suite multiple times in input)
- disabled = ["$RELEASE-updates", "$RELEASE-security"]
- orig = """deb http://ubuntu.com//ubuntu xenial main
-deb http://ubuntu.com//ubuntu xenial-updates main
-deb http://ubuntu.com//ubuntu xenial-security main
-deb-src http://ubuntu.com//ubuntu universe multiverse
-deb http://UBUNTU.com//ubuntu xenial-updates main
-deb http://UBUNTU.COM//ubuntu xenial-updates main
-deb http://ubuntu.com/ubuntu/ xenial-proposed main"""
- expect = ("""deb http://ubuntu.com//ubuntu xenial main
-# suite disabled by cloud-init: deb http://ubuntu.com//ubuntu """
- """xenial-updates main
-# suite disabled by cloud-init: deb http://ubuntu.com//ubuntu """
- """xenial-security main
-deb-src http://ubuntu.com//ubuntu universe multiverse
-# suite disabled by cloud-init: deb http://UBUNTU.com//ubuntu """
- """xenial-updates main
-# suite disabled by cloud-init: deb http://UBUNTU.COM//ubuntu """
- """xenial-updates main
-deb http://ubuntu.com/ubuntu/ xenial-proposed main""")
- result = cc_apt_configure.disable_suites(disabled, orig, release)
- self.assertEqual(expect, result)
-
- # comment in input
- disabled = ["$RELEASE-updates", "$RELEASE-security"]
- orig = """deb http://ubuntu.com//ubuntu xenial main
-deb http://ubuntu.com//ubuntu xenial-updates main
-deb http://ubuntu.com//ubuntu xenial-security main
-deb-src http://ubuntu.com//ubuntu universe multiverse
-#foo
-#deb http://UBUNTU.com//ubuntu xenial-updates main
-deb http://UBUNTU.COM//ubuntu xenial-updates main
-deb http://ubuntu.com/ubuntu/ xenial-proposed main"""
- expect = ("""deb http://ubuntu.com//ubuntu xenial main
-# suite disabled by cloud-init: deb http://ubuntu.com//ubuntu """
- """xenial-updates main
-# suite disabled by cloud-init: deb http://ubuntu.com//ubuntu """
- """xenial-security main
-deb-src http://ubuntu.com//ubuntu universe multiverse
-#foo
-#deb http://UBUNTU.com//ubuntu xenial-updates main
-# suite disabled by cloud-init: deb http://UBUNTU.COM//ubuntu """
- """xenial-updates main
-deb http://ubuntu.com/ubuntu/ xenial-proposed main""")
- result = cc_apt_configure.disable_suites(disabled, orig, release)
- self.assertEqual(expect, result)
-
- # single disable custom suite
- disabled = ["foobar"]
- orig = """deb http://ubuntu.com//ubuntu xenial main
-deb http://ubuntu.com//ubuntu xenial-updates main
-deb http://ubuntu.com//ubuntu xenial-security main
-deb http://ubuntu.com/ubuntu/ foobar main"""
- expect = """deb http://ubuntu.com//ubuntu xenial main
-deb http://ubuntu.com//ubuntu xenial-updates main
-deb http://ubuntu.com//ubuntu xenial-security main
-# suite disabled by cloud-init: deb http://ubuntu.com/ubuntu/ foobar main"""
- result = cc_apt_configure.disable_suites(disabled, orig, release)
- self.assertEqual(expect, result)
-
- # single disable non existing suite
- disabled = ["foobar"]
- orig = """deb http://ubuntu.com//ubuntu xenial main
-deb http://ubuntu.com//ubuntu xenial-updates main
-deb http://ubuntu.com//ubuntu xenial-security main
-deb http://ubuntu.com/ubuntu/ notfoobar main"""
- expect = """deb http://ubuntu.com//ubuntu xenial main
-deb http://ubuntu.com//ubuntu xenial-updates main
-deb http://ubuntu.com//ubuntu xenial-security main
-deb http://ubuntu.com/ubuntu/ notfoobar main"""
- result = cc_apt_configure.disable_suites(disabled, orig, release)
- self.assertEqual(expect, result)
-
- # single disable suite with option
- disabled = ["$RELEASE-updates"]
- orig = """deb http://ubuntu.com//ubuntu xenial main
-deb [a=b] http://ubu.com//ubu xenial-updates main
-deb http://ubuntu.com//ubuntu xenial-security main
-deb-src http://ubuntu.com//ubuntu universe multiverse
-deb http://ubuntu.com/ubuntu/ xenial-proposed main"""
- expect = ("""deb http://ubuntu.com//ubuntu xenial main
-# suite disabled by cloud-init: deb [a=b] http://ubu.com//ubu """
- """xenial-updates main
-deb http://ubuntu.com//ubuntu xenial-security main
-deb-src http://ubuntu.com//ubuntu universe multiverse
-deb http://ubuntu.com/ubuntu/ xenial-proposed main""")
- result = cc_apt_configure.disable_suites(disabled, orig, release)
- self.assertEqual(expect, result)
-
- # single disable suite with more options and auto $RELEASE expansion
- disabled = ["updates"]
- orig = """deb http://ubuntu.com//ubuntu xenial main
-deb [a=b c=d] http://ubu.com//ubu xenial-updates main
-deb http://ubuntu.com//ubuntu xenial-security main
-deb-src http://ubuntu.com//ubuntu universe multiverse
-deb http://ubuntu.com/ubuntu/ xenial-proposed main"""
- expect = """deb http://ubuntu.com//ubuntu xenial main
-# suite disabled by cloud-init: deb [a=b c=d] \
-http://ubu.com//ubu xenial-updates main
-deb http://ubuntu.com//ubuntu xenial-security main
-deb-src http://ubuntu.com//ubuntu universe multiverse
-deb http://ubuntu.com/ubuntu/ xenial-proposed main"""
- result = cc_apt_configure.disable_suites(disabled, orig, release)
- self.assertEqual(expect, result)
-
- # single disable suite while options at others
- disabled = ["$RELEASE-security"]
- orig = """deb http://ubuntu.com//ubuntu xenial main
-deb [arch=foo] http://ubuntu.com//ubuntu xenial-updates main
-deb http://ubuntu.com//ubuntu xenial-security main
-deb-src http://ubuntu.com//ubuntu universe multiverse
-deb http://ubuntu.com/ubuntu/ xenial-proposed main"""
- expect = ("""deb http://ubuntu.com//ubuntu xenial main
-deb [arch=foo] http://ubuntu.com//ubuntu xenial-updates main
-# suite disabled by cloud-init: deb http://ubuntu.com//ubuntu """
- """xenial-security main
-deb-src http://ubuntu.com//ubuntu universe multiverse
-deb http://ubuntu.com/ubuntu/ xenial-proposed main""")
- result = cc_apt_configure.disable_suites(disabled, orig, release)
- self.assertEqual(expect, result)
-
- def test_disable_suites_blank_lines(self):
- """test_disable_suites_blank_lines - ensure blank lines allowed"""
- lines = ["deb %(repo)s %(rel)s main universe",
- "",
- "deb %(repo)s %(rel)s-updates main universe",
- " # random comment",
- "#comment here",
- ""]
- rel = "trusty"
- repo = 'http://example.com/mirrors/ubuntu'
- orig = "\n".join(lines) % {'repo': repo, 'rel': rel}
- self.assertEqual(
- orig, cc_apt_configure.disable_suites(["proposed"], orig, rel))
-
- @mock.patch("cloudinit.util.get_hostname", return_value='abc.localdomain')
- def test_apt_v3_mirror_search_dns(self, m_get_hostname):
- """test_apt_v3_mirror_search_dns - Test searching dns patterns"""
- pmir = "phit"
- smir = "shit"
- arch = 'amd64'
- mycloud = get_cloud('ubuntu')
- cfg = {"primary": [{'arches': ["default"],
- "search_dns": True}],
- "security": [{'arches': ["default"],
- "search_dns": True}]}
-
- with mock.patch.object(cc_apt_configure, 'get_mirror',
- return_value="http://mocked/foo") as mockgm:
- mirrors = cc_apt_configure.find_apt_mirror_info(cfg, mycloud, arch)
- calls = [call(cfg, 'primary', arch, mycloud),
- call(cfg, 'security', arch, mycloud)]
- mockgm.assert_has_calls(calls)
-
- with mock.patch.object(cc_apt_configure, 'search_for_mirror_dns',
- return_value="http://mocked/foo") as mocksdns:
- mirrors = cc_apt_configure.find_apt_mirror_info(cfg, mycloud, arch)
- calls = [call(True, 'primary', cfg, mycloud),
- call(True, 'security', cfg, mycloud)]
- mocksdns.assert_has_calls(calls)
-
- # first return is for the non-dns call before
- with mock.patch.object(cc_apt_configure.util, 'search_for_mirror',
- side_effect=[None, pmir, None, smir]) as mockse:
- mirrors = cc_apt_configure.find_apt_mirror_info(cfg, mycloud, arch)
-
- calls = [call(None),
- call(['http://ubuntu-mirror.localdomain/ubuntu',
- 'http://ubuntu-mirror/ubuntu']),
- call(None),
- call(['http://ubuntu-security-mirror.localdomain/ubuntu',
- 'http://ubuntu-security-mirror/ubuntu'])]
- mockse.assert_has_calls(calls)
-
- self.assertEqual(mirrors['MIRROR'],
- pmir)
- self.assertEqual(mirrors['PRIMARY'],
- pmir)
- self.assertEqual(mirrors['SECURITY'],
- smir)
-
- def test_apt_v3_add_mirror_keys(self):
- """test_apt_v3_add_mirror_keys - Test adding key for mirrors"""
- arch = 'amd64'
- cfg = {
- 'primary': [
- {'arches': [arch],
- 'uri': 'http://test.ubuntu.com/',
- 'filename': 'primary',
- 'key': 'fakekey_primary'}],
- 'security': [
- {'arches': [arch],
- 'uri': 'http://testsec.ubuntu.com/',
- 'filename': 'security',
- 'key': 'fakekey_security'}]
- }
-
- with mock.patch.object(cc_apt_configure,
- 'add_apt_key_raw') as mockadd:
- cc_apt_configure.add_mirror_keys(cfg, TARGET)
- calls = [
- mock.call('fakekey_primary', 'primary', hardened=False),
- mock.call('fakekey_security', 'security', hardened=False),
- ]
- mockadd.assert_has_calls(calls, any_order=True)
-
-
-class TestDebconfSelections(TestCase):
-
- @mock.patch("cloudinit.config.cc_apt_configure.subp.subp")
- def test_set_sel_appends_newline_if_absent(self, m_subp):
- """Automatically append a newline to debconf-set-selections config."""
- selections = b'some/setting boolean true'
- cc_apt_configure.debconf_set_selections(selections=selections)
- cc_apt_configure.debconf_set_selections(selections=selections + b'\n')
- m_call = mock.call(
- ['debconf-set-selections'], data=selections + b'\n', capture=True,
- target=None)
- self.assertEqual([m_call, m_call], m_subp.call_args_list)
-
- @mock.patch("cloudinit.config.cc_apt_configure.debconf_set_selections")
- def test_no_set_sel_if_none_to_set(self, m_set_sel):
- cc_apt_configure.apply_debconf_selections({'foo': 'bar'})
- m_set_sel.assert_not_called()
-
- @mock.patch("cloudinit.config.cc_apt_configure."
- "debconf_set_selections")
- @mock.patch("cloudinit.config.cc_apt_configure."
- "util.get_installed_packages")
- def test_set_sel_call_has_expected_input(self, m_get_inst, m_set_sel):
- data = {
- 'set1': 'pkga pkga/q1 mybool false',
- 'set2': ('pkgb\tpkgb/b1\tstr\tthis is a string\n'
- 'pkgc\tpkgc/ip\tstring\t10.0.0.1')}
- lines = '\n'.join(data.values()).split('\n')
-
- m_get_inst.return_value = ["adduser", "apparmor"]
- m_set_sel.return_value = None
-
- cc_apt_configure.apply_debconf_selections({'debconf_selections': data})
- self.assertTrue(m_get_inst.called)
- self.assertEqual(m_set_sel.call_count, 1)
-
- # assumes called with *args value.
- selections = m_set_sel.call_args_list[0][0][0].decode()
-
- missing = [
- line for line in lines if line not in selections.splitlines()
- ]
- self.assertEqual([], missing)
-
- @mock.patch("cloudinit.config.cc_apt_configure.dpkg_reconfigure")
- @mock.patch("cloudinit.config.cc_apt_configure.debconf_set_selections")
- @mock.patch("cloudinit.config.cc_apt_configure."
- "util.get_installed_packages")
- def test_reconfigure_if_intersection(self, m_get_inst, m_set_sel,
- m_dpkg_r):
- data = {
- 'set1': 'pkga pkga/q1 mybool false',
- 'set2': ('pkgb\tpkgb/b1\tstr\tthis is a string\n'
- 'pkgc\tpkgc/ip\tstring\t10.0.0.1'),
- 'cloud-init': ('cloud-init cloud-init/datasources'
- 'multiselect MAAS')}
-
- m_set_sel.return_value = None
- m_get_inst.return_value = ["adduser", "apparmor", "pkgb",
- "cloud-init", 'zdog']
-
- cc_apt_configure.apply_debconf_selections({'debconf_selections': data})
-
- # reconfigure should be called with the intersection
- # of (packages in config, packages installed)
- self.assertEqual(m_dpkg_r.call_count, 1)
- # assumes called with *args (dpkg_reconfigure([a,b,c], target=))
- packages = m_dpkg_r.call_args_list[0][0][0]
- self.assertEqual(set(['cloud-init', 'pkgb']), set(packages))
-
- @mock.patch("cloudinit.config.cc_apt_configure.dpkg_reconfigure")
- @mock.patch("cloudinit.config.cc_apt_configure.debconf_set_selections")
- @mock.patch("cloudinit.config.cc_apt_configure."
- "util.get_installed_packages")
- def test_reconfigure_if_no_intersection(self, m_get_inst, m_set_sel,
- m_dpkg_r):
- data = {'set1': 'pkga pkga/q1 mybool false'}
-
- m_get_inst.return_value = ["adduser", "apparmor", "pkgb",
- "cloud-init", 'zdog']
- m_set_sel.return_value = None
-
- cc_apt_configure.apply_debconf_selections({'debconf_selections': data})
-
- self.assertTrue(m_get_inst.called)
- self.assertEqual(m_dpkg_r.call_count, 0)
-
- @mock.patch("cloudinit.config.cc_apt_configure.subp.subp")
- def test_dpkg_reconfigure_does_reconfigure(self, m_subp):
- target = "/foo-target"
-
- # due to the way the cleaners are called (via dictionary reference)
- # mocking clean_cloud_init directly does not work. So we mock
- # the CONFIG_CLEANERS dictionary and assert our cleaner is called.
- ci_cleaner = mock.MagicMock()
- with mock.patch.dict(("cloudinit.config.cc_apt_configure."
- "CONFIG_CLEANERS"),
- values={'cloud-init': ci_cleaner}, clear=True):
- cc_apt_configure.dpkg_reconfigure(['pkga', 'cloud-init'],
- target=target)
- # cloud-init is actually the only package we have a cleaner for
- # so for now, its the only one that should reconfigured
- self.assertTrue(m_subp.called)
- ci_cleaner.assert_called_with(target)
- self.assertEqual(m_subp.call_count, 1)
- found = m_subp.call_args_list[0][0][0]
- expected = ['dpkg-reconfigure', '--frontend=noninteractive',
- 'cloud-init']
- self.assertEqual(expected, found)
-
- @mock.patch("cloudinit.config.cc_apt_configure.subp.subp")
- def test_dpkg_reconfigure_not_done_on_no_data(self, m_subp):
- cc_apt_configure.dpkg_reconfigure([])
- m_subp.assert_not_called()
-
- @mock.patch("cloudinit.config.cc_apt_configure.subp.subp")
- def test_dpkg_reconfigure_not_done_if_no_cleaners(self, m_subp):
- cc_apt_configure.dpkg_reconfigure(['pkgfoo', 'pkgbar'])
- m_subp.assert_not_called()
-
-#
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_bootcmd.py b/tests/unittests/test_handler/test_handler_bootcmd.py
deleted file mode 100644
index 8cd3a5e1..00000000
--- a/tests/unittests/test_handler/test_handler_bootcmd.py
+++ /dev/null
@@ -1,152 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-import logging
-import tempfile
-
-from cloudinit.config.cc_bootcmd import handle, schema
-from cloudinit import (subp, util)
-from cloudinit.tests.helpers import (
- CiTestCase, mock, SchemaTestCaseMixin, skipUnlessJsonSchema)
-
-from tests.unittests.util import get_cloud
-
-LOG = logging.getLogger(__name__)
-
-
-class FakeExtendedTempFile(object):
- def __init__(self, suffix):
- self.suffix = suffix
- self.handle = tempfile.NamedTemporaryFile(
- prefix="ci-%s." % self.__class__.__name__, delete=False)
-
- def __enter__(self):
- return self.handle
-
- def __exit__(self, exc_type, exc_value, traceback):
- self.handle.close()
- util.del_file(self.handle.name)
-
-
-class TestBootcmd(CiTestCase):
-
- with_logs = True
-
- _etmpfile_path = ('cloudinit.config.cc_bootcmd.temp_utils.'
- 'ExtendedTemporaryFile')
-
- def setUp(self):
- super(TestBootcmd, self).setUp()
- self.subp = subp.subp
- self.new_root = self.tmp_dir()
-
- def test_handler_skip_if_no_bootcmd(self):
- """When the provided config doesn't contain bootcmd, skip it."""
- cfg = {}
- mycloud = get_cloud()
- handle('notimportant', cfg, mycloud, LOG, None)
- self.assertIn(
- "Skipping module named notimportant, no 'bootcmd' key",
- self.logs.getvalue())
-
- def test_handler_invalid_command_set(self):
- """Commands which can't be converted to shell will raise errors."""
- invalid_config = {'bootcmd': 1}
- cc = get_cloud()
- with self.assertRaises(TypeError) as context_manager:
- handle('cc_bootcmd', invalid_config, cc, LOG, [])
- self.assertIn('Failed to shellify bootcmd', self.logs.getvalue())
- self.assertEqual(
- "Input to shellify was type 'int'. Expected list or tuple.",
- str(context_manager.exception))
-
- @skipUnlessJsonSchema()
- def test_handler_schema_validation_warns_non_array_type(self):
- """Schema validation warns of non-array type for bootcmd key.
-
- Schema validation is not strict, so bootcmd attempts to shellify the
- invalid content.
- """
- invalid_config = {'bootcmd': 1}
- cc = get_cloud()
- with self.assertRaises(TypeError):
- handle('cc_bootcmd', invalid_config, cc, LOG, [])
- self.assertIn(
- 'Invalid config:\nbootcmd: 1 is not of type \'array\'',
- self.logs.getvalue())
- self.assertIn('Failed to shellify', self.logs.getvalue())
-
- @skipUnlessJsonSchema()
- def test_handler_schema_validation_warns_non_array_item_type(self):
- """Schema validation warns of non-array or string bootcmd items.
-
- Schema validation is not strict, so bootcmd attempts to shellify the
- invalid content.
- """
- invalid_config = {
- 'bootcmd': ['ls /', 20, ['wget', 'http://stuff/blah'], {'a': 'n'}]}
- cc = get_cloud()
- with self.assertRaises(TypeError) as context_manager:
- handle('cc_bootcmd', invalid_config, cc, LOG, [])
- expected_warnings = [
- 'bootcmd.1: 20 is not valid under any of the given schemas',
- 'bootcmd.3: {\'a\': \'n\'} is not valid under any of the given'
- ' schema'
- ]
- logs = self.logs.getvalue()
- for warning in expected_warnings:
- self.assertIn(warning, logs)
- self.assertIn('Failed to shellify', logs)
- self.assertEqual(
- ("Unable to shellify type 'int'. Expected list, string, tuple. "
- "Got: 20"),
- str(context_manager.exception))
-
- def test_handler_creates_and_runs_bootcmd_script_with_instance_id(self):
- """Valid schema runs a bootcmd script with INSTANCE_ID in the env."""
- cc = get_cloud()
- out_file = self.tmp_path('bootcmd.out', self.new_root)
- my_id = "b6ea0f59-e27d-49c6-9f87-79f19765a425"
- valid_config = {'bootcmd': [
- 'echo {0} $INSTANCE_ID > {1}'.format(my_id, out_file)]}
-
- with mock.patch(self._etmpfile_path, FakeExtendedTempFile):
- with self.allow_subp(['/bin/sh']):
- handle('cc_bootcmd', valid_config, cc, LOG, [])
- self.assertEqual(my_id + ' iid-datasource-none\n',
- util.load_file(out_file))
-
- def test_handler_runs_bootcmd_script_with_error(self):
- """When a valid script generates an error, that error is raised."""
- cc = get_cloud()
- valid_config = {'bootcmd': ['exit 1']} # Script with error
-
- with mock.patch(self._etmpfile_path, FakeExtendedTempFile):
- with self.allow_subp(['/bin/sh']):
- with self.assertRaises(subp.ProcessExecutionError) as ctxt:
- handle('does-not-matter', valid_config, cc, LOG, [])
- self.assertIn(
- 'Unexpected error while running command.\n'
- "Command: ['/bin/sh',",
- str(ctxt.exception))
- self.assertIn(
- 'Failed to run bootcmd module does-not-matter',
- self.logs.getvalue())
-
-
-@skipUnlessJsonSchema()
-class TestSchema(CiTestCase, SchemaTestCaseMixin):
- """Directly test schema rather than through handle."""
-
- schema = schema
-
- def test_duplicates_are_fine_array_array(self):
- """Duplicated commands array/array entries are allowed."""
- self.assertSchemaValid(
- ["byebye", "byebye"], 'command entries can be duplicate')
-
- def test_duplicates_are_fine_array_string(self):
- """Duplicated commands array/string entries are allowed."""
- self.assertSchemaValid(
- ["echo bye", "echo bye"], "command entries can be duplicate.")
-
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_ca_certs.py b/tests/unittests/test_handler/test_handler_ca_certs.py
deleted file mode 100644
index 2a4ab49e..00000000
--- a/tests/unittests/test_handler/test_handler_ca_certs.py
+++ /dev/null
@@ -1,361 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-import logging
-import shutil
-import tempfile
-import unittest
-from contextlib import ExitStack
-from unittest import mock
-
-from cloudinit import distros
-from cloudinit.config import cc_ca_certs
-from cloudinit import helpers
-from cloudinit import subp
-from cloudinit import util
-from cloudinit.tests.helpers import TestCase
-
-from tests.unittests.util import get_cloud
-
-
-class TestNoConfig(unittest.TestCase):
- def setUp(self):
- super(TestNoConfig, self).setUp()
- self.name = "ca-certs"
- self.cloud_init = None
- self.log = logging.getLogger("TestNoConfig")
- self.args = []
-
- def test_no_config(self):
- """
- Test that nothing is done if no ca-certs configuration is provided.
- """
- config = util.get_builtin_cfg()
- with ExitStack() as mocks:
- util_mock = mocks.enter_context(
- mock.patch.object(util, 'write_file'))
- certs_mock = mocks.enter_context(
- mock.patch.object(cc_ca_certs, 'update_ca_certs'))
-
- cc_ca_certs.handle(self.name, config, self.cloud_init, self.log,
- self.args)
-
- self.assertEqual(util_mock.call_count, 0)
- self.assertEqual(certs_mock.call_count, 0)
-
-
-class TestConfig(TestCase):
- def setUp(self):
- super(TestConfig, self).setUp()
- self.name = "ca-certs"
- self.paths = None
- self.log = logging.getLogger("TestNoConfig")
- self.args = []
-
- def _fetch_distro(self, kind):
- cls = distros.fetch(kind)
- paths = helpers.Paths({})
- return cls(kind, {}, paths)
-
- def _mock_init(self):
- self.mocks = ExitStack()
- self.addCleanup(self.mocks.close)
-
- # Mock out the functions that actually modify the system
- self.mock_add = self.mocks.enter_context(
- mock.patch.object(cc_ca_certs, 'add_ca_certs'))
- self.mock_update = self.mocks.enter_context(
- mock.patch.object(cc_ca_certs, 'update_ca_certs'))
- self.mock_remove = self.mocks.enter_context(
- mock.patch.object(cc_ca_certs, 'remove_default_ca_certs'))
-
- def test_no_trusted_list(self):
- """
- Test that no certificates are written if the 'trusted' key is not
- present.
- """
- config = {"ca-certs": {}}
-
- for distro_name in cc_ca_certs.distros:
- self._mock_init()
- cloud = get_cloud(distro_name)
- cc_ca_certs.handle(self.name, config, cloud, self.log, self.args)
-
- self.assertEqual(self.mock_add.call_count, 0)
- self.assertEqual(self.mock_update.call_count, 1)
- self.assertEqual(self.mock_remove.call_count, 0)
-
- def test_empty_trusted_list(self):
- """Test that no certificate are written if 'trusted' list is empty."""
- config = {"ca-certs": {"trusted": []}}
-
- for distro_name in cc_ca_certs.distros:
- self._mock_init()
- cloud = get_cloud(distro_name)
- cc_ca_certs.handle(self.name, config, cloud, self.log, self.args)
-
- self.assertEqual(self.mock_add.call_count, 0)
- self.assertEqual(self.mock_update.call_count, 1)
- self.assertEqual(self.mock_remove.call_count, 0)
-
- def test_single_trusted(self):
- """Test that a single cert gets passed to add_ca_certs."""
- config = {"ca-certs": {"trusted": ["CERT1"]}}
-
- for distro_name in cc_ca_certs.distros:
- self._mock_init()
- cloud = get_cloud(distro_name)
- conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
- cc_ca_certs.handle(self.name, config, cloud, self.log, self.args)
-
- self.mock_add.assert_called_once_with(conf, ['CERT1'])
- self.assertEqual(self.mock_update.call_count, 1)
- self.assertEqual(self.mock_remove.call_count, 0)
-
- def test_multiple_trusted(self):
- """Test that multiple certs get passed to add_ca_certs."""
- config = {"ca-certs": {"trusted": ["CERT1", "CERT2"]}}
-
- for distro_name in cc_ca_certs.distros:
- self._mock_init()
- cloud = get_cloud(distro_name)
- conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
- cc_ca_certs.handle(self.name, config, cloud, self.log, self.args)
-
- self.mock_add.assert_called_once_with(conf, ['CERT1', 'CERT2'])
- self.assertEqual(self.mock_update.call_count, 1)
- self.assertEqual(self.mock_remove.call_count, 0)
-
- def test_remove_default_ca_certs(self):
- """Test remove_defaults works as expected."""
- config = {"ca-certs": {"remove-defaults": True}}
-
- for distro_name in cc_ca_certs.distros:
- self._mock_init()
- cloud = get_cloud(distro_name)
- cc_ca_certs.handle(self.name, config, cloud, self.log, self.args)
-
- self.assertEqual(self.mock_add.call_count, 0)
- self.assertEqual(self.mock_update.call_count, 1)
- self.assertEqual(self.mock_remove.call_count, 1)
-
- def test_no_remove_defaults_if_false(self):
- """Test remove_defaults is not called when config value is False."""
- config = {"ca-certs": {"remove-defaults": False}}
-
- for distro_name in cc_ca_certs.distros:
- self._mock_init()
- cloud = get_cloud(distro_name)
- cc_ca_certs.handle(self.name, config, cloud, self.log, self.args)
-
- self.assertEqual(self.mock_add.call_count, 0)
- self.assertEqual(self.mock_update.call_count, 1)
- self.assertEqual(self.mock_remove.call_count, 0)
-
- def test_correct_order_for_remove_then_add(self):
- """Test remove_defaults is not called when config value is False."""
- config = {"ca-certs": {"remove-defaults": True, "trusted": ["CERT1"]}}
-
- for distro_name in cc_ca_certs.distros:
- self._mock_init()
- cloud = get_cloud(distro_name)
- conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
- cc_ca_certs.handle(self.name, config, cloud, self.log, self.args)
-
- self.mock_add.assert_called_once_with(conf, ['CERT1'])
- self.assertEqual(self.mock_update.call_count, 1)
- self.assertEqual(self.mock_remove.call_count, 1)
-
-
-class TestAddCaCerts(TestCase):
-
- def setUp(self):
- super(TestAddCaCerts, self).setUp()
- tmpdir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, tmpdir)
- self.paths = helpers.Paths({
- 'cloud_dir': tmpdir,
- })
- self.add_patch("cloudinit.config.cc_ca_certs.os.stat", "m_stat")
-
- def _fetch_distro(self, kind):
- cls = distros.fetch(kind)
- paths = helpers.Paths({})
- return cls(kind, {}, paths)
-
- def test_no_certs_in_list(self):
- """Test that no certificate are written if not provided."""
- for distro_name in cc_ca_certs.distros:
- conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
- with mock.patch.object(util, 'write_file') as mockobj:
- cc_ca_certs.add_ca_certs(conf, [])
- self.assertEqual(mockobj.call_count, 0)
-
- def test_single_cert_trailing_cr(self):
- """Test adding a single certificate to the trusted CAs
- when existing ca-certificates has trailing newline"""
- cert = "CERT1\nLINE2\nLINE3"
-
- ca_certs_content = "line1\nline2\ncloud-init-ca-certs.crt\nline3\n"
- expected = "line1\nline2\nline3\ncloud-init-ca-certs.crt\n"
-
- self.m_stat.return_value.st_size = 1
-
- for distro_name in cc_ca_certs.distros:
- conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
-
- with ExitStack() as mocks:
- mock_write = mocks.enter_context(
- mock.patch.object(util, 'write_file'))
- mock_load = mocks.enter_context(
- mock.patch.object(util, 'load_file',
- return_value=ca_certs_content))
-
- cc_ca_certs.add_ca_certs(conf, [cert])
-
- mock_write.assert_has_calls([
- mock.call(conf['ca_cert_full_path'],
- cert, mode=0o644)])
- if conf['ca_cert_config'] is not None:
- mock_write.assert_has_calls([
- mock.call(conf['ca_cert_config'],
- expected, omode="wb")])
- mock_load.assert_called_once_with(conf['ca_cert_config'])
-
- def test_single_cert_no_trailing_cr(self):
- """Test adding a single certificate to the trusted CAs
- when existing ca-certificates has no trailing newline"""
- cert = "CERT1\nLINE2\nLINE3"
-
- ca_certs_content = "line1\nline2\nline3"
-
- self.m_stat.return_value.st_size = 1
-
- for distro_name in cc_ca_certs.distros:
- conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
-
- with ExitStack() as mocks:
- mock_write = mocks.enter_context(
- mock.patch.object(util, 'write_file'))
- mock_load = mocks.enter_context(
- mock.patch.object(util, 'load_file',
- return_value=ca_certs_content))
-
- cc_ca_certs.add_ca_certs(conf, [cert])
-
- mock_write.assert_has_calls([
- mock.call(conf['ca_cert_full_path'],
- cert, mode=0o644)])
- if conf['ca_cert_config'] is not None:
- mock_write.assert_has_calls([
- mock.call(conf['ca_cert_config'],
- "%s\n%s\n" % (ca_certs_content,
- conf['ca_cert_filename']),
- omode="wb")])
-
- mock_load.assert_called_once_with(conf['ca_cert_config'])
-
- def test_single_cert_to_empty_existing_ca_file(self):
- """Test adding a single certificate to the trusted CAs
- when existing ca-certificates.conf is empty"""
- cert = "CERT1\nLINE2\nLINE3"
-
- expected = "cloud-init-ca-certs.crt\n"
-
- self.m_stat.return_value.st_size = 0
-
- for distro_name in cc_ca_certs.distros:
- conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
- with mock.patch.object(util, 'write_file',
- autospec=True) as m_write:
-
- cc_ca_certs.add_ca_certs(conf, [cert])
-
- m_write.assert_has_calls([
- mock.call(conf['ca_cert_full_path'],
- cert, mode=0o644)])
- if conf['ca_cert_config'] is not None:
- m_write.assert_has_calls([
- mock.call(conf['ca_cert_config'],
- expected, omode="wb")])
-
- def test_multiple_certs(self):
- """Test adding multiple certificates to the trusted CAs."""
- certs = ["CERT1\nLINE2\nLINE3", "CERT2\nLINE2\nLINE3"]
- expected_cert_file = "\n".join(certs)
- ca_certs_content = "line1\nline2\nline3"
-
- self.m_stat.return_value.st_size = 1
-
- for distro_name in cc_ca_certs.distros:
- conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
-
- with ExitStack() as mocks:
- mock_write = mocks.enter_context(
- mock.patch.object(util, 'write_file'))
- mock_load = mocks.enter_context(
- mock.patch.object(util, 'load_file',
- return_value=ca_certs_content))
-
- cc_ca_certs.add_ca_certs(conf, certs)
-
- mock_write.assert_has_calls([
- mock.call(conf['ca_cert_full_path'],
- expected_cert_file, mode=0o644)])
- if conf['ca_cert_config'] is not None:
- mock_write.assert_has_calls([
- mock.call(conf['ca_cert_config'],
- "%s\n%s\n" % (ca_certs_content,
- conf['ca_cert_filename']),
- omode='wb')])
-
- mock_load.assert_called_once_with(conf['ca_cert_config'])
-
-
-class TestUpdateCaCerts(unittest.TestCase):
- def test_commands(self):
- for distro_name in cc_ca_certs.distros:
- conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
- with mock.patch.object(subp, 'subp') as mockobj:
- cc_ca_certs.update_ca_certs(conf)
- mockobj.assert_called_once_with(
- conf['ca_cert_update_cmd'], capture=False)
-
-
-class TestRemoveDefaultCaCerts(TestCase):
-
- def setUp(self):
- super(TestRemoveDefaultCaCerts, self).setUp()
- tmpdir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, tmpdir)
- self.paths = helpers.Paths({
- 'cloud_dir': tmpdir,
- })
-
- def test_commands(self):
- for distro_name in cc_ca_certs.distros:
- conf = cc_ca_certs._distro_ca_certs_configs(distro_name)
-
- with ExitStack() as mocks:
- mock_delete = mocks.enter_context(
- mock.patch.object(util, 'delete_dir_contents'))
- mock_write = mocks.enter_context(
- mock.patch.object(util, 'write_file'))
- mock_subp = mocks.enter_context(
- mock.patch.object(subp, 'subp'))
-
- cc_ca_certs.remove_default_ca_certs(distro_name, conf)
-
- mock_delete.assert_has_calls([
- mock.call(conf['ca_cert_path']),
- mock.call(conf['ca_cert_system_path'])])
-
- if conf['ca_cert_config'] is not None:
- mock_write.assert_called_once_with(
- conf['ca_cert_config'], "", mode=0o644)
-
- if distro_name in ['debian', 'ubuntu']:
- mock_subp.assert_called_once_with(
- ('debconf-set-selections', '-'),
- "ca-certificates \
-ca-certificates/trust_new_crts select no")
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_chef.py b/tests/unittests/test_handler/test_handler_chef.py
deleted file mode 100644
index 0672cebc..00000000
--- a/tests/unittests/test_handler/test_handler_chef.py
+++ /dev/null
@@ -1,271 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-import httpretty
-import json
-import logging
-import os
-
-from cloudinit.config import cc_chef
-from cloudinit import util
-
-from cloudinit.tests.helpers import (
- HttprettyTestCase, FilesystemMockingTestCase, mock, skipIf)
-
-from tests.unittests.util import get_cloud
-
-LOG = logging.getLogger(__name__)
-
-CLIENT_TEMPL = os.path.sep.join(["templates", "chef_client.rb.tmpl"])
-
-# This is adjusted to use http because using with https causes issue
-# in some openssl/httpretty combinations.
-# https://github.com/gabrielfalcao/HTTPretty/issues/242
-# We saw issue in opensuse 42.3 with
-# httpretty=0.8.8-7.1 ndg-httpsclient=0.4.0-3.2 pyOpenSSL=16.0.0-4.1
-OMNIBUS_URL_HTTP = cc_chef.OMNIBUS_URL.replace("https:", "http:")
-
-
-class TestInstallChefOmnibus(HttprettyTestCase):
-
- def setUp(self):
- super(TestInstallChefOmnibus, self).setUp()
- self.new_root = self.tmp_dir()
-
- @mock.patch("cloudinit.config.cc_chef.OMNIBUS_URL", OMNIBUS_URL_HTTP)
- def test_install_chef_from_omnibus_runs_chef_url_content(self):
- """install_chef_from_omnibus calls subp_blob_in_tempfile."""
- response = b'#!/bin/bash\necho "Hi Mom"'
- httpretty.register_uri(
- httpretty.GET, cc_chef.OMNIBUS_URL, body=response, status=200)
- ret = (None, None) # stdout, stderr but capture=False
-
- with mock.patch("cloudinit.config.cc_chef.subp_blob_in_tempfile",
- return_value=ret) as m_subp_blob:
- cc_chef.install_chef_from_omnibus()
- # admittedly whitebox, but assuming subp_blob_in_tempfile works
- # this should be fine.
- self.assertEqual(
- [mock.call(blob=response, args=[], basename='chef-omnibus-install',
- capture=False)],
- m_subp_blob.call_args_list)
-
- @mock.patch('cloudinit.config.cc_chef.url_helper.readurl')
- @mock.patch('cloudinit.config.cc_chef.subp_blob_in_tempfile')
- def test_install_chef_from_omnibus_retries_url(self, m_subp_blob, m_rdurl):
- """install_chef_from_omnibus retries OMNIBUS_URL upon failure."""
-
- class FakeURLResponse(object):
- contents = '#!/bin/bash\necho "Hi Mom" > {0}/chef.out'.format(
- self.new_root)
-
- m_rdurl.return_value = FakeURLResponse()
-
- cc_chef.install_chef_from_omnibus()
- expected_kwargs = {'retries': cc_chef.OMNIBUS_URL_RETRIES,
- 'url': cc_chef.OMNIBUS_URL}
- self.assertCountEqual(expected_kwargs, m_rdurl.call_args_list[0][1])
- cc_chef.install_chef_from_omnibus(retries=10)
- expected_kwargs = {'retries': 10,
- 'url': cc_chef.OMNIBUS_URL}
- self.assertCountEqual(expected_kwargs, m_rdurl.call_args_list[1][1])
- expected_subp_kwargs = {
- 'args': ['-v', '2.0'],
- 'basename': 'chef-omnibus-install',
- 'blob': m_rdurl.return_value.contents,
- 'capture': False
- }
- self.assertCountEqual(
- expected_subp_kwargs,
- m_subp_blob.call_args_list[0][1])
-
- @mock.patch("cloudinit.config.cc_chef.OMNIBUS_URL", OMNIBUS_URL_HTTP)
- @mock.patch('cloudinit.config.cc_chef.subp_blob_in_tempfile')
- def test_install_chef_from_omnibus_has_omnibus_version(self, m_subp_blob):
- """install_chef_from_omnibus provides version arg to OMNIBUS_URL."""
- chef_outfile = self.tmp_path('chef.out', self.new_root)
- response = '#!/bin/bash\necho "Hi Mom" > {0}'.format(chef_outfile)
- httpretty.register_uri(
- httpretty.GET, cc_chef.OMNIBUS_URL, body=response)
- cc_chef.install_chef_from_omnibus(omnibus_version='2.0')
-
- called_kwargs = m_subp_blob.call_args_list[0][1]
- expected_kwargs = {
- 'args': ['-v', '2.0'],
- 'basename': 'chef-omnibus-install',
- 'blob': response,
- 'capture': False
- }
- self.assertCountEqual(expected_kwargs, called_kwargs)
-
-
-class TestChef(FilesystemMockingTestCase):
-
- def setUp(self):
- super(TestChef, self).setUp()
- self.tmp = self.tmp_dir()
-
- def test_no_config(self):
- self.patchUtils(self.tmp)
- self.patchOS(self.tmp)
-
- cfg = {}
- cc_chef.handle('chef', cfg, get_cloud(), LOG, [])
- for d in cc_chef.CHEF_DIRS:
- self.assertFalse(os.path.isdir(d))
-
- @skipIf(not os.path.isfile(CLIENT_TEMPL),
- CLIENT_TEMPL + " is not available")
- def test_basic_config(self):
- """
- test basic config looks sane
-
- # This should create a file of the format...
- # Created by cloud-init v. 0.7.6 on Sat, 11 Oct 2014 23:57:21 +0000
- chef_license "accept"
- log_level :info
- ssl_verify_mode :verify_none
- log_location "/var/log/chef/client.log"
- validation_client_name "bob"
- validation_key "/etc/chef/validation.pem"
- client_key "/etc/chef/client.pem"
- chef_server_url "localhost"
- environment "_default"
- node_name "iid-datasource-none"
- json_attribs "/etc/chef/firstboot.json"
- file_cache_path "/var/cache/chef"
- file_backup_path "/var/backups/chef"
- pid_file "/var/run/chef/client.pid"
- Chef::Log::Formatter.show_time = true
- encrypted_data_bag_secret "/etc/chef/encrypted_data_bag_secret"
- """
- tpl_file = util.load_file('templates/chef_client.rb.tmpl')
- self.patchUtils(self.tmp)
- self.patchOS(self.tmp)
-
- util.write_file('/etc/cloud/templates/chef_client.rb.tmpl', tpl_file)
- cfg = {
- 'chef': {
- 'chef_license': "accept",
- 'server_url': 'localhost',
- 'validation_name': 'bob',
- 'validation_key': "/etc/chef/vkey.pem",
- 'validation_cert': "this is my cert",
- 'encrypted_data_bag_secret':
- '/etc/chef/encrypted_data_bag_secret'
- },
- }
- cc_chef.handle('chef', cfg, get_cloud(), LOG, [])
- for d in cc_chef.CHEF_DIRS:
- self.assertTrue(os.path.isdir(d))
- c = util.load_file(cc_chef.CHEF_RB_PATH)
-
- # the content of these keys is not expected to be rendered to tmpl
- unrendered_keys = ('validation_cert',)
- for k, v in cfg['chef'].items():
- if k in unrendered_keys:
- continue
- self.assertIn(v, c)
- for k, v in cc_chef.CHEF_RB_TPL_DEFAULTS.items():
- if k in unrendered_keys:
- continue
- # the value from the cfg overrides that in the default
- val = cfg['chef'].get(k, v)
- if isinstance(val, str):
- self.assertIn(val, c)
- c = util.load_file(cc_chef.CHEF_FB_PATH)
- self.assertEqual({}, json.loads(c))
-
- def test_firstboot_json(self):
- self.patchUtils(self.tmp)
- self.patchOS(self.tmp)
-
- cfg = {
- 'chef': {
- 'server_url': 'localhost',
- 'validation_name': 'bob',
- 'run_list': ['a', 'b', 'c'],
- 'initial_attributes': {
- 'c': 'd',
- }
- },
- }
- cc_chef.handle('chef', cfg, get_cloud(), LOG, [])
- c = util.load_file(cc_chef.CHEF_FB_PATH)
- self.assertEqual(
- {
- 'run_list': ['a', 'b', 'c'],
- 'c': 'd',
- }, json.loads(c))
-
- @skipIf(not os.path.isfile(CLIENT_TEMPL),
- CLIENT_TEMPL + " is not available")
- def test_template_deletes(self):
- tpl_file = util.load_file('templates/chef_client.rb.tmpl')
- self.patchUtils(self.tmp)
- self.patchOS(self.tmp)
-
- util.write_file('/etc/cloud/templates/chef_client.rb.tmpl', tpl_file)
- cfg = {
- 'chef': {
- 'server_url': 'localhost',
- 'validation_name': 'bob',
- 'json_attribs': None,
- 'show_time': None,
- },
- }
- cc_chef.handle('chef', cfg, get_cloud(), LOG, [])
- c = util.load_file(cc_chef.CHEF_RB_PATH)
- self.assertNotIn('json_attribs', c)
- self.assertNotIn('Formatter.show_time', c)
-
- @skipIf(not os.path.isfile(CLIENT_TEMPL),
- CLIENT_TEMPL + " is not available")
- def test_validation_cert_and_validation_key(self):
- # test validation_cert content is written to validation_key path
- tpl_file = util.load_file('templates/chef_client.rb.tmpl')
- self.patchUtils(self.tmp)
- self.patchOS(self.tmp)
-
- util.write_file('/etc/cloud/templates/chef_client.rb.tmpl', tpl_file)
- v_path = '/etc/chef/vkey.pem'
- v_cert = 'this is my cert'
- cfg = {
- 'chef': {
- 'server_url': 'localhost',
- 'validation_name': 'bob',
- 'validation_key': v_path,
- 'validation_cert': v_cert
- },
- }
- cc_chef.handle('chef', cfg, get_cloud(), LOG, [])
- content = util.load_file(cc_chef.CHEF_RB_PATH)
- self.assertIn(v_path, content)
- util.load_file(v_path)
- self.assertEqual(v_cert, util.load_file(v_path))
-
- def test_validation_cert_with_system(self):
- # test validation_cert content is not written over system file
- tpl_file = util.load_file('templates/chef_client.rb.tmpl')
- self.patchUtils(self.tmp)
- self.patchOS(self.tmp)
-
- v_path = '/etc/chef/vkey.pem'
- v_cert = "system"
- expected_cert = "this is the system file certificate"
- cfg = {
- 'chef': {
- 'server_url': 'localhost',
- 'validation_name': 'bob',
- 'validation_key': v_path,
- 'validation_cert': v_cert
- },
- }
- util.write_file('/etc/cloud/templates/chef_client.rb.tmpl', tpl_file)
- util.write_file(v_path, expected_cert)
- cc_chef.handle('chef', cfg, get_cloud(), LOG, [])
- content = util.load_file(cc_chef.CHEF_RB_PATH)
- self.assertIn(v_path, content)
- util.load_file(v_path)
- self.assertEqual(expected_cert, util.load_file(v_path))
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_debug.py b/tests/unittests/test_handler/test_handler_debug.py
deleted file mode 100644
index 41e9d9bd..00000000
--- a/tests/unittests/test_handler/test_handler_debug.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright (C) 2014 Yahoo! Inc.
-#
-# This file is part of cloud-init. See LICENSE file for license information.
-import logging
-import shutil
-import tempfile
-
-from cloudinit import util
-from cloudinit.config import cc_debug
-from cloudinit.tests.helpers import (FilesystemMockingTestCase, mock)
-
-from tests.unittests.util import get_cloud
-
-LOG = logging.getLogger(__name__)
-
-
-@mock.patch('cloudinit.distros.debian.read_system_locale')
-class TestDebug(FilesystemMockingTestCase):
- def setUp(self):
- super(TestDebug, self).setUp()
- self.new_root = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.new_root)
- self.patchUtils(self.new_root)
-
- def test_debug_write(self, m_locale):
- m_locale.return_value = 'en_US.UTF-8'
- cfg = {
- 'abc': '123',
- 'c': '\u20a0',
- 'debug': {
- 'verbose': True,
- # Does not actually write here due to mocking...
- 'output': '/var/log/cloud-init-debug.log',
- },
- }
- cc = get_cloud()
- cc_debug.handle('cc_debug', cfg, cc, LOG, [])
- contents = util.load_file('/var/log/cloud-init-debug.log')
- # Some basic sanity tests...
- self.assertNotEqual(0, len(contents))
- for k in cfg.keys():
- self.assertIn(k, contents)
-
- def test_debug_no_write(self, m_locale):
- m_locale.return_value = 'en_US.UTF-8'
- cfg = {
- 'abc': '123',
- 'debug': {
- 'verbose': False,
- # Does not actually write here due to mocking...
- 'output': '/var/log/cloud-init-debug.log',
- },
- }
- cc = get_cloud()
- cc_debug.handle('cc_debug', cfg, cc, LOG, [])
- self.assertRaises(IOError,
- util.load_file, '/var/log/cloud-init-debug.log')
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_disk_setup.py b/tests/unittests/test_handler/test_handler_disk_setup.py
deleted file mode 100644
index 4f4a57fa..00000000
--- a/tests/unittests/test_handler/test_handler_disk_setup.py
+++ /dev/null
@@ -1,243 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-import random
-
-from cloudinit.config import cc_disk_setup
-from cloudinit.tests.helpers import CiTestCase, ExitStack, mock, TestCase
-
-
-class TestIsDiskUsed(TestCase):
-
- def setUp(self):
- super(TestIsDiskUsed, self).setUp()
- self.patches = ExitStack()
- mod_name = 'cloudinit.config.cc_disk_setup'
- self.enumerate_disk = self.patches.enter_context(
- mock.patch('{0}.enumerate_disk'.format(mod_name)))
- self.check_fs = self.patches.enter_context(
- mock.patch('{0}.check_fs'.format(mod_name)))
-
- def tearDown(self):
- super(TestIsDiskUsed, self).tearDown()
- self.patches.close()
-
- def test_multiple_child_nodes_returns_true(self):
- self.enumerate_disk.return_value = (mock.MagicMock() for _ in range(2))
- self.check_fs.return_value = (mock.MagicMock(), None, mock.MagicMock())
- self.assertTrue(cc_disk_setup.is_disk_used(mock.MagicMock()))
-
- def test_valid_filesystem_returns_true(self):
- self.enumerate_disk.return_value = (mock.MagicMock() for _ in range(1))
- self.check_fs.return_value = (
- mock.MagicMock(), 'ext4', mock.MagicMock())
- self.assertTrue(cc_disk_setup.is_disk_used(mock.MagicMock()))
-
- def test_one_child_nodes_and_no_fs_returns_false(self):
- self.enumerate_disk.return_value = (mock.MagicMock() for _ in range(1))
- self.check_fs.return_value = (mock.MagicMock(), None, mock.MagicMock())
- self.assertFalse(cc_disk_setup.is_disk_used(mock.MagicMock()))
-
-
-class TestGetMbrHddSize(TestCase):
-
- def setUp(self):
- super(TestGetMbrHddSize, self).setUp()
- self.patches = ExitStack()
- self.subp = self.patches.enter_context(
- mock.patch.object(cc_disk_setup.subp, 'subp'))
-
- def tearDown(self):
- super(TestGetMbrHddSize, self).tearDown()
- self.patches.close()
-
- def _configure_subp_mock(self, hdd_size_in_bytes, sector_size_in_bytes):
- def _subp(cmd, *args, **kwargs):
- self.assertEqual(3, len(cmd))
- if '--getsize64' in cmd:
- return hdd_size_in_bytes, None
- elif '--getss' in cmd:
- return sector_size_in_bytes, None
- raise Exception('Unexpected blockdev command called')
-
- self.subp.side_effect = _subp
-
- def _test_for_sector_size(self, sector_size):
- size_in_bytes = random.randint(10000, 10000000) * 512
- size_in_sectors = size_in_bytes / sector_size
- self._configure_subp_mock(size_in_bytes, sector_size)
- self.assertEqual(size_in_sectors,
- cc_disk_setup.get_hdd_size('/dev/sda1'))
-
- def test_size_for_512_byte_sectors(self):
- self._test_for_sector_size(512)
-
- def test_size_for_1024_byte_sectors(self):
- self._test_for_sector_size(1024)
-
- def test_size_for_2048_byte_sectors(self):
- self._test_for_sector_size(2048)
-
- def test_size_for_4096_byte_sectors(self):
- self._test_for_sector_size(4096)
-
-
-class TestGetPartitionMbrLayout(TestCase):
-
- def test_single_partition_using_boolean(self):
- self.assertEqual('0,',
- cc_disk_setup.get_partition_mbr_layout(1000, True))
-
- def test_single_partition_using_list(self):
- disk_size = random.randint(1000000, 1000000000000)
- self.assertEqual(
- ',,83',
- cc_disk_setup.get_partition_mbr_layout(disk_size, [100]))
-
- def test_half_and_half(self):
- disk_size = random.randint(1000000, 1000000000000)
- expected_partition_size = int(float(disk_size) / 2)
- self.assertEqual(
- ',{0},83\n,,83'.format(expected_partition_size),
- cc_disk_setup.get_partition_mbr_layout(disk_size, [50, 50]))
-
- def test_thirds_with_different_partition_type(self):
- disk_size = random.randint(1000000, 1000000000000)
- expected_partition_size = int(float(disk_size) * 0.33)
- self.assertEqual(
- ',{0},83\n,,82'.format(expected_partition_size),
- cc_disk_setup.get_partition_mbr_layout(disk_size, [33, [66, 82]]))
-
-
-class TestUpdateFsSetupDevices(TestCase):
- def test_regression_1634678(self):
- # Cf. https://bugs.launchpad.net/cloud-init/+bug/1634678
- fs_setup = {
- 'partition': 'auto',
- 'device': '/dev/xvdb1',
- 'overwrite': False,
- 'label': 'test',
- 'filesystem': 'ext4'
- }
-
- cc_disk_setup.update_fs_setup_devices([fs_setup],
- lambda device: device)
-
- self.assertEqual({
- '_origname': '/dev/xvdb1',
- 'partition': 'auto',
- 'device': '/dev/xvdb1',
- 'overwrite': False,
- 'label': 'test',
- 'filesystem': 'ext4'
- }, fs_setup)
-
- def test_dotted_devname(self):
- fs_setup = {
- 'partition': 'auto',
- 'device': 'ephemeral0.0',
- 'label': 'test2',
- 'filesystem': 'xfs'
- }
-
- cc_disk_setup.update_fs_setup_devices([fs_setup],
- lambda device: device)
-
- self.assertEqual({
- '_origname': 'ephemeral0.0',
- '_partition': 'auto',
- 'partition': '0',
- 'device': 'ephemeral0',
- 'label': 'test2',
- 'filesystem': 'xfs'
- }, fs_setup)
-
- def test_dotted_devname_populates_partition(self):
- fs_setup = {
- 'device': 'ephemeral0.1',
- 'label': 'test2',
- 'filesystem': 'xfs'
- }
- cc_disk_setup.update_fs_setup_devices([fs_setup],
- lambda device: device)
- self.assertEqual({
- '_origname': 'ephemeral0.1',
- 'device': 'ephemeral0',
- 'partition': '1',
- 'label': 'test2',
- 'filesystem': 'xfs'
- }, fs_setup)
-
-
-@mock.patch('cloudinit.config.cc_disk_setup.assert_and_settle_device',
- return_value=None)
-@mock.patch('cloudinit.config.cc_disk_setup.find_device_node',
- return_value=('/dev/xdb1', False))
-@mock.patch('cloudinit.config.cc_disk_setup.device_type', return_value=None)
-@mock.patch('cloudinit.config.cc_disk_setup.subp.subp', return_value=('', ''))
-class TestMkfsCommandHandling(CiTestCase):
-
- with_logs = True
-
- def test_with_cmd(self, subp, *args):
- """mkfs honors cmd and logs warnings when extra_opts or overwrite are
- provided."""
- cc_disk_setup.mkfs({
- 'cmd': 'mkfs -t %(filesystem)s -L %(label)s %(device)s',
- 'filesystem': 'ext4',
- 'device': '/dev/xdb1',
- 'label': 'with_cmd',
- 'extra_opts': ['should', 'generate', 'warning'],
- 'overwrite': 'should generate warning too'
- })
-
- self.assertIn(
- 'extra_opts ' +
- 'ignored because cmd was specified: mkfs -t ext4 -L with_cmd ' +
- '/dev/xdb1',
- self.logs.getvalue())
- self.assertIn(
- 'overwrite ' +
- 'ignored because cmd was specified: mkfs -t ext4 -L with_cmd ' +
- '/dev/xdb1',
- self.logs.getvalue())
-
- subp.assert_called_once_with(
- 'mkfs -t ext4 -L with_cmd /dev/xdb1', shell=True)
-
- @mock.patch('cloudinit.config.cc_disk_setup.subp.which')
- def test_overwrite_and_extra_opts_without_cmd(self, m_which, subp, *args):
- """mkfs observes extra_opts and overwrite settings when cmd is not
- present."""
- m_which.side_effect = lambda p: {'mkfs.ext4': '/sbin/mkfs.ext4'}[p]
- cc_disk_setup.mkfs({
- 'filesystem': 'ext4',
- 'device': '/dev/xdb1',
- 'label': 'without_cmd',
- 'extra_opts': ['are', 'added'],
- 'overwrite': True
- })
-
- subp.assert_called_once_with(
- ['/sbin/mkfs.ext4', '/dev/xdb1',
- '-L', 'without_cmd', '-F', 'are', 'added'],
- shell=False)
-
- @mock.patch('cloudinit.config.cc_disk_setup.subp.which')
- def test_mkswap(self, m_which, subp, *args):
- """mkfs observes extra_opts and overwrite settings when cmd is not
- present."""
- m_which.side_effect = iter([None, '/sbin/mkswap'])
- cc_disk_setup.mkfs({
- 'filesystem': 'swap',
- 'device': '/dev/xdb1',
- 'label': 'swap',
- 'overwrite': True,
- })
-
- self.assertEqual([mock.call('mkfs.swap'), mock.call('mkswap')],
- m_which.call_args_list)
- subp.assert_called_once_with(
- ['/sbin/mkswap', '/dev/xdb1', '-L', 'swap', '-f'], shell=False)
-
-#
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_etc_hosts.py b/tests/unittests/test_handler/test_handler_etc_hosts.py
deleted file mode 100644
index e3778b11..00000000
--- a/tests/unittests/test_handler/test_handler_etc_hosts.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-from cloudinit.config import cc_update_etc_hosts
-
-from cloudinit import cloud
-from cloudinit import distros
-from cloudinit import helpers
-from cloudinit import util
-
-from cloudinit.tests import helpers as t_help
-
-import logging
-import os
-import shutil
-
-LOG = logging.getLogger(__name__)
-
-
-class TestHostsFile(t_help.FilesystemMockingTestCase):
- def setUp(self):
- super(TestHostsFile, self).setUp()
- self.tmp = self.tmp_dir()
-
- def _fetch_distro(self, kind):
- cls = distros.fetch(kind)
- paths = helpers.Paths({})
- return cls(kind, {}, paths)
-
- def test_write_etc_hosts_suse_localhost(self):
- cfg = {
- 'manage_etc_hosts': 'localhost',
- 'hostname': 'cloud-init.test.us'
- }
- os.makedirs('%s/etc/' % self.tmp)
- hosts_content = '192.168.1.1 blah.blah.us blah\n'
- fout = open('%s/etc/hosts' % self.tmp, 'w')
- fout.write(hosts_content)
- fout.close()
- distro = self._fetch_distro('sles')
- distro.hosts_fn = '%s/etc/hosts' % self.tmp
- paths = helpers.Paths({})
- ds = None
- cc = cloud.Cloud(ds, paths, {}, distro, None)
- self.patchUtils(self.tmp)
- cc_update_etc_hosts.handle('test', cfg, cc, LOG, [])
- contents = util.load_file('%s/etc/hosts' % self.tmp)
- if '127.0.1.1\tcloud-init.test.us\tcloud-init' not in contents:
- self.assertIsNone('No entry for 127.0.1.1 in etc/hosts')
- if '192.168.1.1\tblah.blah.us\tblah' not in contents:
- self.assertIsNone('Default etc/hosts content modified')
-
- @t_help.skipUnlessJinja()
- def test_write_etc_hosts_suse_template(self):
- cfg = {
- 'manage_etc_hosts': 'template',
- 'hostname': 'cloud-init.test.us'
- }
- shutil.copytree('templates', '%s/etc/cloud/templates' % self.tmp)
- distro = self._fetch_distro('sles')
- paths = helpers.Paths({})
- paths.template_tpl = '%s' % self.tmp + '/etc/cloud/templates/%s.tmpl'
- ds = None
- cc = cloud.Cloud(ds, paths, {}, distro, None)
- self.patchUtils(self.tmp)
- cc_update_etc_hosts.handle('test', cfg, cc, LOG, [])
- contents = util.load_file('%s/etc/hosts' % self.tmp)
- if '127.0.1.1 cloud-init.test.us cloud-init' not in contents:
- self.assertIsNone('No entry for 127.0.1.1 in etc/hosts')
- if '::1 cloud-init.test.us cloud-init' not in contents:
- self.assertIsNone('No entry for 127.0.0.1 in etc/hosts')
diff --git a/tests/unittests/test_handler/test_handler_growpart.py b/tests/unittests/test_handler/test_handler_growpart.py
deleted file mode 100644
index b7d5d7ba..00000000
--- a/tests/unittests/test_handler/test_handler_growpart.py
+++ /dev/null
@@ -1,309 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-from cloudinit import cloud
-from cloudinit.config import cc_growpart
-from cloudinit import subp
-from cloudinit import temp_utils
-
-from cloudinit.tests.helpers import TestCase
-
-import errno
-import logging
-import os
-import shutil
-import re
-import unittest
-from contextlib import ExitStack
-from unittest import mock
-import stat
-
-# growpart:
-# mode: auto # off, on, auto, 'growpart'
-# devices: ['root']
-
-HELP_GROWPART_RESIZE = """
-growpart disk partition
- rewrite partition table so that partition takes up all the space it can
- options:
- -h | --help print Usage and exit
-<SNIP>
- -u | --update R update the the kernel partition table info after growing
- this requires kernel support and 'partx --update'
- R is one of:
- - 'auto' : [default] update partition if possible
-<SNIP>
- Example:
- - growpart /dev/sda 1
- Resize partition 1 on /dev/sda
-"""
-
-HELP_GROWPART_NO_RESIZE = """
-growpart disk partition
- rewrite partition table so that partition takes up all the space it can
- options:
- -h | --help print Usage and exit
-<SNIP>
- Example:
- - growpart /dev/sda 1
- Resize partition 1 on /dev/sda
-"""
-
-HELP_GPART = """
-usage: gpart add -t type [-a alignment] [-b start] <SNIP> geom
- gpart backup geom
- gpart bootcode [-b bootcode] [-p partcode -i index] [-f flags] geom
-<SNIP>
- gpart resize -i index [-a alignment] [-s size] [-f flags] geom
- gpart restore [-lF] [-f flags] provider [...]
- gpart recover [-f flags] geom
- gpart help
-<SNIP>
-"""
-
-
-class Dir:
- '''Stub object'''
- def __init__(self, name):
- self.name = name
- self.st_mode = name
-
- def is_dir(self, *args, **kwargs):
- return True
-
- def stat(self, *args, **kwargs):
- return self
-
-
-class Scanner:
- '''Stub object'''
- def __enter__(self):
- return (Dir(''), Dir(''),)
-
- def __exit__(self, *args):
- pass
-
-
-class TestDisabled(unittest.TestCase):
- def setUp(self):
- super(TestDisabled, self).setUp()
- self.name = "growpart"
- self.cloud_init = None
- self.log = logging.getLogger("TestDisabled")
- self.args = []
-
- self.handle = cc_growpart.handle
-
- def test_mode_off(self):
- # Test that nothing is done if mode is off.
-
- # this really only verifies that resizer_factory isn't called
- config = {'growpart': {'mode': 'off'}}
-
- with mock.patch.object(cc_growpart, 'resizer_factory') as mockobj:
- self.handle(self.name, config, self.cloud_init, self.log,
- self.args)
- self.assertEqual(mockobj.call_count, 0)
-
-
-class TestConfig(TestCase):
- def setUp(self):
- super(TestConfig, self).setUp()
- self.name = "growpart"
- self.paths = None
- self.cloud = cloud.Cloud(None, self.paths, None, None, None)
- self.log = logging.getLogger("TestConfig")
- self.args = []
-
- self.cloud_init = None
- self.handle = cc_growpart.handle
- self.tmppath = '/tmp/cloudinit-test-file'
- self.tmpdir = os.scandir('/tmp')
- self.tmpfile = open(self.tmppath, 'w')
-
- def tearDown(self):
- self.tmpfile.close()
- os.remove(self.tmppath)
-
- @mock.patch.dict("os.environ", clear=True)
- def test_no_resizers_auto_is_fine(self):
- with mock.patch.object(
- subp, 'subp',
- return_value=(HELP_GROWPART_NO_RESIZE, "")) as mockobj:
-
- config = {'growpart': {'mode': 'auto'}}
- self.handle(self.name, config, self.cloud_init, self.log,
- self.args)
-
- mockobj.assert_has_calls([
- mock.call(['growpart', '--help'], env={'LANG': 'C'}),
- mock.call(['gpart', 'help'], env={'LANG': 'C'}, rcs=[0, 1])])
-
- @mock.patch.dict("os.environ", clear=True)
- def test_no_resizers_mode_growpart_is_exception(self):
- with mock.patch.object(
- subp, 'subp',
- return_value=(HELP_GROWPART_NO_RESIZE, "")) as mockobj:
- config = {'growpart': {'mode': "growpart"}}
- self.assertRaises(
- ValueError, self.handle, self.name, config,
- self.cloud_init, self.log, self.args)
-
- mockobj.assert_called_once_with(
- ['growpart', '--help'], env={'LANG': 'C'})
-
- @mock.patch.dict("os.environ", clear=True)
- def test_mode_auto_prefers_growpart(self):
- with mock.patch.object(
- subp, 'subp',
- return_value=(HELP_GROWPART_RESIZE, "")) as mockobj:
- ret = cc_growpart.resizer_factory(mode="auto")
- self.assertIsInstance(ret, cc_growpart.ResizeGrowPart)
-
- mockobj.assert_called_once_with(
- ['growpart', '--help'], env={'LANG': 'C'})
-
- @mock.patch.dict("os.environ", {'LANG': 'cs_CZ.UTF-8'}, clear=True)
- @mock.patch.object(temp_utils, 'mkdtemp', return_value='/tmp/much-random')
- @mock.patch.object(stat, 'S_ISDIR', return_value=False)
- @mock.patch.object(os.path, 'samestat', return_value=True)
- @mock.patch.object(os.path, "join", return_value='/tmp')
- @mock.patch.object(os, 'scandir', return_value=Scanner())
- @mock.patch.object(os, 'mkdir')
- @mock.patch.object(os, 'unlink')
- @mock.patch.object(os, 'rmdir')
- @mock.patch.object(os, 'open', return_value=1)
- @mock.patch.object(os, 'close')
- @mock.patch.object(shutil, 'rmtree')
- @mock.patch.object(os, 'lseek', return_value=1024)
- @mock.patch.object(os, 'lstat', return_value='interesting metadata')
- def test_force_lang_check_tempfile(self, *args, **kwargs):
- with mock.patch.object(
- subp,
- 'subp',
- return_value=(HELP_GROWPART_RESIZE, "")) as mockobj:
-
- ret = cc_growpart.resizer_factory(mode="auto")
- self.assertIsInstance(ret, cc_growpart.ResizeGrowPart)
- diskdev = '/dev/sdb'
- partnum = 1
- partdev = '/dev/sdb'
- ret.resize(diskdev, partnum, partdev)
- mockobj.assert_has_calls([
- mock.call(
- ["growpart", '--dry-run', diskdev, partnum],
- env={'LANG': 'C', 'TMPDIR': '/tmp'}),
- mock.call(
- ["growpart", diskdev, partnum],
- env={'LANG': 'C', 'TMPDIR': '/tmp'}),
- ])
-
- @mock.patch.dict("os.environ", {'LANG': 'cs_CZ.UTF-8'}, clear=True)
- def test_mode_auto_falls_back_to_gpart(self):
- with mock.patch.object(
- subp, 'subp',
- return_value=("", HELP_GPART)) as mockobj:
- ret = cc_growpart.resizer_factory(mode="auto")
- self.assertIsInstance(ret, cc_growpart.ResizeGpart)
-
- mockobj.assert_has_calls([
- mock.call(['growpart', '--help'], env={'LANG': 'C'}),
- mock.call(['gpart', 'help'], env={'LANG': 'C'}, rcs=[0, 1])])
-
- def test_handle_with_no_growpart_entry(self):
- # if no 'growpart' entry in config, then mode=auto should be used
-
- myresizer = object()
- retval = (("/", cc_growpart.RESIZE.CHANGED, "my-message",),)
-
- with ExitStack() as mocks:
- factory = mocks.enter_context(
- mock.patch.object(cc_growpart, 'resizer_factory',
- return_value=myresizer))
- rsdevs = mocks.enter_context(
- mock.patch.object(cc_growpart, 'resize_devices',
- return_value=retval))
- mocks.enter_context(
- mock.patch.object(cc_growpart, 'RESIZERS',
- (('mysizer', object),)
- ))
-
- self.handle(self.name, {}, self.cloud_init, self.log, self.args)
-
- factory.assert_called_once_with('auto')
- rsdevs.assert_called_once_with(myresizer, ['/'])
-
-
-class TestResize(unittest.TestCase):
- def setUp(self):
- super(TestResize, self).setUp()
- self.name = "growpart"
- self.log = logging.getLogger("TestResize")
-
- def test_simple_devices(self):
- # test simple device list
- # this patches out devent2dev, os.stat, and device_part_info
- # so in the end, doesn't test a lot
- devs = ["/dev/XXda1", "/dev/YYda2"]
- devstat_ret = Bunch(st_mode=25008, st_ino=6078, st_dev=5,
- st_nlink=1, st_uid=0, st_gid=6, st_size=0,
- st_atime=0, st_mtime=0, st_ctime=0)
- enoent = ["/dev/NOENT"]
- real_stat = os.stat
- resize_calls = []
-
- class myresizer(object):
- def resize(self, diskdev, partnum, partdev):
- resize_calls.append((diskdev, partnum, partdev))
- if partdev == "/dev/YYda2":
- return (1024, 2048)
- return (1024, 1024) # old size, new size
-
- def mystat(path):
- if path in devs:
- return devstat_ret
- if path in enoent:
- e = OSError("%s: does not exist" % path)
- e.errno = errno.ENOENT
- raise e
- return real_stat(path)
-
- try:
- opinfo = cc_growpart.device_part_info
- cc_growpart.device_part_info = simple_device_part_info
- os.stat = mystat
-
- resized = cc_growpart.resize_devices(myresizer(), devs + enoent)
-
- def find(name, res):
- for f in res:
- if f[0] == name:
- return f
- return None
-
- self.assertEqual(cc_growpart.RESIZE.NOCHANGE,
- find("/dev/XXda1", resized)[1])
- self.assertEqual(cc_growpart.RESIZE.CHANGED,
- find("/dev/YYda2", resized)[1])
- self.assertEqual(cc_growpart.RESIZE.SKIPPED,
- find(enoent[0], resized)[1])
- # self.assertEqual(resize_calls,
- # [("/dev/XXda", "1", "/dev/XXda1"),
- # ("/dev/YYda", "2", "/dev/YYda2")])
- finally:
- cc_growpart.device_part_info = opinfo
- os.stat = real_stat
-
-
-def simple_device_part_info(devpath):
- # simple stupid return (/dev/vda, 1) for /dev/vda
- ret = re.search("([^0-9]*)([0-9]*)$", devpath)
- x = (ret.group(1), ret.group(2))
- return x
-
-
-class Bunch(object):
- def __init__(self, **kwds):
- self.__dict__.update(kwds)
-
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_install_hotplug.py b/tests/unittests/test_handler/test_handler_install_hotplug.py
deleted file mode 100644
index 5d6b1e77..00000000
--- a/tests/unittests/test_handler/test_handler_install_hotplug.py
+++ /dev/null
@@ -1,113 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-from collections import namedtuple
-from unittest import mock
-
-import pytest
-
-from cloudinit.config.cc_install_hotplug import (
- handle,
- HOTPLUG_UDEV_PATH,
- HOTPLUG_UDEV_RULES_TEMPLATE,
-)
-from cloudinit.event import EventScope, EventType
-
-
-@pytest.yield_fixture()
-def mocks():
- m_update_enabled = mock.patch('cloudinit.stages.update_event_enabled')
- m_write = mock.patch('cloudinit.util.write_file', autospec=True)
- m_del = mock.patch('cloudinit.util.del_file', autospec=True)
- m_subp = mock.patch('cloudinit.subp.subp')
- m_which = mock.patch('cloudinit.subp.which', return_value=None)
- m_path_exists = mock.patch('os.path.exists', return_value=False)
-
- yield namedtuple(
- 'Mocks',
- 'm_update_enabled m_write m_del m_subp m_which m_path_exists'
- )(
- m_update_enabled.start(), m_write.start(), m_del.start(),
- m_subp.start(), m_which.start(), m_path_exists.start()
- )
-
- m_update_enabled.stop()
- m_write.stop()
- m_del.stop()
- m_subp.stop()
- m_which.stop()
- m_path_exists.stop()
-
-
-class TestInstallHotplug:
- @pytest.mark.parametrize('libexec_exists', [True, False])
- def test_rules_installed_when_supported_and_enabled(
- self, mocks, libexec_exists
- ):
- mocks.m_which.return_value = 'udevadm'
- mocks.m_update_enabled.return_value = True
- m_cloud = mock.MagicMock()
- m_cloud.datasource.get_supported_events.return_value = {
- EventScope.NETWORK: {EventType.HOTPLUG}
- }
-
- if libexec_exists:
- libexecdir = "/usr/libexec/cloud-init"
- else:
- libexecdir = "/usr/lib/cloud-init"
- with mock.patch('os.path.exists', return_value=libexec_exists):
- handle(None, {}, m_cloud, mock.Mock(), None)
- mocks.m_write.assert_called_once_with(
- filename=HOTPLUG_UDEV_PATH,
- content=HOTPLUG_UDEV_RULES_TEMPLATE.format(
- libexecdir=libexecdir),
- )
- assert mocks.m_subp.call_args_list == [mock.call([
- 'udevadm', 'control', '--reload-rules',
- ])]
- assert mocks.m_del.call_args_list == []
-
- def test_rules_not_installed_when_unsupported(self, mocks):
- mocks.m_update_enabled.return_value = True
- m_cloud = mock.MagicMock()
- m_cloud.datasource.get_supported_events.return_value = {}
-
- handle(None, {}, m_cloud, mock.Mock(), None)
- assert mocks.m_write.call_args_list == []
- assert mocks.m_del.call_args_list == []
- assert mocks.m_subp.call_args_list == []
-
- def test_rules_not_installed_when_disabled(self, mocks):
- mocks.m_update_enabled.return_value = False
- m_cloud = mock.MagicMock()
- m_cloud.datasource.get_supported_events.return_value = {
- EventScope.NETWORK: {EventType.HOTPLUG}
- }
-
- handle(None, {}, m_cloud, mock.Mock(), None)
- assert mocks.m_write.call_args_list == []
- assert mocks.m_del.call_args_list == []
- assert mocks.m_subp.call_args_list == []
-
- def test_rules_uninstalled_when_disabled(self, mocks):
- mocks.m_path_exists.return_value = True
- mocks.m_update_enabled.return_value = False
- m_cloud = mock.MagicMock()
- m_cloud.datasource.get_supported_events.return_value = {}
-
- handle(None, {}, m_cloud, mock.Mock(), None)
- mocks.m_del.assert_called_with(HOTPLUG_UDEV_PATH)
- assert mocks.m_subp.call_args_list == [mock.call([
- 'udevadm', 'control', '--reload-rules',
- ])]
- assert mocks.m_write.call_args_list == []
-
- def test_rules_not_installed_when_no_udevadm(self, mocks):
- mocks.m_update_enabled.return_value = True
- m_cloud = mock.MagicMock()
- m_cloud.datasource.get_supported_events.return_value = {
- EventScope.NETWORK: {EventType.HOTPLUG}
- }
-
- handle(None, {}, m_cloud, mock.Mock(), None)
- assert mocks.m_del.call_args_list == []
- assert mocks.m_write.call_args_list == []
- assert mocks.m_subp.call_args_list == []
diff --git a/tests/unittests/test_handler/test_handler_landscape.py b/tests/unittests/test_handler/test_handler_landscape.py
deleted file mode 100644
index 1cc73ea2..00000000
--- a/tests/unittests/test_handler/test_handler_landscape.py
+++ /dev/null
@@ -1,126 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-import logging
-from configobj import ConfigObj
-
-from cloudinit.config import cc_landscape
-from cloudinit import util
-from cloudinit.tests.helpers import (FilesystemMockingTestCase, mock,
- wrap_and_call)
-
-from tests.unittests.util import get_cloud
-
-LOG = logging.getLogger(__name__)
-
-
-class TestLandscape(FilesystemMockingTestCase):
-
- with_logs = True
-
- def setUp(self):
- super(TestLandscape, self).setUp()
- self.new_root = self.tmp_dir()
- self.conf = self.tmp_path('client.conf', self.new_root)
- self.default_file = self.tmp_path('default_landscape', self.new_root)
- self.patchUtils(self.new_root)
- self.add_patch(
- 'cloudinit.distros.ubuntu.Distro.install_packages',
- 'm_install_packages'
- )
-
- def test_handler_skips_empty_landscape_cloudconfig(self):
- """Empty landscape cloud-config section does no work."""
- mycloud = get_cloud('ubuntu')
- mycloud.distro = mock.MagicMock()
- cfg = {'landscape': {}}
- cc_landscape.handle('notimportant', cfg, mycloud, LOG, None)
- self.assertFalse(mycloud.distro.install_packages.called)
-
- def test_handler_error_on_invalid_landscape_type(self):
- """Raise an error when landscape configuraiton option is invalid."""
- mycloud = get_cloud('ubuntu')
- cfg = {'landscape': 'wrongtype'}
- with self.assertRaises(RuntimeError) as context_manager:
- cc_landscape.handle('notimportant', cfg, mycloud, LOG, None)
- self.assertIn(
- "'landscape' key existed in config, but not a dict",
- str(context_manager.exception))
-
- @mock.patch('cloudinit.config.cc_landscape.subp')
- def test_handler_restarts_landscape_client(self, m_subp):
- """handler restarts lansdscape-client after install."""
- mycloud = get_cloud('ubuntu')
- cfg = {'landscape': {'client': {}}}
- wrap_and_call(
- 'cloudinit.config.cc_landscape',
- {'LSC_CLIENT_CFG_FILE': {'new': self.conf}},
- cc_landscape.handle, 'notimportant', cfg, mycloud, LOG, None)
- self.assertEqual(
- [mock.call(['service', 'landscape-client', 'restart'])],
- m_subp.subp.call_args_list)
-
- def test_handler_installs_client_and_creates_config_file(self):
- """Write landscape client.conf and install landscape-client."""
- mycloud = get_cloud('ubuntu')
- cfg = {'landscape': {'client': {}}}
- expected = {'client': {
- 'log_level': 'info',
- 'url': 'https://landscape.canonical.com/message-system',
- 'ping_url': 'http://landscape.canonical.com/ping',
- 'data_path': '/var/lib/landscape/client'}}
- mycloud.distro = mock.MagicMock()
- wrap_and_call(
- 'cloudinit.config.cc_landscape',
- {'LSC_CLIENT_CFG_FILE': {'new': self.conf},
- 'LS_DEFAULT_FILE': {'new': self.default_file}},
- cc_landscape.handle, 'notimportant', cfg, mycloud, LOG, None)
- self.assertEqual(
- [mock.call('landscape-client')],
- mycloud.distro.install_packages.call_args)
- self.assertEqual(expected, dict(ConfigObj(self.conf)))
- self.assertIn(
- 'Wrote landscape config file to {0}'.format(self.conf),
- self.logs.getvalue())
- default_content = util.load_file(self.default_file)
- self.assertEqual('RUN=1\n', default_content)
-
- def test_handler_writes_merged_client_config_file_with_defaults(self):
- """Merge and write options from LSC_CLIENT_CFG_FILE with defaults."""
- # Write existing sparse client.conf file
- util.write_file(self.conf, '[client]\ncomputer_title = My PC\n')
- mycloud = get_cloud('ubuntu')
- cfg = {'landscape': {'client': {}}}
- expected = {'client': {
- 'log_level': 'info',
- 'url': 'https://landscape.canonical.com/message-system',
- 'ping_url': 'http://landscape.canonical.com/ping',
- 'data_path': '/var/lib/landscape/client',
- 'computer_title': 'My PC'}}
- wrap_and_call(
- 'cloudinit.config.cc_landscape',
- {'LSC_CLIENT_CFG_FILE': {'new': self.conf}},
- cc_landscape.handle, 'notimportant', cfg, mycloud, LOG, None)
- self.assertEqual(expected, dict(ConfigObj(self.conf)))
- self.assertIn(
- 'Wrote landscape config file to {0}'.format(self.conf),
- self.logs.getvalue())
-
- def test_handler_writes_merged_provided_cloudconfig_with_defaults(self):
- """Merge and write options from cloud-config options with defaults."""
- # Write empty sparse client.conf file
- util.write_file(self.conf, '')
- mycloud = get_cloud('ubuntu')
- cfg = {'landscape': {'client': {'computer_title': 'My PC'}}}
- expected = {'client': {
- 'log_level': 'info',
- 'url': 'https://landscape.canonical.com/message-system',
- 'ping_url': 'http://landscape.canonical.com/ping',
- 'data_path': '/var/lib/landscape/client',
- 'computer_title': 'My PC'}}
- wrap_and_call(
- 'cloudinit.config.cc_landscape',
- {'LSC_CLIENT_CFG_FILE': {'new': self.conf}},
- cc_landscape.handle, 'notimportant', cfg, mycloud, LOG, None)
- self.assertEqual(expected, dict(ConfigObj(self.conf)))
- self.assertIn(
- 'Wrote landscape config file to {0}'.format(self.conf),
- self.logs.getvalue())
diff --git a/tests/unittests/test_handler/test_handler_locale.py b/tests/unittests/test_handler/test_handler_locale.py
deleted file mode 100644
index 3c17927e..00000000
--- a/tests/unittests/test_handler/test_handler_locale.py
+++ /dev/null
@@ -1,116 +0,0 @@
-# Copyright (C) 2013 Hewlett-Packard Development Company, L.P.
-#
-# Author: Juerg Haefliger <juerg.haefliger@hp.com>
-#
-# This file is part of cloud-init. See LICENSE file for license information.
-import logging
-import os
-import shutil
-import tempfile
-from io import BytesIO
-from configobj import ConfigObj
-from unittest import mock
-
-from cloudinit import util
-from cloudinit.config import cc_locale
-from cloudinit.tests import helpers as t_help
-
-from tests.unittests.util import get_cloud
-
-
-LOG = logging.getLogger(__name__)
-
-
-class TestLocale(t_help.FilesystemMockingTestCase):
-
- def setUp(self):
- super(TestLocale, self).setUp()
- self.new_root = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.new_root)
- self.patchUtils(self.new_root)
-
- def test_set_locale_arch(self):
- locale = 'en_GB.UTF-8'
- locale_configfile = '/etc/invalid-locale-path'
- cfg = {
- 'locale': locale,
- 'locale_configfile': locale_configfile,
- }
- cc = get_cloud('arch')
-
- with mock.patch('cloudinit.distros.arch.subp.subp') as m_subp:
- with mock.patch('cloudinit.distros.arch.LOG.warning') as m_LOG:
- cc_locale.handle('cc_locale', cfg, cc, LOG, [])
- m_LOG.assert_called_with('Invalid locale_configfile %s, '
- 'only supported value is '
- '/etc/locale.conf',
- locale_configfile)
-
- contents = util.load_file(cc.distro.locale_gen_fn)
- self.assertIn('%s UTF-8' % locale, contents)
- m_subp.assert_called_with(['localectl',
- 'set-locale',
- locale], capture=False)
-
- def test_set_locale_sles(self):
-
- cfg = {
- 'locale': 'My.Locale',
- }
- cc = get_cloud('sles')
- cc_locale.handle('cc_locale', cfg, cc, LOG, [])
- if cc.distro.uses_systemd():
- locale_conf = cc.distro.systemd_locale_conf_fn
- else:
- locale_conf = cc.distro.locale_conf_fn
- contents = util.load_file(locale_conf, decode=False)
- n_cfg = ConfigObj(BytesIO(contents))
- if cc.distro.uses_systemd():
- self.assertEqual({'LANG': cfg['locale']}, dict(n_cfg))
- else:
- self.assertEqual({'RC_LANG': cfg['locale']}, dict(n_cfg))
-
- def test_set_locale_sles_default(self):
- cfg = {}
- cc = get_cloud('sles')
- cc_locale.handle('cc_locale', cfg, cc, LOG, [])
-
- if cc.distro.uses_systemd():
- locale_conf = cc.distro.systemd_locale_conf_fn
- keyname = 'LANG'
- else:
- locale_conf = cc.distro.locale_conf_fn
- keyname = 'RC_LANG'
-
- contents = util.load_file(locale_conf, decode=False)
- n_cfg = ConfigObj(BytesIO(contents))
- self.assertEqual({keyname: 'en_US.UTF-8'}, dict(n_cfg))
-
- def test_locale_update_config_if_different_than_default(self):
- """Test cc_locale writes updates conf if different than default"""
- locale_conf = os.path.join(self.new_root, "etc/default/locale")
- util.write_file(locale_conf, 'LANG="en_US.UTF-8"\n')
- cfg = {'locale': 'C.UTF-8'}
- cc = get_cloud('ubuntu')
- with mock.patch('cloudinit.distros.debian.subp.subp') as m_subp:
- with mock.patch('cloudinit.distros.debian.LOCALE_CONF_FN',
- locale_conf):
- cc_locale.handle('cc_locale', cfg, cc, LOG, [])
- m_subp.assert_called_with(['update-locale',
- '--locale-file=%s' % locale_conf,
- 'LANG=C.UTF-8'], capture=False)
-
- def test_locale_rhel_defaults_en_us_utf8(self):
- """Test cc_locale gets en_US.UTF-8 from distro get_locale fallback"""
- cfg = {}
- cc = get_cloud('rhel')
- update_sysconfig = 'cloudinit.distros.rhel_util.update_sysconfig_file'
- with mock.patch.object(cc.distro, 'uses_systemd') as m_use_sd:
- m_use_sd.return_value = True
- with mock.patch(update_sysconfig) as m_update_syscfg:
- cc_locale.handle('cc_locale', cfg, cc, LOG, [])
- m_update_syscfg.assert_called_with('/etc/locale.conf',
- {'LANG': 'en_US.UTF-8'})
-
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_lxd.py b/tests/unittests/test_handler/test_handler_lxd.py
deleted file mode 100644
index ea8b6e90..00000000
--- a/tests/unittests/test_handler/test_handler_lxd.py
+++ /dev/null
@@ -1,222 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-from unittest import mock
-
-from cloudinit.config import cc_lxd
-from cloudinit.tests import helpers as t_help
-
-from tests.unittests.util import get_cloud
-
-
-class TestLxd(t_help.CiTestCase):
-
- with_logs = True
-
- lxd_cfg = {
- 'lxd': {
- 'init': {
- 'network_address': '0.0.0.0',
- 'storage_backend': 'zfs',
- 'storage_pool': 'poolname',
- }
- }
- }
-
- @mock.patch("cloudinit.config.cc_lxd.maybe_cleanup_default")
- @mock.patch("cloudinit.config.cc_lxd.subp")
- def test_lxd_init(self, mock_subp, m_maybe_clean):
- cc = get_cloud()
- mock_subp.which.return_value = True
- m_maybe_clean.return_value = None
- cc_lxd.handle('cc_lxd', self.lxd_cfg, cc, self.logger, [])
- self.assertTrue(mock_subp.which.called)
- # no bridge config, so maybe_cleanup should not be called.
- self.assertFalse(m_maybe_clean.called)
- self.assertEqual(
- [mock.call(['lxd', 'waitready', '--timeout=300']),
- mock.call(
- ['lxd', 'init', '--auto', '--network-address=0.0.0.0',
- '--storage-backend=zfs', '--storage-pool=poolname'])],
- mock_subp.subp.call_args_list)
-
- @mock.patch("cloudinit.config.cc_lxd.maybe_cleanup_default")
- @mock.patch("cloudinit.config.cc_lxd.subp")
- def test_lxd_install(self, mock_subp, m_maybe_clean):
- cc = get_cloud()
- cc.distro = mock.MagicMock()
- mock_subp.which.return_value = None
- cc_lxd.handle('cc_lxd', self.lxd_cfg, cc, self.logger, [])
- self.assertNotIn('WARN', self.logs.getvalue())
- self.assertTrue(cc.distro.install_packages.called)
- cc_lxd.handle('cc_lxd', self.lxd_cfg, cc, self.logger, [])
- self.assertFalse(m_maybe_clean.called)
- install_pkg = cc.distro.install_packages.call_args_list[0][0][0]
- self.assertEqual(sorted(install_pkg), ['lxd', 'zfsutils-linux'])
-
- @mock.patch("cloudinit.config.cc_lxd.maybe_cleanup_default")
- @mock.patch("cloudinit.config.cc_lxd.subp")
- def test_no_init_does_nothing(self, mock_subp, m_maybe_clean):
- cc = get_cloud()
- cc.distro = mock.MagicMock()
- cc_lxd.handle('cc_lxd', {'lxd': {}}, cc, self.logger, [])
- self.assertFalse(cc.distro.install_packages.called)
- self.assertFalse(mock_subp.subp.called)
- self.assertFalse(m_maybe_clean.called)
-
- @mock.patch("cloudinit.config.cc_lxd.maybe_cleanup_default")
- @mock.patch("cloudinit.config.cc_lxd.subp")
- def test_no_lxd_does_nothing(self, mock_subp, m_maybe_clean):
- cc = get_cloud()
- cc.distro = mock.MagicMock()
- cc_lxd.handle('cc_lxd', {'package_update': True}, cc, self.logger, [])
- self.assertFalse(cc.distro.install_packages.called)
- self.assertFalse(mock_subp.subp.called)
- self.assertFalse(m_maybe_clean.called)
-
- def test_lxd_debconf_new_full(self):
- data = {"mode": "new",
- "name": "testbr0",
- "ipv4_address": "10.0.8.1",
- "ipv4_netmask": "24",
- "ipv4_dhcp_first": "10.0.8.2",
- "ipv4_dhcp_last": "10.0.8.254",
- "ipv4_dhcp_leases": "250",
- "ipv4_nat": "true",
- "ipv6_address": "fd98:9e0:3744::1",
- "ipv6_netmask": "64",
- "ipv6_nat": "true",
- "domain": "lxd"}
- self.assertEqual(
- cc_lxd.bridge_to_debconf(data),
- {"lxd/setup-bridge": "true",
- "lxd/bridge-name": "testbr0",
- "lxd/bridge-ipv4": "true",
- "lxd/bridge-ipv4-address": "10.0.8.1",
- "lxd/bridge-ipv4-netmask": "24",
- "lxd/bridge-ipv4-dhcp-first": "10.0.8.2",
- "lxd/bridge-ipv4-dhcp-last": "10.0.8.254",
- "lxd/bridge-ipv4-dhcp-leases": "250",
- "lxd/bridge-ipv4-nat": "true",
- "lxd/bridge-ipv6": "true",
- "lxd/bridge-ipv6-address": "fd98:9e0:3744::1",
- "lxd/bridge-ipv6-netmask": "64",
- "lxd/bridge-ipv6-nat": "true",
- "lxd/bridge-domain": "lxd"})
-
- def test_lxd_debconf_new_partial(self):
- data = {"mode": "new",
- "ipv6_address": "fd98:9e0:3744::1",
- "ipv6_netmask": "64",
- "ipv6_nat": "true"}
- self.assertEqual(
- cc_lxd.bridge_to_debconf(data),
- {"lxd/setup-bridge": "true",
- "lxd/bridge-ipv6": "true",
- "lxd/bridge-ipv6-address": "fd98:9e0:3744::1",
- "lxd/bridge-ipv6-netmask": "64",
- "lxd/bridge-ipv6-nat": "true"})
-
- def test_lxd_debconf_existing(self):
- data = {"mode": "existing",
- "name": "testbr0"}
- self.assertEqual(
- cc_lxd.bridge_to_debconf(data),
- {"lxd/setup-bridge": "false",
- "lxd/use-existing-bridge": "true",
- "lxd/bridge-name": "testbr0"})
-
- def test_lxd_debconf_none(self):
- data = {"mode": "none"}
- self.assertEqual(
- cc_lxd.bridge_to_debconf(data),
- {"lxd/setup-bridge": "false",
- "lxd/bridge-name": ""})
-
- def test_lxd_cmd_new_full(self):
- data = {"mode": "new",
- "name": "testbr0",
- "ipv4_address": "10.0.8.1",
- "ipv4_netmask": "24",
- "ipv4_dhcp_first": "10.0.8.2",
- "ipv4_dhcp_last": "10.0.8.254",
- "ipv4_dhcp_leases": "250",
- "ipv4_nat": "true",
- "ipv6_address": "fd98:9e0:3744::1",
- "ipv6_netmask": "64",
- "ipv6_nat": "true",
- "domain": "lxd"}
- self.assertEqual(
- cc_lxd.bridge_to_cmd(data),
- (["network", "create", "testbr0",
- "ipv4.address=10.0.8.1/24", "ipv4.nat=true",
- "ipv4.dhcp.ranges=10.0.8.2-10.0.8.254",
- "ipv6.address=fd98:9e0:3744::1/64",
- "ipv6.nat=true", "dns.domain=lxd"],
- ["network", "attach-profile",
- "testbr0", "default", "eth0"]))
-
- def test_lxd_cmd_new_partial(self):
- data = {"mode": "new",
- "ipv6_address": "fd98:9e0:3744::1",
- "ipv6_netmask": "64",
- "ipv6_nat": "true"}
- self.assertEqual(
- cc_lxd.bridge_to_cmd(data),
- (["network", "create", "lxdbr0", "ipv4.address=none",
- "ipv6.address=fd98:9e0:3744::1/64", "ipv6.nat=true"],
- ["network", "attach-profile",
- "lxdbr0", "default", "eth0"]))
-
- def test_lxd_cmd_existing(self):
- data = {"mode": "existing",
- "name": "testbr0"}
- self.assertEqual(
- cc_lxd.bridge_to_cmd(data),
- (None, ["network", "attach-profile",
- "testbr0", "default", "eth0"]))
-
- def test_lxd_cmd_none(self):
- data = {"mode": "none"}
- self.assertEqual(
- cc_lxd.bridge_to_cmd(data),
- (None, None))
-
-
-class TestLxdMaybeCleanupDefault(t_help.CiTestCase):
- """Test the implementation of maybe_cleanup_default."""
-
- defnet = cc_lxd._DEFAULT_NETWORK_NAME
-
- @mock.patch("cloudinit.config.cc_lxd._lxc")
- def test_network_other_than_default_not_deleted(self, m_lxc):
- """deletion or removal should only occur if bridge is default."""
- cc_lxd.maybe_cleanup_default(
- net_name="lxdbr1", did_init=True, create=True, attach=True)
- m_lxc.assert_not_called()
-
- @mock.patch("cloudinit.config.cc_lxd._lxc")
- def test_did_init_false_does_not_delete(self, m_lxc):
- """deletion or removal should only occur if did_init is True."""
- cc_lxd.maybe_cleanup_default(
- net_name=self.defnet, did_init=False, create=True, attach=True)
- m_lxc.assert_not_called()
-
- @mock.patch("cloudinit.config.cc_lxd._lxc")
- def test_network_deleted_if_create_true(self, m_lxc):
- """deletion of network should occur if create is True."""
- cc_lxd.maybe_cleanup_default(
- net_name=self.defnet, did_init=True, create=True, attach=False)
- m_lxc.assert_called_with(["network", "delete", self.defnet])
-
- @mock.patch("cloudinit.config.cc_lxd._lxc")
- def test_device_removed_if_attach_true(self, m_lxc):
- """deletion of network should occur if create is True."""
- nic_name = "my_nic"
- profile = "my_profile"
- cc_lxd.maybe_cleanup_default(
- net_name=self.defnet, did_init=True, create=False, attach=True,
- profile=profile, nic_name=nic_name)
- m_lxc.assert_called_once_with(
- ["profile", "device", "remove", profile, nic_name])
-
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_mcollective.py b/tests/unittests/test_handler/test_handler_mcollective.py
deleted file mode 100644
index 9cda6fbe..00000000
--- a/tests/unittests/test_handler/test_handler_mcollective.py
+++ /dev/null
@@ -1,146 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-import configobj
-import logging
-import os
-import shutil
-import tempfile
-from io import BytesIO
-
-from cloudinit import (util)
-from cloudinit.config import cc_mcollective
-from cloudinit.tests import helpers as t_help
-
-from tests.unittests.util import get_cloud
-
-LOG = logging.getLogger(__name__)
-
-
-STOCK_CONFIG = """\
-main_collective = mcollective
-collectives = mcollective
-libdir = /usr/share/mcollective/plugins
-logfile = /var/log/mcollective.log
-loglevel = info
-daemonize = 1
-
-# Plugins
-securityprovider = psk
-plugin.psk = unset
-
-connector = activemq
-plugin.activemq.pool.size = 1
-plugin.activemq.pool.1.host = stomp1
-plugin.activemq.pool.1.port = 61613
-plugin.activemq.pool.1.user = mcollective
-plugin.activemq.pool.1.password = marionette
-
-# Facts
-factsource = yaml
-plugin.yaml = /etc/mcollective/facts.yaml
-"""
-
-
-class TestConfig(t_help.FilesystemMockingTestCase):
- def setUp(self):
- super(TestConfig, self).setUp()
- self.tmp = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.tmp)
- # "./": make os.path.join behave correctly with abs path as second arg
- self.server_cfg = os.path.join(
- self.tmp, "./" + cc_mcollective.SERVER_CFG)
- self.pubcert_file = os.path.join(
- self.tmp, "./" + cc_mcollective.PUBCERT_FILE)
- self.pricert_file = os.path.join(
- self.tmp, self.tmp, "./" + cc_mcollective.PRICERT_FILE)
-
- def test_basic_config(self):
- cfg = {
- 'mcollective': {
- 'conf': {
- 'loglevel': 'debug',
- 'connector': 'rabbitmq',
- 'logfile': '/var/log/mcollective.log',
- 'ttl': '4294957',
- 'collectives': 'mcollective',
- 'main_collective': 'mcollective',
- 'securityprovider': 'psk',
- 'daemonize': '1',
- 'factsource': 'yaml',
- 'direct_addressing': '1',
- 'plugin.psk': 'unset',
- 'libdir': '/usr/share/mcollective/plugins',
- 'identity': '1',
- },
- },
- }
- expected = cfg['mcollective']['conf']
-
- self.patchUtils(self.tmp)
- cc_mcollective.configure(cfg['mcollective']['conf'])
- contents = util.load_file(cc_mcollective.SERVER_CFG, decode=False)
- contents = configobj.ConfigObj(BytesIO(contents))
- self.assertEqual(expected, dict(contents))
-
- def test_existing_config_is_saved(self):
- cfg = {'loglevel': 'warn'}
- util.write_file(self.server_cfg, STOCK_CONFIG)
- cc_mcollective.configure(config=cfg, server_cfg=self.server_cfg)
- self.assertTrue(os.path.exists(self.server_cfg))
- self.assertTrue(os.path.exists(self.server_cfg + ".old"))
- self.assertEqual(util.load_file(self.server_cfg + ".old"),
- STOCK_CONFIG)
-
- def test_existing_updated(self):
- cfg = {'loglevel': 'warn'}
- util.write_file(self.server_cfg, STOCK_CONFIG)
- cc_mcollective.configure(config=cfg, server_cfg=self.server_cfg)
- cfgobj = configobj.ConfigObj(self.server_cfg)
- self.assertEqual(cfg['loglevel'], cfgobj['loglevel'])
-
- def test_certificats_written(self):
- # check public-cert and private-cert keys in config get written
- cfg = {'loglevel': 'debug',
- 'public-cert': "this is my public-certificate",
- 'private-cert': "secret private certificate"}
-
- cc_mcollective.configure(
- config=cfg, server_cfg=self.server_cfg,
- pricert_file=self.pricert_file, pubcert_file=self.pubcert_file)
-
- found = configobj.ConfigObj(self.server_cfg)
-
- # make sure these didnt get written in
- self.assertFalse('public-cert' in found)
- self.assertFalse('private-cert' in found)
-
- # these need updating to the specified paths
- self.assertEqual(found['plugin.ssl_server_public'], self.pubcert_file)
- self.assertEqual(found['plugin.ssl_server_private'], self.pricert_file)
-
- # and the security provider should be ssl
- self.assertEqual(found['securityprovider'], 'ssl')
-
- self.assertEqual(
- util.load_file(self.pricert_file), cfg['private-cert'])
- self.assertEqual(
- util.load_file(self.pubcert_file), cfg['public-cert'])
-
-
-class TestHandler(t_help.TestCase):
- @t_help.mock.patch("cloudinit.config.cc_mcollective.subp")
- @t_help.mock.patch("cloudinit.config.cc_mcollective.util")
- def test_mcollective_install(self, mock_util, mock_subp):
- cc = get_cloud()
- cc.distro = t_help.mock.MagicMock()
- mock_util.load_file.return_value = b""
- mycfg = {'mcollective': {'conf': {'loglevel': 'debug'}}}
- cc_mcollective.handle('cc_mcollective', mycfg, cc, LOG, [])
- self.assertTrue(cc.distro.install_packages.called)
- install_pkg = cc.distro.install_packages.call_args_list[0][0][0]
- self.assertEqual(install_pkg, ('mcollective',))
-
- self.assertTrue(mock_subp.subp.called)
- self.assertEqual(mock_subp.subp.call_args_list[0][0][0],
- ['service', 'mcollective', 'restart'])
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_mounts.py b/tests/unittests/test_handler/test_handler_mounts.py
deleted file mode 100644
index 69e8b30d..00000000
--- a/tests/unittests/test_handler/test_handler_mounts.py
+++ /dev/null
@@ -1,406 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-import os.path
-from unittest import mock
-
-from cloudinit.config import cc_mounts
-
-from cloudinit.tests import helpers as test_helpers
-
-
-class TestSanitizeDevname(test_helpers.FilesystemMockingTestCase):
-
- def setUp(self):
- super(TestSanitizeDevname, self).setUp()
- self.new_root = self.tmp_dir()
- self.patchOS(self.new_root)
-
- def _touch(self, path):
- path = os.path.join(self.new_root, path.lstrip('/'))
- basedir = os.path.dirname(path)
- if not os.path.exists(basedir):
- os.makedirs(basedir)
- open(path, 'a').close()
-
- def _makedirs(self, directory):
- directory = os.path.join(self.new_root, directory.lstrip('/'))
- if not os.path.exists(directory):
- os.makedirs(directory)
-
- def mock_existence_of_disk(self, disk_path):
- self._touch(disk_path)
- self._makedirs(os.path.join('/sys/block', disk_path.split('/')[-1]))
-
- def mock_existence_of_partition(self, disk_path, partition_number):
- self.mock_existence_of_disk(disk_path)
- self._touch(disk_path + str(partition_number))
- disk_name = disk_path.split('/')[-1]
- self._makedirs(os.path.join('/sys/block',
- disk_name,
- disk_name + str(partition_number)))
-
- def test_existent_full_disk_path_is_returned(self):
- disk_path = '/dev/sda'
- self.mock_existence_of_disk(disk_path)
- self.assertEqual(disk_path,
- cc_mounts.sanitize_devname(disk_path,
- lambda x: None,
- mock.Mock()))
-
- def test_existent_disk_name_returns_full_path(self):
- disk_name = 'sda'
- disk_path = '/dev/' + disk_name
- self.mock_existence_of_disk(disk_path)
- self.assertEqual(disk_path,
- cc_mounts.sanitize_devname(disk_name,
- lambda x: None,
- mock.Mock()))
-
- def test_existent_meta_disk_is_returned(self):
- actual_disk_path = '/dev/sda'
- self.mock_existence_of_disk(actual_disk_path)
- self.assertEqual(
- actual_disk_path,
- cc_mounts.sanitize_devname('ephemeral0',
- lambda x: actual_disk_path,
- mock.Mock()))
-
- def test_existent_meta_partition_is_returned(self):
- disk_name, partition_part = '/dev/sda', '1'
- actual_partition_path = disk_name + partition_part
- self.mock_existence_of_partition(disk_name, partition_part)
- self.assertEqual(
- actual_partition_path,
- cc_mounts.sanitize_devname('ephemeral0.1',
- lambda x: disk_name,
- mock.Mock()))
-
- def test_existent_meta_partition_with_p_is_returned(self):
- disk_name, partition_part = '/dev/sda', 'p1'
- actual_partition_path = disk_name + partition_part
- self.mock_existence_of_partition(disk_name, partition_part)
- self.assertEqual(
- actual_partition_path,
- cc_mounts.sanitize_devname('ephemeral0.1',
- lambda x: disk_name,
- mock.Mock()))
-
- def test_first_partition_returned_if_existent_disk_is_partitioned(self):
- disk_name, partition_part = '/dev/sda', '1'
- actual_partition_path = disk_name + partition_part
- self.mock_existence_of_partition(disk_name, partition_part)
- self.assertEqual(
- actual_partition_path,
- cc_mounts.sanitize_devname('ephemeral0',
- lambda x: disk_name,
- mock.Mock()))
-
- def test_nth_partition_returned_if_requested(self):
- disk_name, partition_part = '/dev/sda', '3'
- actual_partition_path = disk_name + partition_part
- self.mock_existence_of_partition(disk_name, partition_part)
- self.assertEqual(
- actual_partition_path,
- cc_mounts.sanitize_devname('ephemeral0.3',
- lambda x: disk_name,
- mock.Mock()))
-
- def test_transformer_returning_none_returns_none(self):
- self.assertIsNone(
- cc_mounts.sanitize_devname(
- 'ephemeral0', lambda x: None, mock.Mock()))
-
- def test_missing_device_returns_none(self):
- self.assertIsNone(
- cc_mounts.sanitize_devname('/dev/sda', None, mock.Mock()))
-
- def test_missing_sys_returns_none(self):
- disk_path = '/dev/sda'
- self._makedirs(disk_path)
- self.assertIsNone(
- cc_mounts.sanitize_devname(disk_path, None, mock.Mock()))
-
- def test_existent_disk_but_missing_partition_returns_none(self):
- disk_path = '/dev/sda'
- self.mock_existence_of_disk(disk_path)
- self.assertIsNone(
- cc_mounts.sanitize_devname(
- 'ephemeral0.1', lambda x: disk_path, mock.Mock()))
-
- def test_network_device_returns_network_device(self):
- disk_path = 'netdevice:/path'
- self.assertEqual(
- disk_path,
- cc_mounts.sanitize_devname(disk_path, None, mock.Mock()))
-
- def test_device_aliases_remapping(self):
- disk_path = '/dev/sda'
- self.mock_existence_of_disk(disk_path)
- self.assertEqual(disk_path,
- cc_mounts.sanitize_devname('mydata',
- lambda x: None,
- mock.Mock(),
- {'mydata': disk_path}))
-
-
-class TestSwapFileCreation(test_helpers.FilesystemMockingTestCase):
-
- def setUp(self):
- super(TestSwapFileCreation, self).setUp()
- self.new_root = self.tmp_dir()
- self.patchOS(self.new_root)
-
- self.fstab_path = os.path.join(self.new_root, 'etc/fstab')
- self.swap_path = os.path.join(self.new_root, 'swap.img')
- self._makedirs('/etc')
-
- self.add_patch('cloudinit.config.cc_mounts.FSTAB_PATH',
- 'mock_fstab_path',
- self.fstab_path,
- autospec=False)
-
- self.add_patch('cloudinit.config.cc_mounts.subp.subp',
- 'm_subp_subp')
-
- self.add_patch('cloudinit.config.cc_mounts.util.mounts',
- 'mock_util_mounts',
- return_value={
- '/dev/sda1': {'fstype': 'ext4',
- 'mountpoint': '/',
- 'opts': 'rw,relatime,discard'
- }})
-
- self.mock_cloud = mock.Mock()
- self.mock_log = mock.Mock()
- self.mock_cloud.device_name_to_device = self.device_name_to_device
-
- self.cc = {
- 'swap': {
- 'filename': self.swap_path,
- 'size': '512',
- 'maxsize': '512'}}
-
- def _makedirs(self, directory):
- directory = os.path.join(self.new_root, directory.lstrip('/'))
- if not os.path.exists(directory):
- os.makedirs(directory)
-
- def device_name_to_device(self, path):
- if path == 'swap':
- return self.swap_path
- else:
- dev = None
-
- return dev
-
- @mock.patch('cloudinit.util.get_mount_info')
- @mock.patch('cloudinit.util.kernel_version')
- def test_swap_creation_method_fallocate_on_xfs(self, m_kernel_version,
- m_get_mount_info):
- m_kernel_version.return_value = (4, 20)
- m_get_mount_info.return_value = ["", "xfs"]
-
- cc_mounts.handle(None, self.cc, self.mock_cloud, self.mock_log, [])
- self.m_subp_subp.assert_has_calls([
- mock.call(['fallocate', '-l', '0M', self.swap_path], capture=True),
- mock.call(['mkswap', self.swap_path]),
- mock.call(['swapon', '-a'])])
-
- @mock.patch('cloudinit.util.get_mount_info')
- @mock.patch('cloudinit.util.kernel_version')
- def test_swap_creation_method_xfs(self, m_kernel_version,
- m_get_mount_info):
- m_kernel_version.return_value = (3, 18)
- m_get_mount_info.return_value = ["", "xfs"]
-
- cc_mounts.handle(None, self.cc, self.mock_cloud, self.mock_log, [])
- self.m_subp_subp.assert_has_calls([
- mock.call(['dd', 'if=/dev/zero',
- 'of=' + self.swap_path,
- 'bs=1M', 'count=0'], capture=True),
- mock.call(['mkswap', self.swap_path]),
- mock.call(['swapon', '-a'])])
-
- @mock.patch('cloudinit.util.get_mount_info')
- @mock.patch('cloudinit.util.kernel_version')
- def test_swap_creation_method_btrfs(self, m_kernel_version,
- m_get_mount_info):
- m_kernel_version.return_value = (4, 20)
- m_get_mount_info.return_value = ["", "btrfs"]
-
- cc_mounts.handle(None, self.cc, self.mock_cloud, self.mock_log, [])
- self.m_subp_subp.assert_has_calls([
- mock.call(['dd', 'if=/dev/zero',
- 'of=' + self.swap_path,
- 'bs=1M', 'count=0'], capture=True),
- mock.call(['mkswap', self.swap_path]),
- mock.call(['swapon', '-a'])])
-
- @mock.patch('cloudinit.util.get_mount_info')
- @mock.patch('cloudinit.util.kernel_version')
- def test_swap_creation_method_ext4(self, m_kernel_version,
- m_get_mount_info):
- m_kernel_version.return_value = (5, 14)
- m_get_mount_info.return_value = ["", "ext4"]
-
- cc_mounts.handle(None, self.cc, self.mock_cloud, self.mock_log, [])
- self.m_subp_subp.assert_has_calls([
- mock.call(['fallocate', '-l', '0M', self.swap_path], capture=True),
- mock.call(['mkswap', self.swap_path]),
- mock.call(['swapon', '-a'])])
-
-
-class TestFstabHandling(test_helpers.FilesystemMockingTestCase):
-
- swap_path = '/dev/sdb1'
-
- def setUp(self):
- super(TestFstabHandling, self).setUp()
- self.new_root = self.tmp_dir()
- self.patchOS(self.new_root)
-
- self.fstab_path = os.path.join(self.new_root, 'etc/fstab')
- self._makedirs('/etc')
-
- self.add_patch('cloudinit.config.cc_mounts.FSTAB_PATH',
- 'mock_fstab_path',
- self.fstab_path,
- autospec=False)
-
- self.add_patch('cloudinit.config.cc_mounts._is_block_device',
- 'mock_is_block_device',
- return_value=True)
-
- self.add_patch('cloudinit.config.cc_mounts.subp.subp',
- 'm_subp_subp')
-
- self.add_patch('cloudinit.config.cc_mounts.util.mounts',
- 'mock_util_mounts',
- return_value={
- '/dev/sda1': {'fstype': 'ext4',
- 'mountpoint': '/',
- 'opts': 'rw,relatime,discard'
- }})
-
- self.mock_cloud = mock.Mock()
- self.mock_log = mock.Mock()
- self.mock_cloud.device_name_to_device = self.device_name_to_device
-
- def _makedirs(self, directory):
- directory = os.path.join(self.new_root, directory.lstrip('/'))
- if not os.path.exists(directory):
- os.makedirs(directory)
-
- def device_name_to_device(self, path):
- if path == 'swap':
- return self.swap_path
- else:
- dev = None
-
- return dev
-
- def test_no_fstab(self):
- """ Handle images which do not include an fstab. """
- self.assertFalse(os.path.exists(cc_mounts.FSTAB_PATH))
- fstab_expected_content = (
- '%s\tnone\tswap\tsw,comment=cloudconfig\t'
- '0\t0\n' % (self.swap_path,)
- )
- cc_mounts.handle(None, {}, self.mock_cloud, self.mock_log, [])
- with open(cc_mounts.FSTAB_PATH, 'r') as fd:
- fstab_new_content = fd.read()
- self.assertEqual(fstab_expected_content, fstab_new_content)
-
- def test_swap_integrity(self):
- '''Ensure that the swap file is correctly created and can
- swapon successfully. Fixing the corner case of:
- kernel: swapon: swapfile has holes'''
-
- fstab = '/swap.img swap swap defaults 0 0\n'
-
- with open(cc_mounts.FSTAB_PATH, 'w') as fd:
- fd.write(fstab)
- cc = {'swap': ['filename: /swap.img', 'size: 512', 'maxsize: 512']}
- cc_mounts.handle(None, cc, self.mock_cloud, self.mock_log, [])
-
- def test_fstab_no_swap_device(self):
- '''Ensure that cloud-init adds a discovered swap partition
- to /etc/fstab.'''
-
- fstab_original_content = ''
- fstab_expected_content = (
- '%s\tnone\tswap\tsw,comment=cloudconfig\t'
- '0\t0\n' % (self.swap_path,)
- )
-
- with open(cc_mounts.FSTAB_PATH, 'w') as fd:
- fd.write(fstab_original_content)
-
- cc_mounts.handle(None, {}, self.mock_cloud, self.mock_log, [])
-
- with open(cc_mounts.FSTAB_PATH, 'r') as fd:
- fstab_new_content = fd.read()
- self.assertEqual(fstab_expected_content, fstab_new_content)
-
- def test_fstab_same_swap_device_already_configured(self):
- '''Ensure that cloud-init will not add a swap device if the same
- device already exists in /etc/fstab.'''
-
- fstab_original_content = '%s swap swap defaults 0 0\n' % (
- self.swap_path,)
- fstab_expected_content = fstab_original_content
-
- with open(cc_mounts.FSTAB_PATH, 'w') as fd:
- fd.write(fstab_original_content)
-
- cc_mounts.handle(None, {}, self.mock_cloud, self.mock_log, [])
-
- with open(cc_mounts.FSTAB_PATH, 'r') as fd:
- fstab_new_content = fd.read()
- self.assertEqual(fstab_expected_content, fstab_new_content)
-
- def test_fstab_alternate_swap_device_already_configured(self):
- '''Ensure that cloud-init will add a discovered swap device to
- /etc/fstab even when there exists a swap definition on another
- device.'''
-
- fstab_original_content = '/dev/sdc1 swap swap defaults 0 0\n'
- fstab_expected_content = (
- fstab_original_content +
- '%s\tnone\tswap\tsw,comment=cloudconfig\t'
- '0\t0\n' % (self.swap_path,)
- )
-
- with open(cc_mounts.FSTAB_PATH, 'w') as fd:
- fd.write(fstab_original_content)
-
- cc_mounts.handle(None, {}, self.mock_cloud, self.mock_log, [])
-
- with open(cc_mounts.FSTAB_PATH, 'r') as fd:
- fstab_new_content = fd.read()
- self.assertEqual(fstab_expected_content, fstab_new_content)
-
- def test_no_change_fstab_sets_needs_mount_all(self):
- '''verify unchanged fstab entries are mounted if not call mount -a'''
- fstab_original_content = (
- 'LABEL=cloudimg-rootfs / ext4 defaults 0 0\n'
- 'LABEL=UEFI /boot/efi vfat defaults 0 0\n'
- '/dev/vdb /mnt auto defaults,noexec,comment=cloudconfig 0 2\n'
- )
- fstab_expected_content = fstab_original_content
- cc = {
- 'mounts': [
- ['/dev/vdb', '/mnt', 'auto', 'defaults,noexec']
- ]
- }
- with open(cc_mounts.FSTAB_PATH, 'w') as fd:
- fd.write(fstab_original_content)
- with open(cc_mounts.FSTAB_PATH, 'r') as fd:
- fstab_new_content = fd.read()
- self.assertEqual(fstab_expected_content, fstab_new_content)
- cc_mounts.handle(None, cc, self.mock_cloud, self.mock_log, [])
- self.m_subp_subp.assert_has_calls([
- mock.call(['mount', '-a']),
- mock.call(['systemctl', 'daemon-reload'])])
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_ntp.py b/tests/unittests/test_handler/test_handler_ntp.py
deleted file mode 100644
index b34a18cb..00000000
--- a/tests/unittests/test_handler/test_handler_ntp.py
+++ /dev/null
@@ -1,765 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-import copy
-import os
-import shutil
-from functools import partial
-from os.path import dirname
-
-from cloudinit import (helpers, util)
-from cloudinit.config import cc_ntp
-from cloudinit.tests.helpers import (
- CiTestCase, FilesystemMockingTestCase, mock, skipUnlessJsonSchema)
-
-from tests.unittests.util import get_cloud
-
-
-NTP_TEMPLATE = """\
-## template: jinja
-servers {{servers}}
-pools {{pools}}
-"""
-
-TIMESYNCD_TEMPLATE = """\
-## template:jinja
-[Time]
-{% if servers or pools -%}
-NTP={% for host in servers|list + pools|list %}{{ host }} {% endfor -%}
-{% endif -%}
-"""
-
-
-class TestNtp(FilesystemMockingTestCase):
-
- with_logs = True
-
- def setUp(self):
- super(TestNtp, self).setUp()
- self.new_root = self.tmp_dir()
- self.add_patch('cloudinit.util.system_is_snappy', 'm_snappy')
- self.m_snappy.return_value = False
- self.new_root = self.reRoot()
- self._get_cloud = partial(
- get_cloud,
- paths=helpers.Paths({'templates_dir': self.new_root})
- )
-
- def _get_template_path(self, template_name, distro, basepath=None):
- # ntp.conf.{distro} -> ntp.conf.debian.tmpl
- template_fn = '{0}.tmpl'.format(
- template_name.replace('{distro}', distro))
- if not basepath:
- basepath = self.new_root
- path = os.path.join(basepath, template_fn)
- return path
-
- def _generate_template(self, template=None):
- if not template:
- template = NTP_TEMPLATE
- confpath = os.path.join(self.new_root, 'client.conf')
- template_fn = os.path.join(self.new_root, 'client.conf.tmpl')
- util.write_file(template_fn, content=template)
- return (confpath, template_fn)
-
- def _mock_ntp_client_config(self, client=None, distro=None):
- if not client:
- client = 'ntp'
- if not distro:
- distro = 'ubuntu'
- dcfg = cc_ntp.distro_ntp_client_configs(distro)
- if client == 'systemd-timesyncd':
- template = TIMESYNCD_TEMPLATE
- else:
- template = NTP_TEMPLATE
- (confpath, _template_fn) = self._generate_template(template=template)
- ntpconfig = copy.deepcopy(dcfg[client])
- ntpconfig['confpath'] = confpath
- ntpconfig['template_name'] = os.path.basename(confpath)
- return ntpconfig
-
- @mock.patch("cloudinit.config.cc_ntp.subp")
- def test_ntp_install(self, mock_subp):
- """ntp_install_client runs install_func when check_exe is absent."""
- mock_subp.which.return_value = None # check_exe not found.
- install_func = mock.MagicMock()
- cc_ntp.install_ntp_client(install_func,
- packages=['ntpx'], check_exe='ntpdx')
- mock_subp.which.assert_called_with('ntpdx')
- install_func.assert_called_once_with(['ntpx'])
-
- @mock.patch("cloudinit.config.cc_ntp.subp")
- def test_ntp_install_not_needed(self, mock_subp):
- """ntp_install_client doesn't install when check_exe is found."""
- client = 'chrony'
- mock_subp.which.return_value = [client] # check_exe found.
- install_func = mock.MagicMock()
- cc_ntp.install_ntp_client(install_func, packages=[client],
- check_exe=client)
- install_func.assert_not_called()
-
- @mock.patch("cloudinit.config.cc_ntp.subp")
- def test_ntp_install_no_op_with_empty_pkg_list(self, mock_subp):
- """ntp_install_client runs install_func with empty list"""
- mock_subp.which.return_value = None # check_exe not found
- install_func = mock.MagicMock()
- cc_ntp.install_ntp_client(install_func, packages=[],
- check_exe='timesyncd')
- install_func.assert_called_once_with([])
-
- def test_ntp_rename_ntp_conf(self):
- """When NTP_CONF exists, rename_ntp moves it."""
- ntpconf = self.tmp_path("ntp.conf", self.new_root)
- util.write_file(ntpconf, "")
- cc_ntp.rename_ntp_conf(confpath=ntpconf)
- self.assertFalse(os.path.exists(ntpconf))
- self.assertTrue(os.path.exists("{0}.dist".format(ntpconf)))
-
- def test_ntp_rename_ntp_conf_skip_missing(self):
- """When NTP_CONF doesn't exist rename_ntp doesn't create a file."""
- ntpconf = self.tmp_path("ntp.conf", self.new_root)
- self.assertFalse(os.path.exists(ntpconf))
- cc_ntp.rename_ntp_conf(confpath=ntpconf)
- self.assertFalse(os.path.exists("{0}.dist".format(ntpconf)))
- self.assertFalse(os.path.exists(ntpconf))
-
- def test_write_ntp_config_template_uses_ntp_conf_distro_no_servers(self):
- """write_ntp_config_template reads from $client.conf.distro.tmpl"""
- servers = []
- pools = ['10.0.0.1', '10.0.0.2']
- (confpath, template_fn) = self._generate_template()
- mock_path = 'cloudinit.config.cc_ntp.temp_utils._TMPDIR'
- with mock.patch(mock_path, self.new_root):
- cc_ntp.write_ntp_config_template('ubuntu',
- servers=servers, pools=pools,
- path=confpath,
- template_fn=template_fn,
- template=None)
- self.assertEqual(
- "servers []\npools ['10.0.0.1', '10.0.0.2']\n",
- util.load_file(confpath))
-
- def test_write_ntp_config_template_defaults_pools_w_empty_lists(self):
- """write_ntp_config_template defaults pools servers upon empty config.
-
- When both pools and servers are empty, default NR_POOL_SERVERS get
- configured.
- """
- distro = 'ubuntu'
- pools = cc_ntp.generate_server_names(distro)
- servers = []
- (confpath, template_fn) = self._generate_template()
- mock_path = 'cloudinit.config.cc_ntp.temp_utils._TMPDIR'
- with mock.patch(mock_path, self.new_root):
- cc_ntp.write_ntp_config_template(distro,
- servers=servers, pools=pools,
- path=confpath,
- template_fn=template_fn,
- template=None)
- self.assertEqual(
- "servers []\npools {0}\n".format(pools),
- util.load_file(confpath))
-
- def test_defaults_pools_empty_lists_sles(self):
- """write_ntp_config_template defaults opensuse pools upon empty config.
-
- When both pools and servers are empty, default NR_POOL_SERVERS get
- configured.
- """
- distro = 'sles'
- default_pools = cc_ntp.generate_server_names(distro)
- (confpath, template_fn) = self._generate_template()
-
- cc_ntp.write_ntp_config_template(distro,
- servers=[], pools=[],
- path=confpath,
- template_fn=template_fn,
- template=None)
- for pool in default_pools:
- self.assertIn('opensuse', pool)
- self.assertEqual(
- "servers []\npools {0}\n".format(default_pools),
- util.load_file(confpath))
- self.assertIn(
- "Adding distro default ntp pool servers: {0}".format(
- ",".join(default_pools)),
- self.logs.getvalue())
-
- def test_timesyncd_template(self):
- """Test timesycnd template is correct"""
- pools = ['0.mycompany.pool.ntp.org', '3.mycompany.pool.ntp.org']
- servers = ['192.168.23.3', '192.168.23.4']
- (confpath, template_fn) = self._generate_template(
- template=TIMESYNCD_TEMPLATE)
- cc_ntp.write_ntp_config_template('ubuntu',
- servers=servers, pools=pools,
- path=confpath,
- template_fn=template_fn,
- template=None)
- self.assertEqual(
- "[Time]\nNTP=%s %s \n" % (" ".join(servers), " ".join(pools)),
- util.load_file(confpath))
-
- def test_distro_ntp_client_configs(self):
- """Test we have updated ntp client configs on different distros"""
- delta = copy.deepcopy(cc_ntp.DISTRO_CLIENT_CONFIG)
- base = copy.deepcopy(cc_ntp.NTP_CLIENT_CONFIG)
- # confirm no-delta distros match the base config
- for distro in cc_ntp.distros:
- if distro not in delta:
- result = cc_ntp.distro_ntp_client_configs(distro)
- self.assertEqual(base, result)
- # for distros with delta, ensure the merged config values match
- # what is set in the delta
- for distro in delta.keys():
- result = cc_ntp.distro_ntp_client_configs(distro)
- for client in delta[distro].keys():
- for key in delta[distro][client].keys():
- self.assertEqual(delta[distro][client][key],
- result[client][key])
-
- def _get_expected_pools(self, pools, distro, client):
- if client in ['ntp', 'chrony']:
- if client == 'ntp' and distro == 'alpine':
- # NTP for Alpine Linux is Busybox's ntp which does not
- # support 'pool' lines in its configuration file.
- expected_pools = []
- else:
- expected_pools = [
- 'pool {0} iburst'.format(pool) for pool in pools]
- elif client == 'systemd-timesyncd':
- expected_pools = " ".join(pools)
-
- return expected_pools
-
- def _get_expected_servers(self, servers, distro, client):
- if client in ['ntp', 'chrony']:
- if client == 'ntp' and distro == 'alpine':
- # NTP for Alpine Linux is Busybox's ntp which only supports
- # 'server' lines without iburst option.
- expected_servers = [
- 'server {0}'.format(srv) for srv in servers]
- else:
- expected_servers = [
- 'server {0} iburst'.format(srv) for srv in servers]
- elif client == 'systemd-timesyncd':
- expected_servers = " ".join(servers)
-
- return expected_servers
-
- def test_ntp_handler_real_distro_ntp_templates(self):
- """Test ntp handler renders the shipped distro ntp client templates."""
- pools = ['0.mycompany.pool.ntp.org', '3.mycompany.pool.ntp.org']
- servers = ['192.168.23.3', '192.168.23.4']
- for client in ['ntp', 'systemd-timesyncd', 'chrony']:
- for distro in cc_ntp.distros:
- distro_cfg = cc_ntp.distro_ntp_client_configs(distro)
- ntpclient = distro_cfg[client]
- confpath = (
- os.path.join(self.new_root, ntpclient.get('confpath')[1:]))
- template = ntpclient.get('template_name')
- # find sourcetree template file
- root_dir = (
- dirname(dirname(os.path.realpath(util.__file__))) +
- '/templates')
- source_fn = self._get_template_path(template, distro,
- basepath=root_dir)
- template_fn = self._get_template_path(template, distro)
- # don't fail if cloud-init doesn't have a template for
- # a distro,client pair
- if not os.path.exists(source_fn):
- continue
- # Create a copy in our tmp_dir
- shutil.copy(source_fn, template_fn)
- cc_ntp.write_ntp_config_template(distro, servers=servers,
- pools=pools, path=confpath,
- template_fn=template_fn)
- content = util.load_file(confpath)
- if client in ['ntp', 'chrony']:
- content_lines = content.splitlines()
- expected_servers = self._get_expected_servers(servers,
- distro,
- client)
- print('distro=%s client=%s' % (distro, client))
- for sline in expected_servers:
- self.assertIn(sline, content_lines,
- ('failed to render {0} conf'
- ' for distro:{1}'.format(client,
- distro)))
- expected_pools = self._get_expected_pools(pools, distro,
- client)
- if expected_pools != []:
- for pline in expected_pools:
- self.assertIn(pline, content_lines,
- ('failed to render {0} conf'
- ' for distro:{1}'.format(client,
- distro)))
- elif client == 'systemd-timesyncd':
- expected_servers = self._get_expected_servers(servers,
- distro,
- client)
- expected_pools = self._get_expected_pools(pools,
- distro,
- client)
- expected_content = (
- "# cloud-init generated file\n" +
- "# See timesyncd.conf(5) for details.\n\n" +
- "[Time]\nNTP=%s %s \n" % (expected_servers,
- expected_pools))
- self.assertEqual(expected_content, content)
-
- def test_no_ntpcfg_does_nothing(self):
- """When no ntp section is defined handler logs a warning and noops."""
- cc_ntp.handle('cc_ntp', {}, None, None, [])
- self.assertEqual(
- 'DEBUG: Skipping module named cc_ntp, '
- 'not present or disabled by cfg\n',
- self.logs.getvalue())
-
- @mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
- def test_ntp_handler_schema_validation_allows_empty_ntp_config(self,
- m_select):
- """Ntp schema validation allows for an empty ntp: configuration."""
- valid_empty_configs = [{'ntp': {}}, {'ntp': None}]
- for valid_empty_config in valid_empty_configs:
- for distro in cc_ntp.distros:
- mycloud = self._get_cloud(distro)
- ntpconfig = self._mock_ntp_client_config(distro=distro)
- confpath = ntpconfig['confpath']
- m_select.return_value = ntpconfig
- cc_ntp.handle('cc_ntp', valid_empty_config, mycloud, None, [])
- if distro == 'alpine':
- # _mock_ntp_client_config call above did not specify a
- # client value and so it defaults to "ntp" which on
- # Alpine Linux only supports servers and not pools.
-
- servers = cc_ntp.generate_server_names(mycloud.distro.name)
- self.assertEqual(
- "servers {0}\npools []\n".format(servers),
- util.load_file(confpath))
- else:
- pools = cc_ntp.generate_server_names(mycloud.distro.name)
- self.assertEqual(
- "servers []\npools {0}\n".format(pools),
- util.load_file(confpath))
- self.assertNotIn('Invalid config:', self.logs.getvalue())
-
- @skipUnlessJsonSchema()
- @mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
- def test_ntp_handler_schema_validation_warns_non_string_item_type(self,
- m_sel):
- """Ntp schema validation warns of non-strings in pools or servers.
-
- Schema validation is not strict, so ntp config is still be rendered.
- """
- invalid_config = {'ntp': {'pools': [123], 'servers': ['valid', None]}}
- for distro in cc_ntp.distros:
- mycloud = self._get_cloud(distro)
- ntpconfig = self._mock_ntp_client_config(distro=distro)
- confpath = ntpconfig['confpath']
- m_sel.return_value = ntpconfig
- cc_ntp.handle('cc_ntp', invalid_config, mycloud, None, [])
- self.assertIn(
- "Invalid config:\nntp.pools.0: 123 is not of type 'string'\n"
- "ntp.servers.1: None is not of type 'string'",
- self.logs.getvalue())
- self.assertEqual("servers ['valid', None]\npools [123]\n",
- util.load_file(confpath))
-
- @skipUnlessJsonSchema()
- @mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
- def test_ntp_handler_schema_validation_warns_of_non_array_type(self,
- m_select):
- """Ntp schema validation warns of non-array pools or servers types.
-
- Schema validation is not strict, so ntp config is still be rendered.
- """
- invalid_config = {'ntp': {'pools': 123, 'servers': 'non-array'}}
-
- for distro in cc_ntp.distros:
- mycloud = self._get_cloud(distro)
- ntpconfig = self._mock_ntp_client_config(distro=distro)
- confpath = ntpconfig['confpath']
- m_select.return_value = ntpconfig
- cc_ntp.handle('cc_ntp', invalid_config, mycloud, None, [])
- self.assertIn(
- "Invalid config:\nntp.pools: 123 is not of type 'array'\n"
- "ntp.servers: 'non-array' is not of type 'array'",
- self.logs.getvalue())
- self.assertEqual("servers non-array\npools 123\n",
- util.load_file(confpath))
-
- @skipUnlessJsonSchema()
- @mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
- def test_ntp_handler_schema_validation_warns_invalid_key_present(self,
- m_select):
- """Ntp schema validation warns of invalid keys present in ntp config.
-
- Schema validation is not strict, so ntp config is still be rendered.
- """
- invalid_config = {
- 'ntp': {'invalidkey': 1, 'pools': ['0.mycompany.pool.ntp.org']}}
- for distro in cc_ntp.distros:
- if distro != 'alpine':
- mycloud = self._get_cloud(distro)
- ntpconfig = self._mock_ntp_client_config(distro=distro)
- confpath = ntpconfig['confpath']
- m_select.return_value = ntpconfig
- cc_ntp.handle('cc_ntp', invalid_config, mycloud, None, [])
- self.assertIn(
- "Invalid config:\nntp: Additional properties are not "
- "allowed ('invalidkey' was unexpected)",
- self.logs.getvalue())
- self.assertEqual(
- "servers []\npools ['0.mycompany.pool.ntp.org']\n",
- util.load_file(confpath))
-
- @skipUnlessJsonSchema()
- @mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
- def test_ntp_handler_schema_validation_warns_of_duplicates(self, m_select):
- """Ntp schema validation warns of duplicates in servers or pools.
-
- Schema validation is not strict, so ntp config is still be rendered.
- """
- invalid_config = {
- 'ntp': {'pools': ['0.mypool.org', '0.mypool.org'],
- 'servers': ['10.0.0.1', '10.0.0.1']}}
- for distro in cc_ntp.distros:
- mycloud = self._get_cloud(distro)
- ntpconfig = self._mock_ntp_client_config(distro=distro)
- confpath = ntpconfig['confpath']
- m_select.return_value = ntpconfig
- cc_ntp.handle('cc_ntp', invalid_config, mycloud, None, [])
- self.assertIn(
- "Invalid config:\nntp.pools: ['0.mypool.org', '0.mypool.org']"
- " has non-unique elements\nntp.servers: "
- "['10.0.0.1', '10.0.0.1'] has non-unique elements",
- self.logs.getvalue())
- self.assertEqual(
- "servers ['10.0.0.1', '10.0.0.1']\n"
- "pools ['0.mypool.org', '0.mypool.org']\n",
- util.load_file(confpath))
-
- @mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
- def test_ntp_handler_timesyncd(self, m_select):
- """Test ntp handler configures timesyncd"""
- servers = ['192.168.2.1', '192.168.2.2']
- pools = ['0.mypool.org']
- cfg = {'ntp': {'servers': servers, 'pools': pools}}
- client = 'systemd-timesyncd'
- for distro in cc_ntp.distros:
- mycloud = self._get_cloud(distro)
- ntpconfig = self._mock_ntp_client_config(distro=distro,
- client=client)
- confpath = ntpconfig['confpath']
- m_select.return_value = ntpconfig
- cc_ntp.handle('cc_ntp', cfg, mycloud, None, [])
- self.assertEqual(
- "[Time]\nNTP=192.168.2.1 192.168.2.2 0.mypool.org \n",
- util.load_file(confpath))
-
- @mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
- def test_ntp_handler_enabled_false(self, m_select):
- """Test ntp handler does not run if enabled: false """
- cfg = {'ntp': {'enabled': False}}
- for distro in cc_ntp.distros:
- mycloud = self._get_cloud(distro)
- cc_ntp.handle('notimportant', cfg, mycloud, None, None)
- self.assertEqual(0, m_select.call_count)
-
- @mock.patch("cloudinit.distros.subp")
- @mock.patch("cloudinit.config.cc_ntp.subp")
- @mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
- @mock.patch("cloudinit.distros.Distro.uses_systemd")
- def test_ntp_the_whole_package(self, m_sysd, m_select, m_subp, m_dsubp):
- """Test enabled config renders template, and restarts service """
- cfg = {'ntp': {'enabled': True}}
- for distro in cc_ntp.distros:
- mycloud = self._get_cloud(distro)
- ntpconfig = self._mock_ntp_client_config(distro=distro)
- confpath = ntpconfig['confpath']
- service_name = ntpconfig['service_name']
- m_select.return_value = ntpconfig
-
- hosts = cc_ntp.generate_server_names(mycloud.distro.name)
- uses_systemd = True
- expected_service_call = ['systemctl', 'reload-or-restart',
- service_name]
- expected_content = "servers []\npools {0}\n".format(hosts)
-
- if distro == 'alpine':
- uses_systemd = False
- expected_service_call = ['rc-service', service_name, 'restart']
- # _mock_ntp_client_config call above did not specify a client
- # value and so it defaults to "ntp" which on Alpine Linux only
- # supports servers and not pools.
- expected_content = "servers {0}\npools []\n".format(hosts)
-
- m_sysd.return_value = uses_systemd
- with mock.patch('cloudinit.config.cc_ntp.util') as m_util:
- # allow use of util.mergemanydict
- m_util.mergemanydict.side_effect = util.mergemanydict
- # default client is present
- m_subp.which.return_value = True
- # use the config 'enabled' value
- m_util.is_false.return_value = util.is_false(
- cfg['ntp']['enabled'])
- cc_ntp.handle('notimportant', cfg, mycloud, None, None)
- m_dsubp.subp.assert_called_with(
- expected_service_call, capture=True)
-
- self.assertEqual(expected_content, util.load_file(confpath))
-
- @mock.patch('cloudinit.util.system_info')
- def test_opensuse_picks_chrony(self, m_sysinfo):
- """Test opensuse picks chrony or ntp on certain distro versions"""
- # < 15.0 => ntp
- m_sysinfo.return_value = {
- 'dist': ('openSUSE', '13.2', 'Harlequin')
- }
- mycloud = self._get_cloud('opensuse')
- expected_client = mycloud.distro.preferred_ntp_clients[0]
- self.assertEqual('ntp', expected_client)
-
- # >= 15.0 and not openSUSE => chrony
- m_sysinfo.return_value = {
- 'dist': ('SLES', '15.0', 'SUSE Linux Enterprise Server 15')
- }
- mycloud = self._get_cloud('sles')
- expected_client = mycloud.distro.preferred_ntp_clients[0]
- self.assertEqual('chrony', expected_client)
-
- # >= 15.0 and openSUSE and ver != 42 => chrony
- m_sysinfo.return_value = {
- 'dist': ('openSUSE Tumbleweed', '20180326', 'timbleweed')
- }
- mycloud = self._get_cloud('opensuse')
- expected_client = mycloud.distro.preferred_ntp_clients[0]
- self.assertEqual('chrony', expected_client)
-
- @mock.patch('cloudinit.util.system_info')
- def test_ubuntu_xenial_picks_ntp(self, m_sysinfo):
- """Test Ubuntu picks ntp on xenial release"""
-
- m_sysinfo.return_value = {'dist': ('Ubuntu', '16.04', 'xenial')}
- mycloud = self._get_cloud('ubuntu')
- expected_client = mycloud.distro.preferred_ntp_clients[0]
- self.assertEqual('ntp', expected_client)
-
- @mock.patch('cloudinit.config.cc_ntp.subp.which')
- def test_snappy_system_picks_timesyncd(self, m_which):
- """Test snappy systems prefer installed clients"""
-
- # we are on ubuntu-core here
- self.m_snappy.return_value = True
-
- # ubuntu core systems will have timesyncd installed
- m_which.side_effect = iter([None, '/lib/systemd/systemd-timesyncd',
- None, None, None])
- distro = 'ubuntu'
- mycloud = self._get_cloud(distro)
- distro_configs = cc_ntp.distro_ntp_client_configs(distro)
- expected_client = 'systemd-timesyncd'
- expected_cfg = distro_configs[expected_client]
- expected_calls = []
- # we only get to timesyncd
- for client in mycloud.distro.preferred_ntp_clients[0:2]:
- cfg = distro_configs[client]
- expected_calls.append(mock.call(cfg['check_exe']))
- result = cc_ntp.select_ntp_client(None, mycloud.distro)
- m_which.assert_has_calls(expected_calls)
- self.assertEqual(sorted(expected_cfg), sorted(cfg))
- self.assertEqual(sorted(expected_cfg), sorted(result))
-
- @mock.patch('cloudinit.config.cc_ntp.subp.which')
- def test_ntp_distro_searches_all_preferred_clients(self, m_which):
- """Test select_ntp_client search all distro perferred clients """
- # nothing is installed
- m_which.return_value = None
- for distro in cc_ntp.distros:
- mycloud = self._get_cloud(distro)
- distro_configs = cc_ntp.distro_ntp_client_configs(distro)
- expected_client = mycloud.distro.preferred_ntp_clients[0]
- expected_cfg = distro_configs[expected_client]
- expected_calls = []
- for client in mycloud.distro.preferred_ntp_clients:
- cfg = distro_configs[client]
- expected_calls.append(mock.call(cfg['check_exe']))
- cc_ntp.select_ntp_client({}, mycloud.distro)
- m_which.assert_has_calls(expected_calls)
- self.assertEqual(sorted(expected_cfg), sorted(cfg))
-
- @mock.patch('cloudinit.config.cc_ntp.subp.which')
- def test_user_cfg_ntp_client_auto_uses_distro_clients(self, m_which):
- """Test user_cfg.ntp_client='auto' defaults to distro search"""
- # nothing is installed
- m_which.return_value = None
- for distro in cc_ntp.distros:
- mycloud = self._get_cloud(distro)
- distro_configs = cc_ntp.distro_ntp_client_configs(distro)
- expected_client = mycloud.distro.preferred_ntp_clients[0]
- expected_cfg = distro_configs[expected_client]
- expected_calls = []
- for client in mycloud.distro.preferred_ntp_clients:
- cfg = distro_configs[client]
- expected_calls.append(mock.call(cfg['check_exe']))
- cc_ntp.select_ntp_client('auto', mycloud.distro)
- m_which.assert_has_calls(expected_calls)
- self.assertEqual(sorted(expected_cfg), sorted(cfg))
-
- @mock.patch('cloudinit.config.cc_ntp.write_ntp_config_template')
- @mock.patch('cloudinit.cloud.Cloud.get_template_filename')
- @mock.patch('cloudinit.config.cc_ntp.subp.which')
- def test_ntp_custom_client_overrides_installed_clients(self, m_which,
- m_tmpfn, m_write):
- """Test user client is installed despite other clients present """
- client = 'ntpdate'
- cfg = {'ntp': {'ntp_client': client}}
- for distro in cc_ntp.distros:
- # client is not installed
- m_which.side_effect = iter([None])
- mycloud = self._get_cloud(distro)
- with mock.patch.object(mycloud.distro,
- 'install_packages') as m_install:
- cc_ntp.handle('notimportant', cfg, mycloud, None, None)
- m_install.assert_called_with([client])
- m_which.assert_called_with(client)
-
- @mock.patch('cloudinit.config.cc_ntp.subp.which')
- def test_ntp_system_config_overrides_distro_builtin_clients(self, m_which):
- """Test distro system_config overrides builtin preferred ntp clients"""
- system_client = 'chrony'
- sys_cfg = {'ntp_client': system_client}
- # no clients installed
- m_which.return_value = None
- for distro in cc_ntp.distros:
- mycloud = self._get_cloud(distro, sys_cfg=sys_cfg)
- distro_configs = cc_ntp.distro_ntp_client_configs(distro)
- expected_cfg = distro_configs[system_client]
- result = cc_ntp.select_ntp_client(None, mycloud.distro)
- self.assertEqual(sorted(expected_cfg), sorted(result))
- m_which.assert_has_calls([])
-
- @mock.patch('cloudinit.config.cc_ntp.subp.which')
- def test_ntp_user_config_overrides_system_cfg(self, m_which):
- """Test user-data overrides system_config ntp_client"""
- system_client = 'chrony'
- sys_cfg = {'ntp_client': system_client}
- user_client = 'systemd-timesyncd'
- # no clients installed
- m_which.return_value = None
- for distro in cc_ntp.distros:
- mycloud = self._get_cloud(distro, sys_cfg=sys_cfg)
- distro_configs = cc_ntp.distro_ntp_client_configs(distro)
- expected_cfg = distro_configs[user_client]
- result = cc_ntp.select_ntp_client(user_client, mycloud.distro)
- self.assertEqual(sorted(expected_cfg), sorted(result))
- m_which.assert_has_calls([])
-
- @mock.patch('cloudinit.config.cc_ntp.install_ntp_client')
- def test_ntp_user_provided_config_with_template(self, m_install):
- custom = r'\n#MyCustomTemplate'
- user_template = NTP_TEMPLATE + custom
- confpath = os.path.join(self.new_root, 'etc/myntp/myntp.conf')
- cfg = {
- 'ntp': {
- 'pools': ['mypool.org'],
- 'ntp_client': 'myntpd',
- 'config': {
- 'check_exe': 'myntpd',
- 'confpath': confpath,
- 'packages': ['myntp'],
- 'service_name': 'myntp',
- 'template': user_template,
- }
- }
- }
- for distro in cc_ntp.distros:
- mycloud = self._get_cloud(distro)
- mock_path = 'cloudinit.config.cc_ntp.temp_utils._TMPDIR'
- with mock.patch(mock_path, self.new_root):
- cc_ntp.handle('notimportant', cfg, mycloud, None, None)
- self.assertEqual(
- "servers []\npools ['mypool.org']\n%s" % custom,
- util.load_file(confpath))
-
- @mock.patch('cloudinit.config.cc_ntp.supplemental_schema_validation')
- @mock.patch('cloudinit.config.cc_ntp.install_ntp_client')
- @mock.patch('cloudinit.config.cc_ntp.select_ntp_client')
- def test_ntp_user_provided_config_template_only(self, m_select, m_install,
- m_schema):
- """Test custom template for default client"""
- custom = r'\n#MyCustomTemplate'
- user_template = NTP_TEMPLATE + custom
- client = 'chrony'
- cfg = {
- 'pools': ['mypool.org'],
- 'ntp_client': client,
- 'config': {
- 'template': user_template,
- }
- }
- expected_merged_cfg = {
- 'check_exe': 'chronyd',
- 'confpath': '{tmpdir}/client.conf'.format(tmpdir=self.new_root),
- 'template_name': 'client.conf', 'template': user_template,
- 'service_name': 'chrony', 'packages': ['chrony']}
- for distro in cc_ntp.distros:
- mycloud = self._get_cloud(distro)
- ntpconfig = self._mock_ntp_client_config(client=client,
- distro=distro)
- confpath = ntpconfig['confpath']
- m_select.return_value = ntpconfig
- mock_path = 'cloudinit.config.cc_ntp.temp_utils._TMPDIR'
- with mock.patch(mock_path, self.new_root):
- cc_ntp.handle('notimportant',
- {'ntp': cfg}, mycloud, None, None)
- self.assertEqual(
- "servers []\npools ['mypool.org']\n%s" % custom,
- util.load_file(confpath))
- m_schema.assert_called_with(expected_merged_cfg)
-
-
-class TestSupplementalSchemaValidation(CiTestCase):
-
- def test_error_on_missing_keys(self):
- """ValueError raised reporting any missing required ntp:config keys"""
- cfg = {}
- match = (r'Invalid ntp configuration:\\nMissing required ntp:config'
- ' keys: check_exe, confpath, packages, service_name')
- with self.assertRaisesRegex(ValueError, match):
- cc_ntp.supplemental_schema_validation(cfg)
-
- def test_error_requiring_either_template_or_template_name(self):
- """ValueError raised if both template not template_name are None."""
- cfg = {'confpath': 'someconf', 'check_exe': '', 'service_name': '',
- 'template': None, 'template_name': None, 'packages': []}
- match = (r'Invalid ntp configuration:\\nEither ntp:config:template'
- ' or ntp:config:template_name values are required')
- with self.assertRaisesRegex(ValueError, match):
- cc_ntp.supplemental_schema_validation(cfg)
-
- def test_error_on_non_list_values(self):
- """ValueError raised when packages is not of type list."""
- cfg = {'confpath': 'someconf', 'check_exe': '', 'service_name': '',
- 'template': 'asdf', 'template_name': None, 'packages': 'NOPE'}
- match = (r'Invalid ntp configuration:\\nExpected a list of required'
- ' package names for ntp:config:packages. Found \\(NOPE\\)')
- with self.assertRaisesRegex(ValueError, match):
- cc_ntp.supplemental_schema_validation(cfg)
-
- def test_error_on_non_string_values(self):
- """ValueError raised for any values expected as string type."""
- cfg = {'confpath': 1, 'check_exe': 2, 'service_name': 3,
- 'template': 4, 'template_name': 5, 'packages': []}
- errors = [
- 'Expected a config file path ntp:config:confpath. Found (1)',
- 'Expected a string type for ntp:config:check_exe. Found (2)',
- 'Expected a string type for ntp:config:service_name. Found (3)',
- 'Expected a string type for ntp:config:template. Found (4)',
- 'Expected a string type for ntp:config:template_name. Found (5)']
- with self.assertRaises(ValueError) as context_mgr:
- cc_ntp.supplemental_schema_validation(cfg)
- error_msg = str(context_mgr.exception)
- for error in errors:
- self.assertIn(error, error_msg)
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_power_state.py b/tests/unittests/test_handler/test_handler_power_state.py
deleted file mode 100644
index 4ac49424..00000000
--- a/tests/unittests/test_handler/test_handler_power_state.py
+++ /dev/null
@@ -1,159 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-import sys
-
-from cloudinit.config import cc_power_state_change as psc
-
-from cloudinit import distros
-from cloudinit import helpers
-
-from cloudinit.tests import helpers as t_help
-from cloudinit.tests.helpers import mock
-
-
-class TestLoadPowerState(t_help.TestCase):
- def setUp(self):
- super(TestLoadPowerState, self).setUp()
- cls = distros.fetch('ubuntu')
- paths = helpers.Paths({})
- self.dist = cls('ubuntu', {}, paths)
-
- def test_no_config(self):
- # completely empty config should mean do nothing
- (cmd, _timeout, _condition) = psc.load_power_state({}, self.dist)
- self.assertIsNone(cmd)
-
- def test_irrelevant_config(self):
- # no power_state field in config should return None for cmd
- (cmd, _timeout, _condition) = psc.load_power_state({'foo': 'bar'},
- self.dist)
- self.assertIsNone(cmd)
-
- def test_invalid_mode(self):
-
- cfg = {'power_state': {'mode': 'gibberish'}}
- self.assertRaises(TypeError, psc.load_power_state, cfg, self.dist)
-
- cfg = {'power_state': {'mode': ''}}
- self.assertRaises(TypeError, psc.load_power_state, cfg, self.dist)
-
- def test_empty_mode(self):
- cfg = {'power_state': {'message': 'goodbye'}}
- self.assertRaises(TypeError, psc.load_power_state, cfg, self.dist)
-
- def test_valid_modes(self):
- cfg = {'power_state': {}}
- for mode in ('halt', 'poweroff', 'reboot'):
- cfg['power_state']['mode'] = mode
- check_lps_ret(psc.load_power_state(cfg, self.dist), mode=mode)
-
- def test_invalid_delay(self):
- cfg = {'power_state': {'mode': 'poweroff', 'delay': 'goodbye'}}
- self.assertRaises(TypeError, psc.load_power_state, cfg, self.dist)
-
- def test_valid_delay(self):
- cfg = {'power_state': {'mode': 'poweroff', 'delay': ''}}
- for delay in ("now", "+1", "+30"):
- cfg['power_state']['delay'] = delay
- check_lps_ret(psc.load_power_state(cfg, self.dist))
-
- def test_message_present(self):
- cfg = {'power_state': {'mode': 'poweroff', 'message': 'GOODBYE'}}
- ret = psc.load_power_state(cfg, self.dist)
- check_lps_ret(psc.load_power_state(cfg, self.dist))
- self.assertIn(cfg['power_state']['message'], ret[0])
-
- def test_no_message(self):
- # if message is not present, then no argument should be passed for it
- cfg = {'power_state': {'mode': 'poweroff'}}
- (cmd, _timeout, _condition) = psc.load_power_state(cfg, self.dist)
- self.assertNotIn("", cmd)
- check_lps_ret(psc.load_power_state(cfg, self.dist))
- self.assertTrue(len(cmd) == 3)
-
- def test_condition_null_raises(self):
- cfg = {'power_state': {'mode': 'poweroff', 'condition': None}}
- self.assertRaises(TypeError, psc.load_power_state, cfg, self.dist)
-
- def test_condition_default_is_true(self):
- cfg = {'power_state': {'mode': 'poweroff'}}
- _cmd, _timeout, cond = psc.load_power_state(cfg, self.dist)
- self.assertEqual(cond, True)
-
- def test_freebsd_poweroff_uses_lowercase_p(self):
- cls = distros.fetch('freebsd')
- paths = helpers.Paths({})
- freebsd = cls('freebsd', {}, paths)
- cfg = {'power_state': {'mode': 'poweroff'}}
- ret = psc.load_power_state(cfg, freebsd)
- self.assertIn('-p', ret[0])
-
- def test_alpine_delay(self):
- # alpine takes delay in seconds.
- cls = distros.fetch('alpine')
- paths = helpers.Paths({})
- alpine = cls('alpine', {}, paths)
- cfg = {'power_state': {'mode': 'poweroff', 'delay': ''}}
- for delay, value in (('now', 0), ("+1", 60), ("+30", 1800)):
- cfg['power_state']['delay'] = delay
- ret = psc.load_power_state(cfg, alpine)
- self.assertEqual('-d', ret[0][1])
- self.assertEqual(str(value), ret[0][2])
-
-
-class TestCheckCondition(t_help.TestCase):
- def cmd_with_exit(self, rc):
- return([sys.executable, '-c', 'import sys; sys.exit(%s)' % rc])
-
- def test_true_is_true(self):
- self.assertEqual(psc.check_condition(True), True)
-
- def test_false_is_false(self):
- self.assertEqual(psc.check_condition(False), False)
-
- def test_cmd_exit_zero_true(self):
- self.assertEqual(psc.check_condition(self.cmd_with_exit(0)), True)
-
- def test_cmd_exit_one_false(self):
- self.assertEqual(psc.check_condition(self.cmd_with_exit(1)), False)
-
- def test_cmd_exit_nonzero_warns(self):
- mocklog = mock.Mock()
- self.assertEqual(
- psc.check_condition(self.cmd_with_exit(2), mocklog), False)
- self.assertEqual(mocklog.warning.call_count, 1)
-
-
-def check_lps_ret(psc_return, mode=None):
- if len(psc_return) != 3:
- raise TypeError("length returned = %d" % len(psc_return))
-
- errs = []
- cmd = psc_return[0]
- timeout = psc_return[1]
- condition = psc_return[2]
-
- if 'shutdown' not in psc_return[0][0]:
- errs.append("string 'shutdown' not in cmd")
-
- if condition is None:
- errs.append("condition was not returned")
-
- if mode is not None:
- opt = {'halt': '-H', 'poweroff': '-P', 'reboot': '-r'}[mode]
- if opt not in psc_return[0]:
- errs.append("opt '%s' not in cmd: %s" % (opt, cmd))
-
- if len(cmd) != 3 and len(cmd) != 4:
- errs.append("Invalid command length: %s" % len(cmd))
-
- try:
- float(timeout)
- except Exception:
- errs.append("timeout failed convert to float")
-
- if len(errs):
- lines = ["Errors in result: %s" % str(psc_return)] + errs
- raise Exception('\n'.join(lines))
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_puppet.py b/tests/unittests/test_handler/test_handler_puppet.py
deleted file mode 100644
index 8d99f535..00000000
--- a/tests/unittests/test_handler/test_handler_puppet.py
+++ /dev/null
@@ -1,380 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-import logging
-import textwrap
-
-from cloudinit.config import cc_puppet
-from cloudinit import util
-from cloudinit.tests.helpers import CiTestCase, HttprettyTestCase, mock
-
-from tests.unittests.util import get_cloud
-
-LOG = logging.getLogger(__name__)
-
-
-@mock.patch('cloudinit.config.cc_puppet.subp.subp')
-@mock.patch('cloudinit.config.cc_puppet.os')
-class TestAutostartPuppet(CiTestCase):
-
- def test_wb_autostart_puppet_updates_puppet_default(self, m_os, m_subp):
- """Update /etc/default/puppet to autostart if it exists."""
-
- def _fake_exists(path):
- return path == '/etc/default/puppet'
-
- m_os.path.exists.side_effect = _fake_exists
- cc_puppet._autostart_puppet(LOG)
- self.assertEqual(
- [mock.call(['sed', '-i', '-e', 's/^START=.*/START=yes/',
- '/etc/default/puppet'], capture=False)],
- m_subp.call_args_list)
-
- def test_wb_autostart_pupppet_enables_puppet_systemctl(self, m_os, m_subp):
- """If systemctl is present, enable puppet via systemctl."""
-
- def _fake_exists(path):
- return path == '/bin/systemctl'
-
- m_os.path.exists.side_effect = _fake_exists
- cc_puppet._autostart_puppet(LOG)
- expected_calls = [mock.call(
- ['/bin/systemctl', 'enable', 'puppet.service'], capture=False)]
- self.assertEqual(expected_calls, m_subp.call_args_list)
-
- def test_wb_autostart_pupppet_enables_puppet_chkconfig(self, m_os, m_subp):
- """If chkconfig is present, enable puppet via checkcfg."""
-
- def _fake_exists(path):
- return path == '/sbin/chkconfig'
-
- m_os.path.exists.side_effect = _fake_exists
- cc_puppet._autostart_puppet(LOG)
- expected_calls = [mock.call(
- ['/sbin/chkconfig', 'puppet', 'on'], capture=False)]
- self.assertEqual(expected_calls, m_subp.call_args_list)
-
-
-@mock.patch('cloudinit.config.cc_puppet._autostart_puppet')
-class TestPuppetHandle(CiTestCase):
-
- with_logs = True
-
- def setUp(self):
- super(TestPuppetHandle, self).setUp()
- self.new_root = self.tmp_dir()
- self.conf = self.tmp_path('puppet.conf')
- self.csr_attributes_path = self.tmp_path(
- 'csr_attributes.yaml')
- self.cloud = get_cloud()
-
- def test_skips_missing_puppet_key_in_cloudconfig(self, m_auto):
- """Cloud-config containing no 'puppet' key is skipped."""
-
- cfg = {}
- cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
- self.assertIn(
- "no 'puppet' configuration found", self.logs.getvalue())
- self.assertEqual(0, m_auto.call_count)
-
- @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
- def test_puppet_config_starts_puppet_service(self, m_subp, m_auto):
- """Cloud-config 'puppet' configuration starts puppet."""
-
- cfg = {'puppet': {'install': False}}
- cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
- self.assertEqual(1, m_auto.call_count)
- self.assertIn(
- [mock.call(['service', 'puppet', 'start'], capture=False)],
- m_subp.call_args_list)
-
- @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
- def test_empty_puppet_config_installs_puppet(self, m_subp, m_auto):
- """Cloud-config empty 'puppet' configuration installs latest puppet."""
-
- self.cloud.distro = mock.MagicMock()
- cfg = {'puppet': {}}
- cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
- self.assertEqual(
- [mock.call(('puppet', None))],
- self.cloud.distro.install_packages.call_args_list)
-
- @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
- def test_puppet_config_installs_puppet_on_true(self, m_subp, _):
- """Cloud-config with 'puppet' key installs when 'install' is True."""
-
- self.cloud.distro = mock.MagicMock()
- cfg = {'puppet': {'install': True}}
- cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
- self.assertEqual(
- [mock.call(('puppet', None))],
- self.cloud.distro.install_packages.call_args_list)
-
- @mock.patch('cloudinit.config.cc_puppet.install_puppet_aio', autospec=True)
- @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
- def test_puppet_config_installs_puppet_aio(self, m_subp, m_aio, _):
- """Cloud-config with 'puppet' key installs
- when 'install_type' is 'aio'."""
-
- self.cloud.distro = mock.MagicMock()
- cfg = {'puppet': {'install': True, 'install_type': 'aio'}}
- cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
- m_aio.assert_called_with(
- cc_puppet.AIO_INSTALL_URL,
- None, None, True)
-
- @mock.patch('cloudinit.config.cc_puppet.install_puppet_aio', autospec=True)
- @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
- def test_puppet_config_installs_puppet_aio_with_version(self,
- m_subp, m_aio, _):
- """Cloud-config with 'puppet' key installs
- when 'install_type' is 'aio' and 'version' is specified."""
-
- self.cloud.distro = mock.MagicMock()
- cfg = {'puppet': {'install': True,
- 'version': '6.24.0', 'install_type': 'aio'}}
- cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
- m_aio.assert_called_with(
- cc_puppet.AIO_INSTALL_URL,
- '6.24.0', None, True)
-
- @mock.patch('cloudinit.config.cc_puppet.install_puppet_aio', autospec=True)
- @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
- def test_puppet_config_installs_puppet_aio_with_collection(self,
- m_subp,
- m_aio, _):
- """Cloud-config with 'puppet' key installs
- when 'install_type' is 'aio' and 'collection' is specified."""
-
- self.cloud.distro = mock.MagicMock()
- cfg = {'puppet': {'install': True,
- 'collection': 'puppet6', 'install_type': 'aio'}}
- cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
- m_aio.assert_called_with(
- cc_puppet.AIO_INSTALL_URL,
- None, 'puppet6', True)
-
- @mock.patch('cloudinit.config.cc_puppet.install_puppet_aio', autospec=True)
- @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
- def test_puppet_config_installs_puppet_aio_with_custom_url(self,
- m_subp,
- m_aio, _):
- """Cloud-config with 'puppet' key installs
- when 'install_type' is 'aio' and 'aio_install_url' is specified."""
-
- self.cloud.distro = mock.MagicMock()
- cfg = {'puppet':
- {'install': True,
- 'aio_install_url': 'http://test.url/path/to/script.sh',
- 'install_type': 'aio'}}
- cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
- m_aio.assert_called_with(
- 'http://test.url/path/to/script.sh', None, None, True)
-
- @mock.patch('cloudinit.config.cc_puppet.install_puppet_aio', autospec=True)
- @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
- def test_puppet_config_installs_puppet_aio_without_cleanup(self,
- m_subp,
- m_aio, _):
- """Cloud-config with 'puppet' key installs
- when 'install_type' is 'aio' and no cleanup."""
-
- self.cloud.distro = mock.MagicMock()
- cfg = {'puppet': {'install': True,
- 'cleanup': False, 'install_type': 'aio'}}
- cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
- m_aio.assert_called_with(
- cc_puppet.AIO_INSTALL_URL,
- None, None, False)
-
- @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
- def test_puppet_config_installs_puppet_version(self, m_subp, _):
- """Cloud-config 'puppet' configuration can specify a version."""
-
- self.cloud.distro = mock.MagicMock()
- cfg = {'puppet': {'version': '3.8'}}
- cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
- self.assertEqual(
- [mock.call(('puppet', '3.8'))],
- self.cloud.distro.install_packages.call_args_list)
-
- @mock.patch('cloudinit.config.cc_puppet.get_config_value')
- @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
- def test_puppet_config_updates_puppet_conf(self,
- m_subp, m_default, m_auto):
- """When 'conf' is provided update values in PUPPET_CONF_PATH."""
-
- def _fake_get_config_value(puppet_bin, setting):
- return self.conf
-
- m_default.side_effect = _fake_get_config_value
-
- cfg = {
- 'puppet': {
- 'conf': {'agent': {'server': 'puppetserver.example.org'}}}}
- util.write_file(
- self.conf, '[agent]\nserver = origpuppet\nother = 3')
- self.cloud.distro = mock.MagicMock()
- cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
- content = util.load_file(self.conf)
- expected = '[agent]\nserver = puppetserver.example.org\nother = 3\n\n'
- self.assertEqual(expected, content)
-
- @mock.patch('cloudinit.config.cc_puppet.get_config_value')
- @mock.patch('cloudinit.config.cc_puppet.subp.subp')
- def test_puppet_writes_csr_attributes_file(self,
- m_subp, m_default, m_auto):
- """When csr_attributes is provided
- creates file in PUPPET_CSR_ATTRIBUTES_PATH."""
-
- def _fake_get_config_value(puppet_bin, setting):
- return self.csr_attributes_path
-
- m_default.side_effect = _fake_get_config_value
-
- self.cloud.distro = mock.MagicMock()
- cfg = {
- 'puppet': {
- 'csr_attributes': {
- 'custom_attributes': {
- '1.2.840.113549.1.9.7':
- '342thbjkt82094y0uthhor289jnqthpc2290'
- },
- 'extension_requests': {
- 'pp_uuid': 'ED803750-E3C7-44F5-BB08-41A04433FE2E',
- 'pp_image_name': 'my_ami_image',
- 'pp_preshared_key':
- '342thbjkt82094y0uthhor289jnqthpc2290'
- }
- }
- }
- }
- cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
- content = util.load_file(self.csr_attributes_path)
- expected = textwrap.dedent("""\
- custom_attributes:
- 1.2.840.113549.1.9.7: 342thbjkt82094y0uthhor289jnqthpc2290
- extension_requests:
- pp_image_name: my_ami_image
- pp_preshared_key: 342thbjkt82094y0uthhor289jnqthpc2290
- pp_uuid: ED803750-E3C7-44F5-BB08-41A04433FE2E
- """)
- self.assertEqual(expected, content)
-
- @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
- def test_puppet_runs_puppet_if_requested(self, m_subp, m_auto):
- """Run puppet with default args if 'exec' is set to True."""
-
- cfg = {'puppet': {'exec': True}}
- cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
- self.assertEqual(1, m_auto.call_count)
- self.assertIn(
- [mock.call(['puppet', 'agent', '--test'], capture=False)],
- m_subp.call_args_list)
-
- @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
- def test_puppet_starts_puppetd(self, m_subp, m_auto):
- """Run puppet with default args if 'exec' is set to True."""
-
- cfg = {'puppet': {}}
- cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
- self.assertEqual(1, m_auto.call_count)
- self.assertIn(
- [mock.call(['service', 'puppet', 'start'], capture=False)],
- m_subp.call_args_list)
-
- @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
- def test_puppet_skips_puppetd(self, m_subp, m_auto):
- """Run puppet with default args if 'exec' is set to True."""
-
- cfg = {'puppet': {'start_service': False}}
- cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
- self.assertEqual(0, m_auto.call_count)
- self.assertNotIn(
- [mock.call(['service', 'puppet', 'start'], capture=False)],
- m_subp.call_args_list)
-
- @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
- def test_puppet_runs_puppet_with_args_list_if_requested(self,
- m_subp, m_auto):
- """Run puppet with 'exec_args' list if 'exec' is set to True."""
-
- cfg = {'puppet': {'exec': True, 'exec_args': [
- '--onetime', '--detailed-exitcodes']}}
- cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
- self.assertEqual(1, m_auto.call_count)
- self.assertIn(
- [mock.call(
- ['puppet', 'agent', '--onetime', '--detailed-exitcodes'],
- capture=False)],
- m_subp.call_args_list)
-
- @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", ""))
- def test_puppet_runs_puppet_with_args_string_if_requested(self,
- m_subp, m_auto):
- """Run puppet with 'exec_args' string if 'exec' is set to True."""
-
- cfg = {'puppet': {'exec': True,
- 'exec_args': '--onetime --detailed-exitcodes'}}
- cc_puppet.handle('notimportant', cfg, self.cloud, LOG, None)
- self.assertEqual(1, m_auto.call_count)
- self.assertIn(
- [mock.call(
- ['puppet', 'agent', '--onetime', '--detailed-exitcodes'],
- capture=False)],
- m_subp.call_args_list)
-
-
-URL_MOCK = mock.Mock()
-URL_MOCK.contents = b'#!/bin/bash\necho "Hi Mom"'
-
-
-@mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=(None, None))
-@mock.patch(
- 'cloudinit.config.cc_puppet.url_helper.readurl',
- return_value=URL_MOCK, autospec=True,
-)
-class TestInstallPuppetAio(HttprettyTestCase):
- def test_install_with_default_arguments(self, m_readurl, m_subp):
- """Install AIO with no arguments"""
- cc_puppet.install_puppet_aio()
-
- self.assertEqual(
- [mock.call([mock.ANY, '--cleanup'], capture=False)],
- m_subp.call_args_list)
-
- def test_install_with_custom_url(self, m_readurl, m_subp):
- """Install AIO from custom URL"""
- cc_puppet.install_puppet_aio('http://custom.url/path/to/script.sh')
- m_readurl.assert_called_with(
- url='http://custom.url/path/to/script.sh',
- retries=5)
-
- self.assertEqual(
- [mock.call([mock.ANY, '--cleanup'], capture=False)],
- m_subp.call_args_list)
-
- def test_install_with_version(self, m_readurl, m_subp):
- """Install AIO with specific version"""
- cc_puppet.install_puppet_aio(cc_puppet.AIO_INSTALL_URL, '7.6.0')
-
- self.assertEqual(
- [mock.call([mock.ANY, '-v', '7.6.0', '--cleanup'], capture=False)],
- m_subp.call_args_list)
-
- def test_install_with_collection(self, m_readurl, m_subp):
- """Install AIO with specific collection"""
- cc_puppet.install_puppet_aio(
- cc_puppet.AIO_INSTALL_URL, None, 'puppet6-nightly')
-
- self.assertEqual(
- [mock.call([mock.ANY, '-c', 'puppet6-nightly', '--cleanup'],
- capture=False)],
- m_subp.call_args_list)
-
- def test_install_with_no_cleanup(self, m_readurl, m_subp):
- """Install AIO with no cleanup"""
- cc_puppet.install_puppet_aio(
- cc_puppet.AIO_INSTALL_URL, None, None, False)
-
- self.assertEqual(
- [mock.call([mock.ANY], capture=False)],
- m_subp.call_args_list)
diff --git a/tests/unittests/test_handler/test_handler_refresh_rmc_and_interface.py b/tests/unittests/test_handler/test_handler_refresh_rmc_and_interface.py
deleted file mode 100644
index e13b7793..00000000
--- a/tests/unittests/test_handler/test_handler_refresh_rmc_and_interface.py
+++ /dev/null
@@ -1,109 +0,0 @@
-from cloudinit.config import cc_refresh_rmc_and_interface as ccrmci
-
-from cloudinit import util
-
-from cloudinit.tests import helpers as t_help
-from cloudinit.tests.helpers import mock
-
-from textwrap import dedent
-import logging
-
-LOG = logging.getLogger(__name__)
-MPATH = "cloudinit.config.cc_refresh_rmc_and_interface"
-NET_INFO = {
- 'lo': {'ipv4': [{'ip': '127.0.0.1',
- 'bcast': '', 'mask': '255.0.0.0',
- 'scope': 'host'}],
- 'ipv6': [{'ip': '::1/128',
- 'scope6': 'host'}], 'hwaddr': '',
- 'up': 'True'},
- 'env2': {'ipv4': [{'ip': '8.0.0.19',
- 'bcast': '8.0.0.255', 'mask': '255.255.255.0',
- 'scope': 'global'}],
- 'ipv6': [{'ip': 'fe80::f896:c2ff:fe81:8220/64',
- 'scope6': 'link'}], 'hwaddr': 'fa:96:c2:81:82:20',
- 'up': 'True'},
- 'env3': {'ipv4': [{'ip': '90.0.0.14',
- 'bcast': '90.0.0.255', 'mask': '255.255.255.0',
- 'scope': 'global'}],
- 'ipv6': [{'ip': 'fe80::f896:c2ff:fe81:8221/64',
- 'scope6': 'link'}], 'hwaddr': 'fa:96:c2:81:82:21',
- 'up': 'True'},
- 'env4': {'ipv4': [{'ip': '9.114.23.7',
- 'bcast': '9.114.23.255', 'mask': '255.255.255.0',
- 'scope': 'global'}],
- 'ipv6': [{'ip': 'fe80::f896:c2ff:fe81:8222/64',
- 'scope6': 'link'}], 'hwaddr': 'fa:96:c2:81:82:22',
- 'up': 'True'},
- 'env5': {'ipv4': [],
- 'ipv6': [{'ip': 'fe80::9c26:c3ff:fea4:62c8/64',
- 'scope6': 'link'}], 'hwaddr': '42:20:86:df:fa:4c',
- 'up': 'True'}}
-
-
-class TestRsctNodeFile(t_help.CiTestCase):
- def test_disable_ipv6_interface(self):
- """test parsing of iface files."""
- fname = self.tmp_path("iface-eth5")
- util.write_file(fname, dedent("""\
- BOOTPROTO=static
- DEVICE=eth5
- HWADDR=42:20:86:df:fa:4c
- IPV6INIT=yes
- IPADDR6=fe80::9c26:c3ff:fea4:62c8/64
- IPV6ADDR=fe80::9c26:c3ff:fea4:62c8/64
- NM_CONTROLLED=yes
- ONBOOT=yes
- STARTMODE=auto
- TYPE=Ethernet
- USERCTL=no
- """))
-
- ccrmci.disable_ipv6(fname)
- self.assertEqual(dedent("""\
- BOOTPROTO=static
- DEVICE=eth5
- HWADDR=42:20:86:df:fa:4c
- ONBOOT=yes
- STARTMODE=auto
- TYPE=Ethernet
- USERCTL=no
- NM_CONTROLLED=no
- """), util.load_file(fname))
-
- @mock.patch(MPATH + '.refresh_rmc')
- @mock.patch(MPATH + '.restart_network_manager')
- @mock.patch(MPATH + '.disable_ipv6')
- @mock.patch(MPATH + '.refresh_ipv6')
- @mock.patch(MPATH + '.netinfo.netdev_info')
- @mock.patch(MPATH + '.subp.which')
- def test_handle(self, m_refresh_rmc,
- m_netdev_info, m_refresh_ipv6, m_disable_ipv6,
- m_restart_nm, m_which):
- """Basic test of handle."""
- m_netdev_info.return_value = NET_INFO
- m_which.return_value = '/opt/rsct/bin/rmcctrl'
- ccrmci.handle(
- "refresh_rmc_and_interface", None, None, None, None)
- self.assertEqual(1, m_netdev_info.call_count)
- m_refresh_ipv6.assert_called_with('env5')
- m_disable_ipv6.assert_called_with(
- '/etc/sysconfig/network-scripts/ifcfg-env5')
- self.assertEqual(1, m_restart_nm.call_count)
- self.assertEqual(1, m_refresh_rmc.call_count)
-
- @mock.patch(MPATH + '.netinfo.netdev_info')
- def test_find_ipv6(self, m_netdev_info):
- """find_ipv6_ifaces parses netdev_info returning those with ipv6"""
- m_netdev_info.return_value = NET_INFO
- found = ccrmci.find_ipv6_ifaces()
- self.assertEqual(['env5'], found)
-
- @mock.patch(MPATH + '.subp.subp')
- def test_refresh_ipv6(self, m_subp):
- """refresh_ipv6 should ip down and up the interface."""
- iface = "myeth0"
- ccrmci.refresh_ipv6(iface)
- m_subp.assert_has_calls([
- mock.call(['ip', 'link', 'set', iface, 'down']),
- mock.call(['ip', 'link', 'set', iface, 'up'])])
diff --git a/tests/unittests/test_handler/test_handler_resizefs.py b/tests/unittests/test_handler/test_handler_resizefs.py
deleted file mode 100644
index 28d55072..00000000
--- a/tests/unittests/test_handler/test_handler_resizefs.py
+++ /dev/null
@@ -1,398 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-from cloudinit.config.cc_resizefs import (
- can_skip_resize, handle, maybe_get_writable_device_path, _resize_btrfs,
- _resize_zfs, _resize_xfs, _resize_ext, _resize_ufs)
-
-from collections import namedtuple
-import logging
-
-from cloudinit.subp import ProcessExecutionError
-from cloudinit.tests.helpers import (
- CiTestCase, mock, skipUnlessJsonSchema, util, wrap_and_call)
-
-
-LOG = logging.getLogger(__name__)
-
-
-class TestResizefs(CiTestCase):
- with_logs = True
-
- def setUp(self):
- super(TestResizefs, self).setUp()
- self.name = "resizefs"
-
- @mock.patch('cloudinit.subp.subp')
- def test_skip_ufs_resize(self, m_subp):
- fs_type = "ufs"
- resize_what = "/"
- devpth = "/dev/da0p2"
- err = ("growfs: requested size 2.0GB is not larger than the "
- "current filesystem size 2.0GB\n")
- exception = ProcessExecutionError(stderr=err, exit_code=1)
- m_subp.side_effect = exception
- res = can_skip_resize(fs_type, resize_what, devpth)
- self.assertTrue(res)
-
- @mock.patch('cloudinit.subp.subp')
- def test_cannot_skip_ufs_resize(self, m_subp):
- fs_type = "ufs"
- resize_what = "/"
- devpth = "/dev/da0p2"
- m_subp.return_value = (
- ("stdout: super-block backups (for fsck_ffs -b #) at:\n\n"),
- ("growfs: no room to allocate last cylinder group; "
- "leaving 364KB unused\n")
- )
- res = can_skip_resize(fs_type, resize_what, devpth)
- self.assertFalse(res)
-
- @mock.patch('cloudinit.subp.subp')
- def test_cannot_skip_ufs_growfs_exception(self, m_subp):
- fs_type = "ufs"
- resize_what = "/"
- devpth = "/dev/da0p2"
- err = "growfs: /dev/da0p2 is not clean - run fsck.\n"
- exception = ProcessExecutionError(stderr=err, exit_code=1)
- m_subp.side_effect = exception
- with self.assertRaises(ProcessExecutionError):
- can_skip_resize(fs_type, resize_what, devpth)
-
- def test_can_skip_resize_ext(self):
- self.assertFalse(can_skip_resize('ext', '/', '/dev/sda1'))
-
- def test_handle_noops_on_disabled(self):
- """The handle function logs when the configuration disables resize."""
- cfg = {'resize_rootfs': False}
- handle('cc_resizefs', cfg, _cloud=None, log=LOG, args=[])
- self.assertIn(
- 'DEBUG: Skipping module named cc_resizefs, resizing disabled\n',
- self.logs.getvalue())
-
- @skipUnlessJsonSchema()
- def test_handle_schema_validation_logs_invalid_resize_rootfs_value(self):
- """The handle reports json schema violations as a warning.
-
- Invalid values for resize_rootfs result in disabling the module.
- """
- cfg = {'resize_rootfs': 'junk'}
- handle('cc_resizefs', cfg, _cloud=None, log=LOG, args=[])
- logs = self.logs.getvalue()
- self.assertIn(
- "WARNING: Invalid config:\nresize_rootfs: 'junk' is not one of"
- " [True, False, 'noblock']",
- logs)
- self.assertIn(
- 'DEBUG: Skipping module named cc_resizefs, resizing disabled\n',
- logs)
-
- @mock.patch('cloudinit.config.cc_resizefs.util.get_mount_info')
- def test_handle_warns_on_unknown_mount_info(self, m_get_mount_info):
- """handle warns when get_mount_info sees unknown filesystem for /."""
- m_get_mount_info.return_value = None
- cfg = {'resize_rootfs': True}
- handle('cc_resizefs', cfg, _cloud=None, log=LOG, args=[])
- logs = self.logs.getvalue()
- self.assertNotIn("WARNING: Invalid config:\nresize_rootfs:", logs)
- self.assertIn(
- 'WARNING: Could not determine filesystem type of /\n',
- logs)
- self.assertEqual(
- [mock.call('/', LOG)],
- m_get_mount_info.call_args_list)
-
- def test_handle_warns_on_undiscoverable_root_path_in_commandline(self):
- """handle noops when the root path is not found on the commandline."""
- cfg = {'resize_rootfs': True}
- exists_mock_path = 'cloudinit.config.cc_resizefs.os.path.exists'
-
- def fake_mount_info(path, log):
- self.assertEqual('/', path)
- self.assertEqual(LOG, log)
- return ('/dev/root', 'ext4', '/')
-
- with mock.patch(exists_mock_path) as m_exists:
- m_exists.return_value = False
- wrap_and_call(
- 'cloudinit.config.cc_resizefs.util',
- {'is_container': {'return_value': False},
- 'get_mount_info': {'side_effect': fake_mount_info},
- 'get_cmdline': {'return_value': 'BOOT_IMAGE=/vmlinuz.efi'}},
- handle, 'cc_resizefs', cfg, _cloud=None, log=LOG,
- args=[])
- logs = self.logs.getvalue()
- self.assertIn("WARNING: Unable to find device '/dev/root'", logs)
-
- def test_resize_zfs_cmd_return(self):
- zpool = 'zroot'
- devpth = 'gpt/system'
- self.assertEqual(('zpool', 'online', '-e', zpool, devpth),
- _resize_zfs(zpool, devpth))
-
- def test_resize_xfs_cmd_return(self):
- mount_point = '/mnt/test'
- devpth = '/dev/sda1'
- self.assertEqual(('xfs_growfs', mount_point),
- _resize_xfs(mount_point, devpth))
-
- def test_resize_ext_cmd_return(self):
- mount_point = '/'
- devpth = '/dev/sdb1'
- self.assertEqual(('resize2fs', devpth),
- _resize_ext(mount_point, devpth))
-
- def test_resize_ufs_cmd_return(self):
- mount_point = '/'
- devpth = '/dev/sda2'
- self.assertEqual(('growfs', '-y', mount_point),
- _resize_ufs(mount_point, devpth))
-
- @mock.patch('cloudinit.util.is_container', return_value=False)
- @mock.patch('cloudinit.util.parse_mount')
- @mock.patch('cloudinit.util.get_device_info_from_zpool')
- @mock.patch('cloudinit.util.get_mount_info')
- def test_handle_zfs_root(self, mount_info, zpool_info, parse_mount,
- is_container):
- devpth = 'vmzroot/ROOT/freebsd'
- disk = 'gpt/system'
- fs_type = 'zfs'
- mount_point = '/'
-
- mount_info.return_value = (devpth, fs_type, mount_point)
- zpool_info.return_value = disk
- parse_mount.return_value = (devpth, fs_type, mount_point)
-
- cfg = {'resize_rootfs': True}
-
- with mock.patch('cloudinit.config.cc_resizefs.do_resize') as dresize:
- handle('cc_resizefs', cfg, _cloud=None, log=LOG, args=[])
- ret = dresize.call_args[0][0]
-
- self.assertEqual(('zpool', 'online', '-e', 'vmzroot', disk), ret)
-
- @mock.patch('cloudinit.util.is_container', return_value=False)
- @mock.patch('cloudinit.util.get_mount_info')
- @mock.patch('cloudinit.util.get_device_info_from_zpool')
- @mock.patch('cloudinit.util.parse_mount')
- def test_handle_modern_zfsroot(self, mount_info, zpool_info, parse_mount,
- is_container):
- devpth = 'zroot/ROOT/default'
- disk = 'da0p3'
- fs_type = 'zfs'
- mount_point = '/'
-
- mount_info.return_value = (devpth, fs_type, mount_point)
- zpool_info.return_value = disk
- parse_mount.return_value = (devpth, fs_type, mount_point)
-
- cfg = {'resize_rootfs': True}
-
- def fake_stat(devpath):
- if devpath == disk:
- raise OSError("not here")
- FakeStat = namedtuple(
- 'FakeStat', ['st_mode', 'st_size', 'st_mtime']) # minimal stat
- return FakeStat(25008, 0, 1) # fake char block device
-
- with mock.patch('cloudinit.config.cc_resizefs.do_resize') as dresize:
- with mock.patch('cloudinit.config.cc_resizefs.os.stat') as m_stat:
- m_stat.side_effect = fake_stat
- handle('cc_resizefs', cfg, _cloud=None, log=LOG, args=[])
-
- self.assertEqual(('zpool', 'online', '-e', 'zroot', '/dev/' + disk),
- dresize.call_args[0][0])
-
-
-class TestRootDevFromCmdline(CiTestCase):
-
- def test_rootdev_from_cmdline_with_no_root(self):
- """Return None from rootdev_from_cmdline when root is not present."""
- invalid_cases = [
- 'BOOT_IMAGE=/adsf asdfa werasef root adf', 'BOOT_IMAGE=/adsf', '']
- for case in invalid_cases:
- self.assertIsNone(util.rootdev_from_cmdline(case))
-
- def test_rootdev_from_cmdline_with_root_startswith_dev(self):
- """Return the cmdline root when the path starts with /dev."""
- self.assertEqual(
- '/dev/this', util.rootdev_from_cmdline('asdf root=/dev/this'))
-
- def test_rootdev_from_cmdline_with_root_without_dev_prefix(self):
- """Add /dev prefix to cmdline root when the path lacks the prefix."""
- self.assertEqual(
- '/dev/this', util.rootdev_from_cmdline('asdf root=this'))
-
- def test_rootdev_from_cmdline_with_root_with_label(self):
- """When cmdline root contains a LABEL, our root is disk/by-label."""
- self.assertEqual(
- '/dev/disk/by-label/unique',
- util.rootdev_from_cmdline('asdf root=LABEL=unique'))
-
- def test_rootdev_from_cmdline_with_root_with_uuid(self):
- """When cmdline root contains a UUID, our root is disk/by-uuid."""
- self.assertEqual(
- '/dev/disk/by-uuid/adsfdsaf-adsf',
- util.rootdev_from_cmdline('asdf root=UUID=adsfdsaf-adsf'))
-
-
-class TestMaybeGetDevicePathAsWritableBlock(CiTestCase):
-
- with_logs = True
-
- def test_maybe_get_writable_device_path_none_on_overlayroot(self):
- """When devpath is overlayroot (on MAAS), is_dev_writable is False."""
- info = 'does not matter'
- devpath = wrap_and_call(
- 'cloudinit.config.cc_resizefs.util',
- {'is_container': {'return_value': False}},
- maybe_get_writable_device_path, 'overlayroot', info, LOG)
- self.assertIsNone(devpath)
- self.assertIn(
- "Not attempting to resize devpath 'overlayroot'",
- self.logs.getvalue())
-
- def test_maybe_get_writable_device_path_warns_missing_cmdline_root(self):
- """When root does not exist isn't in the cmdline, log warning."""
- info = 'does not matter'
-
- def fake_mount_info(path, log):
- self.assertEqual('/', path)
- self.assertEqual(LOG, log)
- return ('/dev/root', 'ext4', '/')
-
- exists_mock_path = 'cloudinit.config.cc_resizefs.os.path.exists'
- with mock.patch(exists_mock_path) as m_exists:
- m_exists.return_value = False
- devpath = wrap_and_call(
- 'cloudinit.config.cc_resizefs.util',
- {'is_container': {'return_value': False},
- 'get_mount_info': {'side_effect': fake_mount_info},
- 'get_cmdline': {'return_value': 'BOOT_IMAGE=/vmlinuz.efi'}},
- maybe_get_writable_device_path, '/dev/root', info, LOG)
- self.assertIsNone(devpath)
- logs = self.logs.getvalue()
- self.assertIn("WARNING: Unable to find device '/dev/root'", logs)
-
- def test_maybe_get_writable_device_path_does_not_exist(self):
- """When devpath does not exist, a warning is logged."""
- info = 'dev=/dev/I/dont/exist mnt_point=/ path=/dev/none'
- devpath = wrap_and_call(
- 'cloudinit.config.cc_resizefs.util',
- {'is_container': {'return_value': False}},
- maybe_get_writable_device_path, '/dev/I/dont/exist', info, LOG)
- self.assertIsNone(devpath)
- self.assertIn(
- "WARNING: Device '/dev/I/dont/exist' did not exist."
- ' cannot resize: %s' % info,
- self.logs.getvalue())
-
- def test_maybe_get_writable_device_path_does_not_exist_in_container(self):
- """When devpath does not exist in a container, log a debug message."""
- info = 'dev=/dev/I/dont/exist mnt_point=/ path=/dev/none'
- devpath = wrap_and_call(
- 'cloudinit.config.cc_resizefs.util',
- {'is_container': {'return_value': True}},
- maybe_get_writable_device_path, '/dev/I/dont/exist', info, LOG)
- self.assertIsNone(devpath)
- self.assertIn(
- "DEBUG: Device '/dev/I/dont/exist' did not exist in container."
- ' cannot resize: %s' % info,
- self.logs.getvalue())
-
- def test_maybe_get_writable_device_path_raises_oserror(self):
- """When unexpected OSError is raises by os.stat it is reraised."""
- info = 'dev=/dev/I/dont/exist mnt_point=/ path=/dev/none'
- with self.assertRaises(OSError) as context_manager:
- wrap_and_call(
- 'cloudinit.config.cc_resizefs',
- {'util.is_container': {'return_value': True},
- 'os.stat': {'side_effect': OSError('Something unexpected')}},
- maybe_get_writable_device_path, '/dev/I/dont/exist', info, LOG)
- self.assertEqual(
- 'Something unexpected', str(context_manager.exception))
-
- def test_maybe_get_writable_device_path_non_block(self):
- """When device is not a block device, emit warning return False."""
- fake_devpath = self.tmp_path('dev/readwrite')
- util.write_file(fake_devpath, '', mode=0o600) # read-write
- info = 'dev=/dev/root mnt_point=/ path={0}'.format(fake_devpath)
-
- devpath = wrap_and_call(
- 'cloudinit.config.cc_resizefs.util',
- {'is_container': {'return_value': False}},
- maybe_get_writable_device_path, fake_devpath, info, LOG)
- self.assertIsNone(devpath)
- self.assertIn(
- "WARNING: device '{0}' not a block device. cannot resize".format(
- fake_devpath),
- self.logs.getvalue())
-
- def test_maybe_get_writable_device_path_non_block_on_container(self):
- """When device is non-block device in container, emit debug log."""
- fake_devpath = self.tmp_path('dev/readwrite')
- util.write_file(fake_devpath, '', mode=0o600) # read-write
- info = 'dev=/dev/root mnt_point=/ path={0}'.format(fake_devpath)
-
- devpath = wrap_and_call(
- 'cloudinit.config.cc_resizefs.util',
- {'is_container': {'return_value': True}},
- maybe_get_writable_device_path, fake_devpath, info, LOG)
- self.assertIsNone(devpath)
- self.assertIn(
- "DEBUG: device '{0}' not a block device in container."
- ' cannot resize'.format(fake_devpath),
- self.logs.getvalue())
-
- def test_maybe_get_writable_device_path_returns_cmdline_root(self):
- """When root device is UUID in kernel commandline, update devpath."""
- # XXX Long-term we want to use FilesystemMocking test to avoid
- # touching os.stat.
- FakeStat = namedtuple(
- 'FakeStat', ['st_mode', 'st_size', 'st_mtime']) # minimal def.
- info = 'dev=/dev/root mnt_point=/ path=/does/not/matter'
- devpath = wrap_and_call(
- 'cloudinit.config.cc_resizefs',
- {'util.get_cmdline': {'return_value': 'asdf root=UUID=my-uuid'},
- 'util.is_container': False,
- 'os.path.exists': False, # /dev/root doesn't exist
- 'os.stat': {
- 'return_value': FakeStat(25008, 0, 1)} # char block device
- },
- maybe_get_writable_device_path, '/dev/root', info, LOG)
- self.assertEqual('/dev/disk/by-uuid/my-uuid', devpath)
- self.assertIn(
- "DEBUG: Converted /dev/root to '/dev/disk/by-uuid/my-uuid'"
- " per kernel cmdline",
- self.logs.getvalue())
-
- @mock.patch('cloudinit.util.mount_is_read_write')
- @mock.patch('cloudinit.config.cc_resizefs.os.path.isdir')
- def test_resize_btrfs_mount_is_ro(self, m_is_dir, m_is_rw):
- """Do not resize / directly if it is read-only. (LP: #1734787)."""
- m_is_rw.return_value = False
- m_is_dir.return_value = True
- self.assertEqual(
- ('btrfs', 'filesystem', 'resize', 'max', '//.snapshots'),
- _resize_btrfs("/", "/dev/sda1"))
-
- @mock.patch('cloudinit.util.mount_is_read_write')
- @mock.patch('cloudinit.config.cc_resizefs.os.path.isdir')
- def test_resize_btrfs_mount_is_rw(self, m_is_dir, m_is_rw):
- """Do not resize / directly if it is read-only. (LP: #1734787)."""
- m_is_rw.return_value = True
- m_is_dir.return_value = True
- self.assertEqual(
- ('btrfs', 'filesystem', 'resize', 'max', '/'),
- _resize_btrfs("/", "/dev/sda1"))
-
- @mock.patch('cloudinit.util.is_container', return_value=True)
- @mock.patch('cloudinit.util.is_FreeBSD')
- def test_maybe_get_writable_device_path_zfs_freebsd(self, freebsd,
- m_is_container):
- freebsd.return_value = True
- info = 'dev=gpt/system mnt_point=/ path=/'
- devpth = maybe_get_writable_device_path('gpt/system', info, LOG)
- self.assertEqual('gpt/system', devpth)
-
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_resolv_conf.py b/tests/unittests/test_handler/test_handler_resolv_conf.py
deleted file mode 100644
index 96139001..00000000
--- a/tests/unittests/test_handler/test_handler_resolv_conf.py
+++ /dev/null
@@ -1,105 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-from cloudinit.config import cc_resolv_conf
-
-from cloudinit import cloud
-from cloudinit import distros
-from cloudinit import helpers
-from cloudinit import util
-from copy import deepcopy
-
-from cloudinit.tests import helpers as t_help
-
-import logging
-import os
-import shutil
-import tempfile
-from unittest import mock
-
-LOG = logging.getLogger(__name__)
-
-
-class TestResolvConf(t_help.FilesystemMockingTestCase):
- with_logs = True
- cfg = {'manage_resolv_conf': True, 'resolv_conf': {}}
-
- def setUp(self):
- super(TestResolvConf, self).setUp()
- self.tmp = tempfile.mkdtemp()
- util.ensure_dir(os.path.join(self.tmp, 'data'))
- self.addCleanup(shutil.rmtree, self.tmp)
-
- def _fetch_distro(self, kind, conf=None):
- cls = distros.fetch(kind)
- paths = helpers.Paths({'cloud_dir': self.tmp})
- conf = {} if conf is None else conf
- return cls(kind, conf, paths)
-
- def call_resolv_conf_handler(self, distro_name, conf, cc=None):
- if not cc:
- ds = None
- distro = self._fetch_distro(distro_name, conf)
- paths = helpers.Paths({'cloud_dir': self.tmp})
- cc = cloud.Cloud(ds, paths, {}, distro, None)
- cc_resolv_conf.handle('cc_resolv_conf', conf, cc, LOG, [])
-
- @mock.patch("cloudinit.config.cc_resolv_conf.templater.render_to_file")
- def test_resolv_conf_systemd_resolved(self, m_render_to_file):
- self.call_resolv_conf_handler('photon', self.cfg)
-
- assert [
- mock.call(mock.ANY, '/etc/systemd/resolved.conf', mock.ANY)
- ] == m_render_to_file.call_args_list
-
- @mock.patch("cloudinit.config.cc_resolv_conf.templater.render_to_file")
- def test_resolv_conf_no_param(self, m_render_to_file):
- tmp = deepcopy(self.cfg)
- self.logs.truncate(0)
- tmp.pop('resolv_conf')
- self.call_resolv_conf_handler('photon', tmp)
-
- self.assertIn('manage_resolv_conf True but no parameters provided',
- self.logs.getvalue())
- assert [
- mock.call(mock.ANY, '/etc/systemd/resolved.conf', mock.ANY)
- ] not in m_render_to_file.call_args_list
-
- @mock.patch("cloudinit.config.cc_resolv_conf.templater.render_to_file")
- def test_resolv_conf_manage_resolv_conf_false(self, m_render_to_file):
- tmp = deepcopy(self.cfg)
- self.logs.truncate(0)
- tmp['manage_resolv_conf'] = False
- self.call_resolv_conf_handler('photon', tmp)
- self.assertIn("'manage_resolv_conf' present but set to False",
- self.logs.getvalue())
- assert [
- mock.call(mock.ANY, '/etc/systemd/resolved.conf', mock.ANY)
- ] not in m_render_to_file.call_args_list
-
- @mock.patch("cloudinit.config.cc_resolv_conf.templater.render_to_file")
- def test_resolv_conf_etc_resolv_conf(self, m_render_to_file):
- self.call_resolv_conf_handler('rhel', self.cfg)
-
- assert [
- mock.call(mock.ANY, '/etc/resolv.conf', mock.ANY)
- ] == m_render_to_file.call_args_list
-
- @mock.patch("cloudinit.config.cc_resolv_conf.templater.render_to_file")
- def test_resolv_conf_invalid_resolve_conf_fn(self, m_render_to_file):
- ds = None
- distro = self._fetch_distro('rhel', self.cfg)
- paths = helpers.Paths({'cloud_dir': self.tmp})
- cc = cloud.Cloud(ds, paths, {}, distro, None)
- cc.distro.resolve_conf_fn = 'bla'
-
- self.logs.truncate(0)
- self.call_resolv_conf_handler('rhel', self.cfg, cc)
-
- self.assertIn('No template found, not rendering resolve configs',
- self.logs.getvalue())
-
- assert [
- mock.call(mock.ANY, '/etc/resolv.conf', mock.ANY)
- ] not in m_render_to_file.call_args_list
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_rsyslog.py b/tests/unittests/test_handler/test_handler_rsyslog.py
deleted file mode 100644
index 8c8e2838..00000000
--- a/tests/unittests/test_handler/test_handler_rsyslog.py
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-import os
-import shutil
-import tempfile
-
-from cloudinit.config.cc_rsyslog import (
- apply_rsyslog_changes, DEF_DIR, DEF_FILENAME, DEF_RELOAD, load_config,
- parse_remotes_line, remotes_to_rsyslog_cfg)
-from cloudinit import util
-
-from cloudinit.tests import helpers as t_help
-
-
-class TestLoadConfig(t_help.TestCase):
- def setUp(self):
- super(TestLoadConfig, self).setUp()
- self.basecfg = {
- 'config_filename': DEF_FILENAME,
- 'config_dir': DEF_DIR,
- 'service_reload_command': DEF_RELOAD,
- 'configs': [],
- 'remotes': {},
- }
-
- def test_legacy_full(self):
- found = load_config({
- 'rsyslog': ['*.* @192.168.1.1'],
- 'rsyslog_dir': "mydir",
- 'rsyslog_filename': "myfilename"})
- self.basecfg.update({
- 'configs': ['*.* @192.168.1.1'],
- 'config_dir': "mydir",
- 'config_filename': 'myfilename',
- 'service_reload_command': 'auto'}
- )
-
- self.assertEqual(found, self.basecfg)
-
- def test_legacy_defaults(self):
- found = load_config({
- 'rsyslog': ['*.* @192.168.1.1']})
- self.basecfg.update({
- 'configs': ['*.* @192.168.1.1']})
- self.assertEqual(found, self.basecfg)
-
- def test_new_defaults(self):
- self.assertEqual(load_config({}), self.basecfg)
-
- def test_new_configs(self):
- cfgs = ['*.* myhost', '*.* my2host']
- self.basecfg.update({'configs': cfgs})
- self.assertEqual(
- load_config({'rsyslog': {'configs': cfgs}}),
- self.basecfg)
-
-
-class TestApplyChanges(t_help.TestCase):
- def setUp(self):
- self.tmp = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.tmp)
-
- def test_simple(self):
- cfgline = "*.* foohost"
- changed = apply_rsyslog_changes(
- configs=[cfgline], def_fname="foo.cfg", cfg_dir=self.tmp)
-
- fname = os.path.join(self.tmp, "foo.cfg")
- self.assertEqual([fname], changed)
- self.assertEqual(
- util.load_file(fname), cfgline + "\n")
-
- def test_multiple_files(self):
- configs = [
- '*.* foohost',
- {'content': 'abc', 'filename': 'my.cfg'},
- {'content': 'filefoo-content',
- 'filename': os.path.join(self.tmp, 'mydir/mycfg')},
- ]
-
- changed = apply_rsyslog_changes(
- configs=configs, def_fname="default.cfg", cfg_dir=self.tmp)
-
- expected = [
- (os.path.join(self.tmp, "default.cfg"),
- "*.* foohost\n"),
- (os.path.join(self.tmp, "my.cfg"), "abc\n"),
- (os.path.join(self.tmp, "mydir/mycfg"), "filefoo-content\n"),
- ]
- self.assertEqual([f[0] for f in expected], changed)
- actual = []
- for fname, _content in expected:
- util.load_file(fname)
- actual.append((fname, util.load_file(fname),))
- self.assertEqual(expected, actual)
-
- def test_repeat_def(self):
- configs = ['*.* foohost', "*.warn otherhost"]
-
- changed = apply_rsyslog_changes(
- configs=configs, def_fname="default.cfg", cfg_dir=self.tmp)
-
- fname = os.path.join(self.tmp, "default.cfg")
- self.assertEqual([fname], changed)
-
- expected_content = '\n'.join([c for c in configs]) + '\n'
- found_content = util.load_file(fname)
- self.assertEqual(expected_content, found_content)
-
- def test_multiline_content(self):
- configs = ['line1', 'line2\nline3\n']
-
- apply_rsyslog_changes(
- configs=configs, def_fname="default.cfg", cfg_dir=self.tmp)
-
- fname = os.path.join(self.tmp, "default.cfg")
- expected_content = '\n'.join([c for c in configs])
- found_content = util.load_file(fname)
- self.assertEqual(expected_content, found_content)
-
-
-class TestParseRemotesLine(t_help.TestCase):
- def test_valid_port(self):
- r = parse_remotes_line("foo:9")
- self.assertEqual(9, r.port)
-
- def test_invalid_port(self):
- with self.assertRaises(ValueError):
- parse_remotes_line("*.* foo:abc")
-
- def test_valid_ipv6(self):
- r = parse_remotes_line("*.* [::1]")
- self.assertEqual("*.* @[::1]", str(r))
-
- def test_valid_ipv6_with_port(self):
- r = parse_remotes_line("*.* [::1]:100")
- self.assertEqual(r.port, 100)
- self.assertEqual(r.addr, "::1")
- self.assertEqual("*.* @[::1]:100", str(r))
-
- def test_invalid_multiple_colon(self):
- with self.assertRaises(ValueError):
- parse_remotes_line("*.* ::1:100")
-
- def test_name_in_string(self):
- r = parse_remotes_line("syslog.host", name="foobar")
- self.assertEqual("*.* @syslog.host # foobar", str(r))
-
-
-class TestRemotesToSyslog(t_help.TestCase):
- def test_simple(self):
- # str rendered line must appear in remotes_to_ryslog_cfg return
- mycfg = "*.* myhost"
- myline = str(parse_remotes_line(mycfg, name="myname"))
- r = remotes_to_rsyslog_cfg({'myname': mycfg})
- lines = r.splitlines()
- self.assertEqual(1, len(lines))
- self.assertTrue(myline in r.splitlines())
-
- def test_header_footer(self):
- header = "#foo head"
- footer = "#foo foot"
- r = remotes_to_rsyslog_cfg(
- {'myname': "*.* myhost"}, header=header, footer=footer)
- lines = r.splitlines()
- self.assertTrue(header, lines[0])
- self.assertTrue(footer, lines[-1])
-
- def test_with_empty_or_null(self):
- mycfg = "*.* myhost"
- myline = str(parse_remotes_line(mycfg, name="myname"))
- r = remotes_to_rsyslog_cfg(
- {'myname': mycfg, 'removed': None, 'removed2': ""})
- lines = r.splitlines()
- self.assertEqual(1, len(lines))
- self.assertTrue(myline in r.splitlines())
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_runcmd.py b/tests/unittests/test_handler/test_handler_runcmd.py
deleted file mode 100644
index 672e8093..00000000
--- a/tests/unittests/test_handler/test_handler_runcmd.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-import logging
-import os
-import stat
-from unittest.mock import patch
-
-from cloudinit.config.cc_runcmd import handle, schema
-from cloudinit import (helpers, subp, util)
-from cloudinit.tests.helpers import (
- CiTestCase, FilesystemMockingTestCase, SchemaTestCaseMixin,
- skipUnlessJsonSchema)
-
-from tests.unittests.util import get_cloud
-
-LOG = logging.getLogger(__name__)
-
-
-class TestRuncmd(FilesystemMockingTestCase):
-
- with_logs = True
-
- def setUp(self):
- super(TestRuncmd, self).setUp()
- self.subp = subp.subp
- self.new_root = self.tmp_dir()
- self.patchUtils(self.new_root)
- self.paths = helpers.Paths({'scripts': self.new_root})
-
- def test_handler_skip_if_no_runcmd(self):
- """When the provided config doesn't contain runcmd, skip it."""
- cfg = {}
- mycloud = get_cloud(paths=self.paths)
- handle('notimportant', cfg, mycloud, LOG, None)
- self.assertIn(
- "Skipping module named notimportant, no 'runcmd' key",
- self.logs.getvalue())
-
- @patch('cloudinit.util.shellify')
- def test_runcmd_shellify_fails(self, cls):
- """When shellify fails throw exception"""
- cls.side_effect = TypeError("patched shellify")
- valid_config = {'runcmd': ['echo 42']}
- cc = get_cloud(paths=self.paths)
- with self.assertRaises(TypeError) as cm:
- with self.allow_subp(['/bin/sh']):
- handle('cc_runcmd', valid_config, cc, LOG, None)
- self.assertIn("Failed to shellify", str(cm.exception))
-
- def test_handler_invalid_command_set(self):
- """Commands which can't be converted to shell will raise errors."""
- invalid_config = {'runcmd': 1}
- cc = get_cloud(paths=self.paths)
- with self.assertRaises(TypeError) as cm:
- handle('cc_runcmd', invalid_config, cc, LOG, [])
- self.assertIn(
- 'Failed to shellify 1 into file'
- ' /var/lib/cloud/instances/iid-datasource-none/scripts/runcmd',
- str(cm.exception))
-
- @skipUnlessJsonSchema()
- def test_handler_schema_validation_warns_non_array_type(self):
- """Schema validation warns of non-array type for runcmd key.
-
- Schema validation is not strict, so runcmd attempts to shellify the
- invalid content.
- """
- invalid_config = {'runcmd': 1}
- cc = get_cloud(paths=self.paths)
- with self.assertRaises(TypeError) as cm:
- handle('cc_runcmd', invalid_config, cc, LOG, [])
- self.assertIn(
- 'Invalid config:\nruncmd: 1 is not of type \'array\'',
- self.logs.getvalue())
- self.assertIn('Failed to shellify', str(cm.exception))
-
- @skipUnlessJsonSchema()
- def test_handler_schema_validation_warns_non_array_item_type(self):
- """Schema validation warns of non-array or string runcmd items.
-
- Schema validation is not strict, so runcmd attempts to shellify the
- invalid content.
- """
- invalid_config = {
- 'runcmd': ['ls /', 20, ['wget', 'http://stuff/blah'], {'a': 'n'}]}
- cc = get_cloud(paths=self.paths)
- with self.assertRaises(TypeError) as cm:
- handle('cc_runcmd', invalid_config, cc, LOG, [])
- expected_warnings = [
- 'runcmd.1: 20 is not valid under any of the given schemas',
- 'runcmd.3: {\'a\': \'n\'} is not valid under any of the given'
- ' schema'
- ]
- logs = self.logs.getvalue()
- for warning in expected_warnings:
- self.assertIn(warning, logs)
- self.assertIn('Failed to shellify', str(cm.exception))
-
- def test_handler_write_valid_runcmd_schema_to_file(self):
- """Valid runcmd schema is written to a runcmd shell script."""
- valid_config = {'runcmd': [['ls', '/']]}
- cc = get_cloud(paths=self.paths)
- handle('cc_runcmd', valid_config, cc, LOG, [])
- runcmd_file = os.path.join(
- self.new_root,
- 'var/lib/cloud/instances/iid-datasource-none/scripts/runcmd')
- self.assertEqual("#!/bin/sh\n'ls' '/'\n", util.load_file(runcmd_file))
- file_stat = os.stat(runcmd_file)
- self.assertEqual(0o700, stat.S_IMODE(file_stat.st_mode))
-
-
-@skipUnlessJsonSchema()
-class TestSchema(CiTestCase, SchemaTestCaseMixin):
- """Directly test schema rather than through handle."""
-
- schema = schema
-
- def test_duplicates_are_fine_array_array(self):
- """Duplicated commands array/array entries are allowed."""
- self.assertSchemaValid(
- [["echo", "bye"], ["echo", "bye"]],
- "command entries can be duplicate.")
-
- def test_duplicates_are_fine_array_string(self):
- """Duplicated commands array/string entries are allowed."""
- self.assertSchemaValid(
- ["echo bye", "echo bye"],
- "command entries can be duplicate.")
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_seed_random.py b/tests/unittests/test_handler/test_handler_seed_random.py
deleted file mode 100644
index 2ab153d2..00000000
--- a/tests/unittests/test_handler/test_handler_seed_random.py
+++ /dev/null
@@ -1,205 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-# Copyright (C) 2013 Hewlett-Packard Development Company, L.P.
-#
-# Author: Juerg Haefliger <juerg.haefliger@hp.com>
-#
-# Based on test_handler_set_hostname.py
-#
-# This file is part of cloud-init. See LICENSE file for license information.
-import gzip
-import logging
-import tempfile
-from io import BytesIO
-
-from cloudinit import subp
-from cloudinit import util
-from cloudinit.config import cc_seed_random
-from cloudinit.tests import helpers as t_help
-
-from tests.unittests.util import get_cloud
-
-LOG = logging.getLogger(__name__)
-
-
-class TestRandomSeed(t_help.TestCase):
- def setUp(self):
- super(TestRandomSeed, self).setUp()
- self._seed_file = tempfile.mktemp()
- self.unapply = []
-
- # by default 'which' has nothing in its path
- self.apply_patches([(subp, 'which', self._which)])
- self.apply_patches([(subp, 'subp', self._subp)])
- self.subp_called = []
- self.whichdata = {}
-
- def tearDown(self):
- apply_patches([i for i in reversed(self.unapply)])
- util.del_file(self._seed_file)
-
- def apply_patches(self, patches):
- ret = apply_patches(patches)
- self.unapply += ret
-
- def _which(self, program):
- return self.whichdata.get(program)
-
- def _subp(self, *args, **kwargs):
- # supports subp calling with cmd as args or kwargs
- if 'args' not in kwargs:
- kwargs['args'] = args[0]
- self.subp_called.append(kwargs)
- return
-
- def _compress(self, text):
- contents = BytesIO()
- gz_fh = gzip.GzipFile(mode='wb', fileobj=contents)
- gz_fh.write(text)
- gz_fh.close()
- return contents.getvalue()
-
- def test_append_random(self):
- cfg = {
- 'random_seed': {
- 'file': self._seed_file,
- 'data': 'tiny-tim-was-here',
- }
- }
- cc_seed_random.handle('test', cfg, get_cloud('ubuntu'), LOG, [])
- contents = util.load_file(self._seed_file)
- self.assertEqual("tiny-tim-was-here", contents)
-
- def test_append_random_unknown_encoding(self):
- data = self._compress(b"tiny-toe")
- cfg = {
- 'random_seed': {
- 'file': self._seed_file,
- 'data': data,
- 'encoding': 'special_encoding',
- }
- }
- self.assertRaises(IOError, cc_seed_random.handle, 'test', cfg,
- get_cloud('ubuntu'), LOG, [])
-
- def test_append_random_gzip(self):
- data = self._compress(b"tiny-toe")
- cfg = {
- 'random_seed': {
- 'file': self._seed_file,
- 'data': data,
- 'encoding': 'gzip',
- }
- }
- cc_seed_random.handle('test', cfg, get_cloud('ubuntu'), LOG, [])
- contents = util.load_file(self._seed_file)
- self.assertEqual("tiny-toe", contents)
-
- def test_append_random_gz(self):
- data = self._compress(b"big-toe")
- cfg = {
- 'random_seed': {
- 'file': self._seed_file,
- 'data': data,
- 'encoding': 'gz',
- }
- }
- cc_seed_random.handle('test', cfg, get_cloud('ubuntu'), LOG, [])
- contents = util.load_file(self._seed_file)
- self.assertEqual("big-toe", contents)
-
- def test_append_random_base64(self):
- data = util.b64e('bubbles')
- cfg = {
- 'random_seed': {
- 'file': self._seed_file,
- 'data': data,
- 'encoding': 'base64',
- }
- }
- cc_seed_random.handle('test', cfg, get_cloud('ubuntu'), LOG, [])
- contents = util.load_file(self._seed_file)
- self.assertEqual("bubbles", contents)
-
- def test_append_random_b64(self):
- data = util.b64e('kit-kat')
- cfg = {
- 'random_seed': {
- 'file': self._seed_file,
- 'data': data,
- 'encoding': 'b64',
- }
- }
- cc_seed_random.handle('test', cfg, get_cloud('ubuntu'), LOG, [])
- contents = util.load_file(self._seed_file)
- self.assertEqual("kit-kat", contents)
-
- def test_append_random_metadata(self):
- cfg = {
- 'random_seed': {
- 'file': self._seed_file,
- 'data': 'tiny-tim-was-here',
- }
- }
- c = get_cloud('ubuntu', metadata={'random_seed': '-so-was-josh'})
- cc_seed_random.handle('test', cfg, c, LOG, [])
- contents = util.load_file(self._seed_file)
- self.assertEqual('tiny-tim-was-here-so-was-josh', contents)
-
- def test_seed_command_provided_and_available(self):
- c = get_cloud('ubuntu')
- self.whichdata = {'pollinate': '/usr/bin/pollinate'}
- cfg = {'random_seed': {'command': ['pollinate', '-q']}}
- cc_seed_random.handle('test', cfg, c, LOG, [])
-
- subp_args = [f['args'] for f in self.subp_called]
- self.assertIn(['pollinate', '-q'], subp_args)
-
- def test_seed_command_not_provided(self):
- c = get_cloud('ubuntu')
- self.whichdata = {}
- cc_seed_random.handle('test', {}, c, LOG, [])
-
- # subp should not have been called as which would say not available
- self.assertFalse(self.subp_called)
-
- def test_unavailable_seed_command_and_required_raises_error(self):
- c = get_cloud('ubuntu')
- self.whichdata = {}
- cfg = {'random_seed': {'command': ['THIS_NO_COMMAND'],
- 'command_required': True}}
- self.assertRaises(ValueError, cc_seed_random.handle,
- 'test', cfg, c, LOG, [])
-
- def test_seed_command_and_required(self):
- c = get_cloud('ubuntu')
- self.whichdata = {'foo': 'foo'}
- cfg = {'random_seed': {'command_required': True, 'command': ['foo']}}
- cc_seed_random.handle('test', cfg, c, LOG, [])
-
- self.assertIn(['foo'], [f['args'] for f in self.subp_called])
-
- def test_file_in_environment_for_command(self):
- c = get_cloud('ubuntu')
- self.whichdata = {'foo': 'foo'}
- cfg = {'random_seed': {'command_required': True, 'command': ['foo'],
- 'file': self._seed_file}}
- cc_seed_random.handle('test', cfg, c, LOG, [])
-
- # this just instists that the first time subp was called,
- # RANDOM_SEED_FILE was in the environment set up correctly
- subp_env = [f['env'] for f in self.subp_called]
- self.assertEqual(subp_env[0].get('RANDOM_SEED_FILE'), self._seed_file)
-
-
-def apply_patches(patches):
- ret = []
- for (ref, name, replace) in patches:
- if replace is None:
- continue
- orig = getattr(ref, name)
- setattr(ref, name, replace)
- ret.append((ref, name, orig))
- return ret
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_set_hostname.py b/tests/unittests/test_handler/test_handler_set_hostname.py
deleted file mode 100644
index 1a524c7d..00000000
--- a/tests/unittests/test_handler/test_handler_set_hostname.py
+++ /dev/null
@@ -1,207 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-from cloudinit.config import cc_set_hostname
-
-from cloudinit import cloud
-from cloudinit import distros
-from cloudinit import helpers
-from cloudinit import util
-
-from cloudinit.tests import helpers as t_help
-
-from configobj import ConfigObj
-import logging
-import os
-import shutil
-import tempfile
-from io import BytesIO
-from unittest import mock
-
-LOG = logging.getLogger(__name__)
-
-
-class TestHostname(t_help.FilesystemMockingTestCase):
-
- with_logs = True
-
- def setUp(self):
- super(TestHostname, self).setUp()
- self.tmp = tempfile.mkdtemp()
- util.ensure_dir(os.path.join(self.tmp, 'data'))
- self.addCleanup(shutil.rmtree, self.tmp)
-
- def _fetch_distro(self, kind, conf=None):
- cls = distros.fetch(kind)
- paths = helpers.Paths({'cloud_dir': self.tmp})
- conf = {} if conf is None else conf
- return cls(kind, conf, paths)
-
- def test_debian_write_hostname_prefer_fqdn(self):
- cfg = {
- 'hostname': 'blah',
- 'prefer_fqdn_over_hostname': True,
- 'fqdn': 'blah.yahoo.com',
- }
- distro = self._fetch_distro('debian', cfg)
- paths = helpers.Paths({'cloud_dir': self.tmp})
- ds = None
- cc = cloud.Cloud(ds, paths, {}, distro, None)
- self.patchUtils(self.tmp)
- cc_set_hostname.handle('cc_set_hostname',
- cfg, cc, LOG, [])
- contents = util.load_file("/etc/hostname")
- self.assertEqual('blah.yahoo.com', contents.strip())
-
- @mock.patch('cloudinit.distros.Distro.uses_systemd', return_value=False)
- def test_rhel_write_hostname_prefer_hostname(self, m_uses_systemd):
- cfg = {
- 'hostname': 'blah',
- 'prefer_fqdn_over_hostname': False,
- 'fqdn': 'blah.yahoo.com',
- }
- distro = self._fetch_distro('rhel', cfg)
- paths = helpers.Paths({'cloud_dir': self.tmp})
- ds = None
- cc = cloud.Cloud(ds, paths, {}, distro, None)
- self.patchUtils(self.tmp)
- cc_set_hostname.handle('cc_set_hostname',
- cfg, cc, LOG, [])
- contents = util.load_file("/etc/sysconfig/network", decode=False)
- n_cfg = ConfigObj(BytesIO(contents))
- self.assertEqual(
- {'HOSTNAME': 'blah'},
- dict(n_cfg))
-
- @mock.patch('cloudinit.distros.Distro.uses_systemd', return_value=False)
- def test_write_hostname_rhel(self, m_uses_systemd):
- cfg = {
- 'hostname': 'blah',
- 'fqdn': 'blah.blah.blah.yahoo.com'
- }
- distro = self._fetch_distro('rhel')
- paths = helpers.Paths({'cloud_dir': self.tmp})
- ds = None
- cc = cloud.Cloud(ds, paths, {}, distro, None)
- self.patchUtils(self.tmp)
- cc_set_hostname.handle('cc_set_hostname',
- cfg, cc, LOG, [])
- contents = util.load_file("/etc/sysconfig/network", decode=False)
- n_cfg = ConfigObj(BytesIO(contents))
- self.assertEqual(
- {'HOSTNAME': 'blah.blah.blah.yahoo.com'},
- dict(n_cfg))
-
- def test_write_hostname_debian(self):
- cfg = {
- 'hostname': 'blah',
- 'fqdn': 'blah.blah.blah.yahoo.com',
- }
- distro = self._fetch_distro('debian')
- paths = helpers.Paths({'cloud_dir': self.tmp})
- ds = None
- cc = cloud.Cloud(ds, paths, {}, distro, None)
- self.patchUtils(self.tmp)
- cc_set_hostname.handle('cc_set_hostname',
- cfg, cc, LOG, [])
- contents = util.load_file("/etc/hostname")
- self.assertEqual('blah', contents.strip())
-
- @mock.patch('cloudinit.distros.Distro.uses_systemd', return_value=False)
- def test_write_hostname_sles(self, m_uses_systemd):
- cfg = {
- 'hostname': 'blah.blah.blah.suse.com',
- }
- distro = self._fetch_distro('sles')
- paths = helpers.Paths({'cloud_dir': self.tmp})
- ds = None
- cc = cloud.Cloud(ds, paths, {}, distro, None)
- self.patchUtils(self.tmp)
- cc_set_hostname.handle('cc_set_hostname', cfg, cc, LOG, [])
- contents = util.load_file(distro.hostname_conf_fn)
- self.assertEqual('blah', contents.strip())
-
- @mock.patch('cloudinit.distros.photon.subp.subp')
- def test_photon_hostname(self, m_subp):
- cfg1 = {
- 'hostname': 'photon',
- 'prefer_fqdn_over_hostname': True,
- 'fqdn': 'test1.vmware.com',
- }
- cfg2 = {
- 'hostname': 'photon',
- 'prefer_fqdn_over_hostname': False,
- 'fqdn': 'test2.vmware.com',
- }
-
- ds = None
- m_subp.return_value = (None, None)
- distro = self._fetch_distro('photon', cfg1)
- paths = helpers.Paths({'cloud_dir': self.tmp})
- cc = cloud.Cloud(ds, paths, {}, distro, None)
- for c in [cfg1, cfg2]:
- cc_set_hostname.handle('cc_set_hostname', c, cc, LOG, [])
- print("\n", m_subp.call_args_list)
- if c['prefer_fqdn_over_hostname']:
- assert [
- mock.call(['hostnamectl', 'set-hostname', c['fqdn']],
- capture=True)
- ] in m_subp.call_args_list
- assert [
- mock.call(['hostnamectl', 'set-hostname', c['hostname']],
- capture=True)
- ] not in m_subp.call_args_list
- else:
- assert [
- mock.call(['hostnamectl', 'set-hostname', c['hostname']],
- capture=True)
- ] in m_subp.call_args_list
- assert [
- mock.call(['hostnamectl', 'set-hostname', c['fqdn']],
- capture=True)
- ] not in m_subp.call_args_list
-
- def test_multiple_calls_skips_unchanged_hostname(self):
- """Only new hostname or fqdn values will generate a hostname call."""
- distro = self._fetch_distro('debian')
- paths = helpers.Paths({'cloud_dir': self.tmp})
- ds = None
- cc = cloud.Cloud(ds, paths, {}, distro, None)
- self.patchUtils(self.tmp)
- cc_set_hostname.handle(
- 'cc_set_hostname', {'hostname': 'hostname1.me.com'}, cc, LOG, [])
- contents = util.load_file("/etc/hostname")
- self.assertEqual('hostname1', contents.strip())
- cc_set_hostname.handle(
- 'cc_set_hostname', {'hostname': 'hostname1.me.com'}, cc, LOG, [])
- self.assertIn(
- 'DEBUG: No hostname changes. Skipping set-hostname\n',
- self.logs.getvalue())
- cc_set_hostname.handle(
- 'cc_set_hostname', {'hostname': 'hostname2.me.com'}, cc, LOG, [])
- contents = util.load_file("/etc/hostname")
- self.assertEqual('hostname2', contents.strip())
- self.assertIn(
- 'Non-persistently setting the system hostname to hostname2',
- self.logs.getvalue())
-
- def test_error_on_distro_set_hostname_errors(self):
- """Raise SetHostnameError on exceptions from distro.set_hostname."""
- distro = self._fetch_distro('debian')
-
- def set_hostname_error(hostname, fqdn):
- raise Exception("OOPS on: %s" % fqdn)
-
- distro.set_hostname = set_hostname_error
- paths = helpers.Paths({'cloud_dir': self.tmp})
- ds = None
- cc = cloud.Cloud(ds, paths, {}, distro, None)
- self.patchUtils(self.tmp)
- with self.assertRaises(cc_set_hostname.SetHostnameError) as ctx_mgr:
- cc_set_hostname.handle(
- 'somename', {'hostname': 'hostname1.me.com'}, cc, LOG, [])
- self.assertEqual(
- 'Failed to set the hostname to hostname1.me.com (hostname1):'
- ' OOPS on: hostname1.me.com',
- str(ctx_mgr.exception))
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_spacewalk.py b/tests/unittests/test_handler/test_handler_spacewalk.py
deleted file mode 100644
index 26f7648f..00000000
--- a/tests/unittests/test_handler/test_handler_spacewalk.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-from cloudinit.config import cc_spacewalk
-from cloudinit import subp
-
-from cloudinit.tests import helpers
-
-import logging
-from unittest import mock
-
-LOG = logging.getLogger(__name__)
-
-
-class TestSpacewalk(helpers.TestCase):
- space_cfg = {
- 'spacewalk': {
- 'server': 'localhost',
- 'profile_name': 'test',
- }
- }
-
- @mock.patch("cloudinit.config.cc_spacewalk.subp.subp")
- def test_not_is_registered(self, mock_subp):
- mock_subp.side_effect = subp.ProcessExecutionError(exit_code=1)
- self.assertFalse(cc_spacewalk.is_registered())
-
- @mock.patch("cloudinit.config.cc_spacewalk.subp.subp")
- def test_is_registered(self, mock_subp):
- mock_subp.side_effect = None
- self.assertTrue(cc_spacewalk.is_registered())
-
- @mock.patch("cloudinit.config.cc_spacewalk.subp.subp")
- def test_do_register(self, mock_subp):
- cc_spacewalk.do_register(**self.space_cfg['spacewalk'])
- mock_subp.assert_called_with([
- 'rhnreg_ks',
- '--serverUrl', 'https://localhost/XMLRPC',
- '--profilename', 'test',
- '--sslCACert', cc_spacewalk.def_ca_cert_path,
- ], capture=False)
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_timezone.py b/tests/unittests/test_handler/test_handler_timezone.py
deleted file mode 100644
index 77cdb0c2..00000000
--- a/tests/unittests/test_handler/test_handler_timezone.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Copyright (C) 2013 Hewlett-Packard Development Company, L.P.
-#
-# Author: Juerg Haefliger <juerg.haefliger@hp.com>
-#
-# This file is part of cloud-init. See LICENSE file for license information.
-
-from cloudinit.config import cc_timezone
-
-from cloudinit import util
-
-
-import logging
-import shutil
-import tempfile
-from configobj import ConfigObj
-from io import BytesIO
-
-from cloudinit.tests import helpers as t_help
-
-from tests.unittests.util import get_cloud
-
-LOG = logging.getLogger(__name__)
-
-
-class TestTimezone(t_help.FilesystemMockingTestCase):
- def setUp(self):
- super(TestTimezone, self).setUp()
- self.new_root = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.new_root)
- self.patchUtils(self.new_root)
- self.patchOS(self.new_root)
-
- def test_set_timezone_sles(self):
-
- cfg = {
- 'timezone': 'Tatooine/Bestine',
- }
- cc = get_cloud('sles')
-
- # Create a dummy timezone file
- dummy_contents = '0123456789abcdefgh'
- util.write_file('/usr/share/zoneinfo/%s' % cfg['timezone'],
- dummy_contents)
-
- cc_timezone.handle('cc_timezone', cfg, cc, LOG, [])
-
- contents = util.load_file('/etc/sysconfig/clock', decode=False)
- n_cfg = ConfigObj(BytesIO(contents))
- self.assertEqual({'TIMEZONE': cfg['timezone']}, dict(n_cfg))
-
- contents = util.load_file('/etc/localtime')
- self.assertEqual(dummy_contents, contents.strip())
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_write_files.py b/tests/unittests/test_handler/test_handler_write_files.py
deleted file mode 100644
index 0af92805..00000000
--- a/tests/unittests/test_handler/test_handler_write_files.py
+++ /dev/null
@@ -1,246 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-import base64
-import copy
-import gzip
-import io
-import shutil
-import tempfile
-
-from cloudinit.config.cc_write_files import (
- handle, decode_perms, write_files)
-from cloudinit import log as logging
-from cloudinit import util
-
-from cloudinit.tests.helpers import (
- CiTestCase, FilesystemMockingTestCase, mock, skipUnlessJsonSchema)
-
-LOG = logging.getLogger(__name__)
-
-YAML_TEXT = """
-write_files:
- - encoding: gzip
- content: !!binary |
- H4sIAIDb/U8C/1NW1E/KzNMvzuBKTc7IV8hIzcnJVyjPL8pJ4QIA6N+MVxsAAAA=
- path: /usr/bin/hello
- permissions: '0755'
- - content: !!binary |
- Zm9vYmFyCg==
- path: /wark
- permissions: '0755'
- - content: |
- hi mom line 1
- hi mom line 2
- path: /tmp/message
-"""
-
-YAML_CONTENT_EXPECTED = {
- '/usr/bin/hello': "#!/bin/sh\necho hello world\n",
- '/wark': "foobar\n",
- '/tmp/message': "hi mom line 1\nhi mom line 2\n",
-}
-
-VALID_SCHEMA = {
- 'write_files': [
- {'append': False, 'content': 'a', 'encoding': 'gzip', 'owner': 'jeff',
- 'path': '/some', 'permissions': '0777'}
- ]
-}
-
-INVALID_SCHEMA = { # Dropped required path key
- 'write_files': [
- {'append': False, 'content': 'a', 'encoding': 'gzip', 'owner': 'jeff',
- 'permissions': '0777'}
- ]
-}
-
-
-@skipUnlessJsonSchema()
-@mock.patch('cloudinit.config.cc_write_files.write_files')
-class TestWriteFilesSchema(CiTestCase):
-
- with_logs = True
-
- def test_schema_validation_warns_missing_path(self, m_write_files):
- """The only required file item property is 'path'."""
- cc = self.tmp_cloud('ubuntu')
- valid_config = {'write_files': [{'path': '/some/path'}]}
- handle('cc_write_file', valid_config, cc, LOG, [])
- self.assertNotIn('Invalid config:', self.logs.getvalue())
- handle('cc_write_file', INVALID_SCHEMA, cc, LOG, [])
- self.assertIn('Invalid config:', self.logs.getvalue())
- self.assertIn("'path' is a required property", self.logs.getvalue())
-
- def test_schema_validation_warns_non_string_type_for_files(
- self, m_write_files):
- """Schema validation warns of non-string values for each file item."""
- cc = self.tmp_cloud('ubuntu')
- for key in VALID_SCHEMA['write_files'][0].keys():
- if key == 'append':
- key_type = 'boolean'
- else:
- key_type = 'string'
- invalid_config = copy.deepcopy(VALID_SCHEMA)
- invalid_config['write_files'][0][key] = 1
- handle('cc_write_file', invalid_config, cc, LOG, [])
- self.assertIn(
- mock.call('cc_write_file', invalid_config['write_files']),
- m_write_files.call_args_list)
- self.assertIn(
- 'write_files.0.%s: 1 is not of type \'%s\'' % (key, key_type),
- self.logs.getvalue())
- self.assertIn('Invalid config:', self.logs.getvalue())
-
- def test_schema_validation_warns_on_additional_undefined_propertes(
- self, m_write_files):
- """Schema validation warns on additional undefined file properties."""
- cc = self.tmp_cloud('ubuntu')
- invalid_config = copy.deepcopy(VALID_SCHEMA)
- invalid_config['write_files'][0]['bogus'] = 'value'
- handle('cc_write_file', invalid_config, cc, LOG, [])
- self.assertIn(
- "Invalid config:\nwrite_files.0: Additional properties"
- " are not allowed ('bogus' was unexpected)",
- self.logs.getvalue())
-
-
-class TestWriteFiles(FilesystemMockingTestCase):
-
- with_logs = True
-
- def setUp(self):
- super(TestWriteFiles, self).setUp()
- self.tmp = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.tmp)
-
- @skipUnlessJsonSchema()
- def test_handler_schema_validation_warns_non_array_type(self):
- """Schema validation warns of non-array value."""
- invalid_config = {'write_files': 1}
- cc = self.tmp_cloud('ubuntu')
- with self.assertRaises(TypeError):
- handle('cc_write_file', invalid_config, cc, LOG, [])
- self.assertIn(
- 'Invalid config:\nwrite_files: 1 is not of type \'array\'',
- self.logs.getvalue())
-
- def test_simple(self):
- self.patchUtils(self.tmp)
- expected = "hello world\n"
- filename = "/tmp/my.file"
- write_files(
- "test_simple", [{"content": expected, "path": filename}])
- self.assertEqual(util.load_file(filename), expected)
-
- def test_append(self):
- self.patchUtils(self.tmp)
- existing = "hello "
- added = "world\n"
- expected = existing + added
- filename = "/tmp/append.file"
- util.write_file(filename, existing)
- write_files(
- "test_append",
- [{"content": added, "path": filename, "append": "true"}])
- self.assertEqual(util.load_file(filename), expected)
-
- def test_yaml_binary(self):
- self.patchUtils(self.tmp)
- data = util.load_yaml(YAML_TEXT)
- write_files("testname", data['write_files'])
- for path, content in YAML_CONTENT_EXPECTED.items():
- self.assertEqual(util.load_file(path), content)
-
- def test_all_decodings(self):
- self.patchUtils(self.tmp)
-
- # build a 'files' array that has a dictionary of encodings
- # for 'gz', 'gzip', 'gz+base64' ...
- data = b"foobzr"
- utf8_valid = b"foobzr"
- utf8_invalid = b'ab\xaadef'
- files = []
- expected = []
-
- gz_aliases = ('gz', 'gzip')
- gz_b64_aliases = ('gz+base64', 'gzip+base64', 'gz+b64', 'gzip+b64')
- b64_aliases = ('base64', 'b64')
-
- datum = (("utf8", utf8_valid), ("no-utf8", utf8_invalid))
- for name, data in datum:
- gz = (_gzip_bytes(data), gz_aliases)
- gz_b64 = (base64.b64encode(_gzip_bytes(data)), gz_b64_aliases)
- b64 = (base64.b64encode(data), b64_aliases)
- for content, aliases in (gz, gz_b64, b64):
- for enc in aliases:
- cur = {'content': content,
- 'path': '/tmp/file-%s-%s' % (name, enc),
- 'encoding': enc}
- files.append(cur)
- expected.append((cur['path'], data))
-
- write_files("test_decoding", files)
-
- for path, content in expected:
- self.assertEqual(util.load_file(path, decode=False), content)
-
- # make sure we actually wrote *some* files.
- flen_expected = (
- len(gz_aliases + gz_b64_aliases + b64_aliases) * len(datum))
- self.assertEqual(len(expected), flen_expected)
-
- def test_deferred(self):
- self.patchUtils(self.tmp)
- file_path = '/tmp/deferred.file'
- config = {
- 'write_files': [
- {'path': file_path, 'defer': True}
- ]
- }
- cc = self.tmp_cloud('ubuntu')
- handle('cc_write_file', config, cc, LOG, [])
- with self.assertRaises(FileNotFoundError):
- util.load_file(file_path)
-
-
-class TestDecodePerms(CiTestCase):
-
- with_logs = True
-
- def test_none_returns_default(self):
- """If None is passed as perms, then default should be returned."""
- default = object()
- found = decode_perms(None, default)
- self.assertEqual(default, found)
-
- def test_integer(self):
- """A valid integer should return itself."""
- found = decode_perms(0o755, None)
- self.assertEqual(0o755, found)
-
- def test_valid_octal_string(self):
- """A string should be read as octal."""
- found = decode_perms("644", None)
- self.assertEqual(0o644, found)
-
- def test_invalid_octal_string_returns_default_and_warns(self):
- """A string with invalid octal should warn and return default."""
- found = decode_perms("999", None)
- self.assertIsNone(found)
- self.assertIn("WARNING: Undecodable", self.logs.getvalue())
-
-
-def _gzip_bytes(data):
- buf = io.BytesIO()
- fp = None
- try:
- fp = gzip.GzipFile(fileobj=buf, mode="wb")
- fp.write(data)
- fp.close()
- return buf.getvalue()
- finally:
- if fp:
- fp.close()
-
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_write_files_deferred.py b/tests/unittests/test_handler/test_handler_write_files_deferred.py
deleted file mode 100644
index 57b6934a..00000000
--- a/tests/unittests/test_handler/test_handler_write_files_deferred.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-import tempfile
-import shutil
-
-from cloudinit.config.cc_write_files_deferred import (handle)
-from .test_handler_write_files import (VALID_SCHEMA)
-from cloudinit import log as logging
-from cloudinit import util
-
-from cloudinit.tests.helpers import (
- CiTestCase, FilesystemMockingTestCase, mock, skipUnlessJsonSchema)
-
-LOG = logging.getLogger(__name__)
-
-
-@skipUnlessJsonSchema()
-@mock.patch('cloudinit.config.cc_write_files_deferred.write_files')
-class TestWriteFilesDeferredSchema(CiTestCase):
-
- with_logs = True
-
- def test_schema_validation_warns_invalid_value(self,
- m_write_files_deferred):
- """If 'defer' is defined, it must be of type 'bool'."""
-
- valid_config = {
- 'write_files': [
- {**VALID_SCHEMA.get('write_files')[0], 'defer': True}
- ]
- }
-
- invalid_config = {
- 'write_files': [
- {**VALID_SCHEMA.get('write_files')[0], 'defer': str('no')}
- ]
- }
-
- cc = self.tmp_cloud('ubuntu')
- handle('cc_write_files_deferred', valid_config, cc, LOG, [])
- self.assertNotIn('Invalid config:', self.logs.getvalue())
- handle('cc_write_files_deferred', invalid_config, cc, LOG, [])
- self.assertIn('Invalid config:', self.logs.getvalue())
- self.assertIn("defer: 'no' is not of type 'boolean'",
- self.logs.getvalue())
-
-
-class TestWriteFilesDeferred(FilesystemMockingTestCase):
-
- with_logs = True
-
- def setUp(self):
- super(TestWriteFilesDeferred, self).setUp()
- self.tmp = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.tmp)
-
- def test_filtering_deferred_files(self):
- self.patchUtils(self.tmp)
- expected = "hello world\n"
- config = {
- 'write_files': [
- {
- 'path': '/tmp/deferred.file',
- 'defer': True,
- 'content': expected
- },
- {'path': '/tmp/not_deferred.file'}
- ]
- }
- cc = self.tmp_cloud('ubuntu')
- handle('cc_write_files_deferred', config, cc, LOG, [])
- self.assertEqual(util.load_file('/tmp/deferred.file'), expected)
- with self.assertRaises(FileNotFoundError):
- util.load_file('/tmp/not_deferred.file')
-
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_yum_add_repo.py b/tests/unittests/test_handler/test_handler_yum_add_repo.py
deleted file mode 100644
index 7c61bbf9..00000000
--- a/tests/unittests/test_handler/test_handler_yum_add_repo.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-import configparser
-import logging
-import shutil
-import tempfile
-
-from cloudinit import util
-from cloudinit.config import cc_yum_add_repo
-from cloudinit.tests import helpers
-
-LOG = logging.getLogger(__name__)
-
-
-class TestConfig(helpers.FilesystemMockingTestCase):
- def setUp(self):
- super(TestConfig, self).setUp()
- self.tmp = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.tmp)
-
- def test_bad_config(self):
- cfg = {
- 'yum_repos': {
- 'epel-testing': {
- 'name': 'Extra Packages for Enterprise Linux 5 - Testing',
- # Missing this should cause the repo not to be written
- # 'baseurl': 'http://blah.org/pub/epel/testing/5/$barch',
- 'enabled': False,
- 'gpgcheck': True,
- 'gpgkey': 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL',
- 'failovermethod': 'priority',
- },
- },
- }
- self.patchUtils(self.tmp)
- cc_yum_add_repo.handle('yum_add_repo', cfg, None, LOG, [])
- self.assertRaises(IOError, util.load_file,
- "/etc/yum.repos.d/epel_testing.repo")
-
- def test_write_config(self):
- cfg = {
- 'yum_repos': {
- 'epel-testing': {
- 'name': 'Extra Packages for Enterprise Linux 5 - Testing',
- 'baseurl': 'http://blah.org/pub/epel/testing/5/$basearch',
- 'enabled': False,
- 'gpgcheck': True,
- 'gpgkey': 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL',
- 'failovermethod': 'priority',
- },
- },
- }
- self.patchUtils(self.tmp)
- cc_yum_add_repo.handle('yum_add_repo', cfg, None, LOG, [])
- contents = util.load_file("/etc/yum.repos.d/epel_testing.repo")
- parser = configparser.ConfigParser()
- parser.read_string(contents)
- expected = {
- 'epel_testing': {
- 'name': 'Extra Packages for Enterprise Linux 5 - Testing',
- 'failovermethod': 'priority',
- 'gpgkey': 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL',
- 'enabled': '0',
- 'baseurl': 'http://blah.org/pub/epel/testing/5/$basearch',
- 'gpgcheck': '1',
- }
- }
- for section in expected:
- self.assertTrue(parser.has_section(section),
- "Contains section {0}".format(section))
- for k, v in expected[section].items():
- self.assertEqual(parser.get(section, k), v)
-
- def test_write_config_array(self):
- cfg = {
- 'yum_repos': {
- 'puppetlabs-products': {
- 'name': 'Puppet Labs Products El 6 - $basearch',
- 'baseurl':
- 'http://yum.puppetlabs.com/el/6/products/$basearch',
- 'gpgkey': [
- 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-puppetlabs',
- 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-puppet',
- ],
- 'enabled': True,
- 'gpgcheck': True,
- }
- }
- }
- self.patchUtils(self.tmp)
- cc_yum_add_repo.handle('yum_add_repo', cfg, None, LOG, [])
- contents = util.load_file("/etc/yum.repos.d/puppetlabs_products.repo")
- parser = configparser.ConfigParser()
- parser.read_string(contents)
- expected = {
- 'puppetlabs_products': {
- 'name': 'Puppet Labs Products El 6 - $basearch',
- 'baseurl': 'http://yum.puppetlabs.com/el/6/products/$basearch',
- 'gpgkey': 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-puppetlabs\n'
- 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-puppet',
- 'enabled': '1',
- 'gpgcheck': '1',
- }
- }
- for section in expected:
- self.assertTrue(parser.has_section(section),
- "Contains section {0}".format(section))
- for k, v in expected[section].items():
- self.assertEqual(parser.get(section, k), v)
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_zypper_add_repo.py b/tests/unittests/test_handler/test_handler_zypper_add_repo.py
deleted file mode 100644
index 0fb1de1a..00000000
--- a/tests/unittests/test_handler/test_handler_zypper_add_repo.py
+++ /dev/null
@@ -1,231 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-
-import configparser
-import glob
-import logging
-import os
-
-from cloudinit import util
-from cloudinit.config import cc_zypper_add_repo
-from cloudinit.tests import helpers
-from cloudinit.tests.helpers import mock
-
-LOG = logging.getLogger(__name__)
-
-
-class TestConfig(helpers.FilesystemMockingTestCase):
- def setUp(self):
- super(TestConfig, self).setUp()
- self.tmp = self.tmp_dir()
- self.zypp_conf = 'etc/zypp/zypp.conf'
-
- def test_bad_repo_config(self):
- """Config has no baseurl, no file should be written"""
- cfg = {
- 'repos': [
- {
- 'id': 'foo',
- 'name': 'suse-test',
- 'enabled': '1'
- },
- ]
- }
- self.patchUtils(self.tmp)
- cc_zypper_add_repo._write_repos(cfg['repos'], '/etc/zypp/repos.d')
- self.assertRaises(IOError, util.load_file,
- "/etc/zypp/repos.d/foo.repo")
-
- def test_write_repos(self):
- """Verify valid repos get written"""
- cfg = self._get_base_config_repos()
- root_d = self.tmp_dir()
- cc_zypper_add_repo._write_repos(cfg['zypper']['repos'], root_d)
- repos = glob.glob('%s/*.repo' % root_d)
- expected_repos = ['testing-foo.repo', 'testing-bar.repo']
- if len(repos) != 2:
- assert 'Number of repos written is "%d" expected 2' % len(repos)
- for repo in repos:
- repo_name = os.path.basename(repo)
- if repo_name not in expected_repos:
- assert 'Found repo with name "%s"; unexpected' % repo_name
- # Validation that the content gets properly written is in another test
-
- def test_write_repo(self):
- """Verify the content of a repo file"""
- cfg = {
- 'repos': [
- {
- 'baseurl': 'http://foo',
- 'name': 'test-foo',
- 'id': 'testing-foo'
- },
- ]
- }
- root_d = self.tmp_dir()
- cc_zypper_add_repo._write_repos(cfg['repos'], root_d)
- contents = util.load_file("%s/testing-foo.repo" % root_d)
- parser = configparser.ConfigParser()
- parser.read_string(contents)
- expected = {
- 'testing-foo': {
- 'name': 'test-foo',
- 'baseurl': 'http://foo',
- 'enabled': '1',
- 'autorefresh': '1'
- }
- }
- for section in expected:
- self.assertTrue(parser.has_section(section),
- "Contains section {0}".format(section))
- for k, v in expected[section].items():
- self.assertEqual(parser.get(section, k), v)
-
- def test_config_write(self):
- """Write valid configuration data"""
- cfg = {
- 'config': {
- 'download.deltarpm': 'False',
- 'reposdir': 'foo'
- }
- }
- root_d = self.tmp_dir()
- helpers.populate_dir(root_d, {self.zypp_conf: '# Zypp config\n'})
- self.reRoot(root_d)
- cc_zypper_add_repo._write_zypp_config(cfg['config'])
- cfg_out = os.path.join(root_d, self.zypp_conf)
- contents = util.load_file(cfg_out)
- expected = [
- '# Zypp config',
- '# Added via cloud.cfg',
- 'download.deltarpm=False',
- 'reposdir=foo'
- ]
- for item in contents.split('\n'):
- if item not in expected:
- self.assertIsNone(item)
-
- @mock.patch('cloudinit.log.logging')
- def test_config_write_skip_configdir(self, mock_logging):
- """Write configuration but skip writing 'configdir' setting"""
- cfg = {
- 'config': {
- 'download.deltarpm': 'False',
- 'reposdir': 'foo',
- 'configdir': 'bar'
- }
- }
- root_d = self.tmp_dir()
- helpers.populate_dir(root_d, {self.zypp_conf: '# Zypp config\n'})
- self.reRoot(root_d)
- cc_zypper_add_repo._write_zypp_config(cfg['config'])
- cfg_out = os.path.join(root_d, self.zypp_conf)
- contents = util.load_file(cfg_out)
- expected = [
- '# Zypp config',
- '# Added via cloud.cfg',
- 'download.deltarpm=False',
- 'reposdir=foo'
- ]
- for item in contents.split('\n'):
- if item not in expected:
- self.assertIsNone(item)
- # Not finding teh right path for mocking :(
- # assert mock_logging.warning.called
-
- def test_empty_config_section_no_new_data(self):
- """When the config section is empty no new data should be written to
- zypp.conf"""
- cfg = self._get_base_config_repos()
- cfg['zypper']['config'] = None
- root_d = self.tmp_dir()
- helpers.populate_dir(root_d, {self.zypp_conf: '# No data'})
- self.reRoot(root_d)
- cc_zypper_add_repo._write_zypp_config(cfg.get('config', {}))
- cfg_out = os.path.join(root_d, self.zypp_conf)
- contents = util.load_file(cfg_out)
- self.assertEqual(contents, '# No data')
-
- def test_empty_config_value_no_new_data(self):
- """When the config section is not empty but there are no values
- no new data should be written to zypp.conf"""
- cfg = self._get_base_config_repos()
- cfg['zypper']['config'] = {
- 'download.deltarpm': None
- }
- root_d = self.tmp_dir()
- helpers.populate_dir(root_d, {self.zypp_conf: '# No data'})
- self.reRoot(root_d)
- cc_zypper_add_repo._write_zypp_config(cfg.get('config', {}))
- cfg_out = os.path.join(root_d, self.zypp_conf)
- contents = util.load_file(cfg_out)
- self.assertEqual(contents, '# No data')
-
- def test_handler_full_setup(self):
- """Test that the handler ends up calling the renderers"""
- cfg = self._get_base_config_repos()
- cfg['zypper']['config'] = {
- 'download.deltarpm': 'False',
- }
- root_d = self.tmp_dir()
- os.makedirs('%s/etc/zypp/repos.d' % root_d)
- helpers.populate_dir(root_d, {self.zypp_conf: '# Zypp config\n'})
- self.reRoot(root_d)
- cc_zypper_add_repo.handle('zypper_add_repo', cfg, None, LOG, [])
- cfg_out = os.path.join(root_d, self.zypp_conf)
- contents = util.load_file(cfg_out)
- expected = [
- '# Zypp config',
- '# Added via cloud.cfg',
- 'download.deltarpm=False',
- ]
- for item in contents.split('\n'):
- if item not in expected:
- self.assertIsNone(item)
- repos = glob.glob('%s/etc/zypp/repos.d/*.repo' % root_d)
- expected_repos = ['testing-foo.repo', 'testing-bar.repo']
- if len(repos) != 2:
- assert 'Number of repos written is "%d" expected 2' % len(repos)
- for repo in repos:
- repo_name = os.path.basename(repo)
- if repo_name not in expected_repos:
- assert 'Found repo with name "%s"; unexpected' % repo_name
-
- def test_no_config_section_no_new_data(self):
- """When there is no config section no new data should be written to
- zypp.conf"""
- cfg = self._get_base_config_repos()
- root_d = self.tmp_dir()
- helpers.populate_dir(root_d, {self.zypp_conf: '# No data'})
- self.reRoot(root_d)
- cc_zypper_add_repo._write_zypp_config(cfg.get('config', {}))
- cfg_out = os.path.join(root_d, self.zypp_conf)
- contents = util.load_file(cfg_out)
- self.assertEqual(contents, '# No data')
-
- def test_no_repo_data(self):
- """When there is no repo data nothing should happen"""
- root_d = self.tmp_dir()
- self.reRoot(root_d)
- cc_zypper_add_repo._write_repos(None, root_d)
- content = glob.glob('%s/*' % root_d)
- self.assertEqual(len(content), 0)
-
- def _get_base_config_repos(self):
- """Basic valid repo configuration"""
- cfg = {
- 'zypper': {
- 'repos': [
- {
- 'baseurl': 'http://foo',
- 'name': 'test-foo',
- 'id': 'testing-foo'
- },
- {
- 'baseurl': 'http://bar',
- 'name': 'test-bar',
- 'id': 'testing-bar'
- }
- ]
- }
- }
- return cfg
diff --git a/tests/unittests/test_handler/test_schema.py b/tests/unittests/test_handler/test_schema.py
deleted file mode 100644
index 1dae223d..00000000
--- a/tests/unittests/test_handler/test_schema.py
+++ /dev/null
@@ -1,515 +0,0 @@
-# This file is part of cloud-init. See LICENSE file for license information.
-import cloudinit
-from cloudinit.config.schema import (
- CLOUD_CONFIG_HEADER, SchemaValidationError, annotated_cloudconfig_file,
- get_schema_doc, get_schema, validate_cloudconfig_file,
- validate_cloudconfig_schema, main)
-from cloudinit.util import write_file
-
-from cloudinit.tests.helpers import CiTestCase, mock, skipUnlessJsonSchema
-
-from copy import copy
-import itertools
-import pytest
-from pathlib import Path
-from textwrap import dedent
-from yaml import safe_load
-
-
-class GetSchemaTest(CiTestCase):
-
- def test_get_schema_coalesces_known_schema(self):
- """Every cloudconfig module with schema is listed in allOf keyword."""
- schema = get_schema()
- self.assertCountEqual(
- [
- 'cc_apk_configure',
- 'cc_apt_configure',
- 'cc_bootcmd',
- 'cc_locale',
- 'cc_ntp',
- 'cc_resizefs',
- 'cc_runcmd',
- 'cc_snap',
- 'cc_ubuntu_advantage',
- 'cc_ubuntu_drivers',
- 'cc_write_files',
- 'cc_write_files_deferred',
- 'cc_zypper_add_repo',
- 'cc_chef',
- 'cc_install_hotplug',
- ],
- [subschema['id'] for subschema in schema['allOf']])
- self.assertEqual('cloud-config-schema', schema['id'])
- self.assertEqual(
- 'http://json-schema.org/draft-04/schema#',
- schema['$schema'])
- # FULL_SCHEMA is updated by the get_schema call
- from cloudinit.config.schema import FULL_SCHEMA
- self.assertCountEqual(['id', '$schema', 'allOf'], FULL_SCHEMA.keys())
-
- def test_get_schema_returns_global_when_set(self):
- """When FULL_SCHEMA global is already set, get_schema returns it."""
- m_schema_path = 'cloudinit.config.schema.FULL_SCHEMA'
- with mock.patch(m_schema_path, {'here': 'iam'}):
- self.assertEqual({'here': 'iam'}, get_schema())
-
-
-class SchemaValidationErrorTest(CiTestCase):
- """Test validate_cloudconfig_schema"""
-
- def test_schema_validation_error_expects_schema_errors(self):
- """SchemaValidationError is initialized from schema_errors."""
- errors = (('key.path', 'unexpected key "junk"'),
- ('key2.path', '"-123" is not a valid "hostname" format'))
- exception = SchemaValidationError(schema_errors=errors)
- self.assertIsInstance(exception, Exception)
- self.assertEqual(exception.schema_errors, errors)
- self.assertEqual(
- 'Cloud config schema errors: key.path: unexpected key "junk", '
- 'key2.path: "-123" is not a valid "hostname" format',
- str(exception))
- self.assertTrue(isinstance(exception, ValueError))
-
-
-class ValidateCloudConfigSchemaTest(CiTestCase):
- """Tests for validate_cloudconfig_schema."""
-
- with_logs = True
-
- @skipUnlessJsonSchema()
- def test_validateconfig_schema_non_strict_emits_warnings(self):
- """When strict is False validate_cloudconfig_schema emits warnings."""
- schema = {'properties': {'p1': {'type': 'string'}}}
- validate_cloudconfig_schema({'p1': -1}, schema, strict=False)
- self.assertIn(
- "Invalid config:\np1: -1 is not of type 'string'\n",
- self.logs.getvalue())
-
- @skipUnlessJsonSchema()
- def test_validateconfig_schema_emits_warning_on_missing_jsonschema(self):
- """Warning from validate_cloudconfig_schema when missing jsonschema."""
- schema = {'properties': {'p1': {'type': 'string'}}}
- with mock.patch.dict('sys.modules', **{'jsonschema': ImportError()}):
- validate_cloudconfig_schema({'p1': -1}, schema, strict=True)
- self.assertIn(
- 'Ignoring schema validation. python-jsonschema is not present',
- self.logs.getvalue())
-
- @skipUnlessJsonSchema()
- def test_validateconfig_schema_strict_raises_errors(self):
- """When strict is True validate_cloudconfig_schema raises errors."""
- schema = {'properties': {'p1': {'type': 'string'}}}
- with self.assertRaises(SchemaValidationError) as context_mgr:
- validate_cloudconfig_schema({'p1': -1}, schema, strict=True)
- self.assertEqual(
- "Cloud config schema errors: p1: -1 is not of type 'string'",
- str(context_mgr.exception))
-
- @skipUnlessJsonSchema()
- def test_validateconfig_schema_honors_formats(self):
- """With strict True, validate_cloudconfig_schema errors on format."""
- schema = {
- 'properties': {'p1': {'type': 'string', 'format': 'email'}}}
- with self.assertRaises(SchemaValidationError) as context_mgr:
- validate_cloudconfig_schema({'p1': '-1'}, schema, strict=True)
- self.assertEqual(
- "Cloud config schema errors: p1: '-1' is not a 'email'",
- str(context_mgr.exception))
-
-
-class TestCloudConfigExamples:
- schema = get_schema()
- params = [
- (schema["id"], example)
- for schema in schema["allOf"] for example in schema["examples"]]
-
- @pytest.mark.parametrize("schema_id,example", params)
- @skipUnlessJsonSchema()
- def test_validateconfig_schema_of_example(self, schema_id, example):
- """ For a given example in a config module we test if it is valid
- according to the unified schema of all config modules
- """
- config_load = safe_load(example)
- validate_cloudconfig_schema(
- config_load, self.schema, strict=True)
-
-
-class ValidateCloudConfigFileTest(CiTestCase):
- """Tests for validate_cloudconfig_file."""
-
- def setUp(self):
- super(ValidateCloudConfigFileTest, self).setUp()
- self.config_file = self.tmp_path('cloudcfg.yaml')
-
- def test_validateconfig_file_error_on_absent_file(self):
- """On absent config_path, validate_cloudconfig_file errors."""
- with self.assertRaises(RuntimeError) as context_mgr:
- validate_cloudconfig_file('/not/here', {})
- self.assertEqual(
- 'Configfile /not/here does not exist',
- str(context_mgr.exception))
-
- def test_validateconfig_file_error_on_invalid_header(self):
- """On invalid header, validate_cloudconfig_file errors.
-
- A SchemaValidationError is raised when the file doesn't begin with
- CLOUD_CONFIG_HEADER.
- """
- write_file(self.config_file, '#junk')
- with self.assertRaises(SchemaValidationError) as context_mgr:
- validate_cloudconfig_file(self.config_file, {})
- self.assertEqual(
- 'Cloud config schema errors: format-l1.c1: File {0} needs to begin'
- ' with "{1}"'.format(
- self.config_file, CLOUD_CONFIG_HEADER.decode()),
- str(context_mgr.exception))
-
- def test_validateconfig_file_error_on_non_yaml_scanner_error(self):
- """On non-yaml scan issues, validate_cloudconfig_file errors."""
- # Generate a scanner error by providing text on a single line with
- # improper indent.
- write_file(self.config_file, '#cloud-config\nasdf:\nasdf')
- with self.assertRaises(SchemaValidationError) as context_mgr:
- validate_cloudconfig_file(self.config_file, {})
- self.assertIn(
- 'schema errors: format-l3.c1: File {0} is not valid yaml.'.format(
- self.config_file),
- str(context_mgr.exception))
-
- def test_validateconfig_file_error_on_non_yaml_parser_error(self):
- """On non-yaml parser issues, validate_cloudconfig_file errors."""
- write_file(self.config_file, '#cloud-config\n{}}')
- with self.assertRaises(SchemaValidationError) as context_mgr:
- validate_cloudconfig_file(self.config_file, {})
- self.assertIn(
- 'schema errors: format-l2.c3: File {0} is not valid yaml.'.format(
- self.config_file),
- str(context_mgr.exception))
-
- @skipUnlessJsonSchema()
- def test_validateconfig_file_sctrictly_validates_schema(self):
- """validate_cloudconfig_file raises errors on invalid schema."""
- schema = {
- 'properties': {'p1': {'type': 'string', 'format': 'string'}}}
- write_file(self.config_file, '#cloud-config\np1: -1')
- with self.assertRaises(SchemaValidationError) as context_mgr:
- validate_cloudconfig_file(self.config_file, schema)
- self.assertEqual(
- "Cloud config schema errors: p1: -1 is not of type 'string'",
- str(context_mgr.exception))
-
-
-class GetSchemaDocTest(CiTestCase):
- """Tests for get_schema_doc."""
-
- def setUp(self):
- super(GetSchemaDocTest, self).setUp()
- self.required_schema = {
- 'title': 'title', 'description': 'description', 'id': 'id',
- 'name': 'name', 'frequency': 'frequency',
- 'distros': ['debian', 'rhel']}
-
- def test_get_schema_doc_returns_restructured_text(self):
- """get_schema_doc returns restructured text for a cloudinit schema."""
- full_schema = copy(self.required_schema)
- full_schema.update(
- {'properties': {
- 'prop1': {'type': 'array', 'description': 'prop-description',
- 'items': {'type': 'integer'}}}})
- self.assertEqual(
- dedent("""
- name
- ----
- **Summary:** title
-
- description
-
- **Internal name:** ``id``
-
- **Module frequency:** frequency
-
- **Supported distros:** debian, rhel
-
- **Config schema**:
- **prop1:** (array of integer) prop-description\n\n"""),
- get_schema_doc(full_schema))
-
- def test_get_schema_doc_handles_multiple_types(self):
- """get_schema_doc delimits multiple property types with a '/'."""
- full_schema = copy(self.required_schema)
- full_schema.update(
- {'properties': {
- 'prop1': {'type': ['string', 'integer'],
- 'description': 'prop-description'}}})
- self.assertIn(
- '**prop1:** (string/integer) prop-description',
- get_schema_doc(full_schema))
-
- def test_get_schema_doc_handles_enum_types(self):
- """get_schema_doc converts enum types to yaml and delimits with '/'."""
- full_schema = copy(self.required_schema)
- full_schema.update(
- {'properties': {
- 'prop1': {'enum': [True, False, 'stuff'],
- 'description': 'prop-description'}}})
- self.assertIn(
- '**prop1:** (true/false/stuff) prop-description',
- get_schema_doc(full_schema))
-
- def test_get_schema_doc_handles_nested_oneof_property_types(self):
- """get_schema_doc describes array items oneOf declarations in type."""
- full_schema = copy(self.required_schema)
- full_schema.update(
- {'properties': {
- 'prop1': {'type': 'array',
- 'items': {
- 'oneOf': [{'type': 'string'},
- {'type': 'integer'}]},
- 'description': 'prop-description'}}})
- self.assertIn(
- '**prop1:** (array of (string)/(integer)) prop-description',
- get_schema_doc(full_schema))
-
- def test_get_schema_doc_handles_string_examples(self):
- """get_schema_doc properly indented examples as a list of strings."""
- full_schema = copy(self.required_schema)
- full_schema.update(
- {'examples': ['ex1:\n [don\'t, expand, "this"]', 'ex2: true'],
- 'properties': {
- 'prop1': {'type': 'array', 'description': 'prop-description',
- 'items': {'type': 'integer'}}}})
- self.assertIn(
- dedent("""
- **Config schema**:
- **prop1:** (array of integer) prop-description
-
- **Examples**::
-
- ex1:
- [don't, expand, "this"]
- # --- Example2 ---
- ex2: true
- """),
- get_schema_doc(full_schema))
-
- def test_get_schema_doc_properly_parse_description(self):
- """get_schema_doc description properly formatted"""
- full_schema = copy(self.required_schema)
- full_schema.update(
- {'properties': {
- 'p1': {
- 'type': 'string',
- 'description': dedent("""\
- This item
- has the
- following options:
-
- - option1
- - option2
- - option3
-
- The default value is
- option1""")
- }
- }}
- )
-
- self.assertIn(
- dedent("""
- **Config schema**:
- **p1:** (string) This item has the following options:
-
- - option1
- - option2
- - option3
-
- The default value is option1
- """),
- get_schema_doc(full_schema))
-
- def test_get_schema_doc_raises_key_errors(self):
- """get_schema_doc raises KeyErrors on missing keys."""
- for key in self.required_schema:
- invalid_schema = copy(self.required_schema)
- invalid_schema.pop(key)
- with self.assertRaises(KeyError) as context_mgr:
- get_schema_doc(invalid_schema)
- self.assertIn(key, str(context_mgr.exception))
-
-
-class AnnotatedCloudconfigFileTest(CiTestCase):
- maxDiff = None
-
- def test_annotated_cloudconfig_file_no_schema_errors(self):
- """With no schema_errors, print the original content."""
- content = b'ntp:\n pools: [ntp1.pools.com]\n'
- self.assertEqual(
- content,
- annotated_cloudconfig_file({}, content, schema_errors=[]))
-
- def test_annotated_cloudconfig_file_schema_annotates_and_adds_footer(self):
- """With schema_errors, error lines are annotated and a footer added."""
- content = dedent("""\
- #cloud-config
- # comment
- ntp:
- pools: [-99, 75]
- """).encode()
- expected = dedent("""\
- #cloud-config
- # comment
- ntp: # E1
- pools: [-99, 75] # E2,E3
-
- # Errors: -------------
- # E1: Some type error
- # E2: -99 is not a string
- # E3: 75 is not a string
-
- """)
- parsed_config = safe_load(content[13:])
- schema_errors = [
- ('ntp', 'Some type error'), ('ntp.pools.0', '-99 is not a string'),
- ('ntp.pools.1', '75 is not a string')]
- self.assertEqual(
- expected,
- annotated_cloudconfig_file(parsed_config, content, schema_errors))
-
- def test_annotated_cloudconfig_file_annotates_separate_line_items(self):
- """Errors are annotated for lists with items on separate lines."""
- content = dedent("""\
- #cloud-config
- # comment
- ntp:
- pools:
- - -99
- - 75
- """).encode()
- expected = dedent("""\
- ntp:
- pools:
- - -99 # E1
- - 75 # E2
- """)
- parsed_config = safe_load(content[13:])
- schema_errors = [
- ('ntp.pools.0', '-99 is not a string'),
- ('ntp.pools.1', '75 is not a string')]
- self.assertIn(
- expected,
- annotated_cloudconfig_file(parsed_config, content, schema_errors))
-
-
-class TestMain:
-
- exclusive_combinations = itertools.combinations(
- ["--system", "--docs all", "--config-file something"], 2
- )
-
- @pytest.mark.parametrize("params", exclusive_combinations)
- def test_main_exclusive_args(self, params, capsys):
- """Main exits non-zero and error on required exclusive args."""
- params = list(itertools.chain(*[a.split() for a in params]))
- with mock.patch('sys.argv', ['mycmd'] + params):
- with pytest.raises(SystemExit) as context_manager:
- main()
- assert 1 == context_manager.value.code
-
- _out, err = capsys.readouterr()
- expected = (
- 'Expected one of --config-file, --system or --docs arguments\n'
- )
- assert expected == err
-
- def test_main_missing_args(self, capsys):
- """Main exits non-zero and reports an error on missing parameters."""
- with mock.patch('sys.argv', ['mycmd']):
- with pytest.raises(SystemExit) as context_manager:
- main()
- assert 1 == context_manager.value.code
-
- _out, err = capsys.readouterr()
- expected = (
- 'Expected one of --config-file, --system or --docs arguments\n'
- )
- assert expected == err
-
- def test_main_absent_config_file(self, capsys):
- """Main exits non-zero when config file is absent."""
- myargs = ['mycmd', '--annotate', '--config-file', 'NOT_A_FILE']
- with mock.patch('sys.argv', myargs):
- with pytest.raises(SystemExit) as context_manager:
- main()
- assert 1 == context_manager.value.code
- _out, err = capsys.readouterr()
- assert 'Configfile NOT_A_FILE does not exist\n' == err
-
- def test_main_prints_docs(self, capsys):
- """When --docs parameter is provided, main generates documentation."""
- myargs = ['mycmd', '--docs', 'all']
- with mock.patch('sys.argv', myargs):
- assert 0 == main(), 'Expected 0 exit code'
- out, _err = capsys.readouterr()
- assert '\nNTP\n---\n' in out
- assert '\nRuncmd\n------\n' in out
-
- def test_main_validates_config_file(self, tmpdir, capsys):
- """When --config-file parameter is provided, main validates schema."""
- myyaml = tmpdir.join('my.yaml')
- myargs = ['mycmd', '--config-file', myyaml.strpath]
- myyaml.write(b'#cloud-config\nntp:') # shortest ntp schema
- with mock.patch('sys.argv', myargs):
- assert 0 == main(), 'Expected 0 exit code'
- out, _err = capsys.readouterr()
- assert 'Valid cloud-config: {0}\n'.format(myyaml) == out
-
- @mock.patch('cloudinit.config.schema.read_cfg_paths')
- @mock.patch('cloudinit.config.schema.os.getuid', return_value=0)
- def test_main_validates_system_userdata(
- self, m_getuid, m_read_cfg_paths, capsys, paths
- ):
- """When --system is provided, main validates system userdata."""
- m_read_cfg_paths.return_value = paths
- ud_file = paths.get_ipath_cur("userdata_raw")
- write_file(ud_file, b'#cloud-config\nntp:')
- myargs = ['mycmd', '--system']
- with mock.patch('sys.argv', myargs):
- assert 0 == main(), 'Expected 0 exit code'
- out, _err = capsys.readouterr()
- assert 'Valid cloud-config: system userdata\n' == out
-
- @mock.patch('cloudinit.config.schema.os.getuid', return_value=1000)
- def test_main_system_userdata_requires_root(self, m_getuid, capsys, paths):
- """Non-root user can't use --system param"""
- myargs = ['mycmd', '--system']
- with mock.patch('sys.argv', myargs):
- with pytest.raises(SystemExit) as context_manager:
- main()
- assert 1 == context_manager.value.code
- _out, err = capsys.readouterr()
- expected = (
- 'Unable to read system userdata as non-root user. Try using sudo\n'
- )
- assert expected == err
-
-
-def _get_schema_doc_examples():
- examples_dir = Path(
- cloudinit.__file__).parent.parent / 'doc' / 'examples'
- assert examples_dir.is_dir()
-
- all_text_files = (f for f in examples_dir.glob('cloud-config*.txt')
- if not f.name.startswith('cloud-config-archive'))
- return all_text_files
-
-
-class TestSchemaDocExamples:
- schema = get_schema()
-
- @pytest.mark.parametrize("example_path", _get_schema_doc_examples())
- @skipUnlessJsonSchema()
- def test_schema_doc_examples(self, example_path):
- validate_cloudconfig_file(str(example_path), self.schema)
-
-# vi: ts=4 expandtab syntax=python