From b3abcdc09b894249c8360a030d8aa3b815bd0c20 Mon Sep 17 00:00:00 2001 From: Dan Kenigsberg Date: Wed, 20 Jan 2021 21:50:10 +0200 Subject: Use proper spelling for Red Hat (#778) The company name has two distinct words. Signed-off-by: Dan Kenigsberg --- tests/cloud_tests/testcases/examples/TODO.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'tests/cloud_tests') diff --git a/tests/cloud_tests/testcases/examples/TODO.md b/tests/cloud_tests/testcases/examples/TODO.md index 8db0e98e..cde699a7 100644 --- a/tests/cloud_tests/testcases/examples/TODO.md +++ b/tests/cloud_tests/testcases/examples/TODO.md @@ -6,7 +6,7 @@ Below lists each of the issing examples and why it is not currently added. - Puppet (takes > 60 seconds to run) - Manage resolve.conf (lxd backend overrides changes) - Adding a yum repository (need centos system) - - Register RedHat Subscription (need centos system + subscription) + - Register Red Hat Subscription (need centos system + subscription) - Adjust mount points mounted (need multiple disks) - Call a url when finished (need end point) - Reboot/poweroff when finished (how to test) -- cgit v1.2.3 From f5a244960c3f1591d022c081d816bc9604512629 Mon Sep 17 00:00:00 2001 From: Paride Legovini Date: Fri, 18 Jun 2021 17:22:58 +0200 Subject: cloud_tests: add impish release definition (#927) --- tests/cloud_tests/releases.yaml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) (limited to 'tests/cloud_tests') diff --git a/tests/cloud_tests/releases.yaml b/tests/cloud_tests/releases.yaml index 6249efc5..b4915a25 100644 --- a/tests/cloud_tests/releases.yaml +++ b/tests/cloud_tests/releases.yaml @@ -133,6 +133,23 @@ features: releases: # UBUNTU ================================================================= + impish: + # EOL: July 2022 + default: + enabled: true + release: hirsute + version: "21.10" + os: ubuntu + feature_groups: + - base + - debian_base + - ubuntu_specific + lxd: + sstreams_server: https://cloud-images.ubuntu.com/daily + alias: impish + setup_overrides: null + override_templates: false + hirsute: # EOL: Jan 2022 default: -- cgit v1.2.3 From 35aa9db6f8e2ba05d366776c0e8d97f52217e930 Mon Sep 17 00:00:00 2001 From: sshedi <53473811+sshedi@users.noreply.github.com> Date: Fri, 18 Jun 2021 22:23:44 +0530 Subject: Add support for VMware PhotonOS (#909) Also added a new (currently experimental) systemd-networkd renderer, and includes a small refactor to cc_resolv_conf.py to support the resolved.conf used by systemd-resolved. --- README.md | 2 +- cloudinit/cmd/devel/net_convert.py | 11 +- cloudinit/config/cc_ntp.py | 26 +- cloudinit/config/cc_resolv_conf.py | 25 +- cloudinit/config/cc_yum_add_repo.py | 4 +- cloudinit/config/tests/test_resolv_conf.py | 28 +- cloudinit/distros/__init__.py | 4 +- cloudinit/distros/arch.py | 1 - cloudinit/distros/gentoo.py | 1 - cloudinit/distros/opensuse.py | 1 - cloudinit/distros/photon.py | 355 +++++++++++++++++++++ cloudinit/distros/rhel.py | 1 - cloudinit/net/networkd.py | 246 ++++++++++++++ cloudinit/net/renderers.py | 4 +- cloudinit/tests/test_util.py | 20 ++ cloudinit/util.py | 4 +- config/cloud.cfg.tmpl | 52 ++- systemd/cloud-init.service.tmpl | 2 + templates/chrony.conf.photon.tmpl | 48 +++ templates/hosts.photon.tmpl | 22 ++ templates/ntp.conf.photon.tmpl | 61 ++++ templates/resolv.conf.tmpl | 2 +- templates/systemd.resolved.conf.tmpl | 15 + tests/cloud_tests/util.py | 2 +- tests/unittests/test_cli.py | 2 +- tests/unittests/test_distros/test_netconfig.py | 99 +++++- .../test_handler/test_handler_set_hostname.py | 26 ++ tests/unittests/test_net.py | 244 +++++++++++++- tests/unittests/test_render_cloudcfg.py | 3 +- tools/render-cloudcfg | 4 +- 30 files changed, 1256 insertions(+), 59 deletions(-) create mode 100644 cloudinit/distros/photon.py create mode 100644 cloudinit/net/networkd.py create mode 100644 templates/chrony.conf.photon.tmpl create mode 100644 templates/hosts.photon.tmpl create mode 100644 templates/ntp.conf.photon.tmpl create mode 100644 templates/systemd.resolved.conf.tmpl (limited to 'tests/cloud_tests') diff --git a/README.md b/README.md index 6f7e4c99..462e3204 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,7 @@ get in contact with that distribution and send them our way! | Supported OSes | Supported Public Clouds | Supported Private Clouds | | --- | --- | --- | -| Alpine Linux
ArchLinux
Debian
DragonFlyBSD
Fedora
FreeBSD
Gentoo Linux
NetBSD
OpenBSD
RHEL/CentOS/AlmaLinux/Rocky
SLES/openSUSE
Ubuntu










| Amazon Web Services
Microsoft Azure
Google Cloud Platform
Oracle Cloud Infrastructure
Softlayer
Rackspace Public Cloud
IBM Cloud
DigitalOcean
Bigstep
Hetzner
Joyent
CloudSigma
Alibaba Cloud
OVH
OpenNebula
Exoscale
Scaleway
CloudStack
AltCloud
SmartOS
HyperOne
Vultr
Rootbox
| Bare metal installs
OpenStack
LXD
KVM
Metal-as-a-Service (MAAS)















| +| Alpine Linux
ArchLinux
Debian
DragonFlyBSD
Fedora
FreeBSD
Gentoo Linux
NetBSD
OpenBSD
RHEL/CentOS/AlmaLinux/Rocky/PhotonOS
SLES/openSUSE
Ubuntu










| Amazon Web Services
Microsoft Azure
Google Cloud Platform
Oracle Cloud Infrastructure
Softlayer
Rackspace Public Cloud
IBM Cloud
DigitalOcean
Bigstep
Hetzner
Joyent
CloudSigma
Alibaba Cloud
OVH
OpenNebula
Exoscale
Scaleway
CloudStack
AltCloud
SmartOS
HyperOne
Vultr
Rootbox
| Bare metal installs
OpenStack
LXD
KVM
Metal-as-a-Service (MAAS)















| ## To start developing cloud-init diff --git a/cloudinit/cmd/devel/net_convert.py b/cloudinit/cmd/devel/net_convert.py index 0668ffa3..5c649fd0 100755 --- a/cloudinit/cmd/devel/net_convert.py +++ b/cloudinit/cmd/devel/net_convert.py @@ -11,7 +11,7 @@ from cloudinit.sources import DataSourceAzure as azure from cloudinit.sources import DataSourceOVF as ovf from cloudinit import distros, safeyaml -from cloudinit.net import eni, netplan, network_state, sysconfig +from cloudinit.net import eni, netplan, networkd, network_state, sysconfig from cloudinit import log NAME = 'net-convert' @@ -51,7 +51,7 @@ def get_parser(parser=None): parser.add_argument("--debug", action='store_true', help='enable debug logging to stderr.') parser.add_argument("-O", "--output-kind", - choices=['eni', 'netplan', 'sysconfig'], + choices=['eni', 'netplan', 'networkd', 'sysconfig'], required=True, help="The network config format to emit") return parser @@ -118,9 +118,14 @@ def handle_args(name, args): config['netplan_path'] = config['netplan_path'][1:] # enable some netplan features config['features'] = ['dhcp-use-domains', 'ipv6-mtu'] - else: + elif args.output_kind == "networkd": + r_cls = networkd.Renderer + config = distro.renderer_configs.get('networkd') + elif args.output_kind == "sysconfig": r_cls = sysconfig.Renderer config = distro.renderer_configs.get('sysconfig') + else: + raise RuntimeError("Invalid output_kind") r = r_cls(config=config) sys.stderr.write(''.join([ diff --git a/cloudinit/config/cc_ntp.py b/cloudinit/config/cc_ntp.py index 70c24610..acf3251d 100644 --- a/cloudinit/config/cc_ntp.py +++ b/cloudinit/config/cc_ntp.py @@ -25,7 +25,7 @@ frequency = PER_INSTANCE NTP_CONF = '/etc/ntp.conf' NR_POOL_SERVERS = 4 distros = ['almalinux', 'alpine', 'centos', 'debian', 'fedora', 'opensuse', - 'rhel', 'rocky', 'sles', 'ubuntu'] + 'photon', 'rhel', 'rocky', 'sles', 'ubuntu'] NTP_CLIENT_CONFIG = { 'chrony': { @@ -80,24 +80,37 @@ DISTRO_CLIENT_CONFIG = { 'confpath': '/etc/chrony/chrony.conf', }, }, - 'rhel': { + 'opensuse': { + 'chrony': { + 'service_name': 'chronyd', + }, 'ntp': { + 'confpath': '/etc/ntp.conf', 'service_name': 'ntpd', }, - 'chrony': { - 'service_name': 'chronyd', + 'systemd-timesyncd': { + 'check_exe': '/usr/lib/systemd/systemd-timesyncd', }, }, - 'opensuse': { + 'photon': { 'chrony': { 'service_name': 'chronyd', }, 'ntp': { - 'confpath': '/etc/ntp.conf', 'service_name': 'ntpd', + 'confpath': '/etc/ntp.conf' }, 'systemd-timesyncd': { 'check_exe': '/usr/lib/systemd/systemd-timesyncd', + 'confpath': '/etc/systemd/timesyncd.conf', + }, + }, + 'rhel': { + 'ntp': { + 'service_name': 'ntpd', + }, + 'chrony': { + 'service_name': 'chronyd', }, }, 'sles': { @@ -551,7 +564,6 @@ def handle(name, cfg, cloud, log, _args): # Select which client is going to be used and get the configuration ntp_client_config = select_ntp_client(ntp_cfg.get('ntp_client'), cloud.distro) - # Allow user ntp config to override distro configurations ntp_client_config = util.mergemanydict( [ntp_client_config, ntp_cfg.get('config', {})], reverse=True) diff --git a/cloudinit/config/cc_resolv_conf.py b/cloudinit/config/cc_resolv_conf.py index 466dad03..c51967e2 100644 --- a/cloudinit/config/cc_resolv_conf.py +++ b/cloudinit/config/cc_resolv_conf.py @@ -30,7 +30,7 @@ are configured correctly. **Module frequency:** per instance -**Supported distros:** alpine, fedora, rhel, sles +**Supported distros:** alpine, fedora, photon, rhel, sles **Config keys**:: @@ -47,18 +47,23 @@ are configured correctly. """ from cloudinit import log as logging -from cloudinit.settings import PER_INSTANCE from cloudinit import templater +from cloudinit.settings import PER_INSTANCE from cloudinit import util LOG = logging.getLogger(__name__) frequency = PER_INSTANCE -distros = ['alpine', 'fedora', 'opensuse', 'rhel', 'sles'] +distros = ['alpine', 'fedora', 'opensuse', 'photon', 'rhel', 'sles'] + +RESOLVE_CONFIG_TEMPLATE_MAP = { + '/etc/resolv.conf': 'resolv.conf', + '/etc/systemd/resolved.conf': 'systemd.resolved.conf', +} -def generate_resolv_conf(template_fn, params, target_fname="/etc/resolv.conf"): +def generate_resolv_conf(template_fn, params, target_fname): flags = [] false_flags = [] @@ -104,12 +109,18 @@ def handle(name, cfg, cloud, log, _args): if "resolv_conf" not in cfg: log.warning("manage_resolv_conf True but no parameters provided!") - template_fn = cloud.get_template_filename('resolv.conf') - if not template_fn: + try: + template_fn = cloud.get_template_filename( + RESOLVE_CONFIG_TEMPLATE_MAP[cloud.distro.resolv_conf_fn]) + except KeyError: log.warning("No template found, not rendering /etc/resolv.conf") return - generate_resolv_conf(template_fn=template_fn, params=cfg["resolv_conf"]) + generate_resolv_conf( + template_fn=template_fn, + params=cfg["resolv_conf"], + target_fname=cloud.disro.resolve_conf_fn + ) return # vi: ts=4 expandtab diff --git a/cloudinit/config/cc_yum_add_repo.py b/cloudinit/config/cc_yum_add_repo.py index 7daa6bd9..67f09686 100644 --- a/cloudinit/config/cc_yum_add_repo.py +++ b/cloudinit/config/cc_yum_add_repo.py @@ -18,7 +18,7 @@ entry, the config entry will be skipped. **Module frequency:** per always -**Supported distros:** almalinux, centos, fedora, rhel, rocky +**Supported distros:** almalinux, centos, fedora, photon, rhel, rocky **Config keys**:: @@ -36,7 +36,7 @@ from configparser import ConfigParser from cloudinit import util -distros = ['almalinux', 'centos', 'fedora', 'rhel', 'rocky'] +distros = ['almalinux', 'centos', 'fedora', 'photon', 'rhel', 'rocky'] def _canonicalize_id(repo_id): diff --git a/cloudinit/config/tests/test_resolv_conf.py b/cloudinit/config/tests/test_resolv_conf.py index 6546a0b5..45a06c22 100644 --- a/cloudinit/config/tests/test_resolv_conf.py +++ b/cloudinit/config/tests/test_resolv_conf.py @@ -1,9 +1,8 @@ -from unittest import mock - import pytest +from unittest import mock from cloudinit.config.cc_resolv_conf import generate_resolv_conf - +from tests.unittests.test_distros.test_create_users import MyBaseDistro EXPECTED_HEADER = """\ # Your system has been configured with 'manage-resolv-conf' set to true. @@ -14,22 +13,28 @@ EXPECTED_HEADER = """\ class TestGenerateResolvConf: + + dist = MyBaseDistro() + tmpl_fn = "templates/resolv.conf.tmpl" + @mock.patch("cloudinit.config.cc_resolv_conf.templater.render_to_file") - def test_default_target_fname_is_etc_resolvconf(self, m_render_to_file): - generate_resolv_conf("templates/resolv.conf.tmpl", mock.MagicMock()) + def test_dist_resolv_conf_fn(self, m_render_to_file): + self.dist.resolve_conf_fn = "/tmp/resolv-test.conf" + generate_resolv_conf(self.tmpl_fn, + mock.MagicMock(), + self.dist.resolve_conf_fn) assert [ - mock.call(mock.ANY, "/etc/resolv.conf", mock.ANY) + mock.call(mock.ANY, self.dist.resolve_conf_fn, mock.ANY) ] == m_render_to_file.call_args_list @mock.patch("cloudinit.config.cc_resolv_conf.templater.render_to_file") def test_target_fname_is_used_if_passed(self, m_render_to_file): - generate_resolv_conf( - "templates/resolv.conf.tmpl", mock.MagicMock(), "/use/this/path" - ) + path = "/use/this/path" + generate_resolv_conf(self.tmpl_fn, mock.MagicMock(), path) assert [ - mock.call(mock.ANY, "/use/this/path", mock.ANY) + mock.call(mock.ANY, path, mock.ANY) ] == m_render_to_file.call_args_list # Patch in templater so we can assert on the actual generated content @@ -75,7 +80,8 @@ class TestGenerateResolvConf: def test_flags_and_options( self, m_write_file, params, expected_extra_line ): - generate_resolv_conf("templates/resolv.conf.tmpl", params) + target_fn = "/etc/resolv.conf" + generate_resolv_conf(self.tmpl_fn, params, target_fn) expected_content = EXPECTED_HEADER if expected_extra_line is not None: diff --git a/cloudinit/distros/__init__.py b/cloudinit/distros/__init__.py index 57e33621..4991f42b 100755 --- a/cloudinit/distros/__init__.py +++ b/cloudinit/distros/__init__.py @@ -46,7 +46,8 @@ OSFAMILIES = { 'debian': ['debian', 'ubuntu'], 'freebsd': ['freebsd'], 'gentoo': ['gentoo'], - 'redhat': ['almalinux', 'amazon', 'centos', 'fedora', 'rhel', 'rocky'], + 'redhat': ['almalinux', 'amazon', 'centos', 'fedora', 'photon', 'rhel', + 'rocky'], 'suse': ['opensuse', 'sles'], } @@ -80,6 +81,7 @@ class Distro(persistence.CloudInitPickleMixin, metaclass=abc.ABCMeta): _ci_pkl_version = 1 prefer_fqdn = False + resolve_conf_fn = "/etc/resolv.conf" def __init__(self, name, cfg, paths): self._paths = paths diff --git a/cloudinit/distros/arch.py b/cloudinit/distros/arch.py index f8385f7f..246e6fe7 100644 --- a/cloudinit/distros/arch.py +++ b/cloudinit/distros/arch.py @@ -25,7 +25,6 @@ LOG = logging.getLogger(__name__) class Distro(distros.Distro): locale_gen_fn = "/etc/locale.gen" network_conf_dir = "/etc/netctl" - resolve_conf_fn = "/etc/resolv.conf" init_cmd = ['systemctl'] # init scripts renderer_configs = { "netplan": {"netplan_path": "/etc/netplan/50-cloud-init.yaml", diff --git a/cloudinit/distros/gentoo.py b/cloudinit/distros/gentoo.py index e9b82602..68c03e7f 100644 --- a/cloudinit/distros/gentoo.py +++ b/cloudinit/distros/gentoo.py @@ -23,7 +23,6 @@ LOG = logging.getLogger(__name__) class Distro(distros.Distro): locale_conf_fn = '/etc/locale.gen' network_conf_fn = '/etc/conf.d/net' - resolve_conf_fn = '/etc/resolv.conf' hostname_conf_fn = '/etc/conf.d/hostname' init_cmd = ['rc-service'] # init scripts diff --git a/cloudinit/distros/opensuse.py b/cloudinit/distros/opensuse.py index 7ca0ef99..270cc189 100644 --- a/cloudinit/distros/opensuse.py +++ b/cloudinit/distros/opensuse.py @@ -27,7 +27,6 @@ class Distro(distros.Distro): locale_conf_fn = '/etc/sysconfig/language' network_conf_fn = '/etc/sysconfig/network/config' network_script_tpl = '/etc/sysconfig/network/ifcfg-%s' - resolve_conf_fn = '/etc/resolv.conf' route_conf_tpl = '/etc/sysconfig/network/ifroute-%s' systemd_hostname_conf_fn = '/etc/hostname' systemd_locale_conf_fn = '/etc/locale.conf' diff --git a/cloudinit/distros/photon.py b/cloudinit/distros/photon.py new file mode 100644 index 00000000..8b78f98f --- /dev/null +++ b/cloudinit/distros/photon.py @@ -0,0 +1,355 @@ +#!/usr/bin/env python3 +# vi: ts=4 expandtab +# +# Copyright (C) 2021 VMware Inc. +# +# This file is part of cloud-init. See LICENSE file for license information. + +from cloudinit import util +from cloudinit import subp +from cloudinit import distros +from cloudinit import helpers +from cloudinit import log as logging +from cloudinit.distros import net_util +from cloudinit.settings import PER_INSTANCE +from cloudinit.distros import rhel_util as rhutil +from cloudinit.net.network_state import mask_to_net_prefix +from cloudinit.distros.parsers.hostname import HostnameConf + +LOG = logging.getLogger(__name__) + + +class Distro(distros.Distro): + hostname_conf_fn = '/etc/hostname' + network_conf_dir = '/etc/systemd/network/' + systemd_locale_conf_fn = '/etc/locale.conf' + resolve_conf_fn = '/etc/systemd/resolved.conf' + + renderer_configs = { + 'networkd': { + 'resolv_conf_fn': resolve_conf_fn, + 'network_conf_dir': network_conf_dir, + } + } + + # Should be fqdn if we can use it + prefer_fqdn = True + + def __init__(self, name, cfg, paths): + distros.Distro.__init__(self, name, cfg, paths) + # This will be used to restrict certain + # calls from repeatly happening (when they + # should only happen say once per instance...) + self._runner = helpers.Runners(paths) + self.osfamily = 'photon' + self.init_cmd = ['systemctl'] + + def exec_cmd(self, cmd, capture=False): + LOG.debug('Attempting to run: %s', cmd) + try: + (out, err) = subp.subp(cmd, capture=capture) + if err: + LOG.warning('Running %s resulted in stderr output: %s', + cmd, err) + return True, out, err + except subp.ProcessExecutionError: + util.logexc(LOG, 'Command %s failed', cmd) + return False, None, None + + def apply_locale(self, locale, out_fn=None): + # This has a dependancy on glibc-i18n, user need to manually install it + # and enable the option in cloud.cfg + if not out_fn: + out_fn = self.systemd_locale_conf_fn + + locale_cfg = { + 'LANG': locale, + } + + rhutil.update_sysconfig_file(out_fn, locale_cfg) + + # rhutil will modify /etc/locale.conf + # For locale change to take effect, reboot is needed or we can restart + # systemd-localed. This is equivalent of localectl + cmd = ['systemctl', 'restart', 'systemd-localed'] + _ret, _out, _err = self.exec_cmd(cmd) + + def install_packages(self, pkglist): + # self.update_package_sources() + self.package_command('install', pkgs=pkglist) + + def _write_network_config(self, netconfig): + if isinstance(netconfig, str): + self._write_network_(netconfig) + return + return self._supported_write_network_config(netconfig) + + def _bring_up_interfaces(self, device_names): + cmd = ['systemctl', 'restart', 'systemd-networkd', 'systemd-resolved'] + LOG.debug('Attempting to run bring up interfaces using command %s', + cmd) + ret, _out, _err = self.exec_cmd(cmd) + return ret + + def _write_hostname(self, hostname, out_fn): + conf = None + try: + # Try to update the previous one + # Let's see if we can read it first. + conf = HostnameConf(util.load_file(out_fn)) + conf.parse() + except IOError: + pass + if not conf: + conf = HostnameConf('') + conf.set_hostname(hostname) + util.write_file(out_fn, str(conf), mode=0o644) + + def _read_system_hostname(self): + sys_hostname = self._read_hostname(self.hostname_conf_fn) + return (self.hostname_conf_fn, sys_hostname) + + def _read_hostname(self, filename, default=None): + _ret, out, _err = self.exec_cmd(['hostname']) + + return out if out else default + + def _get_localhost_ip(self): + return '127.0.1.1' + + def set_timezone(self, tz): + distros.set_etc_timezone(tz=tz, tz_file=self._find_tz_file(tz)) + + def package_command(self, command, args=None, pkgs=None): + if pkgs is None: + pkgs = [] + + cmd = ['tdnf', '-y'] + if args and isinstance(args, str): + cmd.append(args) + elif args and isinstance(args, list): + cmd.extend(args) + + cmd.append(command) + + pkglist = util.expand_package_list('%s-%s', pkgs) + cmd.extend(pkglist) + + # Allow the output of this to flow outwards (ie not be captured) + _ret, _out, _err = self.exec_cmd(cmd, capture=False) + + def update_package_sources(self): + self._runner.run('update-sources', self.package_command, + ['makecache'], freq=PER_INSTANCE) + + def _generate_resolv_conf(self): + resolv_conf_fn = self.resolve_conf_fn + resolv_templ_fn = 'systemd.resolved.conf' + + return resolv_conf_fn, resolv_templ_fn + + def _write_network_(self, settings): + entries = net_util.translate_network(settings) + LOG.debug('Translated ubuntu style network settings %s into %s', + settings, entries) + route_entries = [] + route_entries = translate_routes(settings) + dev_names = entries.keys() + nameservers = [] + searchdomains = [] + # Format for systemd + for (dev, info) in entries.items(): + if 'dns-nameservers' in info: + nameservers.extend(info['dns-nameservers']) + if 'dns-search' in info: + searchdomains.extend(info['dns-search']) + if dev == 'lo': + continue + + net_fn = self.network_conf_dir + '10-cloud-init-' + dev + net_fn += '.network' + dhcp_enabled = 'no' + if info.get('bootproto') == 'dhcp': + if (settings.find('inet dhcp') >= 0 and + settings.find('inet6 dhcp') >= 0): + dhcp_enabled = 'yes' + else: + if info.get('inet6') is True: + dhcp_enabled = 'ipv6' + else: + dhcp_enabled = 'ipv4' + + net_cfg = { + 'Name': dev, + 'DHCP': dhcp_enabled, + } + + if info.get('hwaddress'): + net_cfg['MACAddress'] = info.get('hwaddress') + if info.get('address'): + net_cfg['Address'] = '%s' % (info.get('address')) + if info.get('netmask'): + net_cfg['Address'] += '/%s' % ( + mask_to_net_prefix(info.get('netmask'))) + if info.get('gateway'): + net_cfg['Gateway'] = info.get('gateway') + if info.get('dns-nameservers'): + net_cfg['DNS'] = str( + tuple(info.get('dns-nameservers'))).replace(',', '') + if info.get('dns-search'): + net_cfg['Domains'] = str( + tuple(info.get('dns-search'))).replace(',', '') + route_entry = [] + if dev in route_entries: + route_entry = route_entries[dev] + route_index = 0 + found = True + while found: + route_name = 'routes.' + str(route_index) + if route_name in route_entries[dev]: + val = str(tuple(route_entries[dev][route_name])) + val = val.replace(',', '') + if val: + net_cfg[route_name] = val + else: + found = False + route_index += 1 + + if info.get('auto'): + self._write_interface_file(net_fn, net_cfg, route_entry) + + resolve_data = [] + new_resolve_data = [] + with open(self.resolve_conf_fn, 'r') as rf: + resolve_data = rf.readlines() + LOG.debug('Old Resolve Data\n') + LOG.debug('%s', resolve_data) + for item in resolve_data: + if ((nameservers and ('DNS=' in item)) or + (searchdomains and ('Domains=' in item))): + continue + else: + new_resolve_data.append(item) + + new_resolve_data = new_resolve_data + \ + convert_resolv_conf(nameservers, searchdomains) + LOG.debug('New resolve data\n') + LOG.debug('%s', new_resolve_data) + if nameservers or searchdomains: + util.write_file(self.resolve_conf_fn, ''.join(new_resolve_data)) + + return dev_names + + def _write_interface_file(self, net_fn, net_cfg, route_entry): + if not net_cfg['Name']: + return + content = '[Match]\n' + content += 'Name=%s\n' % (net_cfg['Name']) + if 'MACAddress' in net_cfg: + content += 'MACAddress=%s\n' % (net_cfg['MACAddress']) + content += '[Network]\n' + + if 'DHCP' in net_cfg and net_cfg['DHCP'] in {'yes', 'ipv4', 'ipv6'}: + content += 'DHCP=%s\n' % (net_cfg['DHCP']) + else: + if 'Address' in net_cfg: + content += 'Address=%s\n' % (net_cfg['Address']) + if 'Gateway' in net_cfg: + content += 'Gateway=%s\n' % (net_cfg['Gateway']) + if 'DHCP' in net_cfg and net_cfg['DHCP'] == 'no': + content += 'DHCP=%s\n' % (net_cfg['DHCP']) + + route_index = 0 + found = True + if route_entry: + while found: + route_name = 'routes.' + str(route_index) + if route_name in route_entry: + content += '[Route]\n' + if len(route_entry[route_name]) != 2: + continue + content += 'Gateway=%s\n' % ( + route_entry[route_name][0]) + content += 'Destination=%s\n' % ( + route_entry[route_name][1]) + else: + found = False + route_index += 1 + + util.write_file(net_fn, content) + + +def convert_resolv_conf(nameservers, searchdomains): + ''' Returns a string formatted for resolv.conf ''' + result = [] + if nameservers: + nslist = 'DNS=' + for ns in nameservers: + nslist = nslist + '%s ' % ns + nslist = nslist + '\n' + result.append(str(nslist)) + if searchdomains: + sdlist = 'Domains=' + for sd in searchdomains: + sdlist = sdlist + '%s ' % sd + sdlist = sdlist + '\n' + result.append(str(sdlist)) + return result + + +def translate_routes(settings): + entries = [] + for line in settings.splitlines(): + line = line.strip() + if not line or line.startswith('#'): + continue + split_up = line.split(None, 1) + if len(split_up) <= 1: + continue + entries.append(split_up) + consume = {} + ifaces = [] + for (cmd, args) in entries: + if cmd == 'iface': + if consume: + ifaces.append(consume) + consume = {} + consume[cmd] = args + else: + consume[cmd] = args + + absorb = False + for (cmd, args) in consume.items(): + if cmd == 'iface': + absorb = True + if absorb: + ifaces.append(consume) + out_ifaces = {} + for info in ifaces: + if 'iface' not in info: + continue + iface_details = info['iface'].split(None) + dev_name = None + if len(iface_details) >= 1: + dev = iface_details[0].strip().lower() + if dev: + dev_name = dev + if not dev_name: + continue + route_info = {} + route_index = 0 + found = True + while found: + route_name = 'routes.' + str(route_index) + if route_name in info: + val = info[route_name].split() + if val: + route_info[route_name] = val + else: + found = False + route_index += 1 + if dev_name in out_ifaces: + out_ifaces[dev_name].update(route_info) + else: + out_ifaces[dev_name] = route_info + return out_ifaces diff --git a/cloudinit/distros/rhel.py b/cloudinit/distros/rhel.py index 0c00a531..80a6f1d8 100644 --- a/cloudinit/distros/rhel.py +++ b/cloudinit/distros/rhel.py @@ -36,7 +36,6 @@ class Distro(distros.Distro): hostname_conf_fn = "/etc/sysconfig/network" systemd_hostname_conf_fn = "/etc/hostname" network_script_tpl = '/etc/sysconfig/network-scripts/ifcfg-%s' - resolve_conf_fn = "/etc/resolv.conf" tz_local_fn = "/etc/localtime" usr_lib_exec = "/usr/libexec" renderer_configs = { diff --git a/cloudinit/net/networkd.py b/cloudinit/net/networkd.py new file mode 100644 index 00000000..71f87995 --- /dev/null +++ b/cloudinit/net/networkd.py @@ -0,0 +1,246 @@ +#!/usr/bin/env python3 +# vi: ts=4 expandtab +# +# Copyright (C) 2021 VMware Inc. +# +# Author: Shreenidhi Shedi +# +# This file is part of cloud-init. See LICENSE file for license information. + +import os + + +from . import renderer +from cloudinit import util +from cloudinit import subp +from cloudinit import log as logging +from collections import OrderedDict + +LOG = logging.getLogger(__name__) + + +class CfgParser: + def __init__(self): + self.conf_dict = OrderedDict({ + 'Match': [], + 'Link': [], + 'Network': [], + 'DHCPv4': [], + 'DHCPv6': [], + 'Address': [], + 'Route': [], + }) + + def update_section(self, sec, key, val): + for k in self.conf_dict.keys(): + if k == sec: + self.conf_dict[k].append(key+'='+str(val)) + self.conf_dict[k].sort() + + def get_final_conf(self): + contents = '' + for k, v in self.conf_dict.items(): + if not v: + continue + contents += '['+k+']\n' + for e in v: + contents += e + '\n' + contents += '\n' + + return contents + + def dump_data(self, target_fn): + if not target_fn: + LOG.warning('Target file not given') + return + + contents = self.get_final_conf() + LOG.debug('Final content: %s', contents) + util.write_file(target_fn, contents) + + +class Renderer(renderer.Renderer): + """ + Renders network information in /etc/systemd/network + + This Renderer is currently experimental and doesn't support all the + use cases supported by the other renderers yet. + """ + + def __init__(self, config=None): + if not config: + config = {} + self.resolved_conf = config.get('resolved_conf_fn', + '/etc/systemd/resolved.conf') + self.network_conf_dir = config.get('network_conf_dir', + '/etc/systemd/network/') + + def generate_match_section(self, iface, cfg): + sec = 'Match' + match_dict = { + 'name': 'Name', + 'driver': 'Driver', + 'mac_address': 'MACAddress' + } + + if not iface: + return + + for k, v in match_dict.items(): + if k in iface and iface[k]: + cfg.update_section(sec, v, iface[k]) + + return iface['name'] + + def generate_link_section(self, iface, cfg): + sec = 'Link' + + if not iface: + return + + if 'mtu' in iface and iface['mtu']: + cfg.update_section(sec, 'MTUBytes', iface['mtu']) + + def parse_routes(self, conf, cfg): + sec = 'Route' + for k, v in conf.items(): + if k == 'gateway': + cfg.update_section(sec, 'Gateway', v) + elif k == 'network': + tmp = v + if 'prefix' in conf: + tmp += '/' + str(conf['prefix']) + cfg.update_section(sec, 'Destination', tmp) + elif k == 'metric': + cfg.update_section(sec, 'Metric', v) + + def parse_subnets(self, iface, cfg): + dhcp = 'no' + for e in iface.get('subnets', []): + t = e['type'] + if t == 'dhcp4' or t == 'dhcp': + if dhcp == 'no': + dhcp = 'ipv4' + elif dhcp == 'ipv6': + dhcp = 'yes' + elif t == 'dhcp6': + if dhcp == 'no': + dhcp = 'ipv6' + elif dhcp == 'ipv4': + dhcp = 'yes' + if 'routes' in e and e['routes']: + for i in e['routes']: + self.parse_routes(i, cfg) + elif 'address' in e: + for k, v in e.items(): + if k == 'address': + tmp = v + if 'prefix' in e: + tmp += '/' + str(e['prefix']) + cfg.update_section('Address', 'Address', tmp) + elif k == 'gateway': + cfg.update_section('Route', 'Gateway', v) + elif k == 'dns_nameservers': + cfg.update_section('Network', 'DNS', ' '.join(v)) + elif k == 'dns_search': + cfg.update_section('Network', 'Domains', ' '.join(v)) + + cfg.update_section('Network', 'DHCP', dhcp) + + # This is to accommodate extra keys present in VMware config + def dhcp_domain(self, d, cfg): + for item in ['dhcp4domain', 'dhcp6domain']: + if item not in d: + continue + ret = str(d[item]).casefold() + try: + ret = util.translate_bool(ret) + ret = 'yes' if ret else 'no' + except ValueError: + if ret != 'route': + LOG.warning('Invalid dhcp4domain value - %s', ret) + ret = 'no' + if item == 'dhcp4domain': + section = 'DHCPv4' + else: + section = 'DHCPv6' + cfg.update_section(section, 'UseDomains', ret) + + def parse_dns(self, iface, cfg, ns): + sec = 'Network' + + dns_cfg_map = { + 'search': 'Domains', + 'nameservers': 'DNS', + 'addresses': 'DNS', + } + + dns = iface.get('dns') + if not dns and ns.version == 1: + dns = { + 'search': ns.dns_searchdomains, + 'nameservers': ns.dns_nameservers, + } + elif not dns and ns.version == 2: + return + + for k, v in dns_cfg_map.items(): + if k in dns and dns[k]: + cfg.update_section(sec, v, ' '.join(dns[k])) + + def create_network_file(self, link, conf, nwk_dir): + net_fn_owner = 'systemd-network' + + LOG.debug('Setting Networking Config for %s', link) + + net_fn = nwk_dir + '10-cloud-init-' + link + '.network' + util.write_file(net_fn, conf) + util.chownbyname(net_fn, net_fn_owner, net_fn_owner) + + def render_network_state(self, network_state, templates=None, target=None): + fp_nwkd = self.network_conf_dir + if target: + fp_nwkd = subp.target_path(target) + fp_nwkd + + util.ensure_dir(os.path.dirname(fp_nwkd)) + + ret_dict = self._render_content(network_state) + for k, v in ret_dict.items(): + self.create_network_file(k, v, fp_nwkd) + + def _render_content(self, ns): + ret_dict = {} + for iface in ns.iter_interfaces(): + cfg = CfgParser() + + link = self.generate_match_section(iface, cfg) + self.generate_link_section(iface, cfg) + self.parse_subnets(iface, cfg) + self.parse_dns(iface, cfg, ns) + + for route in ns.iter_routes(): + self.parse_routes(route, cfg) + + if ns.version == 2: + name = iface['name'] + # network state doesn't give dhcp domain info + # using ns.config as a workaround here + self.dhcp_domain(ns.config['ethernets'][name], cfg) + + ret_dict.update({link: cfg.get_final_conf()}) + + return ret_dict + + +def available(target=None): + expected = ['systemctl'] + search = ['/usr/bin', '/bin'] + for p in expected: + if not subp.which(p, search=search, target=target): + return False + return True + + +def network_state_to_networkd(ns): + renderer = Renderer({}) + return renderer._render_content(ns) diff --git a/cloudinit/net/renderers.py b/cloudinit/net/renderers.py index e2de4d55..c3931a98 100644 --- a/cloudinit/net/renderers.py +++ b/cloudinit/net/renderers.py @@ -4,6 +4,7 @@ from . import eni from . import freebsd from . import netbsd from . import netplan +from . import networkd from . import RendererNotFoundError from . import openbsd from . import sysconfig @@ -13,12 +14,13 @@ NAME_TO_RENDERER = { "freebsd": freebsd, "netbsd": netbsd, "netplan": netplan, + "networkd": networkd, "openbsd": openbsd, "sysconfig": sysconfig, } DEFAULT_PRIORITY = ["eni", "sysconfig", "netplan", "freebsd", - "netbsd", "openbsd"] + "netbsd", "openbsd", "networkd"] def search(priority=None, target=None, first=False): diff --git a/cloudinit/tests/test_util.py b/cloudinit/tests/test_util.py index f9bc31be..a1ccb1dc 100644 --- a/cloudinit/tests/test_util.py +++ b/cloudinit/tests/test_util.py @@ -177,6 +177,17 @@ OS_RELEASE_UBUNTU = dedent("""\ UBUNTU_CODENAME=xenial\n """) +OS_RELEASE_PHOTON = ("""\ + NAME="VMware Photon OS" + VERSION="4.0" + ID=photon + VERSION_ID=4.0 + PRETTY_NAME="VMware Photon OS/Linux" + ANSI_COLOR="1;34" + HOME_URL="https://vmware.github.io/photon/" + BUG_REPORT_URL="https://github.com/vmware/photon/issues" +""") + class FakeCloud(object): @@ -609,6 +620,15 @@ class TestGetLinuxDistro(CiTestCase): self.assertEqual( ('opensuse-tumbleweed', '20180920', platform.machine()), dist) + @mock.patch('cloudinit.util.load_file') + def test_get_linux_photon_os_release(self, m_os_release, m_path_exists): + """Verify we get the correct name and machine arch on PhotonOS""" + m_os_release.return_value = OS_RELEASE_PHOTON + m_path_exists.side_effect = TestGetLinuxDistro.os_release_exists + dist = util.get_linux_distro() + self.assertEqual( + ('photon', '4.0', 'VMware Photon OS/Linux'), dist) + @mock.patch('platform.system') @mock.patch('platform.dist', create=True) def test_get_linux_distro_no_data(self, m_platform_dist, diff --git a/cloudinit/util.py b/cloudinit/util.py index f95dc435..7995c6c8 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -483,6 +483,8 @@ def get_linux_distro(): # which will include both version codename and architecture # on all distributions. flavor = platform.machine() + elif distro_name == 'photon': + flavor = os_release.get('PRETTY_NAME', '') else: flavor = os_release.get('VERSION_CODENAME', '') if not flavor: @@ -531,7 +533,7 @@ def system_info(): linux_dist = info['dist'][0].lower() if linux_dist in ( 'almalinux', 'alpine', 'arch', 'centos', 'debian', 'fedora', - 'rhel', 'rocky', 'suse'): + 'photon', 'rhel', 'rocky', 'suse'): var = linux_dist elif linux_dist in ('ubuntu', 'linuxmint', 'mint'): var = 'ubuntu' diff --git a/config/cloud.cfg.tmpl b/config/cloud.cfg.tmpl index 586384e4..d6dbb833 100644 --- a/config/cloud.cfg.tmpl +++ b/config/cloud.cfg.tmpl @@ -11,11 +11,21 @@ syslog_fix_perms: root:root # when a 'default' entry is found it will reference the 'default_user' # from the distro configuration specified below users: +{% if variant in ["photon"] %} + - name: root + lock_passwd: false +{% else %} - default +{% endif %} + +# VMware guest customization. +{% if variant in ["photon"] %} +disable_vmware_customization: true +{% endif %} # If this is set, 'root' will not be able to ssh in and they # will get a message to login instead as the default $user -{% if variant in ["freebsd"] %} +{% if variant in ["freebsd", "photon"] %} disable_root: false {% else %} disable_root: true @@ -38,6 +48,16 @@ preserve_hostname: false # This should not be required, but leave it in place until the real cause of # not finding -any- datasources is resolved. datasource_list: ['NoCloud', 'ConfigDrive', 'Azure', 'OpenStack', 'Ec2'] +{% elif variant in ["photon"] %} +# Datasources to check for cloud-config +datasource_list: [ + NoCloud, + ConfigDrive, + OVF, + OpenStack, + VMwareGuestInfo, + None + ] {% endif %} # Example datasource config # datasource: @@ -72,11 +92,13 @@ cloud_init_modules: - set_hostname - update_hostname - update_etc_hosts -{% if variant in ["alpine"] %} +{% if variant in ["alpine", "photon"] %} - resolv_conf {% endif %} {% if not variant.endswith("bsd") %} +{% if variant not in ["photon"] %} - ca-certs +{% endif %} - rsyslog {% endif %} - users-groups @@ -90,11 +112,15 @@ cloud_config_modules: - emit_upstart - snap {% endif %} +{% if variant not in ["photon"] %} - ssh-import-id - locale +{% endif %} - set-passwords -{% if variant in ["rhel", "fedora"] %} +{% if variant in ["rhel", "fedora", "photon"] %} +{% if variant not in ["photon"] %} - spacewalk +{% endif %} - yum-add-repo {% endif %} {% if variant in ["ubuntu", "unknown", "debian"] %} @@ -155,8 +181,8 @@ cloud_final_modules: system_info: # This will affect which distro class gets used {% if variant in ["almalinux", "alpine", "amazon", "arch", "centos", "debian", - "fedora", "freebsd", "netbsd", "openbsd", "rhel", "rocky", - "suse", "ubuntu"] %} + "fedora", "freebsd", "netbsd", "openbsd", "photon", "rhel", + "rocky", "suse", "ubuntu"] %} distro: {{ variant }} {% elif variant in ["dragonfly"] %} distro: dragonflybsd @@ -276,6 +302,22 @@ system_info: groups: [wheel] sudo: ["ALL=(ALL) NOPASSWD:ALL"] shell: /bin/ksh +{% elif variant == "photon" %} + default_user: + name: photon + lock_passwd: True + gecos: PhotonOS + groups: [wheel] + sudo: ["ALL=(ALL) NOPASSWD:ALL"] + shell: /bin/bash + # Other config here will be given to the distro class and/or path classes + paths: + cloud_dir: /var/lib/cloud/ + templates_dir: /etc/cloud/templates/ + + ssh_svcname: sshd + +#manage_etc_hosts: true {% endif %} {% if variant in ["freebsd", "netbsd", "openbsd"] %} network: diff --git a/systemd/cloud-init.service.tmpl b/systemd/cloud-init.service.tmpl index 4da1a905..c773e411 100644 --- a/systemd/cloud-init.service.tmpl +++ b/systemd/cloud-init.service.tmpl @@ -1,7 +1,9 @@ ## template:jinja [Unit] Description=Initial cloud-init job (metadata service crawler) +{% if variant not in ["photon"] %} DefaultDependencies=no +{% endif %} Wants=cloud-init-local.service Wants=sshd-keygen.service Wants=sshd.service diff --git a/templates/chrony.conf.photon.tmpl b/templates/chrony.conf.photon.tmpl new file mode 100644 index 00000000..8551f793 --- /dev/null +++ b/templates/chrony.conf.photon.tmpl @@ -0,0 +1,48 @@ +## template:jinja +# Use public servers from the pool.ntp.org project. +# Please consider joining the pool (http://www.pool.ntp.org/join.html). +{% if pools %}# pools +{% endif %} +{% for pool in pools -%} +pool {{pool}} iburst +{% endfor %} +{%- if servers %}# servers +{% endif %} +{% for server in servers -%} +server {{server}} iburst +{% endfor %} + +# Record the rate at which the system clock gains/losses time. +driftfile /var/lib/chrony/drift + +# Allow the system clock to be stepped in the first three updates +# if its offset is larger than 1 second. +makestep 1.0 3 + +# Enable kernel synchronization of the real-time clock (RTC). +rtcsync + +# Enable hardware timestamping on all interfaces that support it. +#hwtimestamp * + +# Increase the minimum number of selectable sources required to adjust +# the system clock. +#minsources 2 + +# Allow NTP client access from local network. +#allow 192.168.0.0/16 + +# Serve time even if not synchronized to a time source. +#local stratum 10 + +# Specify file containing keys for NTP authentication. +#keyfile /etc/chrony.keys + +# Get TAI-UTC offset and leap seconds from the system tz database. +leapsectz right/UTC + +# Specify directory for log files. +logdir /var/log/chrony + +# Select which information is logged. +#log measurements statistics tracking diff --git a/templates/hosts.photon.tmpl b/templates/hosts.photon.tmpl new file mode 100644 index 00000000..0fd6f722 --- /dev/null +++ b/templates/hosts.photon.tmpl @@ -0,0 +1,22 @@ +## template:jinja +{# +This file /etc/cloud/templates/hosts.photon.tmpl is only utilized +if enabled in cloud-config. Specifically, in order to enable it +you need to add the following to config: + manage_etc_hosts: True +-#} +# Your system has configured 'manage_etc_hosts' as True. +# As a result, if you wish for changes to this file to persist +# then you will need to either +# a.) make changes to the master file in /etc/cloud/templates/hosts.photon.tmpl +# b.) change or remove the value of 'manage_etc_hosts' in +# /etc/cloud/cloud.cfg or cloud-config from user-data +# +# The following lines are desirable for IPv4 capable hosts +127.0.0.1 {{fqdn}} {{hostname}} +127.0.0.1 localhost.localdomain localhost +127.0.0.1 localhost4.localdomain4 localhost4 + +# The following lines are desirable for IPv6 capable hosts +::1 {{fqdn}} {{hostname}} +::1 localhost6.localdomain6 localhost6 diff --git a/templates/ntp.conf.photon.tmpl b/templates/ntp.conf.photon.tmpl new file mode 100644 index 00000000..4d4910d1 --- /dev/null +++ b/templates/ntp.conf.photon.tmpl @@ -0,0 +1,61 @@ +## template:jinja + +# For more information about this file, see the man pages +# ntp.conf(5), ntp_acc(5), ntp_auth(5), ntp_clock(5), ntp_misc(5), ntp_mon(5). + +driftfile /var/lib/ntp/drift + +# Permit time synchronization with our time source, but do not +# permit the source to query or modify the service on this system. +restrict default kod nomodify notrap nopeer noquery +restrict -6 default kod nomodify notrap nopeer noquery + +# Permit all access over the loopback interface. This could +# be tightened as well, but to do so would effect some of +# the administrative functions. +restrict 127.0.0.1 +restrict -6 ::1 + +# Hosts on local network are less restricted. +#restrict 192.168.1.0 mask 255.255.255.0 nomodify notrap + +# Use public servers from the pool.ntp.org project. +# Please consider joining the pool (http://www.pool.ntp.org/join.html). +{% if pools %}# pools +{% endif %} +{% for pool in pools -%} +pool {{pool}} iburst +{% endfor %} +{%- if servers %}# servers +{% endif %} +{% for server in servers -%} +server {{server}} iburst +{% endfor %} + +#broadcast 192.168.1.255 autokey # broadcast server +#broadcastclient # broadcast client +#broadcast 224.0.1.1 autokey # multicast server +#multicastclient 224.0.1.1 # multicast client +#manycastserver 239.255.254.254 # manycast server +#manycastclient 239.255.254.254 autokey # manycast client + +# Enable public key cryptography. +#crypto + +includefile /etc/ntp/crypto/pw + +# Key file containing the keys and key identifiers used when operating +# with symmetric key cryptography. +keys /etc/ntp/keys + +# Specify the key identifiers which are trusted. +#trustedkey 4 8 42 + +# Specify the key identifier to use with the ntpdc utility. +#requestkey 8 + +# Specify the key identifier to use with the ntpq utility. +#controlkey 8 + +# Enable writing of statistics records. +#statistics clockstats cryptostats loopstats peerstats diff --git a/templates/resolv.conf.tmpl b/templates/resolv.conf.tmpl index f870be67..72a37bf7 100644 --- a/templates/resolv.conf.tmpl +++ b/templates/resolv.conf.tmpl @@ -22,7 +22,7 @@ domain {{domain}} sortlist {% for sort in sortlist %}{{sort}} {% endfor %} {% endif %} {# - Flags and options are required to be on the + Flags and options are required to be on the same line preceded by "options" keyword #} {% if options or flags %} diff --git a/templates/systemd.resolved.conf.tmpl b/templates/systemd.resolved.conf.tmpl new file mode 100644 index 00000000..fca50d37 --- /dev/null +++ b/templates/systemd.resolved.conf.tmpl @@ -0,0 +1,15 @@ +## template:jinja +# Your system has been configured with 'manage-resolv-conf' set to true. +# As a result, cloud-init has written this file with configuration data +# that it has been provided. Cloud-init, by default, will write this file +# a single time (PER_ONCE). +# +[Resolve] +LLMNR=false +{% if nameservers is defined %} +DNS={% for server in nameservers %}{{server}} {% endfor %} +{% endif %} + +{% if searchdomains is defined %} +Domains={% for search in searchdomains %}{{search}} {% endfor %} +{% endif %} diff --git a/tests/cloud_tests/util.py b/tests/cloud_tests/util.py index 7dcccbdd..49baadb0 100644 --- a/tests/cloud_tests/util.py +++ b/tests/cloud_tests/util.py @@ -23,7 +23,7 @@ from tests.cloud_tests import LOG OS_FAMILY_MAPPING = { 'debian': ['debian', 'ubuntu'], - 'redhat': ['centos', 'rhel', 'fedora'], + 'redhat': ['centos', 'photon', 'rhel', 'fedora'], 'gentoo': ['gentoo'], 'freebsd': ['freebsd'], 'suse': ['sles'], diff --git a/tests/unittests/test_cli.py b/tests/unittests/test_cli.py index fbc6ec11..fdb4026c 100644 --- a/tests/unittests/test_cli.py +++ b/tests/unittests/test_cli.py @@ -225,7 +225,7 @@ class TestCLI(test_helpers.FilesystemMockingTestCase): expected_doc_sections = [ '**Supported distros:** all', ('**Supported distros:** almalinux, alpine, centos, debian, ' - 'fedora, opensuse, rhel, rocky, sles, ubuntu'), + 'fedora, opensuse, photon, rhel, rocky, sles, ubuntu'), '**Config schema**:\n **resize_rootfs:** (true/false/noblock)', '**Examples**::\n\n runcmd:\n - [ ls, -l, / ]\n' ] diff --git a/tests/unittests/test_distros/test_netconfig.py b/tests/unittests/test_distros/test_netconfig.py index a1df066a..562ee04a 100644 --- a/tests/unittests/test_distros/test_netconfig.py +++ b/tests/unittests/test_distros/test_netconfig.py @@ -2,6 +2,7 @@ import copy import os +import re from io import StringIO from textwrap import dedent from unittest import mock @@ -15,7 +16,6 @@ from cloudinit.tests.helpers import ( from cloudinit import subp from cloudinit import util - BASE_NET_CFG = ''' auto lo iface lo inet loopback @@ -771,6 +771,103 @@ class TestNetCfgDistroArch(TestNetCfgDistroBase): with_netplan=True) +class TestNetCfgDistroPhoton(TestNetCfgDistroBase): + + def setUp(self): + super(TestNetCfgDistroPhoton, self).setUp() + self.distro = self._get_distro('photon', renderers=['networkd']) + + def create_conf_dict(self, contents): + content_dict = {} + for line in contents: + if line: + line = line.strip() + if line and re.search(r'^\[(.+)\]$', line): + content_dict[line] = [] + key = line + elif line: + assert key + content_dict[key].append(line) + + return content_dict + + def compare_dicts(self, actual, expected): + for k, v in actual.items(): + self.assertEqual(sorted(expected[k]), sorted(v)) + + def _apply_and_verify(self, apply_fn, config, expected_cfgs=None, + bringup=False): + if not expected_cfgs: + raise ValueError('expected_cfg must not be None') + + tmpd = None + with mock.patch('cloudinit.net.networkd.available') as m_avail: + m_avail.return_value = True + with self.reRooted(tmpd) as tmpd: + apply_fn(config, bringup) + + results = dir2dict(tmpd) + for cfgpath, expected in expected_cfgs.items(): + actual = self.create_conf_dict(results[cfgpath].splitlines()) + self.compare_dicts(actual, expected) + self.assertEqual(0o644, get_mode(cfgpath, tmpd)) + + def nwk_file_path(self, ifname): + return '/etc/systemd/network/10-cloud-init-%s.network' % ifname + + def net_cfg_1(self, ifname): + ret = """\ + [Match] + Name=%s + [Network] + DHCP=no + [Address] + Address=192.168.1.5/24 + [Route] + Gateway=192.168.1.254""" % ifname + return ret + + def net_cfg_2(self, ifname): + ret = """\ + [Match] + Name=%s + [Network] + DHCP=ipv4""" % ifname + return ret + + def test_photon_network_config_v1(self): + tmp = self.net_cfg_1('eth0').splitlines() + expected_eth0 = self.create_conf_dict(tmp) + + tmp = self.net_cfg_2('eth1').splitlines() + expected_eth1 = self.create_conf_dict(tmp) + + expected_cfgs = { + self.nwk_file_path('eth0'): expected_eth0, + self.nwk_file_path('eth1'): expected_eth1, + } + + self._apply_and_verify(self.distro.apply_network_config, + V1_NET_CFG, + expected_cfgs.copy()) + + def test_photon_network_config_v2(self): + tmp = self.net_cfg_1('eth7').splitlines() + expected_eth7 = self.create_conf_dict(tmp) + + tmp = self.net_cfg_2('eth9').splitlines() + expected_eth9 = self.create_conf_dict(tmp) + + expected_cfgs = { + self.nwk_file_path('eth7'): expected_eth7, + self.nwk_file_path('eth9'): expected_eth9, + } + + self._apply_and_verify(self.distro.apply_network_config, + V2_NET_CFG, + expected_cfgs.copy()) + + def get_mode(path, target=None): return os.stat(subp.target_path(target, path)).st_mode & 0o777 diff --git a/tests/unittests/test_handler/test_handler_set_hostname.py b/tests/unittests/test_handler/test_handler_set_hostname.py index 73641b70..32ca3b7e 100644 --- a/tests/unittests/test_handler/test_handler_set_hostname.py +++ b/tests/unittests/test_handler/test_handler_set_hostname.py @@ -120,6 +120,32 @@ class TestHostname(t_help.FilesystemMockingTestCase): contents = util.load_file(distro.hostname_conf_fn) self.assertEqual('blah', contents.strip()) + @mock.patch('cloudinit.distros.Distro.uses_systemd', return_value=False) + def test_photon_hostname(self, m_uses_systemd): + cfg1 = { + 'hostname': 'photon', + 'prefer_fqdn_over_hostname': True, + 'fqdn': 'test1.vmware.com', + } + cfg2 = { + 'hostname': 'photon', + 'prefer_fqdn_over_hostname': False, + 'fqdn': 'test2.vmware.com', + } + + ds = None + distro = self._fetch_distro('photon', cfg1) + paths = helpers.Paths({'cloud_dir': self.tmp}) + cc = cloud.Cloud(ds, paths, {}, distro, None) + self.patchUtils(self.tmp) + for c in [cfg1, cfg2]: + cc_set_hostname.handle('cc_set_hostname', c, cc, LOG, []) + contents = util.load_file(distro.hostname_conf_fn, decode=True) + if c['prefer_fqdn_over_hostname']: + self.assertEqual(contents.strip(), c['fqdn']) + else: + self.assertEqual(contents.strip(), c['hostname']) + def test_multiple_calls_skips_unchanged_hostname(self): """Only new hostname or fqdn values will generate a hostname call.""" distro = self._fetch_distro('debian') diff --git a/tests/unittests/test_net.py b/tests/unittests/test_net.py index b72a62b8..b2ddbf99 100644 --- a/tests/unittests/test_net.py +++ b/tests/unittests/test_net.py @@ -5,7 +5,7 @@ from cloudinit import distros from cloudinit.net import cmdline from cloudinit.net import ( eni, interface_has_own_mac, natural_sort_key, netplan, network_state, - renderers, sysconfig) + renderers, sysconfig, networkd) from cloudinit.sources.helpers import openstack from cloudinit import temp_utils from cloudinit import subp @@ -821,6 +821,28 @@ iface eth1 inet static NETWORK_CONFIGS = { 'small': { + 'expected_networkd_eth99': textwrap.dedent("""\ + [Match] + Name=eth99 + MACAddress=c0:d6:9f:2c:e8:80 + [Network] + DHCP=ipv4 + Domains=wark.maas + DNS=1.2.3.4 5.6.7.8 + [Route] + Gateway=65.61.151.37 + Destination=0.0.0.0/0 + Metric=10000 + """).rstrip(' '), + 'expected_networkd_eth1': textwrap.dedent("""\ + [Match] + Name=eth1 + MACAddress=cf:d6:af:48:e8:80 + [Network] + DHCP=no + Domains=wark.maas + DNS=1.2.3.4 5.6.7.8 + """).rstrip(' '), 'expected_eni': textwrap.dedent("""\ auto lo iface lo inet loopback @@ -938,6 +960,12 @@ NETWORK_CONFIGS = { """), }, 'v4_and_v6': { + 'expected_networkd': textwrap.dedent("""\ + [Match] + Name=iface0 + [Network] + DHCP=yes + """).rstrip(' '), 'expected_eni': textwrap.dedent("""\ auto lo iface lo inet loopback @@ -973,6 +1001,17 @@ NETWORK_CONFIGS = { """).rstrip(' '), }, 'v4_and_v6_static': { + 'expected_networkd': textwrap.dedent("""\ + [Match] + Name=iface0 + [Link] + MTUBytes=8999 + [Network] + DHCP=no + [Address] + Address=192.168.14.2/24 + Address=2001:1::1/64 + """).rstrip(' '), 'expected_eni': textwrap.dedent("""\ auto lo iface lo inet loopback @@ -1059,6 +1098,12 @@ NETWORK_CONFIGS = { """).rstrip(' '), }, 'dhcpv6_only': { + 'expected_networkd': textwrap.dedent("""\ + [Match] + Name=iface0 + [Network] + DHCP=ipv6 + """).rstrip(' '), 'expected_eni': textwrap.dedent("""\ auto lo iface lo inet loopback @@ -4986,26 +5031,199 @@ class TestEniRoundTrip(CiTestCase): files['/etc/network/interfaces'].splitlines()) +class TestNetworkdNetRendering(CiTestCase): + + def create_conf_dict(self, contents): + content_dict = {} + for line in contents: + if line: + line = line.strip() + if line and re.search(r'^\[(.+)\]$', line): + content_dict[line] = [] + key = line + elif line: + content_dict[key].append(line) + + return content_dict + + def compare_dicts(self, actual, expected): + for k, v in actual.items(): + self.assertEqual(sorted(expected[k]), sorted(v)) + + @mock.patch("cloudinit.net.util.chownbyname", return_value=True) + @mock.patch("cloudinit.net.util.get_cmdline", return_value="root=myroot") + @mock.patch("cloudinit.net.sys_dev_path") + @mock.patch("cloudinit.net.read_sys_net") + @mock.patch("cloudinit.net.get_devicelist") + def test_networkd_default_generation(self, mock_get_devicelist, + mock_read_sys_net, + mock_sys_dev_path, + m_get_cmdline, + m_chown): + tmp_dir = self.tmp_dir() + _setup_test(tmp_dir, mock_get_devicelist, + mock_read_sys_net, mock_sys_dev_path) + + network_cfg = net.generate_fallback_config() + ns = network_state.parse_net_config_data(network_cfg, + skip_broken=False) + + render_dir = os.path.join(tmp_dir, "render") + os.makedirs(render_dir) + + render_target = 'etc/systemd/network/10-cloud-init-eth1000.network' + renderer = networkd.Renderer({}) + renderer.render_network_state(ns, target=render_dir) + + self.assertTrue(os.path.exists(os.path.join(render_dir, + render_target))) + with open(os.path.join(render_dir, render_target)) as fh: + contents = fh.readlines() + + actual = self.create_conf_dict(contents) + print(actual) + + expected = textwrap.dedent("""\ + [Match] + Name=eth1000 + MACAddress=07-1c-c6-75-a4-be + [Network] + DHCP=ipv4""").rstrip(' ') + + expected = self.create_conf_dict(expected.splitlines()) + + self.compare_dicts(actual, expected) + + +class TestNetworkdRoundTrip(CiTestCase): + + def create_conf_dict(self, contents): + content_dict = {} + for line in contents: + if line: + line = line.strip() + if line and re.search(r'^\[(.+)\]$', line): + content_dict[line] = [] + key = line + elif line: + content_dict[key].append(line) + + return content_dict + + def compare_dicts(self, actual, expected): + for k, v in actual.items(): + self.assertEqual(sorted(expected[k]), sorted(v)) + + def _render_and_read(self, network_config=None, state=None, nwkd_path=None, + dir=None): + if dir is None: + dir = self.tmp_dir() + + if network_config: + ns = network_state.parse_net_config_data(network_config) + elif state: + ns = state + else: + raise ValueError("Expected data or state, got neither") + + if not nwkd_path: + nwkd_path = '/etc/systemd/network/' + + renderer = networkd.Renderer(config={'network_conf_dir': nwkd_path}) + + renderer.render_network_state(ns, target=dir) + return dir2dict(dir) + + @mock.patch("cloudinit.net.util.chownbyname", return_value=True) + def testsimple_render_small_networkd(self, m_chown): + nwk_fn1 = '/etc/systemd/network/10-cloud-init-eth99.network' + nwk_fn2 = '/etc/systemd/network/10-cloud-init-eth1.network' + entry = NETWORK_CONFIGS['small'] + files = self._render_and_read(network_config=yaml.load(entry['yaml'])) + + actual = files[nwk_fn1].splitlines() + actual = self.create_conf_dict(actual) + + expected = entry['expected_networkd_eth99'].splitlines() + expected = self.create_conf_dict(expected) + + self.compare_dicts(actual, expected) + + actual = files[nwk_fn2].splitlines() + actual = self.create_conf_dict(actual) + + expected = entry['expected_networkd_eth1'].splitlines() + expected = self.create_conf_dict(expected) + + self.compare_dicts(actual, expected) + + @mock.patch("cloudinit.net.util.chownbyname", return_value=True) + def testsimple_render_v4_and_v6(self, m_chown): + nwk_fn = '/etc/systemd/network/10-cloud-init-iface0.network' + entry = NETWORK_CONFIGS['v4_and_v6'] + files = self._render_and_read(network_config=yaml.load(entry['yaml'])) + + actual = files[nwk_fn].splitlines() + actual = self.create_conf_dict(actual) + + expected = entry['expected_networkd'].splitlines() + expected = self.create_conf_dict(expected) + + self.compare_dicts(actual, expected) + + @mock.patch("cloudinit.net.util.chownbyname", return_value=True) + def testsimple_render_v4_and_v6_static(self, m_chown): + nwk_fn = '/etc/systemd/network/10-cloud-init-iface0.network' + entry = NETWORK_CONFIGS['v4_and_v6_static'] + files = self._render_and_read(network_config=yaml.load(entry['yaml'])) + + actual = files[nwk_fn].splitlines() + actual = self.create_conf_dict(actual) + + expected = entry['expected_networkd'].splitlines() + expected = self.create_conf_dict(expected) + + self.compare_dicts(actual, expected) + + @mock.patch("cloudinit.net.util.chownbyname", return_value=True) + def testsimple_render_dhcpv6_only(self, m_chown): + nwk_fn = '/etc/systemd/network/10-cloud-init-iface0.network' + entry = NETWORK_CONFIGS['dhcpv6_only'] + files = self._render_and_read(network_config=yaml.load(entry['yaml'])) + + actual = files[nwk_fn].splitlines() + actual = self.create_conf_dict(actual) + + expected = entry['expected_networkd'].splitlines() + expected = self.create_conf_dict(expected) + + self.compare_dicts(actual, expected) + + class TestRenderersSelect: @pytest.mark.parametrize( - 'renderer_selected,netplan,eni,nm,scfg,sys', ( + 'renderer_selected,netplan,eni,nm,scfg,sys,networkd', ( # -netplan -ifupdown -nm -scfg -sys raises error - (net.RendererNotFoundError, False, False, False, False, False), + (net.RendererNotFoundError, False, False, False, False, False, + False), # -netplan +ifupdown -nm -scfg -sys selects eni - ('eni', False, True, False, False, False), + ('eni', False, True, False, False, False, False), # +netplan +ifupdown -nm -scfg -sys selects eni - ('eni', True, True, False, False, False), + ('eni', True, True, False, False, False, False), # +netplan -ifupdown -nm -scfg -sys selects netplan - ('netplan', True, False, False, False, False), + ('netplan', True, False, False, False, False, False), # Ubuntu with Network-Manager installed # +netplan -ifupdown +nm -scfg -sys selects netplan - ('netplan', True, False, True, False, False), + ('netplan', True, False, True, False, False, False), # Centos/OpenSuse with Network-Manager installed selects sysconfig # -netplan -ifupdown +nm -scfg +sys selects netplan - ('sysconfig', False, False, True, False, True), + ('sysconfig', False, False, True, False, True, False), + # -netplan -ifupdown -nm -scfg -sys +networkd selects networkd + ('networkd', False, False, False, False, False, True), ), ) + @mock.patch("cloudinit.net.renderers.networkd.available") @mock.patch("cloudinit.net.renderers.netplan.available") @mock.patch("cloudinit.net.renderers.sysconfig.available") @mock.patch("cloudinit.net.renderers.sysconfig.available_sysconfig") @@ -5013,7 +5231,8 @@ class TestRenderersSelect: @mock.patch("cloudinit.net.renderers.eni.available") def test_valid_renderer_from_defaults_depending_on_availability( self, m_eni_avail, m_nm_avail, m_scfg_avail, m_sys_avail, - m_netplan_avail, renderer_selected, netplan, eni, nm, scfg, sys + m_netplan_avail, m_networkd_avail, renderer_selected, + netplan, eni, nm, scfg, sys, networkd ): """Assert proper renderer per DEFAULT_PRIORITY given availability.""" m_eni_avail.return_value = eni # ifupdown pkg presence @@ -5021,6 +5240,7 @@ class TestRenderersSelect: m_scfg_avail.return_value = scfg # sysconfig presence m_sys_avail.return_value = sys # sysconfig/ifup/down presence m_netplan_avail.return_value = netplan # netplan presence + m_networkd_avail.return_value = networkd # networkd presence if isinstance(renderer_selected, str): (renderer_name, _rnd_class) = renderers.select( priority=renderers.DEFAULT_PRIORITY @@ -5094,6 +5314,12 @@ class TestNetRenderers(CiTestCase): result = sysconfig.available() self.assertTrue(result) + @mock.patch("cloudinit.net.renderers.networkd.available") + def test_networkd_available(self, m_nwkd_avail): + m_nwkd_avail.return_value = True + found = renderers.search(priority=['networkd'], first=False) + self.assertEqual('networkd', found[0][0]) + @mock.patch( "cloudinit.net.is_openvswitch_internal_interface", diff --git a/tests/unittests/test_render_cloudcfg.py b/tests/unittests/test_render_cloudcfg.py index 495e2669..275879af 100644 --- a/tests/unittests/test_render_cloudcfg.py +++ b/tests/unittests/test_render_cloudcfg.py @@ -10,7 +10,8 @@ from cloudinit import util # TODO(Look to align with tools.render-cloudcfg or cloudinit.distos.OSFAMILIES) DISTRO_VARIANTS = ["amazon", "arch", "centos", "debian", "fedora", "freebsd", - "netbsd", "openbsd", "rhel", "suse", "ubuntu", "unknown"] + "netbsd", "openbsd", "photon", "rhel", "suse", "ubuntu", + "unknown"] @pytest.mark.allow_subp_for(sys.executable) diff --git a/tools/render-cloudcfg b/tools/render-cloudcfg index 9ec554bd..7e667de4 100755 --- a/tools/render-cloudcfg +++ b/tools/render-cloudcfg @@ -5,8 +5,8 @@ import os import sys VARIANTS = ["almalinux", "alpine", "amazon", "arch", "centos", "debian", - "fedora", "freebsd", "netbsd", "openbsd", "rhel", "suse", "rocky", - "ubuntu", "unknown"] + "fedora", "freebsd", "netbsd", "openbsd", "photon", "rhel", + "suse","rocky", "ubuntu", "unknown"] if "avoid-pep8-E402-import-not-top-of-file": -- cgit v1.2.3 From 6e0aa175513d0d5f64a3684f6840621cb9759b27 Mon Sep 17 00:00:00 2001 From: Paride Legovini Date: Tue, 29 Jun 2021 18:01:28 +0200 Subject: cloud_tests: fix the Impish release name (#931) Commit f5a2449 introduced Impish but left the release name set to 'hirsute'. --- tests/cloud_tests/releases.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'tests/cloud_tests') diff --git a/tests/cloud_tests/releases.yaml b/tests/cloud_tests/releases.yaml index b4915a25..c52b78f9 100644 --- a/tests/cloud_tests/releases.yaml +++ b/tests/cloud_tests/releases.yaml @@ -137,7 +137,7 @@ releases: # EOL: July 2022 default: enabled: true - release: hirsute + release: impish version: "21.10" os: ubuntu feature_groups: -- cgit v1.2.3 From 9893dfcd2f0be92197d707236cbd44cb7452364d Mon Sep 17 00:00:00 2001 From: Gabriel Nagy Date: Tue, 10 Aug 2021 18:14:23 +0300 Subject: cc_puppet: support AIO installations and more (#960) - update the puppet module to support AIO installations by setting `install_type` to `aio` - make the install collection configurable through the `collection` parameter; by default the rolling `puppet` collection will be used, which installs the latest version) - when `install_type` is `aio`, puppetlabs repos will be purged after installation; set `cleanup` to `False` to prevent this - AIO installations are performed by downloading and executing a shell script; the URL for this script can be overridden using the `aio_install_url` parameter - make it possible to run puppet agent after installation/configuration via the `exec` key - by default, puppet agent will run with the `--test` argument; this can be overridden via the `exec_args` key --- cloudinit/config/cc_puppet.py | 159 +++++++++++-- doc/examples/cloud-config-puppet.txt | 60 ++++- .../testcases/examples/setup_run_puppet.yaml | 10 +- .../unittests/test_handler/test_handler_puppet.py | 261 +++++++++++++++++++-- 4 files changed, 426 insertions(+), 64 deletions(-) (limited to 'tests/cloud_tests') diff --git a/cloudinit/config/cc_puppet.py b/cloudinit/config/cc_puppet.py index bc981cf4..a0779eb0 100644 --- a/cloudinit/config/cc_puppet.py +++ b/cloudinit/config/cc_puppet.py @@ -29,22 +29,41 @@ The keys are ``package_name``, ``conf_file``, ``ssl_dir`` and ones that work with puppet 3.x and with distributions that ship modified puppet 4.x that uses the old paths. +Agent packages from the puppetlabs repositories can be installed by setting +``install_type`` to ``aio``. Based on this setting, the default config/SSL/CSR +paths will be adjusted accordingly. To maintain backwards compatibility this +setting defaults to ``packages`` which will install puppet from the distro +packages. + +If installing ``aio`` packages, ``collection`` can also be set to one of +``puppet`` (rolling release), ``puppet6``, ``puppet7`` (or their nightly +counterparts) in order to install specific release streams. By default, the +puppetlabs repository will be purged after installation finishes; set +``cleanup`` to ``false`` to prevent this. AIO packages are installed through a +shell script which is downloaded on the machine and then executed; the path to +this script can be overridden using the ``aio_install_url`` key. + Puppet configuration can be specified under the ``conf`` key. The configuration is specified as a dictionary containing high-level ``
`` keys and lists of ``=`` pairs within each section. Each section name and ``=`` pair is written directly to ``puppet.conf``. As -such, section names should be one of: ``main``, ``master``, ``agent`` or +such, section names should be one of: ``main``, ``server``, ``agent`` or ``user`` and keys should be valid puppet configuration options. The ``certname`` key supports string substitutions for ``%i`` and ``%f``, corresponding to the instance id and fqdn of the machine respectively. If ``ca_cert`` is present, it will not be written to ``puppet.conf``, but -instead will be used as the puppermaster certificate. It should be specified +instead will be used as the puppetserver certificate. It should be specified in pem format as a multi-line string (using the ``|`` yaml notation). -Additionally it's possible to create a csr_attributes.yaml for -CSR attributes and certificate extension requests. +Additionally it's possible to create a ``csr_attributes.yaml`` file for CSR +attributes and certificate extension requests. See https://puppet.com/docs/puppet/latest/config_file_csr_attributes.html +The puppet service will be automatically enabled after installation. A manual +run can also be triggered by setting ``exec`` to ``true``, and additional +arguments can be passed to ``puppet agent`` via the ``exec_args`` key (by +default the agent will execute with the ``--test`` flag). + **Internal name:** ``cc_puppet`` **Module frequency:** per instance @@ -56,13 +75,19 @@ See https://puppet.com/docs/puppet/latest/config_file_csr_attributes.html puppet: install: version: + collection: + install_type: + aio_install_url: 'https://git.io/JBhoQ' + cleanup: conf_file: '/etc/puppet/puppet.conf' ssl_dir: '/var/lib/puppet/ssl' csr_attributes_path: '/etc/puppet/csr_attributes.yaml' package_name: 'puppet' + exec: + exec_args: ['--test'] conf: agent: - server: "puppetmaster.example.org" + server: "puppetserver.example.org" certname: "%i.%f" ca_cert: | -------BEGIN CERTIFICATE------- @@ -84,12 +109,12 @@ from io import StringIO from cloudinit import helpers from cloudinit import subp +from cloudinit import temp_utils from cloudinit import util +from cloudinit import url_helper -PUPPET_CONF_PATH = '/etc/puppet/puppet.conf' -PUPPET_SSL_DIR = '/var/lib/puppet/ssl' -PUPPET_CSR_ATTRIBUTES_PATH = '/etc/puppet/csr_attributes.yaml' -PUPPET_PACKAGE_NAME = 'puppet' +AIO_INSTALL_URL = 'https://raw.githubusercontent.com/puppetlabs/install-puppet/main/install.sh' # noqa: E501 +PUPPET_AGENT_DEFAULT_ARGS = ['--test'] class PuppetConstants(object): @@ -119,6 +144,43 @@ def _autostart_puppet(log): " puppet services on this system")) +def get_config_value(puppet_bin, setting): + """Get the config value for a given setting using `puppet config print` + :param puppet_bin: path to puppet binary + :param setting: setting to query + """ + out, _ = subp.subp([puppet_bin, 'config', 'print', setting]) + return out.rstrip() + + +def install_puppet_aio(url=AIO_INSTALL_URL, version=None, + collection=None, cleanup=True): + """Install puppet-agent from the puppetlabs repositories using the one-shot + shell script + + :param url: URL from where to download the install script + :param version: version to install, blank defaults to latest + :param collection: collection to install, blank defaults to latest + :param cleanup: whether to purge the puppetlabs repo after installation + """ + args = [] + if version is not None: + args = ['-v', version] + if collection is not None: + args += ['-c', collection] + + # Purge puppetlabs repos after installation + if cleanup: + args += ['--cleanup'] + content = url_helper.readurl(url=url, retries=5).contents + + # Use tmpdir over tmpfile to avoid 'text file busy' on execute + with temp_utils.tempdir(needs_exe=True) as tmpd: + tmpf = os.path.join(tmpd, 'puppet-install') + util.write_file(tmpf, content, mode=0o700) + return subp.subp([tmpf] + args, capture=False) + + def handle(name, cfg, cloud, log, _args): # If there isn't a puppet key in the configuration don't do anything if 'puppet' not in cfg: @@ -130,23 +192,50 @@ def handle(name, cfg, cloud, log, _args): # Start by installing the puppet package if necessary... install = util.get_cfg_option_bool(puppet_cfg, 'install', True) version = util.get_cfg_option_str(puppet_cfg, 'version', None) - package_name = util.get_cfg_option_str( - puppet_cfg, 'package_name', PUPPET_PACKAGE_NAME) - conf_file = util.get_cfg_option_str( - puppet_cfg, 'conf_file', PUPPET_CONF_PATH) - ssl_dir = util.get_cfg_option_str(puppet_cfg, 'ssl_dir', PUPPET_SSL_DIR) - csr_attributes_path = util.get_cfg_option_str( - puppet_cfg, 'csr_attributes_path', PUPPET_CSR_ATTRIBUTES_PATH) + collection = util.get_cfg_option_str(puppet_cfg, 'collection', None) + install_type = util.get_cfg_option_str( + puppet_cfg, 'install_type', 'packages') + cleanup = util.get_cfg_option_bool(puppet_cfg, 'cleanup', True) + run = util.get_cfg_option_bool(puppet_cfg, 'exec', default=False) + aio_install_url = util.get_cfg_option_str( + puppet_cfg, 'aio_install_url', default=AIO_INSTALL_URL) - p_constants = PuppetConstants(conf_file, ssl_dir, csr_attributes_path, log) + # AIO and distro packages use different paths + if install_type == 'aio': + puppet_user = 'root' + puppet_bin = '/opt/puppetlabs/bin/puppet' + puppet_package = 'puppet-agent' + else: # default to 'packages' + puppet_user = 'puppet' + puppet_bin = 'puppet' + puppet_package = 'puppet' + + package_name = util.get_cfg_option_str( + puppet_cfg, 'package_name', puppet_package) if not install and version: - log.warning(("Puppet install set false but version supplied," + log.warning(("Puppet install set to false but version supplied," " doing nothing.")) elif install: - log.debug(("Attempting to install puppet %s,"), - version if version else 'latest') + log.debug(("Attempting to install puppet %s from %s"), + version if version else 'latest', install_type) - cloud.distro.install_packages((package_name, version)) + if install_type == "packages": + cloud.distro.install_packages((package_name, version)) + elif install_type == "aio": + install_puppet_aio(aio_install_url, version, collection, cleanup) + else: + log.warning("Unknown puppet install type '%s'", install_type) + run = False + + conf_file = util.get_cfg_option_str( + puppet_cfg, 'conf_file', get_config_value(puppet_bin, 'config')) + ssl_dir = util.get_cfg_option_str( + puppet_cfg, 'ssl_dir', get_config_value(puppet_bin, 'ssldir')) + csr_attributes_path = util.get_cfg_option_str( + puppet_cfg, 'csr_attributes_path', + get_config_value(puppet_bin, 'csr_attributes')) + + p_constants = PuppetConstants(conf_file, ssl_dir, csr_attributes_path, log) # ... and then update the puppet configuration if 'conf' in puppet_cfg: @@ -165,17 +254,18 @@ def handle(name, cfg, cloud, log, _args): source=p_constants.conf_path) for (cfg_name, cfg) in puppet_cfg['conf'].items(): # Cert configuration is a special case - # Dump the puppet master ca certificate in the correct place + # Dump the puppetserver ca certificate in the correct place if cfg_name == 'ca_cert': # Puppet ssl sub-directory isn't created yet # Create it with the proper permissions and ownership util.ensure_dir(p_constants.ssl_dir, 0o771) - util.chownbyname(p_constants.ssl_dir, 'puppet', 'root') + util.chownbyname(p_constants.ssl_dir, puppet_user, 'root') util.ensure_dir(p_constants.ssl_cert_dir) - util.chownbyname(p_constants.ssl_cert_dir, 'puppet', 'root') + util.chownbyname(p_constants.ssl_cert_dir, puppet_user, 'root') util.write_file(p_constants.ssl_cert_path, cfg) - util.chownbyname(p_constants.ssl_cert_path, 'puppet', 'root') + util.chownbyname(p_constants.ssl_cert_path, + puppet_user, 'root') else: # Iterate through the config items, we'll use ConfigParser.set # to overwrite or create new items as needed @@ -203,6 +293,25 @@ def handle(name, cfg, cloud, log, _args): # Set it up so it autostarts _autostart_puppet(log) + # Run the agent if needed + if run: + log.debug('Running puppet-agent') + cmd = [puppet_bin, 'agent'] + if 'exec_args' in puppet_cfg: + cmd_args = puppet_cfg['exec_args'] + if isinstance(cmd_args, (list, tuple)): + cmd.extend(cmd_args) + elif isinstance(cmd_args, str): + cmd.extend(cmd_args.split()) + else: + log.warning("Unknown type %s provided for puppet" + " 'exec_args' expected list, tuple," + " or string", type(cmd_args)) + cmd.extend(PUPPET_AGENT_DEFAULT_ARGS) + else: + cmd.extend(PUPPET_AGENT_DEFAULT_ARGS) + subp.subp(cmd, capture=False) + # Start puppetd subp.subp(['service', 'puppet', 'start'], capture=False) diff --git a/doc/examples/cloud-config-puppet.txt b/doc/examples/cloud-config-puppet.txt index 3c7e2da7..c6bc15de 100644 --- a/doc/examples/cloud-config-puppet.txt +++ b/doc/examples/cloud-config-puppet.txt @@ -1,25 +1,65 @@ #cloud-config # -# This is an example file to automatically setup and run puppetd +# This is an example file to automatically setup and run puppet # when the instance boots for the first time. # Make sure that this file is valid yaml before starting instances. # It should be passed as user-data when starting the instance. puppet: + # Boolean: whether or not to install puppet (default: true) + install: true + + # A specific version to pass to the installer script or package manager + version: "7.7.0" + + # Valid values are 'packages' and 'aio' (default: 'packages') + install_type: "packages" + + # Puppet collection to install if 'install_type' is 'aio' + collection: "puppet7" + + # Boolean: whether or not to remove the puppetlabs repo after installation + # if 'install_type' is 'aio' (default: true) + cleanup: true + + # If 'install_type' is 'aio', change the url to the install script + aio_install_url: "https://raw.githubusercontent.com/puppetlabs/install-puppet/main/install.sh" + + # Path to the puppet config file (default: depends on 'install_type') + conf_file: "/etc/puppet/puppet.conf" + + # Path to the puppet SSL directory (default: depends on 'install_type') + ssl_dir: "/var/lib/puppet/ssl" + + # Path to the CSR attributes file (default: depends on 'install_type') + csr_attributes_path: "/etc/puppet/csr_attributes.yaml" + + # The name of the puppet package to install (no-op if 'install_type' is 'aio') + package_name: "puppet" + + # Boolean: whether or not to run puppet after configuration finishes + # (default: false) + exec: false + + # A list of arguments to pass to 'puppet agent' if 'exec' is true + # (default: ['--test']) + exec_args: ['--test'] + # Every key present in the conf object will be added to puppet.conf: # [name] # subkey=value # # For example the configuration below will have the following section # added to puppet.conf: - # [puppetd] - # server=puppetmaster.example.org + # [main] + # server=puppetserver.example.org # certname=i-0123456.ip-X-Y-Z.cloud.internal # - # The puppmaster ca certificate will be available in - # /var/lib/puppet/ssl/certs/ca.pem + # The puppetserver ca certificate will be available in + # /var/lib/puppet/ssl/certs/ca.pem if using distro packages + # or /etc/puppetlabs/puppet/ssl/certs/ca.pem if using AIO packages. conf: agent: - server: "puppetmaster.example.org" + server: "puppetserver.example.org" # certname supports substitutions at runtime: # %i: instanceid # Example: i-0123456 @@ -29,11 +69,13 @@ puppet: # NB: the certname will automatically be lowercased as required by puppet certname: "%i.%f" # ca_cert is a special case. It won't be added to puppet.conf. - # It holds the puppetmaster certificate in pem format. + # It holds the puppetserver certificate in pem format. # It should be a multi-line string (using the | yaml notation for # multi-line strings). - # The puppetmaster certificate is located in - # /var/lib/puppet/ssl/ca/ca_crt.pem on the puppetmaster host. + # The puppetserver certificate is located in + # /var/lib/puppet/ssl/ca/ca_crt.pem on the puppetserver host if using + # distro packages or /etc/puppetlabs/puppet/ssl/ca/ca_crt.pem if using AIO + # packages. # ca_cert: | -----BEGIN CERTIFICATE----- diff --git a/tests/cloud_tests/testcases/examples/setup_run_puppet.yaml b/tests/cloud_tests/testcases/examples/setup_run_puppet.yaml index e366c042..cdb1c28d 100644 --- a/tests/cloud_tests/testcases/examples/setup_run_puppet.yaml +++ b/tests/cloud_tests/testcases/examples/setup_run_puppet.yaml @@ -14,14 +14,14 @@ cloud_config: | # For example the configuration below will have the following section # added to puppet.conf: # [puppetd] - # server=puppetmaster.example.org + # server=puppetserver.example.org # certname=i-0123456.ip-X-Y-Z.cloud.internal # # The puppmaster ca certificate will be available in # /var/lib/puppet/ssl/certs/ca.pem conf: agent: - server: "puppetmaster.example.org" + server: "puppetserver.example.org" # certname supports substitutions at runtime: # %i: instanceid # Example: i-0123456 @@ -31,11 +31,11 @@ cloud_config: | # NB: the certname will automatically be lowercased as required by puppet certname: "%i.%f" # ca_cert is a special case. It won't be added to puppet.conf. - # It holds the puppetmaster certificate in pem format. + # It holds the puppetserver certificate in pem format. # It should be a multi-line string (using the | yaml notation for # multi-line strings). - # The puppetmaster certificate is located in - # /var/lib/puppet/ssl/ca/ca_crt.pem on the puppetmaster host. + # The puppetserver certificate is located in + # /var/lib/puppet/ssl/ca/ca_crt.pem on the puppetserver host. # ca_cert: | -----BEGIN CERTIFICATE----- diff --git a/tests/unittests/test_handler/test_handler_puppet.py b/tests/unittests/test_handler/test_handler_puppet.py index 62388ac6..c0ba2e3c 100644 --- a/tests/unittests/test_handler/test_handler_puppet.py +++ b/tests/unittests/test_handler/test_handler_puppet.py @@ -3,8 +3,9 @@ from cloudinit.config import cc_puppet from cloudinit.sources import DataSourceNone from cloudinit import (distros, helpers, cloud, util) -from cloudinit.tests.helpers import CiTestCase, mock +from cloudinit.tests.helpers import CiTestCase, HttprettyTestCase, mock +import httpretty import logging import textwrap @@ -63,7 +64,8 @@ class TestPuppetHandle(CiTestCase): super(TestPuppetHandle, self).setUp() self.new_root = self.tmp_dir() self.conf = self.tmp_path('puppet.conf') - self.csr_attributes_path = self.tmp_path('csr_attributes.yaml') + self.csr_attributes_path = self.tmp_path( + 'csr_attributes.yaml') def _get_cloud(self, distro): paths = helpers.Paths({'templates_dir': self.new_root}) @@ -72,7 +74,7 @@ class TestPuppetHandle(CiTestCase): myds = DataSourceNone.DataSourceNone({}, mydist, paths) return cloud.Cloud(myds, paths, {}, mydist, None) - def test_handler_skips_missing_puppet_key_in_cloudconfig(self, m_auto): + def test_skips_missing_puppet_key_in_cloudconfig(self, m_auto): """Cloud-config containing no 'puppet' key is skipped.""" mycloud = self._get_cloud('ubuntu') cfg = {} @@ -81,19 +83,19 @@ class TestPuppetHandle(CiTestCase): "no 'puppet' configuration found", self.logs.getvalue()) self.assertEqual(0, m_auto.call_count) - @mock.patch('cloudinit.config.cc_puppet.subp.subp') - def test_handler_puppet_config_starts_puppet_service(self, m_subp, m_auto): + @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", "")) + def test_puppet_config_starts_puppet_service(self, m_subp, m_auto): """Cloud-config 'puppet' configuration starts puppet.""" mycloud = self._get_cloud('ubuntu') cfg = {'puppet': {'install': False}} cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) self.assertEqual(1, m_auto.call_count) - self.assertEqual( + self.assertIn( [mock.call(['service', 'puppet', 'start'], capture=False)], m_subp.call_args_list) - @mock.patch('cloudinit.config.cc_puppet.subp.subp') - def test_handler_empty_puppet_config_installs_puppet(self, m_subp, m_auto): + @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", "")) + def test_empty_puppet_config_installs_puppet(self, m_subp, m_auto): """Cloud-config empty 'puppet' configuration installs latest puppet.""" mycloud = self._get_cloud('ubuntu') mycloud.distro = mock.MagicMock() @@ -103,8 +105,8 @@ class TestPuppetHandle(CiTestCase): [mock.call(('puppet', None))], mycloud.distro.install_packages.call_args_list) - @mock.patch('cloudinit.config.cc_puppet.subp.subp') - def test_handler_puppet_config_installs_puppet_on_true(self, m_subp, _): + @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", "")) + def test_puppet_config_installs_puppet_on_true(self, m_subp, _): """Cloud-config with 'puppet' key installs when 'install' is True.""" mycloud = self._get_cloud('ubuntu') mycloud.distro = mock.MagicMock() @@ -114,8 +116,85 @@ class TestPuppetHandle(CiTestCase): [mock.call(('puppet', None))], mycloud.distro.install_packages.call_args_list) - @mock.patch('cloudinit.config.cc_puppet.subp.subp') - def test_handler_puppet_config_installs_puppet_version(self, m_subp, _): + @mock.patch('cloudinit.config.cc_puppet.install_puppet_aio', autospec=True) + @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", "")) + def test_puppet_config_installs_puppet_aio(self, m_subp, m_aio, _): + """Cloud-config with 'puppet' key installs + when 'install_type' is 'aio'.""" + mycloud = self._get_cloud('ubuntu') + mycloud.distro = mock.MagicMock() + cfg = {'puppet': {'install': True, 'install_type': 'aio'}} + cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) + m_aio.assert_called_with( + cc_puppet.AIO_INSTALL_URL, + None, None, True) + + @mock.patch('cloudinit.config.cc_puppet.install_puppet_aio', autospec=True) + @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", "")) + def test_puppet_config_installs_puppet_aio_with_version(self, + m_subp, m_aio, _): + """Cloud-config with 'puppet' key installs + when 'install_type' is 'aio' and 'version' is specified.""" + mycloud = self._get_cloud('ubuntu') + mycloud.distro = mock.MagicMock() + cfg = {'puppet': {'install': True, + 'version': '6.24.0', 'install_type': 'aio'}} + cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) + m_aio.assert_called_with( + cc_puppet.AIO_INSTALL_URL, + '6.24.0', None, True) + + @mock.patch('cloudinit.config.cc_puppet.install_puppet_aio', autospec=True) + @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", "")) + def test_puppet_config_installs_puppet_aio_with_collection(self, + m_subp, + m_aio, _): + """Cloud-config with 'puppet' key installs + when 'install_type' is 'aio' and 'collection' is specified.""" + mycloud = self._get_cloud('ubuntu') + mycloud.distro = mock.MagicMock() + cfg = {'puppet': {'install': True, + 'collection': 'puppet6', 'install_type': 'aio'}} + cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) + m_aio.assert_called_with( + cc_puppet.AIO_INSTALL_URL, + None, 'puppet6', True) + + @mock.patch('cloudinit.config.cc_puppet.install_puppet_aio', autospec=True) + @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", "")) + def test_puppet_config_installs_puppet_aio_with_custom_url(self, + m_subp, + m_aio, _): + """Cloud-config with 'puppet' key installs + when 'install_type' is 'aio' and 'aio_install_url' is specified.""" + mycloud = self._get_cloud('ubuntu') + mycloud.distro = mock.MagicMock() + cfg = {'puppet': + {'install': True, + 'aio_install_url': 'http://test.url/path/to/script.sh', + 'install_type': 'aio'}} + cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) + m_aio.assert_called_with( + 'http://test.url/path/to/script.sh', None, None, True) + + @mock.patch('cloudinit.config.cc_puppet.install_puppet_aio', autospec=True) + @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", "")) + def test_puppet_config_installs_puppet_aio_without_cleanup(self, + m_subp, + m_aio, _): + """Cloud-config with 'puppet' key installs + when 'install_type' is 'aio' and no cleanup.""" + mycloud = self._get_cloud('ubuntu') + mycloud.distro = mock.MagicMock() + cfg = {'puppet': {'install': True, + 'cleanup': False, 'install_type': 'aio'}} + cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) + m_aio.assert_called_with( + cc_puppet.AIO_INSTALL_URL, + None, None, False) + + @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", "")) + def test_puppet_config_installs_puppet_version(self, m_subp, _): """Cloud-config 'puppet' configuration can specify a version.""" mycloud = self._get_cloud('ubuntu') mycloud.distro = mock.MagicMock() @@ -125,26 +204,39 @@ class TestPuppetHandle(CiTestCase): [mock.call(('puppet', '3.8'))], mycloud.distro.install_packages.call_args_list) - @mock.patch('cloudinit.config.cc_puppet.subp.subp') - def test_handler_puppet_config_updates_puppet_conf(self, m_subp, m_auto): + @mock.patch('cloudinit.config.cc_puppet.get_config_value') + @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", "")) + def test_puppet_config_updates_puppet_conf(self, + m_subp, m_default, m_auto): """When 'conf' is provided update values in PUPPET_CONF_PATH.""" + + def _fake_get_config_value(puppet_bin, setting): + return self.conf + + m_default.side_effect = _fake_get_config_value mycloud = self._get_cloud('ubuntu') cfg = { 'puppet': { - 'conf': {'agent': {'server': 'puppetmaster.example.org'}}}} - util.write_file(self.conf, '[agent]\nserver = origpuppet\nother = 3') - puppet_conf_path = 'cloudinit.config.cc_puppet.PUPPET_CONF_PATH' + 'conf': {'agent': {'server': 'puppetserver.example.org'}}}} + util.write_file( + self.conf, '[agent]\nserver = origpuppet\nother = 3') mycloud.distro = mock.MagicMock() - with mock.patch(puppet_conf_path, self.conf): - cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) + cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) content = util.load_file(self.conf) - expected = '[agent]\nserver = puppetmaster.example.org\nother = 3\n\n' + expected = '[agent]\nserver = puppetserver.example.org\nother = 3\n\n' self.assertEqual(expected, content) + @mock.patch('cloudinit.config.cc_puppet.get_config_value') @mock.patch('cloudinit.config.cc_puppet.subp.subp') - def test_handler_puppet_writes_csr_attributes_file(self, m_subp, m_auto): + def test_puppet_writes_csr_attributes_file(self, + m_subp, m_default, m_auto): """When csr_attributes is provided creates file in PUPPET_CSR_ATTRIBUTES_PATH.""" + + def _fake_get_config_value(puppet_bin, setting): + return self.csr_attributes_path + + m_default.side_effect = _fake_get_config_value mycloud = self._get_cloud('ubuntu') mycloud.distro = mock.MagicMock() cfg = { @@ -163,10 +255,7 @@ class TestPuppetHandle(CiTestCase): } } } - csr_attributes = 'cloudinit.config.cc_puppet.' \ - 'PUPPET_CSR_ATTRIBUTES_PATH' - with mock.patch(csr_attributes, self.csr_attributes_path): - cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) + cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) content = util.load_file(self.csr_attributes_path) expected = textwrap.dedent("""\ custom_attributes: @@ -177,3 +266,125 @@ class TestPuppetHandle(CiTestCase): pp_uuid: ED803750-E3C7-44F5-BB08-41A04433FE2E """) self.assertEqual(expected, content) + + @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", "")) + def test_puppet_runs_puppet_if_requested(self, m_subp, m_auto): + """Run puppet with default args if 'exec' is set to True.""" + mycloud = self._get_cloud('ubuntu') + cfg = {'puppet': {'exec': True}} + cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) + self.assertEqual(1, m_auto.call_count) + self.assertIn( + [mock.call(['puppet', 'agent', '--test'], capture=False)], + m_subp.call_args_list) + + @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", "")) + def test_puppet_runs_puppet_with_args_list_if_requested(self, + m_subp, m_auto): + """Run puppet with 'exec_args' list if 'exec' is set to True.""" + mycloud = self._get_cloud('ubuntu') + cfg = {'puppet': {'exec': True, 'exec_args': [ + '--onetime', '--detailed-exitcodes']}} + cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) + self.assertEqual(1, m_auto.call_count) + self.assertIn( + [mock.call( + ['puppet', 'agent', '--onetime', '--detailed-exitcodes'], + capture=False)], + m_subp.call_args_list) + + @mock.patch('cloudinit.config.cc_puppet.subp.subp', return_value=("", "")) + def test_puppet_runs_puppet_with_args_string_if_requested(self, + m_subp, m_auto): + """Run puppet with 'exec_args' string if 'exec' is set to True.""" + mycloud = self._get_cloud('ubuntu') + cfg = {'puppet': {'exec': True, + 'exec_args': '--onetime --detailed-exitcodes'}} + cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) + self.assertEqual(1, m_auto.call_count) + self.assertIn( + [mock.call( + ['puppet', 'agent', '--onetime', '--detailed-exitcodes'], + capture=False)], + m_subp.call_args_list) + + +class TestInstallPuppetAio(HttprettyTestCase): + + @mock.patch('cloudinit.config.cc_puppet.subp.subp', + return_value=(None, None)) + def test_install_with_default_arguments(self, m_subp): + """Install AIO with no arguments""" + response = b'#!/bin/bash\necho "Hi Mom"' + httpretty.register_uri( + httpretty.GET, cc_puppet.AIO_INSTALL_URL, + body=response, status=200) + + cc_puppet.install_puppet_aio() + + self.assertEqual( + [mock.call([mock.ANY, '--cleanup'], capture=False)], + m_subp.call_args_list) + + @mock.patch('cloudinit.config.cc_puppet.subp.subp', + return_value=(None, None)) + def test_install_with_custom_url(self, m_subp): + """Install AIO from custom URL""" + response = b'#!/bin/bash\necho "Hi Mom"' + url = 'http://custom.url/path/to/script.sh' + httpretty.register_uri( + httpretty.GET, url, body=response, status=200) + + cc_puppet.install_puppet_aio('http://custom.url/path/to/script.sh') + + self.assertEqual( + [mock.call([mock.ANY, '--cleanup'], capture=False)], + m_subp.call_args_list) + + @mock.patch('cloudinit.config.cc_puppet.subp.subp', + return_value=(None, None)) + def test_install_with_version(self, m_subp): + """Install AIO with specific version""" + response = b'#!/bin/bash\necho "Hi Mom"' + httpretty.register_uri( + httpretty.GET, cc_puppet.AIO_INSTALL_URL, + body=response, status=200) + + cc_puppet.install_puppet_aio(cc_puppet.AIO_INSTALL_URL, '7.6.0') + + self.assertEqual( + [mock.call([mock.ANY, '-v', '7.6.0', '--cleanup'], capture=False)], + m_subp.call_args_list) + + @mock.patch('cloudinit.config.cc_puppet.subp.subp', + return_value=(None, None)) + def test_install_with_collection(self, m_subp): + """Install AIO with specific collection""" + response = b'#!/bin/bash\necho "Hi Mom"' + httpretty.register_uri( + httpretty.GET, cc_puppet.AIO_INSTALL_URL, + body=response, status=200) + + cc_puppet.install_puppet_aio( + cc_puppet.AIO_INSTALL_URL, None, 'puppet6-nightly') + + self.assertEqual( + [mock.call([mock.ANY, '-c', 'puppet6-nightly', '--cleanup'], + capture=False)], + m_subp.call_args_list) + + @mock.patch('cloudinit.config.cc_puppet.subp.subp', + return_value=(None, None)) + def test_install_with_no_cleanup(self, m_subp): + """Install AIO with no cleanup""" + response = b'#!/bin/bash\necho "Hi Mom"' + httpretty.register_uri( + httpretty.GET, cc_puppet.AIO_INSTALL_URL, + body=response, status=200) + + cc_puppet.install_puppet_aio( + cc_puppet.AIO_INSTALL_URL, None, None, False) + + self.assertEqual( + [mock.call([mock.ANY], capture=False)], + m_subp.call_args_list) -- cgit v1.2.3 From ba083245537abd5bf5942fbe851e21eb8f245000 Mon Sep 17 00:00:00 2001 From: James Falcon Date: Wed, 29 Sep 2021 18:32:53 -0500 Subject: testing: remove cloud_tests (#1020) Cloud tests have been replaced with integration tests --- cloud-tests-requirements.txt | 28 - doc/rtd/index.rst | 1 - doc/rtd/topics/cloud_tests.rst | 764 --------------------- tests/cloud_tests/__init__.py | 39 -- tests/cloud_tests/__main__.py | 71 -- tests/cloud_tests/args.py | 304 -------- tests/cloud_tests/bddeb.py | 119 ---- tests/cloud_tests/collect.py | 219 ------ tests/cloud_tests/config.py | 165 ----- tests/cloud_tests/manage.py | 74 -- tests/cloud_tests/platforms.yaml | 77 --- tests/cloud_tests/platforms/__init__.py | 43 -- tests/cloud_tests/platforms/azurecloud/__init__.py | 0 tests/cloud_tests/platforms/azurecloud/image.py | 116 ---- tests/cloud_tests/platforms/azurecloud/instance.py | 247 ------- tests/cloud_tests/platforms/azurecloud/platform.py | 240 ------- .../cloud_tests/platforms/azurecloud/regions.json | 42 -- tests/cloud_tests/platforms/azurecloud/snapshot.py | 58 -- tests/cloud_tests/platforms/ec2/__init__.py | 0 tests/cloud_tests/platforms/ec2/image.py | 100 --- tests/cloud_tests/platforms/ec2/instance.py | 132 ---- tests/cloud_tests/platforms/ec2/platform.py | 263 ------- tests/cloud_tests/platforms/ec2/snapshot.py | 66 -- tests/cloud_tests/platforms/images.py | 56 -- tests/cloud_tests/platforms/instances.py | 165 ----- tests/cloud_tests/platforms/lxd/__init__.py | 0 tests/cloud_tests/platforms/lxd/image.py | 211 ------ tests/cloud_tests/platforms/lxd/instance.py | 278 -------- tests/cloud_tests/platforms/lxd/platform.py | 104 --- tests/cloud_tests/platforms/lxd/snapshot.py | 53 -- tests/cloud_tests/platforms/nocloudkvm/__init__.py | 0 tests/cloud_tests/platforms/nocloudkvm/image.py | 79 --- tests/cloud_tests/platforms/nocloudkvm/instance.py | 197 ------ tests/cloud_tests/platforms/nocloudkvm/platform.py | 94 --- tests/cloud_tests/platforms/nocloudkvm/snapshot.py | 59 -- tests/cloud_tests/platforms/platforms.py | 109 --- tests/cloud_tests/platforms/snapshots.py | 44 -- tests/cloud_tests/releases.yaml | 381 ---------- tests/cloud_tests/run_funcs.py | 75 -- tests/cloud_tests/setup_image.py | 237 ------- tests/cloud_tests/stage.py | 116 ---- tests/cloud_tests/testcases.yaml | 50 -- tests/cloud_tests/testcases/__init__.py | 73 -- tests/cloud_tests/testcases/base.py | 385 ----------- tests/cloud_tests/testcases/bugs/README.md | 13 - tests/cloud_tests/testcases/bugs/__init__.py | 8 - tests/cloud_tests/testcases/bugs/lp1511485.py | 15 - tests/cloud_tests/testcases/bugs/lp1511485.yaml | 11 - tests/cloud_tests/testcases/bugs/lp1611074.yaml | 8 - tests/cloud_tests/testcases/bugs/lp1628337.py | 23 - tests/cloud_tests/testcases/bugs/lp1628337.yaml | 23 - tests/cloud_tests/testcases/examples/README.md | 12 - tests/cloud_tests/testcases/examples/TODO.md | 15 - tests/cloud_tests/testcases/examples/__init__.py | 8 - .../testcases/examples/add_apt_repositories.py | 20 - .../testcases/examples/add_apt_repositories.yaml | 23 - .../testcases/examples/alter_completion_message.py | 40 -- .../examples/alter_completion_message.yaml | 16 - .../configure_instance_trusted_ca_certificates.py | 27 - ...configure_instance_trusted_ca_certificates.yaml | 41 -- .../examples/configure_instances_ssh_keys.py | 31 - .../examples/configure_instances_ssh_keys.yaml | 63 -- .../testcases/examples/including_user_groups.py | 49 -- .../testcases/examples/including_user_groups.yaml | 56 -- .../examples/install_arbitrary_packages.py | 20 - .../examples/install_arbitrary_packages.yaml | 20 - .../testcases/examples/install_run_chef_recipes.py | 17 - .../examples/install_run_chef_recipes.yaml | 104 --- .../testcases/examples/run_apt_upgrade.py | 19 - .../testcases/examples/run_apt_upgrade.yaml | 11 - .../cloud_tests/testcases/examples/run_commands.py | 15 - .../testcases/examples/run_commands.yaml | 16 - .../testcases/examples/run_commands_first_boot.py | 15 - .../examples/run_commands_first_boot.yaml | 16 - .../testcases/examples/setup_run_puppet.yaml | 55 -- .../examples/writing_out_arbitrary_files.py | 30 - .../examples/writing_out_arbitrary_files.yaml | 45 -- tests/cloud_tests/testcases/main/README.md | 11 - tests/cloud_tests/testcases/main/__init__.py | 8 - .../testcases/main/command_output_simple.py | 21 - .../testcases/main/command_output_simple.yaml | 13 - tests/cloud_tests/testcases/modules/README.md | 12 - tests/cloud_tests/testcases/modules/TODO.md | 95 --- tests/cloud_tests/testcases/modules/__init__.py | 8 - .../testcases/modules/apt_configure_conf.py | 20 - .../testcases/modules/apt_configure_conf.yaml | 21 - .../modules/apt_configure_disable_suites.py | 15 - .../modules/apt_configure_disable_suites.yaml | 20 - .../testcases/modules/apt_configure_primary.py | 24 - .../testcases/modules/apt_configure_primary.yaml | 19 - .../testcases/modules/apt_configure_proxy.py | 22 - .../testcases/modules/apt_configure_proxy.yaml | 18 - .../testcases/modules/apt_configure_security.py | 15 - .../testcases/modules/apt_configure_security.yaml | 18 - .../testcases/modules/apt_configure_sources_key.py | 23 - .../modules/apt_configure_sources_key.yaml | 50 -- .../modules/apt_configure_sources_keyserver.py | 23 - .../modules/apt_configure_sources_keyserver.yaml | 23 - .../modules/apt_configure_sources_list.py | 31 - .../modules/apt_configure_sources_list.yaml | 28 - .../testcases/modules/apt_configure_sources_ppa.py | 23 - .../modules/apt_configure_sources_ppa.yaml | 29 - .../testcases/modules/apt_pipelining_disable.py | 15 - .../testcases/modules/apt_pipelining_disable.yaml | 14 - .../testcases/modules/apt_pipelining_os.py | 15 - .../testcases/modules/apt_pipelining_os.yaml | 14 - tests/cloud_tests/testcases/modules/bootcmd.py | 15 - tests/cloud_tests/testcases/modules/bootcmd.yaml | 13 - tests/cloud_tests/testcases/modules/byobu.py | 24 - tests/cloud_tests/testcases/modules/byobu.yaml | 17 - tests/cloud_tests/testcases/modules/ca_certs.py | 33 - tests/cloud_tests/testcases/modules/ca_certs.yaml | 56 -- .../cloud_tests/testcases/modules/debug_disable.py | 16 - .../testcases/modules/debug_disable.yaml | 9 - .../cloud_tests/testcases/modules/debug_enable.py | 15 - .../testcases/modules/debug_enable.yaml | 9 - .../cloud_tests/testcases/modules/final_message.py | 40 -- .../testcases/modules/final_message.yaml | 13 - .../testcases/modules/keys_to_console.py | 22 - .../testcases/modules/keys_to_console.yaml | 15 - tests/cloud_tests/testcases/modules/landscape.yaml | 28 - tests/cloud_tests/testcases/modules/locale.py | 30 - tests/cloud_tests/testcases/modules/locale.yaml | 22 - tests/cloud_tests/testcases/modules/lxd_bridge.py | 36 - .../cloud_tests/testcases/modules/lxd_bridge.yaml | 32 - tests/cloud_tests/testcases/modules/lxd_dir.py | 30 - tests/cloud_tests/testcases/modules/lxd_dir.yaml | 19 - tests/cloud_tests/testcases/modules/ntp.py | 24 - tests/cloud_tests/testcases/modules/ntp.yaml | 22 - tests/cloud_tests/testcases/modules/ntp_chrony.py | 26 - .../cloud_tests/testcases/modules/ntp_chrony.yaml | 17 - tests/cloud_tests/testcases/modules/ntp_pools.py | 34 - tests/cloud_tests/testcases/modules/ntp_pools.yaml | 32 - tests/cloud_tests/testcases/modules/ntp_servers.py | 34 - .../cloud_tests/testcases/modules/ntp_servers.yaml | 28 - .../cloud_tests/testcases/modules/ntp_timesyncd.py | 15 - .../testcases/modules/ntp_timesyncd.yaml | 15 - .../modules/package_update_upgrade_install.py | 36 - .../modules/package_update_upgrade_install.yaml | 30 - tests/cloud_tests/testcases/modules/runcmd.py | 15 - tests/cloud_tests/testcases/modules/runcmd.yaml | 13 - .../testcases/modules/seed_random_command.yaml | 18 - .../testcases/modules/seed_random_data.py | 15 - .../testcases/modules/seed_random_data.yaml | 15 - .../cloud_tests/testcases/modules/set_hostname.py | 17 - .../testcases/modules/set_hostname.yaml | 21 - .../testcases/modules/set_hostname_fqdn.py | 31 - .../testcases/modules/set_hostname_fqdn.yaml | 23 - .../cloud_tests/testcases/modules/set_password.py | 22 - .../testcases/modules/set_password.yaml | 19 - .../testcases/modules/set_password_expire.py | 23 - .../testcases/modules/set_password_expire.yaml | 32 - .../testcases/modules/set_password_list.py | 12 - .../testcases/modules/set_password_list.yaml | 41 -- .../testcases/modules/set_password_list_string.py | 12 - .../modules/set_password_list_string.yaml | 41 -- tests/cloud_tests/testcases/modules/snap.py | 16 - tests/cloud_tests/testcases/modules/snap.yaml | 21 - .../modules/ssh_auth_key_fingerprints_disable.py | 16 - .../modules/ssh_auth_key_fingerprints_disable.yaml | 14 - .../modules/ssh_auth_key_fingerprints_enable.py | 18 - .../modules/ssh_auth_key_fingerprints_enable.yaml | 21 - .../cloud_tests/testcases/modules/ssh_import_id.py | 17 - .../testcases/modules/ssh_import_id.yaml | 17 - .../testcases/modules/ssh_keys_generate.py | 52 -- .../testcases/modules/ssh_keys_generate.yaml | 38 - .../testcases/modules/ssh_keys_provided.py | 58 -- .../testcases/modules/ssh_keys_provided.yaml | 99 --- tests/cloud_tests/testcases/modules/timezone.py | 15 - tests/cloud_tests/testcases/modules/timezone.yaml | 16 - tests/cloud_tests/testcases/modules/user_groups.py | 49 -- .../cloud_tests/testcases/modules/user_groups.yaml | 55 -- tests/cloud_tests/testcases/modules/write_files.py | 33 - .../cloud_tests/testcases/modules/write_files.yaml | 53 -- tests/cloud_tests/util.py | 532 -------------- tests/cloud_tests/verify.py | 149 ---- tests/configs/sample1.yaml | 49 -- tests/unittests/test_handler/test_schema.py | 41 -- tox.ini | 11 +- 179 files changed, 2 insertions(+), 10032 deletions(-) delete mode 100644 cloud-tests-requirements.txt delete mode 100644 doc/rtd/topics/cloud_tests.rst delete mode 100644 tests/cloud_tests/__init__.py delete mode 100644 tests/cloud_tests/__main__.py delete mode 100644 tests/cloud_tests/args.py delete mode 100644 tests/cloud_tests/bddeb.py delete mode 100644 tests/cloud_tests/collect.py delete mode 100644 tests/cloud_tests/config.py delete mode 100644 tests/cloud_tests/manage.py delete mode 100644 tests/cloud_tests/platforms.yaml delete mode 100644 tests/cloud_tests/platforms/__init__.py delete mode 100644 tests/cloud_tests/platforms/azurecloud/__init__.py delete mode 100644 tests/cloud_tests/platforms/azurecloud/image.py delete mode 100644 tests/cloud_tests/platforms/azurecloud/instance.py delete mode 100644 tests/cloud_tests/platforms/azurecloud/platform.py delete mode 100644 tests/cloud_tests/platforms/azurecloud/regions.json delete mode 100644 tests/cloud_tests/platforms/azurecloud/snapshot.py delete mode 100644 tests/cloud_tests/platforms/ec2/__init__.py delete mode 100644 tests/cloud_tests/platforms/ec2/image.py delete mode 100644 tests/cloud_tests/platforms/ec2/instance.py delete mode 100644 tests/cloud_tests/platforms/ec2/platform.py delete mode 100644 tests/cloud_tests/platforms/ec2/snapshot.py delete mode 100644 tests/cloud_tests/platforms/images.py delete mode 100644 tests/cloud_tests/platforms/instances.py delete mode 100644 tests/cloud_tests/platforms/lxd/__init__.py delete mode 100644 tests/cloud_tests/platforms/lxd/image.py delete mode 100644 tests/cloud_tests/platforms/lxd/instance.py delete mode 100644 tests/cloud_tests/platforms/lxd/platform.py delete mode 100644 tests/cloud_tests/platforms/lxd/snapshot.py delete mode 100644 tests/cloud_tests/platforms/nocloudkvm/__init__.py delete mode 100644 tests/cloud_tests/platforms/nocloudkvm/image.py delete mode 100644 tests/cloud_tests/platforms/nocloudkvm/instance.py delete mode 100644 tests/cloud_tests/platforms/nocloudkvm/platform.py delete mode 100644 tests/cloud_tests/platforms/nocloudkvm/snapshot.py delete mode 100644 tests/cloud_tests/platforms/platforms.py delete mode 100644 tests/cloud_tests/platforms/snapshots.py delete mode 100644 tests/cloud_tests/releases.yaml delete mode 100644 tests/cloud_tests/run_funcs.py delete mode 100644 tests/cloud_tests/setup_image.py delete mode 100644 tests/cloud_tests/stage.py delete mode 100644 tests/cloud_tests/testcases.yaml delete mode 100644 tests/cloud_tests/testcases/__init__.py delete mode 100644 tests/cloud_tests/testcases/base.py delete mode 100644 tests/cloud_tests/testcases/bugs/README.md delete mode 100644 tests/cloud_tests/testcases/bugs/__init__.py delete mode 100644 tests/cloud_tests/testcases/bugs/lp1511485.py delete mode 100644 tests/cloud_tests/testcases/bugs/lp1511485.yaml delete mode 100644 tests/cloud_tests/testcases/bugs/lp1611074.yaml delete mode 100644 tests/cloud_tests/testcases/bugs/lp1628337.py delete mode 100644 tests/cloud_tests/testcases/bugs/lp1628337.yaml delete mode 100644 tests/cloud_tests/testcases/examples/README.md delete mode 100644 tests/cloud_tests/testcases/examples/TODO.md delete mode 100644 tests/cloud_tests/testcases/examples/__init__.py delete mode 100644 tests/cloud_tests/testcases/examples/add_apt_repositories.py delete mode 100644 tests/cloud_tests/testcases/examples/add_apt_repositories.yaml delete mode 100644 tests/cloud_tests/testcases/examples/alter_completion_message.py delete mode 100644 tests/cloud_tests/testcases/examples/alter_completion_message.yaml delete mode 100644 tests/cloud_tests/testcases/examples/configure_instance_trusted_ca_certificates.py delete mode 100644 tests/cloud_tests/testcases/examples/configure_instance_trusted_ca_certificates.yaml delete mode 100644 tests/cloud_tests/testcases/examples/configure_instances_ssh_keys.py delete mode 100644 tests/cloud_tests/testcases/examples/configure_instances_ssh_keys.yaml delete mode 100644 tests/cloud_tests/testcases/examples/including_user_groups.py delete mode 100644 tests/cloud_tests/testcases/examples/including_user_groups.yaml delete mode 100644 tests/cloud_tests/testcases/examples/install_arbitrary_packages.py delete mode 100644 tests/cloud_tests/testcases/examples/install_arbitrary_packages.yaml delete mode 100644 tests/cloud_tests/testcases/examples/install_run_chef_recipes.py delete mode 100644 tests/cloud_tests/testcases/examples/install_run_chef_recipes.yaml delete mode 100644 tests/cloud_tests/testcases/examples/run_apt_upgrade.py delete mode 100644 tests/cloud_tests/testcases/examples/run_apt_upgrade.yaml delete mode 100644 tests/cloud_tests/testcases/examples/run_commands.py delete mode 100644 tests/cloud_tests/testcases/examples/run_commands.yaml delete mode 100644 tests/cloud_tests/testcases/examples/run_commands_first_boot.py delete mode 100644 tests/cloud_tests/testcases/examples/run_commands_first_boot.yaml delete mode 100644 tests/cloud_tests/testcases/examples/setup_run_puppet.yaml delete mode 100644 tests/cloud_tests/testcases/examples/writing_out_arbitrary_files.py delete mode 100644 tests/cloud_tests/testcases/examples/writing_out_arbitrary_files.yaml delete mode 100644 tests/cloud_tests/testcases/main/README.md delete mode 100644 tests/cloud_tests/testcases/main/__init__.py delete mode 100644 tests/cloud_tests/testcases/main/command_output_simple.py delete mode 100644 tests/cloud_tests/testcases/main/command_output_simple.yaml delete mode 100644 tests/cloud_tests/testcases/modules/README.md delete mode 100644 tests/cloud_tests/testcases/modules/TODO.md delete mode 100644 tests/cloud_tests/testcases/modules/__init__.py delete mode 100644 tests/cloud_tests/testcases/modules/apt_configure_conf.py delete mode 100644 tests/cloud_tests/testcases/modules/apt_configure_conf.yaml delete mode 100644 tests/cloud_tests/testcases/modules/apt_configure_disable_suites.py delete mode 100644 tests/cloud_tests/testcases/modules/apt_configure_disable_suites.yaml delete mode 100644 tests/cloud_tests/testcases/modules/apt_configure_primary.py delete mode 100644 tests/cloud_tests/testcases/modules/apt_configure_primary.yaml delete mode 100644 tests/cloud_tests/testcases/modules/apt_configure_proxy.py delete mode 100644 tests/cloud_tests/testcases/modules/apt_configure_proxy.yaml delete mode 100644 tests/cloud_tests/testcases/modules/apt_configure_security.py delete mode 100644 tests/cloud_tests/testcases/modules/apt_configure_security.yaml delete mode 100644 tests/cloud_tests/testcases/modules/apt_configure_sources_key.py delete mode 100644 tests/cloud_tests/testcases/modules/apt_configure_sources_key.yaml delete mode 100644 tests/cloud_tests/testcases/modules/apt_configure_sources_keyserver.py delete mode 100644 tests/cloud_tests/testcases/modules/apt_configure_sources_keyserver.yaml delete mode 100644 tests/cloud_tests/testcases/modules/apt_configure_sources_list.py delete mode 100644 tests/cloud_tests/testcases/modules/apt_configure_sources_list.yaml delete mode 100644 tests/cloud_tests/testcases/modules/apt_configure_sources_ppa.py delete mode 100644 tests/cloud_tests/testcases/modules/apt_configure_sources_ppa.yaml delete mode 100644 tests/cloud_tests/testcases/modules/apt_pipelining_disable.py delete mode 100644 tests/cloud_tests/testcases/modules/apt_pipelining_disable.yaml delete mode 100644 tests/cloud_tests/testcases/modules/apt_pipelining_os.py delete mode 100644 tests/cloud_tests/testcases/modules/apt_pipelining_os.yaml delete mode 100644 tests/cloud_tests/testcases/modules/bootcmd.py delete mode 100644 tests/cloud_tests/testcases/modules/bootcmd.yaml delete mode 100644 tests/cloud_tests/testcases/modules/byobu.py delete mode 100644 tests/cloud_tests/testcases/modules/byobu.yaml delete mode 100644 tests/cloud_tests/testcases/modules/ca_certs.py delete mode 100644 tests/cloud_tests/testcases/modules/ca_certs.yaml delete mode 100644 tests/cloud_tests/testcases/modules/debug_disable.py delete mode 100644 tests/cloud_tests/testcases/modules/debug_disable.yaml delete mode 100644 tests/cloud_tests/testcases/modules/debug_enable.py delete mode 100644 tests/cloud_tests/testcases/modules/debug_enable.yaml delete mode 100644 tests/cloud_tests/testcases/modules/final_message.py delete mode 100644 tests/cloud_tests/testcases/modules/final_message.yaml delete mode 100644 tests/cloud_tests/testcases/modules/keys_to_console.py delete mode 100644 tests/cloud_tests/testcases/modules/keys_to_console.yaml delete mode 100644 tests/cloud_tests/testcases/modules/landscape.yaml delete mode 100644 tests/cloud_tests/testcases/modules/locale.py delete mode 100644 tests/cloud_tests/testcases/modules/locale.yaml delete mode 100644 tests/cloud_tests/testcases/modules/lxd_bridge.py delete mode 100644 tests/cloud_tests/testcases/modules/lxd_bridge.yaml delete mode 100644 tests/cloud_tests/testcases/modules/lxd_dir.py delete mode 100644 tests/cloud_tests/testcases/modules/lxd_dir.yaml delete mode 100644 tests/cloud_tests/testcases/modules/ntp.py delete mode 100644 tests/cloud_tests/testcases/modules/ntp.yaml delete mode 100644 tests/cloud_tests/testcases/modules/ntp_chrony.py delete mode 100644 tests/cloud_tests/testcases/modules/ntp_chrony.yaml delete mode 100644 tests/cloud_tests/testcases/modules/ntp_pools.py delete mode 100644 tests/cloud_tests/testcases/modules/ntp_pools.yaml delete mode 100644 tests/cloud_tests/testcases/modules/ntp_servers.py delete mode 100644 tests/cloud_tests/testcases/modules/ntp_servers.yaml delete mode 100644 tests/cloud_tests/testcases/modules/ntp_timesyncd.py delete mode 100644 tests/cloud_tests/testcases/modules/ntp_timesyncd.yaml delete mode 100644 tests/cloud_tests/testcases/modules/package_update_upgrade_install.py delete mode 100644 tests/cloud_tests/testcases/modules/package_update_upgrade_install.yaml delete mode 100644 tests/cloud_tests/testcases/modules/runcmd.py delete mode 100644 tests/cloud_tests/testcases/modules/runcmd.yaml delete mode 100644 tests/cloud_tests/testcases/modules/seed_random_command.yaml delete mode 100644 tests/cloud_tests/testcases/modules/seed_random_data.py delete mode 100644 tests/cloud_tests/testcases/modules/seed_random_data.yaml delete mode 100644 tests/cloud_tests/testcases/modules/set_hostname.py delete mode 100644 tests/cloud_tests/testcases/modules/set_hostname.yaml delete mode 100644 tests/cloud_tests/testcases/modules/set_hostname_fqdn.py delete mode 100644 tests/cloud_tests/testcases/modules/set_hostname_fqdn.yaml delete mode 100644 tests/cloud_tests/testcases/modules/set_password.py delete mode 100644 tests/cloud_tests/testcases/modules/set_password.yaml delete mode 100644 tests/cloud_tests/testcases/modules/set_password_expire.py delete mode 100644 tests/cloud_tests/testcases/modules/set_password_expire.yaml delete mode 100644 tests/cloud_tests/testcases/modules/set_password_list.py delete mode 100644 tests/cloud_tests/testcases/modules/set_password_list.yaml delete mode 100644 tests/cloud_tests/testcases/modules/set_password_list_string.py delete mode 100644 tests/cloud_tests/testcases/modules/set_password_list_string.yaml delete mode 100644 tests/cloud_tests/testcases/modules/snap.py delete mode 100644 tests/cloud_tests/testcases/modules/snap.yaml delete mode 100644 tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_disable.py delete mode 100644 tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_disable.yaml delete mode 100644 tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_enable.py delete mode 100644 tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_enable.yaml delete mode 100644 tests/cloud_tests/testcases/modules/ssh_import_id.py delete mode 100644 tests/cloud_tests/testcases/modules/ssh_import_id.yaml delete mode 100644 tests/cloud_tests/testcases/modules/ssh_keys_generate.py delete mode 100644 tests/cloud_tests/testcases/modules/ssh_keys_generate.yaml delete mode 100644 tests/cloud_tests/testcases/modules/ssh_keys_provided.py delete mode 100644 tests/cloud_tests/testcases/modules/ssh_keys_provided.yaml delete mode 100644 tests/cloud_tests/testcases/modules/timezone.py delete mode 100644 tests/cloud_tests/testcases/modules/timezone.yaml delete mode 100644 tests/cloud_tests/testcases/modules/user_groups.py delete mode 100644 tests/cloud_tests/testcases/modules/user_groups.yaml delete mode 100644 tests/cloud_tests/testcases/modules/write_files.py delete mode 100644 tests/cloud_tests/testcases/modules/write_files.yaml delete mode 100644 tests/cloud_tests/util.py delete mode 100644 tests/cloud_tests/verify.py delete mode 100644 tests/configs/sample1.yaml (limited to 'tests/cloud_tests') diff --git a/cloud-tests-requirements.txt b/cloud-tests-requirements.txt deleted file mode 100644 index eecab63e..00000000 --- a/cloud-tests-requirements.txt +++ /dev/null @@ -1,28 +0,0 @@ -# PyPI requirements for cloud-init cloud tests -# https://cloudinit.readthedocs.io/en/latest/topics/cloud_tests.html -# -# Note: Changes to this requirements may require updates to -# the packages/pkg-deps.json file as well. -# - -# ec2 backend -boto3==1.14.53 - -# ssh communication -paramiko==2.7.2 -cryptography==3.2 - -# lxd backend -pylxd==2.2.11 - -# finds latest image information -git+https://git.launchpad.net/simplestreams - -# azure backend -azure-storage==0.36.0 -msrestazure==0.6.1 -azure-common==1.1.23 -azure-mgmt-compute==7.0.0 -azure-mgmt-network==5.0.0 -azure-mgmt-resource==4.0.0 -azure-mgmt-storage==6.0.0 diff --git a/doc/rtd/index.rst b/doc/rtd/index.rst index 67d6a9e3..69cf2068 100644 --- a/doc/rtd/index.rst +++ b/doc/rtd/index.rst @@ -78,6 +78,5 @@ Having trouble? We would like to help! topics/docs.rst topics/testing.rst topics/integration_tests.rst - topics/cloud_tests.rst .. vi: textwidth=79 diff --git a/doc/rtd/topics/cloud_tests.rst b/doc/rtd/topics/cloud_tests.rst deleted file mode 100644 index 0fbb1301..00000000 --- a/doc/rtd/topics/cloud_tests.rst +++ /dev/null @@ -1,764 +0,0 @@ -************************ -Cloud tests (Deprecated) -************************ - -Cloud tests are longer be maintained. For writing integration -tests, see the :ref:`integration_tests` page. - -Overview -======== - -This page describes the execution, development, and architecture of the -cloud-init integration tests: - -* Execution explains the options available and running of tests -* Development shows how to write test cases -* Architecture explains the internal processes - -Execution -========= - -Overview --------- - -In order to avoid the need for dependencies and ease the setup and -configuration users can run the integration tests via tox: - -.. code-block:: shell-session - - $ git clone https://github.com/canonical/cloud-init - $ cd cloud-init - $ tox -e citest -- -h - -Everything after the double dash will be passed to the integration tests. -Executing tests has several options: - -* ``run`` an alias to run both ``collect`` and ``verify``. The ``tree_run`` - command does the same thing, except uses a deb built from the current - working tree. - -* ``collect`` deploys on the specified platform and distro, patches with the - requested deb or rpm, and finally collects output of the arbitrary - commands. Similarly, ```tree_collect`` will collect output using a deb - built from the current working tree. - -* ``verify`` given a directory of test data, run the Python unit tests on - it to generate results. - -* ``bddeb`` will build a deb of the current working tree. - -Run ---- - -The first example will provide a complete end-to-end run of data -collection and verification. There are additional examples below -explaining how to run one or the other independently. - -.. code-block:: shell-session - - $ git clone https://github.com/canonical/cloud-init - $ cd cloud-init - $ tox -e citest -- run --verbose \ - --os-name stretch --os-name xenial \ - --deb cloud-init_0.7.8~my_patch_all.deb \ - --preserve-data --data-dir ~/collection \ - --preserve-instance - -The above command will do the following: - -* ``run`` both collect output and run tests the output - -* ``--verbose`` verbose output - -* ``--os-name stretch`` on the Debian Stretch release - -* ``--os-name xenial`` on the Ubuntu Xenial release - -* ``--deb cloud-init_0.7.8~patch_all.deb`` use this deb as the version of - cloud-init to run with - -* ``--preserve-data`` always preserve collected data, do not remove data - after successful test run - -* ``--preserve-instance`` do not destroy the instance after test to allow - for debugging the stopped instance during integration test development. By - default, test instances are destroyed after the test completes. - -* ``--data-dir ~/collection`` write collected data into `~/collection`, - rather than using a temporary directory - -For a more detailed explanation of each option see below. - -.. note:: - By default, data collected by the run command will be written into a - temporary directory and deleted after a successful. If you would - like to preserve this data, please use the option ``--preserve-data``. - -Collect -------- - -If developing tests it may be necessary to see if cloud-config works as -expected and the correct files are pulled down. In this case only a -collect can be ran by running: - -.. code-block:: shell-session - - $ tox -e citest -- collect -n xenial --data-dir /tmp/collection - -The above command will run the collection tests on xenial and place -all results into `/tmp/collection`. - -Verify ------- - -When developing tests it is much easier to simply rerun the verify scripts -without the more lengthy collect process. This can be done by running: - -.. code-block:: shell-session - - $ tox -e citest -- verify --data-dir /tmp/collection - -The above command will run the verify scripts on the data discovered in -`/tmp/collection`. - -TreeRun and TreeCollect ------------------------ - -If working on a cloud-init feature or resolving a bug, it may be useful to -run the current copy of cloud-init in the integration testing environment. -The integration testing suite can automatically build a deb based on the -current working tree of cloud-init and run the test suite using this deb. - -The ``tree_run`` and ``tree_collect`` commands take the same arguments as -the ``run`` and ``collect`` commands. These commands will build a deb and -write it into a temporary file, then start the test suite and pass that deb -in. To build a deb only, and not run the test suite, the ``bddeb`` command -can be used. - -Note that code in the cloud-init working tree that has not been committed -when the cloud-init deb is built will still be included. To build a -cloud-init deb from or use the ``tree_run`` command using a copy of -cloud-init located in a different directory, use the option ``--cloud-init -/path/to/cloud-init``. - -.. code-block:: shell-session - - $ tox -e citest -- tree_run --verbose \ - --os-name xenial --os-name stretch \ - --test modules/final_message --test modules/write_files \ - --result /tmp/result.yaml - -Bddeb ------ - -The ``bddeb`` command can be used to generate a deb file. This is used by the -tree_run and tree_collect commands to build a deb of the current working tree -using the packaging template contained in the ``packages/debian/`` directory. -It can also be used to generate a deb for use in other situations and avoid -needing to have all the build and test dependencies installed locally. - -* ``--bddeb-args``: arguments to pass through to bddeb -* ``--build-os``: distribution to use as build system (default is xenial) -* ``--build-platform``: platform to use for build system (default is lxd) -* ``--cloud-init``: path to base of cloud-init tree (default is '.') -* ``--deb``: path to write output deb to (default is '.') -* ``--packaging-branch``: import the ``debian/`` packaging directory - from the specified branch (default: ``ubuntu/devel``) instead of using - the packaging template. - -Setup Image ------------ - -By default an image that is used will remain unmodified, but certain -scenarios may require image modification. For example, many images may use -a much older cloud-init. As a result tests looking at newer functionality -will fail because a newer version of cloud-init may be required. The -following options can be used for further customization: - -* ``--deb``: install the specified deb into the image -* ``--rpm``: install the specified rpm into the image -* ``--repo``: enable a repository and upgrade cloud-init afterwards -* ``--ppa``: enable a ppa and upgrade cloud-init afterwards -* ``--upgrade``: upgrade cloud-init from repos -* ``--upgrade-full``: run a full system upgrade -* ``--script``: execute a script in the image. This can perform any setup - required that is not covered by the other options - -Test Case Development -===================== - -Overview --------- - -As a test writer you need to develop a test configuration and a -verification file: - - * The test configuration specifies a specific cloud-config to be used by - cloud-init and a list of arbitrary commands to capture the output of - (e.g my_test.yaml) - - * The verification file runs tests on the collected output to determine - the result of the test (e.g. my_test.py) - -The names must match, however the extensions will of course be different, -yaml vs py. - -Configuration -------------- - -The test configuration is a YAML file such as *ntp_server.yaml* below: - -.. code-block:: yaml - - # - # Empty NTP config to setup using defaults - # - # NOTE: this should not require apt feature, use 'which' rather than 'dpkg -l' - # NOTE: this should not require no_ntpdate feature, use 'which' to check for - # installation rather than 'dpkg -l', as 'grep ntp' matches 'ntpdate' - # NOTE: the verifier should check for any ntp server not 'ubuntu.pool.ntp.org' - cloud_config: | - #cloud-config - ntp: - servers: - - pool.ntp.org - required_features: - - apt - - no_ntpdate - - ubuntu_ntp - collect_scripts: - ntp_installed_servers: | - #!/bin/bash - dpkg -l | grep ntp | wc -l - ntp_conf_dist_servers: | - #!/bin/bash - ls /etc/ntp.conf.dist | wc -l - ntp_conf_servers: | - #!/bin/bash - cat /etc/ntp.conf | grep '^server' - -There are several keys, 1 required and some optional, in the YAML file: - -1. The required key is ``cloud_config``. This should be a string of valid - YAML that is exactly what would normally be placed in a cloud-config - file, including the cloud-config header. This essentially sets up the - scenario under test. - -2. One optional key is ``collect_scripts``. This key has one or more - sub-keys containing strings of arbitrary commands to execute (e.g. - ```cat /var/log/cloud-config-output.log```). In the example above the - output of dpkg is captured, grep for ntp, and the number of lines - reported. The name of the sub-key is important. The sub-key is used by - the verification script to recall the output of the commands ran. - -3. The optional ``enabled`` key enables or disables the test case. By - default the test case will be enabled. - -4. The optional ``required_features`` key may be used to specify a list - of features flags that an image must have to be able to run the test - case. For example, if a test case relies on an image supporting apt, - then the config for the test case should include ``required_features: - [ apt ]``. - - -Default Collect Scripts ------------------------ - -By default the following files will be collected for every test. There is -no need to specify these items: - -* ``/var/log/cloud-init.log`` -* ``/var/log/cloud-init-output.log`` -* ``/run/cloud-init/.instance-id`` -* ``/run/cloud-init/result.json`` -* ``/run/cloud-init/status.json`` -* ```dpkg-query -W -f='${Version}' cloud-init``` - -Verification ------------- - -The verification script is a Python file with unit tests like the one, -`ntp_server.py`, below: - -.. code-block:: python - - # This file is part of cloud-init. See LICENSE file for license information. - - """cloud-init Integration Test Verify Script""" - from tests.cloud_tests.testcases import base - - - class TestNtp(base.CloudTestCase): - """Test ntp module""" - - def test_ntp_installed(self): - """Test ntp installed""" - out = self.get_data_file('ntp_installed_empty') - self.assertEqual(1, int(out)) - - def test_ntp_dist_entries(self): - """Test dist config file has one entry""" - out = self.get_data_file('ntp_conf_dist_empty') - self.assertEqual(1, int(out)) - - def test_ntp_entires(self): - """Test config entries""" - out = self.get_data_file('ntp_conf_empty') - self.assertIn('pool 0.ubuntu.pool.ntp.org iburst', out) - self.assertIn('pool 1.ubuntu.pool.ntp.org iburst', out) - self.assertIn('pool 2.ubuntu.pool.ntp.org iburst', out) - self.assertIn('pool 3.ubuntu.pool.ntp.org iburst', out) - - # vi: ts=4 expandtab - - -Here is a breakdown of the unit test file: - -* The import statement allows access to the output files. - -* The class can be named anything, but must import the - ``base.CloudTestCase``, either directly or via another test class. - -* There can be 1 to N number of functions with any name, however only - functions starting with ``test_*`` will be executed. - -* There can be 1 to N number of classes in a test module, however only - classes inheriting from ``base.CloudTestCase`` will be loaded. - -* Output from the commands can be accessed via - ``self.get_data_file('key')`` where key is the sub-key of - ``collect_scripts`` above. - -* The cloud config that the test ran with can be accessed via - ``self.cloud_config``, or any entry from the cloud config can be accessed - via ``self.get_config_entry('key')``. - -* See the base ``CloudTestCase`` for additional helper functions. - -Layout ------- - -Integration tests are located under the `tests/cloud_tests` directory. -Test configurations are placed under `configs` and the test verification -scripts under `testcases`: - -.. code-block:: shell-session - - cloud-init$ tree -d tests/cloud_tests/ - tests/cloud_tests/ - ├── configs - │   ├── bugs - │   ├── examples - │   ├── main - │   └── modules - └── testcases - ├── bugs - ├── examples - ├── main - └── modules - -The sub-folders of bugs, examples, main, and modules help organize the -tests. View the README.md in each to understand in more detail each -directory. - -Test Creation Helper --------------------- - -The integration testing suite has a built in helper to aid in test -development. Help can be invoked via ``tox -e citest -- create --help``. It -can create a template test case config file with user data passed in from -the command line, as well as a template test case verifier module. - -The following would create a test case named ``example`` under the -``modules`` category with the given description, and cloud config data read -in from ``/tmp/user_data``. - -.. code-block:: shell-session - - $ tox -e citest -- create modules/example \ - -d "a simple example test case" -c "$(< /tmp/user_data)" - - -Development Checklist ---------------------- - -* Configuration File - * Named 'your_test.yaml' - * Contains at least a valid cloud-config - * Optionally, commands to capture additional output - * Valid YAML - * Placed in the appropriate sub-folder in the configs directory - * Any image features required for the test are specified -* Verification File - * Named 'your_test.py' - * Valid unit tests validating output collected - * Passes pylint & pep8 checks - * Placed in the appropriate sub-folder in the test cases directory -* Tested by running the test: - - .. code-block:: shell-session - - $ tox -e citest -- run -verbose \ - --os-name \ - --test modules/your_test.yaml \ - [--deb ] - - -Platforms -========= - -EC2 ---- -To run on the EC2 platform it is required that the user has an AWS credentials -configuration file specifying his or her access keys and a default region. -These configuration files are the standard that the AWS cli and other AWS -tools utilize for interacting directly with AWS itself and are normally -generated when running ``aws configure``: - -.. code-block:: shell-session - - $ cat $HOME/.aws/credentials - [default] - aws_access_key_id = - aws_secret_access_key = - -.. code-block:: shell-session - - $ cat $HOME/.aws/config - [default] - region = us-west-2 - - -Azure Cloud ------------ - -To run on Azure Cloud platform users login with Service Principal and export -credentials file. Region is defaulted and can be set in -``tests/cloud_tests/platforms.yaml``. The Service Principal credentials are -the standard authentication for Azure SDK to interact with Azure Services: - -Create Service Principal account or login - -.. code-block:: shell-session - - $ az ad sp create-for-rbac --name "APP_ID" --password "STRONG-SECRET-PASSWORD" - -.. code-block:: shell-session - - $ az login --service-principal --username "APP_ID" --password "STRONG-SECRET-PASSWORD" - -Export credentials - -.. code-block:: shell-session - - $ az ad sp create-for-rbac --sdk-auth > $HOME/.azure/credentials.json - -.. code-block:: json - - { - "clientId": "", - "clientSecret": "", - "subscriptionId": "", - "tenantId": "", - "activeDirectoryEndpointUrl": "https://login.microsoftonline.com", - "resourceManagerEndpointUrl": "https://management.azure.com/", - "activeDirectoryGraphResourceId": "https://graph.windows.net/", - "sqlManagementEndpointUrl": "https://management.core.windows.net:8443/", - "galleryEndpointUrl": "https://gallery.azure.com/", - "managementEndpointUrl": "https://management.core.windows.net/" - } - -Set region in platforms.yaml - -.. code-block:: yaml - - azurecloud: - enabled: true - region: West US 2 - vm_size: Standard_DS1_v2 - storage_sku: standard_lrs - tag: ci - - -Architecture -============ - -The following section outlines the high-level architecture of the -integration process. - -Overview --------- -The process flow during a complete end-to-end LXD-backed test. - -1. Configuration - * The back end and specific distro releases are verified as supported - * The test or tests that need to be run are determined either by - directory or by individual yaml - -2. Image Creation - * Acquire the request LXD image - * Install the specified cloud-init package - * Clean the image so that it does not appear to have been booted - * A snapshot of the image is created and reused by all tests - -3. Configuration - * For each test, the cloud-config is injected into a copy of the - snapshot and booted - * The framework waits for ``/var/lib/cloud/instance/boot-finished`` - (up to 120 seconds) - * All default commands are ran and output collected - * Any commands the user specified are executed and output collected - -4. Verification - * The default commands are checked for any failures, errors, and - warnings to validate basic functionality of cloud-init completed - successfully - * The user generated unit tests are then ran validating against the - collected output - -5. Results - * If any failures were detected the test suite returns a failure - * Results can be dumped in yaml format to a specified file using the - ``-r .yaml`` option - -Configuring the Test Suite --------------------------- - -Most of the behavior of the test suite is configurable through several yaml -files. These control the behavior of the test suite's platforms, images, and -tests. The main config files for platforms, images and test cases are -``platforms.yaml``, ``releases.yaml`` and ``testcases.yaml``. - -Config handling -^^^^^^^^^^^^^^^ - -All configurable parts of the test suite use a defaults + overrides system -for managing config entries. All base config items are dictionaries. - -Merging is done on a key-by-key basis, with all keys in the default and -override represented in the final result. If a key exists both in -the defaults and the overrides, then the behavior depends on the type of data -the key refers to. If it is atomic data or a list, then the overrides will -replace the default. If the data is a dictionary then the value will be the -result of merging that dictionary from the default config and that -dictionary from the overrides. - -Merging is done using the function -``tests.cloud_tests.config.merge_config``, which can be examined for more -detail on config merging behavior. - -The following demonstrates merge behavior: - -.. code-block:: yaml - - defaults: - list_item: - - list_entry_1 - - list_entry_2 - int_item_1: 123 - int_item_2: 234 - dict_item: - subkey_1: 1 - subkey_2: 2 - subkey_dict: - subsubkey_1: a - subsubkey_2: b - - overrides: - list_item: - - overridden_list_entry - int_item_1: 0 - dict_item: - subkey_2: false - subkey_dict: - subsubkey_2: 'new value' - - result: - list_item: - - overridden_list_entry - int_item_1: 0 - int_item_2: 234 - dict_item: - subkey_1: 1 - subkey_2: false - subkey_dict: - subsubkey_1: a - subsubkey_2: 'new value' - - -Image Config ------------- - -Image configuration is handled in ``releases.yaml``. The image configuration -controls how platforms locate and acquire images, how the platforms should -interact with the images, how platforms should detect when an image has -fully booted, any options that are required to set the image up, and -features that the image supports. - -Since settings for locating an image and interacting with it differ from -platform to platform, there are 4 levels of settings available for images on -top of the default image settings. The structure of the image config file -is: - -.. code-block:: yaml - - default_release_config: - default: - ... - : - ... - : - ... - - releases: - : - : - ... - : - ... - : - ... - - -The base config is created from the overall defaults and the overrides for -the platform. The overrides are created from the default config for the -image and the platform specific overrides for the image. - -System Boot -^^^^^^^^^^^ - -The test suite must be able to test if a system has fully booted and if -cloud-init has finished running, so that running collect scripts does not -race against the target image booting. This is done using the -``system_ready_script`` and ``cloud_init_ready_script`` image config keys. - -Each of these keys accepts a small bash test statement as a string that must -return 0 or 1. Since this test statement will be added into a larger bash -statement it must be a single statement using the ``[`` test syntax. - -The default image config provides a system ready script that works for any -systemd based image. If the image is not systemd based, then a different -test statement must be provided. The default config also provides a test -for whether or not cloud-init has finished which checks for the file -``/run/cloud-init/result.json``. This should be sufficient for most systems -as writing this file is one of the last things cloud-init does. - -The setting ``boot_timeout`` controls how long, in seconds, the platform -should wait for an image to boot. If the system ready script has not -indicated that the system is fully booted within this time an error will be -raised. - -Feature Flags -^^^^^^^^^^^^^ - -Not all test cases can work on all images due to features the test case -requires not being present on that image. If a test case requires features -in an image that are not likely to be present across all distros and -platforms that the test suite supports, then the test can be skipped -everywhere it is not supported. - -Feature flags, which are names for features supported on some images, but -not all that may be required by test cases. Configuration for feature flags -is provided in ``releases.yaml`` under the ``features`` top level key. The -features config includes a list of all currently defined feature flags, -their meanings, and a list of feature groups. - -Feature groups are groups of features that many images have in common. For -example, the ``Ubuntu_specific`` feature group includes features that -should be present across most Ubuntu releases, but may or may not be for -other distros. Feature groups are specified for an image as a list under -the key ``feature_groups``. - -An image's feature flags are derived from the features groups that that -image has and any feature overrides provided. Feature overrides can be -specified under the ``features`` key which accepts a dictionary of -``{: true/false}`` mappings. If a feature is omitted from an -image's feature flags or set to false in the overrides then the test suite -will skip any tests that require that feature when using that image. - -Feature flags may be overridden at run time using the ``--feature-override`` -command line argument. It accepts a feature flag and value to set in the -format ``=true/false``. Multiple ``--feature-override`` -flags can be used, and will all be applied to all feature flags for images -used during a test. - -Setup Overrides -^^^^^^^^^^^^^^^ - -If an image requires some of the options for image setup to be used, then it -may specify overrides for the command line arguments passed into setup -image. These may be specified as a dictionary under the ``setup_overrides`` -key. When an image is set up, the arguments that control how it is set up -will be the arguments from the command line, with any entries in -``setup_overrides`` used to override these arguments. - -For example, images that do not come with cloud-init already installed -should have ``setup_overrides: {upgrade: true}`` specified so that in the -event that no additional setup options are given, cloud-init will be -installed from the image's repos before running tests. Note that if other -options such as ``--deb`` are passed in on the command line, these will -still work as expected, since apt's policy for cloud-init would prefer the -locally installed deb over an older version from the repos. - -Platform Specific Options -^^^^^^^^^^^^^^^^^^^^^^^^^ - -There are many platform specific options in image configuration that allow -platforms to locate images and that control additional setup that the -platform may have to do to make the image usable. For information on how -these work, please consult the documentation for that platform in the -integration testing suite and the ``releases.yaml`` file for examples. - -Error Handling --------------- - -The test suite makes an attempt to run as many tests as possible even in the -event of some failing so that automated runs collect as much data as -possible. In the event that something goes wrong while setting up for or -running a test, the test suite will attempt to continue running any tests -which have not been affected by the error. - -For example, if the test suite was told to run tests on one platform for two -releases and an error occurred setting up the first image, all tests for -that image would be skipped, and the test suite would continue to set up -the second image and run tests on it. Or, if the system does not start -properly for one test case out of many to run on that image, that test case -will be skipped and the next one will be run. - -Note that if any errors occur, the test suite will record the failure and -where it occurred in the result data and write it out to the specified -result file. - -Results -------- - -The test suite generates result data that includes how long each stage of -the test suite took and which parts were and were not successful. This data -is dumped to the log after the collect and verify stages, and may also be -written out in yaml format to a file. If part of the setup failed, the -traceback for the failure and the error message will be included in the -result file. If a test verifier finds a problem with the collected data -from a test run, the class, test function and test will be recorded in the -result data. - -Exit Codes -^^^^^^^^^^ - -The test suite counts how many errors occur throughout a run. The exit code -after a run is the number of errors that occurred. If the exit code is -non-zero then something is wrong either with the test suite, the -configuration for an image, a test case, or cloud-init itself. - -Note that the exit code does not always directly correspond to the number -of failed test cases, since in some cases, a single error during image setup -can mean that several test cases are not run. If run is used, then the exit -code will be the sum of the number of errors in the collect and verify -stages. - -Data Dir -^^^^^^^^ - -When using run, the collected data is written into a temporary directory. In -the event that all tests pass, this directory is deleted, but if a test -fails or an error occurs, this data will be left in place, and a message -will be written to the log giving the location of the data. diff --git a/tests/cloud_tests/__init__.py b/tests/cloud_tests/__init__.py deleted file mode 100644 index 6c632f99..00000000 --- a/tests/cloud_tests/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Main init.""" - -import logging -import os - -BASE_DIR = os.path.dirname(os.path.abspath(__file__)) -TESTCASES_DIR = os.path.join(BASE_DIR, 'testcases') -TEST_CONF_DIR = os.path.join(BASE_DIR, 'testcases') -TREE_BASE = os.sep.join(BASE_DIR.split(os.sep)[:-2]) - -# This domain contains reverse lookups for hostnames that are used. -# The primary reason is so sudo will return quickly when it attempts -# to look up the hostname. i9n is just short for 'integration'. -# see also bug 1730744 for why we had to do this. -CI_DOMAIN = "i9n.cloud-init.io" - - -def _initialize_logging(): - """Configure logging for cloud_tests.""" - logger = logging.getLogger(__name__) - logger.setLevel(logging.DEBUG) - formatter = logging.Formatter( - '%(asctime)s - %(pathname)s:%(funcName)s:%(lineno)s ' - '[%(levelname)s]: %(message)s') - - console = logging.StreamHandler() - console.setLevel(logging.DEBUG) - console.setFormatter(formatter) - - logger.addHandler(console) - - return logger - - -LOG = _initialize_logging() - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/__main__.py b/tests/cloud_tests/__main__.py deleted file mode 100644 index 7ee29cad..00000000 --- a/tests/cloud_tests/__main__.py +++ /dev/null @@ -1,71 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Main entry point.""" - -import argparse -import logging -import os -import sys - -from tests.cloud_tests import args, bddeb, collect, manage, run_funcs, verify -from tests.cloud_tests import LOG - - -def configure_log(args): - """Configure logging.""" - level = logging.INFO - if args.verbose: - level = logging.DEBUG - elif args.quiet: - level = logging.WARN - LOG.setLevel(level) - - -def main(): - """Entry point for cloud test suite.""" - # configure parser - parser = argparse.ArgumentParser(prog='cloud_tests') - subparsers = parser.add_subparsers(dest="subcmd") - subparsers.required = True - - def add_subparser(name, description, arg_sets): - """Add arguments to subparser.""" - subparser = subparsers.add_parser(name, help=description) - for (_args, _kwargs) in (a for arg_set in arg_sets for a in arg_set): - subparser.add_argument(*_args, **_kwargs) - - # configure subparsers - for (name, (description, arg_sets)) in args.SUBCMDS.items(): - add_subparser(name, description, - [args.ARG_SETS[arg_set] for arg_set in arg_sets]) - - # parse arguments - parsed = parser.parse_args() - - # process arguments - configure_log(parsed) - (_, arg_sets) = args.SUBCMDS[parsed.subcmd] - for normalizer in [args.NORMALIZERS[arg_set] for arg_set in arg_sets]: - parsed = normalizer(parsed) - if not parsed: - return -1 - - # run handler - LOG.debug('running with args: %s', parsed) - return { - 'bddeb': bddeb.bddeb, - 'collect': collect.collect, - 'create': manage.create, - 'run': run_funcs.run, - 'tree_collect': run_funcs.tree_collect, - 'tree_run': run_funcs.tree_run, - 'verify': verify.verify, - }[parsed.subcmd](parsed) - - -if __name__ == "__main__": - if os.geteuid() == 0: - sys.exit('Do not run as root') - sys.exit(main()) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/args.py b/tests/cloud_tests/args.py deleted file mode 100644 index ab345491..00000000 --- a/tests/cloud_tests/args.py +++ /dev/null @@ -1,304 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Argparse argument setup and sanitization.""" - -import os - -from tests.cloud_tests import config, util -from tests.cloud_tests import LOG, TREE_BASE - -ARG_SETS = { - 'BDDEB': ( - (('--bddeb-args',), - {'help': 'args to pass through to bddeb', - 'action': 'store', 'default': None, 'required': False}), - (('--build-os',), - {'help': 'OS to use as build system (default is xenial)', - 'action': 'store', 'choices': config.ENABLED_DISTROS, - 'default': 'xenial', 'required': False}), - (('--build-platform',), - {'help': 'platform to use for build system (default is lxd)', - 'action': 'store', 'choices': config.ENABLED_PLATFORMS, - 'default': 'lxd', 'required': False}), - (('--cloud-init',), - {'help': 'path to base of cloud-init tree', 'metavar': 'DIR', - 'action': 'store', 'required': False, 'default': TREE_BASE}),), - 'COLLECT': ( - (('-p', '--platform'), - {'help': 'platform(s) to run tests on', 'metavar': 'PLATFORM', - 'action': 'append', 'choices': config.ENABLED_PLATFORMS, - 'default': []}), - (('-n', '--os-name'), - {'help': 'the name(s) of the OS(s) to test', 'metavar': 'NAME', - 'action': 'append', 'choices': config.ENABLED_DISTROS, - 'default': []}), - (('-t', '--test-config'), - {'help': 'test config file(s) to use', 'metavar': 'FILE', - 'action': 'append', 'default': []}), - (('--feature-override',), - {'help': 'feature flags override(s), =', - 'action': 'append', 'default': [], 'required': False}),), - 'CREATE': ( - (('-c', '--config'), - {'help': 'cloud-config yaml for testcase', 'metavar': 'DATA', - 'action': 'store', 'required': False, 'default': None}), - (('-e', '--enable'), - {'help': 'enable testcase', 'required': False, 'default': False, - 'action': 'store_true'}), - (('name',), - {'help': 'testcase name, in format "/"', - 'action': 'store'}), - (('-d', '--description'), - {'help': 'description of testcase', 'required': False}), - (('-f', '--force'), - {'help': 'overwrite already existing test', 'required': False, - 'action': 'store_true', 'default': False}),), - 'INTERFACE': ( - (('-v', '--verbose'), - {'help': 'verbose output', 'action': 'store_true', 'default': False}), - (('-q', '--quiet'), - {'help': 'quiet output', 'action': 'store_true', 'default': False}),), - 'OUTPUT': ( - (('-d', '--data-dir'), - {'help': 'directory to store test data in', - 'action': 'store', 'metavar': 'DIR', 'required': False}), - (('--preserve-instance',), - {'help': 'do not destroy the instance under test', - 'action': 'store_true', 'default': False, 'required': False}), - (('--preserve-data',), - {'help': 'do not remove collected data after successful run', - 'action': 'store_true', 'default': False, 'required': False}),), - 'OUTPUT_DEB': ( - (('--deb',), - {'help': 'path to write output deb to', 'metavar': 'FILE', - 'action': 'store', 'required': False, - 'default': 'cloud-init_all.deb'}),), - 'RESULT': ( - (('-r', '--result'), - {'help': 'file to write results to', - 'action': 'store', 'metavar': 'FILE'}),), - 'SETUP': ( - (('--deb',), - {'help': 'install deb', 'metavar': 'FILE', 'action': 'store'}), - (('--rpm',), - {'help': 'install rpm', 'metavar': 'FILE', 'action': 'store'}), - (('--script',), - {'help': 'script to set up image', 'metavar': 'DATA', - 'action': 'store'}), - (('--repo',), - {'help': 'repo to enable (implies -u)', 'metavar': 'NAME', - 'action': 'store'}), - (('--ppa',), - {'help': 'ppa to enable (implies -u)', 'metavar': 'NAME', - 'action': 'store'}), - (('-u', '--upgrade'), - {'help': 'upgrade or install cloud-init from repo', - 'action': 'store_true', 'default': False}), - (('--upgrade-full',), - {'help': 'do full system upgrade from repo (implies -u)', - 'action': 'store_true', 'default': False}),), - -} - -SUBCMDS = { - 'bddeb': ('build cloud-init deb from tree', - ('BDDEB', 'OUTPUT_DEB', 'INTERFACE')), - 'collect': ('collect test data', - ('COLLECT', 'INTERFACE', 'OUTPUT', 'RESULT', 'SETUP')), - 'create': ('create new test case', ('CREATE', 'INTERFACE')), - 'run': ('run test suite', - ('COLLECT', 'INTERFACE', 'RESULT', 'OUTPUT', 'SETUP')), - 'tree_collect': ('collect using current working tree', - ('BDDEB', 'COLLECT', 'INTERFACE', 'OUTPUT', 'RESULT')), - 'tree_run': ('run using current working tree', - ('BDDEB', 'COLLECT', 'INTERFACE', 'OUTPUT', 'RESULT')), - 'verify': ('verify test data', ('INTERFACE', 'OUTPUT', 'RESULT')), -} - - -def _empty_normalizer(args): - """Do not normalize arguments.""" - return args - - -def normalize_bddeb_args(args): - """Normalize BDDEB arguments. - - @param args: parsed args - @return_value: updated args, or None if errors encountered - """ - # make sure cloud-init dir is accessible - if not (args.cloud_init and os.path.isdir(args.cloud_init)): - LOG.error('invalid cloud-init tree path') - return None - - return args - - -def normalize_create_args(args): - """Normalize CREATE arguments. - - @param args: parsed args - @return_value: updated args, or None if errors occurred - """ - # ensure valid name for new test - if len(args.name.split('/')) != 2: - LOG.error('invalid test name: %s', args.name) - return None - if os.path.exists(config.name_to_path(args.name)): - msg = 'test: {} already exists'.format(args.name) - if args.force: - LOG.warning('%s but ignoring due to --force', msg) - else: - LOG.error(msg) - return None - - # ensure test config valid if specified - if isinstance(args.config, str) and len(args.config) == 0: - LOG.error('test config cannot be empty if specified') - return None - - # ensure description valid if specified - if (isinstance(args.description, str) and - (len(args.description) > 70 or len(args.description) == 0)): - LOG.error('test description must be between 1 and 70 characters') - return None - - return args - - -def normalize_collect_args(args): - """Normalize COLLECT arguments. - - @param args: parsed args - @return_value: updated args, or None if errors occurred - """ - # platform should default to lxd - if len(args.platform) == 0: - args.platform = ['lxd'] - args.platform = util.sorted_unique(args.platform) - - # os name should default to all enabled - # if os name is provided ensure that all provided are supported - if len(args.os_name) == 0: - args.os_name = config.ENABLED_DISTROS - else: - supported = config.ENABLED_DISTROS - invalid = [os_name for os_name in args.os_name - if os_name not in supported] - if len(invalid) != 0: - LOG.error('invalid os name(s): %s', invalid) - return None - args.os_name = util.sorted_unique(args.os_name) - - # test configs should default to all enabled - # if test configs are provided, ensure that all provided are valid - if len(args.test_config) == 0: - args.test_config = config.list_test_configs() - else: - valid = [] - invalid = [] - for name in args.test_config: - if os.path.exists(name): - valid.append(name) - elif os.path.exists(config.name_to_path(name)): - valid.append(config.name_to_path(name)) - else: - invalid.append(name) - if len(invalid) != 0: - LOG.error('invalid test config(s): %s', invalid) - return None - else: - args.test_config = valid - args.test_config = util.sorted_unique(args.test_config) - - # parse feature flag overrides and ensure all are valid - if args.feature_override: - overrides = args.feature_override - args.feature_override = util.parse_conf_list( - overrides, boolean=True, valid=config.list_feature_flags()) - if not args.feature_override: - LOG.error('invalid feature flag override(s): %s', overrides) - return None - else: - args.feature_override = {} - - return args - - -def normalize_output_args(args): - """Normalize OUTPUT arguments. - - @param args: parsed args - @return_value: updated args, or None if errors occurred - """ - if args.data_dir: - args.data_dir = os.path.abspath(args.data_dir) - if not os.path.exists(args.data_dir): - os.mkdir(args.data_dir) - - if not args.data_dir: - args.data_dir = None - - # ensure clean output dir if collect - # ensure data exists if verify - if args.subcmd == 'collect': - if not util.is_clean_writable_dir(args.data_dir): - LOG.error('data_dir must be empty/new and must be writable') - return None - - return args - - -def normalize_output_deb_args(args): - """Normalize OUTPUT_DEB arguments. - - @param args: parsed args - @return_value: updated args, or None if erros occurred - """ - # make sure to use abspath for deb - args.deb = os.path.abspath(args.deb) - - if not args.deb.endswith('.deb'): - LOG.error('output filename does not end in ".deb"') - return None - - return args - - -def normalize_setup_args(args): - """Normalize SETUP arguments. - - @param args: parsed args - @return_value: updated_args, or None if errors occurred - """ - # ensure deb or rpm valid if specified - for pkg in (args.deb, args.rpm): - if pkg is not None and not os.path.exists(pkg): - LOG.error('cannot find package: %s', pkg) - return None - - # if repo or ppa to be enabled run upgrade - if args.repo or args.ppa: - args.upgrade = True - - # if ppa is specified, remove leading 'ppa:' if any - _ppa_header = 'ppa:' - if args.ppa and args.ppa.startswith(_ppa_header): - args.ppa = args.ppa[len(_ppa_header):] - - return args - - -NORMALIZERS = { - 'BDDEB': normalize_bddeb_args, - 'COLLECT': normalize_collect_args, - 'CREATE': normalize_create_args, - 'INTERFACE': _empty_normalizer, - 'OUTPUT': normalize_output_args, - 'OUTPUT_DEB': normalize_output_deb_args, - 'RESULT': _empty_normalizer, - 'SETUP': normalize_setup_args, -} - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/bddeb.py b/tests/cloud_tests/bddeb.py deleted file mode 100644 index e45ad947..00000000 --- a/tests/cloud_tests/bddeb.py +++ /dev/null @@ -1,119 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Used to build a deb.""" - -from functools import partial -import os -import tempfile - -from cloudinit import subp -from tests.cloud_tests import (config, LOG) -from tests.cloud_tests import platforms -from tests.cloud_tests.stage import (PlatformComponent, run_stage, run_single) - -pre_reqs = ['devscripts', 'equivs', 'git', 'tar'] - - -def _out(cmd_res): - """Get clean output from cmd result.""" - return cmd_res[0].decode("utf-8").strip() - - -def build_deb(args, instance): - """Build deb on system and copy out to location at args.deb. - - @param args: cmdline arguments - @return_value: tuple of results and fail count - """ - # update remote system package list and install build deps - LOG.debug('installing pre-reqs') - pkgs = ' '.join(pre_reqs) - instance.execute('apt-get update && apt-get install --yes {}'.format(pkgs)) - - # local tmpfile that must be deleted - local_tarball = tempfile.NamedTemporaryFile().name - - # paths to use in remote system - output_link = '/root/cloud-init_all.deb' - remote_tarball = _out(instance.execute(['mktemp'])) - extract_dir = '/root' - bddeb_path = os.path.join(extract_dir, 'packages', 'bddeb') - git_env = {'GIT_DIR': os.path.join(extract_dir, '.git'), - 'GIT_WORK_TREE': extract_dir} - - LOG.debug('creating tarball of cloud-init at: %s', local_tarball) - subp.subp(['tar', 'cf', local_tarball, '--owner', 'root', - '--group', 'root', '-C', args.cloud_init, '.']) - LOG.debug('copying to remote system at: %s', remote_tarball) - instance.push_file(local_tarball, remote_tarball) - - LOG.debug('extracting tarball in remote system at: %s', extract_dir) - instance.execute(['tar', 'xf', remote_tarball, '-C', extract_dir]) - instance.execute(['git', 'commit', '-a', '-m', 'tmp', '--allow-empty'], - env=git_env) - - LOG.debug('installing deps') - deps_path = os.path.join(extract_dir, 'tools', 'read-dependencies') - instance.execute([deps_path, '--install', '--test-distro', - '--distro', 'ubuntu']) - - LOG.debug('building deb in remote system at: %s', output_link) - bddeb_args = args.bddeb_args.split() if args.bddeb_args else [] - instance.execute([bddeb_path, '-d'] + bddeb_args, env=git_env) - - # copy the deb back to the host system - LOG.debug('copying built deb to host at: %s', args.deb) - instance.pull_file(output_link, args.deb) - - -def setup_build(args): - """Set build system up then run build. - - @param args: cmdline arguments - @return_value: tuple of results and fail count - """ - res = ({}, 1) - - # set up platform - LOG.info('setting up platform: %s', args.build_platform) - platform_config = config.load_platform_config(args.build_platform) - platform_call = partial(platforms.get_platform, args.build_platform, - platform_config) - with PlatformComponent(platform_call) as platform: - - # set up image - LOG.info('acquiring image for os: %s', args.build_os) - img_conf = config.load_os_config(platform.platform_name, args.build_os) - image_call = partial(platforms.get_image, platform, img_conf) - with PlatformComponent(image_call) as image: - - # set up snapshot - snapshot_call = partial(platforms.get_snapshot, image) - with PlatformComponent(snapshot_call) as snapshot: - - # create instance with cloud-config to set it up - LOG.info('creating instance to build deb in') - empty_cloud_config = "#cloud-config\n{}" - instance_call = partial( - platforms.get_instance, snapshot, empty_cloud_config, - use_desc='build cloud-init deb') - with PlatformComponent(instance_call) as instance: - - # build the deb - res = run_single('build deb on system', - partial(build_deb, args, instance)) - - return res - - -def bddeb(args): - """Entry point for build deb. - - @param args: cmdline arguments - @return_value: fail count - """ - LOG.info('preparing to build cloud-init deb') - _res, failed = run_stage('build deb', [partial(setup_build, args)]) - return failed - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/collect.py b/tests/cloud_tests/collect.py deleted file mode 100644 index 642745d8..00000000 --- a/tests/cloud_tests/collect.py +++ /dev/null @@ -1,219 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Used to collect data from platforms during tests.""" - -from functools import partial -import os - -from cloudinit import util as c_util -from tests.cloud_tests import (config, LOG, setup_image, util) -from tests.cloud_tests.stage import (PlatformComponent, run_stage, run_single) -from tests.cloud_tests import platforms -from tests.cloud_tests.testcases import base, get_test_class - - -def collect_script(instance, base_dir, script, script_name): - """Collect script data. - - @param instance: instance to run script on - @param base_dir: base directory for output data - @param script: script contents - @param script_name: name of script to run - @return_value: None, may raise errors - """ - LOG.debug('running collect script: %s', script_name) - (out, err, exit) = instance.run_script( - script.encode(), rcs=False, - description='collect: {}'.format(script_name)) - if err: - LOG.debug("collect script %s exited '%s' and had stderr: %s", - script_name, err, exit) - if not isinstance(out, bytes): - raise util.PlatformError( - "Collection of '%s' returned type %s, expected bytes: %s" % - (script_name, type(out), out)) - - c_util.write_file(os.path.join(base_dir, script_name), out) - - -def collect_console(instance, base_dir): - """Collect instance console log. - - @param instance: instance to get console log for - @param base_dir: directory to write console log to - """ - logfile = os.path.join(base_dir, 'console.log') - LOG.debug('getting console log for %s to %s', instance.name, logfile) - try: - data = instance.console_log() - except NotImplementedError as e: - # args[0] is hacky, but thats all I see to get at the message. - data = b'NotImplementedError:' + e.args[0].encode() - with open(logfile, "wb") as fp: - fp.write(data) - - -def collect_test_data(args, snapshot, os_name, test_name): - """Collect data for test case. - - @param args: cmdline arguments - @param snapshot: instantiated snapshot - @param test_name: name or path of test to run - @return_value: tuple of results and fail count - """ - res = ({}, 1) - - # load test config - test_name_in = test_name - test_name = config.path_to_name(test_name) - test_config = config.load_test_config(test_name) - user_data = test_config['cloud_config'] - test_scripts = test_config['collect_scripts'] - test_output_dir = os.sep.join( - (args.data_dir, snapshot.platform_name, os_name, test_name)) - - # if test is not enabled, skip and return 0 failures - if not test_config.get('enabled', False): - LOG.warning('test config %s is not enabled, skipping', test_name) - return ({}, 0) - - test_class = get_test_class( - config.name_to_module(test_name_in), - test_data={'platform': snapshot.platform_name, 'os_name': os_name}, - test_conf=test_config['cloud_config']) - try: - test_class.maybeSkipTest() - except base.SkipTest as s: - LOG.warning('skipping test config %s: %s', test_name, s) - return ({}, 0) - - # if testcase requires a feature flag that the image does not support, - # skip the testcase with a warning - req_features = test_config.get('required_features', []) - if any(feature not in snapshot.features for feature in req_features): - LOG.warning('test config %s requires features not supported by image, ' - 'skipping.\nrequired features: %s\nsupported features: %s', - test_name, req_features, snapshot.features) - return ({}, 0) - - # if there are user data overrides required for this test case, apply them - overrides = snapshot.config.get('user_data_overrides', {}) - if overrides: - LOG.debug('updating user data for collect with: %s', overrides) - user_data = util.update_user_data(user_data, overrides) - - # create test instance - component = PlatformComponent( - partial(platforms.get_instance, snapshot, user_data, - block=True, start=False, use_desc=test_name), - preserve_instance=args.preserve_instance) - - LOG.info('collecting test data for test: %s', test_name) - with component as instance: - start_call = partial(run_single, 'boot instance', partial( - instance.start, wait=True, wait_for_cloud_init=True)) - collect_calls = [partial(run_single, 'script {}'.format(script_name), - partial(collect_script, instance, - test_output_dir, script, script_name)) - for script_name, script in test_scripts.items()] - - res = run_stage('collect for test: {}'.format(test_name), - [start_call] + collect_calls) - - instance.shutdown() - collect_console(instance, test_output_dir) - - return res - - -def collect_snapshot(args, image, os_name): - """Collect data for snapshot of image. - - @param args: cmdline arguments - @param image: instantiated image with set up complete - @return_value tuple of results and fail count - """ - res = ({}, 1) - - component = PlatformComponent(partial(platforms.get_snapshot, image)) - - LOG.debug('creating snapshot for %s', os_name) - with component as snapshot: - LOG.info('collecting test data for os: %s', os_name) - res = run_stage( - 'collect test data for {}'.format(os_name), - [partial(collect_test_data, args, snapshot, os_name, test_name) - for test_name in args.test_config]) - - return res - - -def collect_image(args, platform, os_name): - """Collect data for image. - - @param args: cmdline arguments - @param platform: instantiated platform - @param os_name: name of distro to collect for - @return_value: tuple of results and fail count - """ - res = ({}, 1) - - os_config = config.load_os_config( - platform.platform_name, os_name, require_enabled=True, - feature_overrides=args.feature_override) - LOG.debug('os config: %s', os_config) - component = PlatformComponent( - partial(platforms.get_image, platform, os_config)) - - LOG.info('acquiring image for os: %s', os_name) - with component as image: - res = run_stage('set up and collect data for os: {}'.format(os_name), - [partial(setup_image.setup_image, args, image)] + - [partial(collect_snapshot, args, image, os_name)], - continue_after_error=False) - - return res - - -def collect_platform(args, platform_name): - """Collect data for platform. - - @param args: cmdline arguments - @param platform_name: platform to collect for - @return_value: tuple of results and fail count - """ - res = ({}, 1) - - platform_config = config.load_platform_config( - platform_name, require_enabled=True) - platform_config['data_dir'] = args.data_dir - LOG.debug('platform config: %s', platform_config) - component = PlatformComponent( - partial(platforms.get_platform, platform_name, platform_config)) - - LOG.info('setting up platform: %s', platform_name) - with component as platform: - res = run_stage('collect for platform: {}'.format(platform_name), - [partial(collect_image, args, platform, os_name) - for os_name in args.os_name]) - - return res - - -def collect(args): - """Entry point for collection. - - @param args: cmdline arguments - @return_value: fail count - """ - (res, failed) = run_stage( - 'collect data', [partial(collect_platform, args, platform_name) - for platform_name in args.platform]) - - LOG.debug('collect stages: %s', res) - if args.result: - util.merge_results({'collect_stages': res}, args.result) - - return failed - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/config.py b/tests/cloud_tests/config.py deleted file mode 100644 index 06536edc..00000000 --- a/tests/cloud_tests/config.py +++ /dev/null @@ -1,165 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Used to setup test configuration.""" - -import glob -import os - -from cloudinit import util as c_util -from tests.cloud_tests import (BASE_DIR, TEST_CONF_DIR) - -# conf files -CONF_EXT = '.yaml' -VERIFY_EXT = '.py' -PLATFORM_CONF = os.path.join(BASE_DIR, 'platforms.yaml') -RELEASES_CONF = os.path.join(BASE_DIR, 'releases.yaml') -TESTCASE_CONF = os.path.join(BASE_DIR, 'testcases.yaml') - - -def get(base, key): - """Get config entry 'key' from base, ensuring is dictionary.""" - return base[key] if key in base and base[key] is not None else {} - - -def enabled(config): - """Test if config item is enabled.""" - return isinstance(config, dict) and config.get('enabled', False) - - -def path_to_name(path): - """Convert abs or rel path to test config to path under 'sconfigs/'.""" - dir_path, file_name = os.path.split(os.path.normpath(path)) - name = os.path.splitext(file_name)[0] - return os.sep.join((os.path.basename(dir_path), name)) - - -def name_to_path(name): - """Convert test config path under configs/ to full config path.""" - name = os.path.normpath(name) - if not name.endswith(CONF_EXT): - name = name + CONF_EXT - return name if os.path.isabs(name) else os.path.join(TEST_CONF_DIR, name) - - -def name_sanitize(name): - """Sanitize test name to be used as a module name.""" - return name.replace('-', '_') - - -def name_to_module(name): - """Convert test name to a loadable module name under 'testcases/'.""" - name = name_sanitize(path_to_name(name)) - return name.replace(os.path.sep, '.') - - -def merge_config(base, override): - """Merge config and base.""" - res = base.copy() - res.update(override) - res.update({k: merge_config(base.get(k, {}), v) - for k, v in override.items() if isinstance(v, dict)}) - return res - - -def merge_feature_groups(feature_conf, feature_groups, overrides): - """Combine feature groups and overrides to construct a supported list. - - @param feature_conf: feature config from releases.yaml - @param feature_groups: feature groups the release is a member of - @param overrides: overrides specified by the release's config - @return_value: dict of {feature: true/false} settings - """ - res = dict().fromkeys(feature_conf['all']) - for group in feature_groups: - res.update(feature_conf['groups'][group]) - res.update(overrides) - return res - - -def load_platform_config(platform_name, require_enabled=False): - """Load configuration for platform. - - @param platform_name: name of platform to retrieve config for - @param require_enabled: if true, raise error if 'enabled' not True - @return_value: config dict - """ - main_conf = c_util.read_conf(PLATFORM_CONF) - conf = merge_config(main_conf['default_platform_config'], - main_conf['platforms'][platform_name]) - if require_enabled and not enabled(conf): - raise ValueError('Platform is not enabled') - return conf - - -def load_os_config(platform_name, os_name, require_enabled=False, - feature_overrides=None): - """Load configuration for os. - - @param platform_name: platform name to load os config for - @param os_name: name of os to retrieve config for - @param require_enabled: if true, raise error if 'enabled' not True - @param feature_overrides: feature flag overrides to merge with features - @return_value: config dict - """ - if feature_overrides is None: - feature_overrides = {} - main_conf = c_util.read_conf(RELEASES_CONF) - default = main_conf['default_release_config'] - image = main_conf['releases'][os_name] - conf = merge_config(merge_config(get(default, 'default'), - get(default, platform_name)), - merge_config(get(image, 'default'), - get(image, platform_name))) - - feature_conf = main_conf['features'] - feature_groups = conf.get('feature_groups', []) - overrides = merge_config(get(conf, 'features'), feature_overrides) - conf['arch'] = c_util.get_dpkg_architecture() - conf['features'] = merge_feature_groups( - feature_conf, feature_groups, overrides) - - if require_enabled and not enabled(conf): - raise ValueError('OS is not enabled') - return conf - - -def load_test_config(path): - """Load a test config file by either abs path or rel path.""" - return merge_config(c_util.read_conf(TESTCASE_CONF)['base_test_data'], - c_util.read_conf(name_to_path(path))) - - -def list_feature_flags(): - """List all supported feature flags.""" - feature_conf = get(c_util.read_conf(RELEASES_CONF), 'features') - return feature_conf.get('all', []) - - -def list_enabled_platforms(): - """List all platforms enabled for testing.""" - platforms = get(c_util.read_conf(PLATFORM_CONF), 'platforms') - return [k for k, v in platforms.items() if enabled(v)] - - -def list_enabled_distros(platforms): - """List all distros enabled for testing on specified platforms.""" - def platform_has_enabled(config): - """List if platform is enabled.""" - return any(enabled(merge_config(get(config, 'default'), - get(config, platform))) - for platform in platforms) - - releases = get(c_util.read_conf(RELEASES_CONF), 'releases') - return [k for k, v in releases.items() if platform_has_enabled(v)] - - -def list_test_configs(): - """List all available test config files by abspath.""" - return [os.path.abspath(f) for f in - glob.glob(os.sep.join((TEST_CONF_DIR, '*', '*.yaml')))] - - -ENABLED_PLATFORMS = sorted(list_enabled_platforms()) -ENABLED_DISTROS = sorted(list_enabled_distros(ENABLED_PLATFORMS)) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/manage.py b/tests/cloud_tests/manage.py deleted file mode 100644 index 5f0cfd23..00000000 --- a/tests/cloud_tests/manage.py +++ /dev/null @@ -1,74 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Create test cases automatically given a user_data script.""" - -import os -import textwrap - -from cloudinit import util as c_util -from tests.cloud_tests.config import VERIFY_EXT -from tests.cloud_tests import (config, util) -from tests.cloud_tests import TESTCASES_DIR - - -_verifier_fmt = textwrap.dedent( - """ - \"\"\"cloud-init Integration Test Verify Script\"\"\" - from tests.cloud_tests.testcases import base - - - class {test_class}(base.CloudTestCase): - \"\"\" - Name: {test_name} - Category: {test_category} - Description: {test_description} - \"\"\" - pass - """ -).lstrip() -_config_fmt = textwrap.dedent( - """ - # - # Name: {test_name} - # Category: {test_category} - # Description: {test_description} - # - {config} - """ -).strip() - - -def write_testcase_config(args, fmt_args, testcase_file): - """Write the testcase config file.""" - testcase_config = {'enabled': args.enable, 'collect_scripts': {}} - if args.config: - testcase_config['cloud_config'] = args.config - fmt_args['config'] = util.yaml_format(testcase_config) - c_util.write_file(testcase_file, _config_fmt.format(**fmt_args), omode='w') - - -def write_verifier(args, fmt_args, verifier_file): - """Write the verifier script.""" - fmt_args['test_class'] = 'Test{}'.format( - config.name_sanitize(fmt_args['test_name']).title()) - c_util.write_file(verifier_file, - _verifier_fmt.format(**fmt_args), omode='w') - - -def create(args): - """Create a new testcase.""" - (test_category, test_name) = args.name.split('/') - fmt_args = {'test_name': test_name, 'test_category': test_category, - 'test_description': str(args.description)} - - testcase_file = config.name_to_path(args.name) - verifier_file = os.path.join( - TESTCASES_DIR, test_category, - config.name_sanitize(test_name) + VERIFY_EXT) - - write_testcase_config(args, fmt_args, testcase_file) - write_verifier(args, fmt_args, verifier_file) - - return 0 - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms.yaml b/tests/cloud_tests/platforms.yaml deleted file mode 100644 index eaaa0a71..00000000 --- a/tests/cloud_tests/platforms.yaml +++ /dev/null @@ -1,77 +0,0 @@ -# ============================= Platform Config =============================== -default_platform_config: - # all disabled by default - enabled: false - # maximum time to retrieve image - get_image_timeout: 300 - # maximum time to create instance (before waiting for cloud-init) - create_instance_timeout: 60 - private_key: cloud_init_rsa - public_key: cloud_init_rsa.pub -platforms: - ec2: - enabled: true - instance-type: t2.micro - tag: cii - lxd: - enabled: true - # overrides for image templates - template_overrides: - /var/lib/cloud/seed/nocloud-net/meta-data: - when: - - create - - copy - template: cloud-init-meta.tpl - /var/lib/cloud/seed/nocloud-net/network-config: - when: - - create - - copy - template: cloud-init-network.tpl - /var/lib/cloud/seed/nocloud-net/user-data: - when: - - create - - copy - template: cloud-init-user.tpl - properties: - default: | - #cloud-config - {} - /var/lib/cloud/seed/nocloud-net/vendor-data: - when: - - create - - copy - template: cloud-init-vendor.tpl - properties: - default: | - #cloud-config - {} - # overrides image template files - template_files: - cloud-init-meta.tpl: | - #cloud-config - instance-id: {{ container.name }} - local-hostname: {{ container.name }} - {{ config_get("user.meta-data", "") }} - cloud-init-network.tpl: | - {% if config_get("user.network-config", "") == "" %}version: 1 - config: - - type: physical - name: eth0 - subnets: - - type: {% if config_get("user.network_mode", "") == "link-local" %}manual{% else %}dhcp{% endif %} - control: auto{% else %}{{ config_get("user.network-config", "") }}{% endif %} - cloud-init-user.tpl: | - {{ config_get("user.user-data", properties.default) }} - cloud-init-vendor.tpl: | - {{ config_get("user.vendor-data", properties.default) }} - nocloud-kvm: - enabled: true - cache_mode: cache=none,aio=native - azurecloud: - enabled: true - region: West US 2 - vm_size: Standard_DS1_v2 - storage_sku: standard_lrs - tag: ci - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms/__init__.py b/tests/cloud_tests/platforms/__init__.py deleted file mode 100644 index e506baa0..00000000 --- a/tests/cloud_tests/platforms/__init__.py +++ /dev/null @@ -1,43 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Main init.""" - -from .ec2 import platform as ec2 -from .lxd import platform as lxd -from .nocloudkvm import platform as nocloudkvm -from .azurecloud import platform as azurecloud -from ..util import emit_dots_on_travis - -PLATFORMS = { - 'ec2': ec2.EC2Platform, - 'nocloud-kvm': nocloudkvm.NoCloudKVMPlatform, - 'lxd': lxd.LXDPlatform, - 'azurecloud': azurecloud.AzureCloudPlatform, -} - - -def get_image(platform, config): - """Get image from platform object using os_name.""" - with emit_dots_on_travis(): - return platform.get_image(config) - - -def get_instance(snapshot, *args, **kwargs): - """Get instance from snapshot.""" - return snapshot.launch(*args, **kwargs) - - -def get_platform(platform_name, config): - """Get the platform object for 'platform_name' and init.""" - platform_cls = PLATFORMS.get(platform_name) - if not platform_cls: - raise ValueError('invalid platform name: {}'.format(platform_name)) - return platform_cls(config) - - -def get_snapshot(image): - """Get snapshot from image.""" - return image.snapshot() - - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms/azurecloud/__init__.py b/tests/cloud_tests/platforms/azurecloud/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/cloud_tests/platforms/azurecloud/image.py b/tests/cloud_tests/platforms/azurecloud/image.py deleted file mode 100644 index aad2bca1..00000000 --- a/tests/cloud_tests/platforms/azurecloud/image.py +++ /dev/null @@ -1,116 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Azure Cloud image Base class.""" - -from tests.cloud_tests import LOG - -from ..images import Image -from .snapshot import AzureCloudSnapshot - - -class AzureCloudImage(Image): - """Azure Cloud backed image.""" - - platform_name = 'azurecloud' - - def __init__(self, platform, config, image_id): - """Set up image. - - @param platform: platform object - @param config: image configuration - @param image_id: image id used to boot instance - """ - super(AzureCloudImage, self).__init__(platform, config) - self._img_instance = None - self.image_id = image_id - - @property - def _instance(self): - """Internal use only, returns a running instance""" - if not self._img_instance: - self._img_instance = self.platform.create_instance( - self.properties, self.config, self.features, - self.image_id, user_data=None) - self._img_instance.start(wait=True, wait_for_cloud_init=True) - return self._img_instance - - def destroy(self): - """Delete the instance used to create a custom image.""" - if self._img_instance: - LOG.debug('Deleting backing instance %s', - self._img_instance.vm_name) - delete_vm = self.platform.compute_client.virtual_machines.delete( - self.platform.resource_group.name, self._img_instance.vm_name) - delete_vm.wait() - - super(AzureCloudImage, self).destroy() - - def _execute(self, *args, **kwargs): - """Execute command in image, modifying image.""" - LOG.debug('executing commands on image') - self._instance.start(wait=True) - return self._instance._execute(*args, **kwargs) - - def push_file(self, local_path, remote_path): - """Copy file at 'local_path' to instance at 'remote_path'.""" - LOG.debug('pushing file to image') - return self._instance.push_file(local_path, remote_path) - - def run_script(self, *args, **kwargs): - """Run script in image, modifying image. - - @return_value: script output - """ - LOG.debug('running script on image') - self._instance.start() - return self._instance.run_script(*args, **kwargs) - - def snapshot(self): - """ Create snapshot (image) of instance, wait until done. - - If no instance has been booted, base image is returned. - Otherwise runs the clean script, deallocates, generalizes - and creates custom image from instance. - """ - LOG.debug('creating snapshot of image') - if not self._img_instance: - LOG.debug('No existing image, snapshotting base image') - return AzureCloudSnapshot(self.platform, self.properties, - self.config, self.features, - self._instance.vm_name, - delete_on_destroy=False) - - LOG.debug('creating snapshot from instance: %s', self._img_instance) - if self.config.get('boot_clean_script'): - self._img_instance.run_script(self.config.get('boot_clean_script')) - - LOG.debug('deallocating instance %s', self._instance.vm_name) - deallocate = self.platform.compute_client.virtual_machines.deallocate( - self.platform.resource_group.name, self._instance.vm_name) - deallocate.wait() - - LOG.debug('generalizing instance %s', self._instance.vm_name) - self.platform.compute_client.virtual_machines.generalize( - self.platform.resource_group.name, self._instance.vm_name) - - image_params = { - "location": self.platform.location, - "properties": { - "sourceVirtualMachine": { - "id": self._img_instance.instance.id - } - } - } - LOG.debug('updating resource group image %s', self._instance.vm_name) - self.platform.compute_client.images.create_or_update( - self.platform.resource_group.name, self._instance.vm_name, - image_params) - - LOG.debug('destroying self') - self.destroy() - - LOG.debug('snapshot complete') - return AzureCloudSnapshot(self.platform, self.properties, self.config, - self.features, self._instance.vm_name) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms/azurecloud/instance.py b/tests/cloud_tests/platforms/azurecloud/instance.py deleted file mode 100644 index eedbaae8..00000000 --- a/tests/cloud_tests/platforms/azurecloud/instance.py +++ /dev/null @@ -1,247 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Base Azure Cloud instance.""" - -from datetime import datetime, timedelta -from urllib.parse import urlparse -from time import sleep -import traceback -import os - - -# pylint: disable=no-name-in-module -from azure.storage.blob import BlockBlobService, BlobPermissions -from msrestazure.azure_exceptions import CloudError - -from tests.cloud_tests import LOG - -from ..instances import Instance - - -class AzureCloudInstance(Instance): - """Azure Cloud backed instance.""" - - platform_name = 'azurecloud' - - def __init__(self, platform, properties, config, - features, image_id, user_data=None): - """Set up instance. - - @param platform: platform object - @param properties: dictionary of properties - @param config: dictionary of configuration values - @param features: dictionary of supported feature flags - @param image_id: image to find and/or use - @param user_data: test user-data to pass to instance - """ - super(AzureCloudInstance, self).__init__( - platform, image_id, properties, config, features) - - self.ssh_port = 22 - self.ssh_ip = None - self.instance = None - self.image_id = image_id - self.vm_name = 'ci-azure-i-%s' % self.platform.tag - self.user_data = user_data - self.ssh_key_file = os.path.join( - platform.config['data_dir'], platform.config['private_key']) - self.ssh_pubkey_file = os.path.join( - platform.config['data_dir'], platform.config['public_key']) - self.blob_client, self.container, self.blob = None, None, None - - def start(self, wait=True, wait_for_cloud_init=False): - """Start instance with the platforms NIC.""" - if self.instance: - return - data = self.image_id.split('-') - release, support = data[2].replace('_', '.'), data[3] - sku = '%s-%s' % (release, support) if support == 'LTS' else release - image_resource_id = '/subscriptions/%s' \ - '/resourceGroups/%s' \ - '/providers/Microsoft.Compute/images/%s' % ( - self.platform.subscription_id, - self.platform.resource_group.name, - self.image_id) - storage_uri = "http://%s.blob.core.windows.net" \ - % self.platform.storage.name - with open(self.ssh_pubkey_file, 'r') as key: - ssh_pub_keydata = key.read() - - image_exists = False - try: - LOG.debug('finding image in resource group using image_id') - self.platform.compute_client.images.get( - self.platform.resource_group.name, - self.image_id - ) - image_exists = True - LOG.debug('image found, launching instance, image_id=%s', - self.image_id) - except CloudError: - LOG.debug(('image not found, launching instance with base image, ' - 'image_id=%s'), self.image_id) - - vm_params = { - 'name': self.vm_name, - 'location': self.platform.location, - 'os_profile': { - 'computer_name': 'CI-%s' % self.platform.tag, - 'admin_username': self.ssh_username, - "customData": self.user_data, - "linuxConfiguration": { - "disable_password_authentication": True, - "ssh": { - "public_keys": [{ - "path": "/home/%s/.ssh/authorized_keys" % - self.ssh_username, - "keyData": ssh_pub_keydata - }] - } - } - }, - "diagnosticsProfile": { - "bootDiagnostics": { - "storageUri": storage_uri, - "enabled": True - } - }, - 'hardware_profile': { - 'vm_size': self.platform.vm_size - }, - 'storage_profile': { - 'image_reference': { - 'id': image_resource_id - } if image_exists else { - 'publisher': 'Canonical', - 'offer': 'UbuntuServer', - 'sku': sku, - 'version': 'latest' - } - }, - 'network_profile': { - 'network_interfaces': [{ - 'id': self.platform.nic.id - }] - }, - 'tags': { - 'Name': self.platform.tag, - } - } - - try: - self.instance = self.platform.compute_client.virtual_machines.\ - create_or_update(self.platform.resource_group.name, - self.vm_name, vm_params) - LOG.debug('creating instance %s from image_id=%s', self.vm_name, - self.image_id) - except CloudError as e: - raise RuntimeError( - 'failed creating instance:\n{}'.format(traceback.format_exc()) - ) from e - - if wait: - self.instance.wait() - self.ssh_ip = self.platform.network_client.\ - public_ip_addresses.get( - self.platform.resource_group.name, - self.platform.public_ip.name - ).ip_address - self._wait_for_system(wait_for_cloud_init) - - self.instance = self.instance.result() - self.blob_client, self.container, self.blob =\ - self._get_blob_client() - - def shutdown(self, wait=True): - """Finds console log then stopping/deallocates VM""" - LOG.debug('waiting on console log before stopping') - attempts, exists = 5, False - while not exists and attempts: - try: - attempts -= 1 - exists = self.blob_client.get_blob_to_bytes( - self.container, self.blob) - LOG.debug('found console log') - except Exception as e: - if attempts: - LOG.debug('Unable to find console log, ' - '%s attempts remaining', attempts) - sleep(15) - else: - LOG.warning('Could not find console log: %s', e) - - LOG.debug('stopping instance %s', self.image_id) - vm_deallocate = \ - self.platform.compute_client.virtual_machines.deallocate( - self.platform.resource_group.name, self.image_id) - if wait: - vm_deallocate.wait() - - def destroy(self): - """Delete VM and close all connections""" - if self.instance: - LOG.debug('destroying instance: %s', self.image_id) - vm_delete = self.platform.compute_client.virtual_machines.delete( - self.platform.resource_group.name, self.image_id) - vm_delete.wait() - - self._ssh_close() - - super(AzureCloudInstance, self).destroy() - - def _execute(self, command, stdin=None, env=None): - """Execute command on instance.""" - env_args = [] - if env: - env_args = ['env'] + ["%s=%s" for k, v in env.items()] - - return self._ssh(['sudo'] + env_args + list(command), stdin=stdin) - - def _get_blob_client(self): - """ - Use VM details to retrieve container and blob name. - Then Create blob service client for sas token to - retrieve console log. - - :return: blob service, container name, blob name - """ - LOG.debug('creating blob service for console log') - storage = self.platform.storage_client.storage_accounts.get_properties( - self.platform.resource_group.name, self.platform.storage.name) - - keys = self.platform.storage_client.storage_accounts.list_keys( - self.platform.resource_group.name, self.platform.storage.name - ).keys[0].value - - virtual_machine = self.platform.compute_client.virtual_machines.get( - self.platform.resource_group.name, self.instance.name, - expand='instanceView') - - blob_uri = virtual_machine.instance_view.boot_diagnostics.\ - serial_console_log_blob_uri - - container, blob = urlparse(blob_uri).path.split('/')[-2:] - - blob_client = BlockBlobService( - account_name=storage.name, - account_key=keys) - - sas = blob_client.generate_blob_shared_access_signature( - container_name=container, blob_name=blob, protocol='https', - expiry=datetime.utcnow() + timedelta(hours=1), - permission=BlobPermissions.READ) - - blob_client = BlockBlobService( - account_name=storage.name, - sas_token=sas) - - return blob_client, container, blob - - def console_log(self): - """Instance console. - - @return_value: bytes of this instance’s console - """ - boot_diagnostics = self.blob_client.get_blob_to_bytes( - self.container, self.blob) - return boot_diagnostics.content diff --git a/tests/cloud_tests/platforms/azurecloud/platform.py b/tests/cloud_tests/platforms/azurecloud/platform.py deleted file mode 100644 index a664f612..00000000 --- a/tests/cloud_tests/platforms/azurecloud/platform.py +++ /dev/null @@ -1,240 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Base Azure Cloud class.""" - -import os -import base64 -import traceback -from datetime import datetime -from tests.cloud_tests import LOG - -# pylint: disable=no-name-in-module -from azure.common.credentials import ServicePrincipalCredentials -# pylint: disable=no-name-in-module -from azure.mgmt.resource import ResourceManagementClient -# pylint: disable=no-name-in-module -from azure.mgmt.network import NetworkManagementClient -# pylint: disable=no-name-in-module -from azure.mgmt.compute import ComputeManagementClient -# pylint: disable=no-name-in-module -from azure.mgmt.storage import StorageManagementClient -from msrestazure.azure_exceptions import CloudError - -from .image import AzureCloudImage -from .instance import AzureCloudInstance -from ..platforms import Platform - -from cloudinit import util as c_util - - -class AzureCloudPlatform(Platform): - """Azure Cloud test platforms.""" - - platform_name = 'azurecloud' - - def __init__(self, config): - """Set up platform.""" - super(AzureCloudPlatform, self).__init__(config) - self.tag = '%s-%s' % ( - config['tag'], datetime.now().strftime('%Y%m%d%H%M%S')) - self.storage_sku = config['storage_sku'] - self.vm_size = config['vm_size'] - self.location = config['region'] - - try: - self.credentials, self.subscription_id = self._get_credentials() - - self.resource_client = ResourceManagementClient( - self.credentials, self.subscription_id) - self.compute_client = ComputeManagementClient( - self.credentials, self.subscription_id) - self.network_client = NetworkManagementClient( - self.credentials, self.subscription_id) - self.storage_client = StorageManagementClient( - self.credentials, self.subscription_id) - - self.resource_group = self._create_resource_group() - self.public_ip = self._create_public_ip_address() - self.storage = self._create_storage_account(config) - self.vnet = self._create_vnet() - self.subnet = self._create_subnet() - self.nic = self._create_nic() - except CloudError as e: - raise RuntimeError( - 'failed creating a resource:\n{}'.format( - traceback.format_exc() - ) - ) from e - - def create_instance(self, properties, config, features, - image_id, user_data=None): - """Create an instance - - @param properties: image properties - @param config: image configuration - @param features: image features - @param image_id: string of image id - @param user_data: test user-data to pass to instance - @return_value: cloud_tests.instances instance - """ - if user_data is not None: - user_data = str(base64.b64encode( - user_data.encode('utf-8')), 'utf-8') - - return AzureCloudInstance(self, properties, config, features, - image_id, user_data) - - def get_image(self, img_conf): - """Get image using specified image configuration. - - @param img_conf: configuration for image - @return_value: cloud_tests.images instance - """ - ss_region = self.azure_location_to_simplestreams_region() - - filters = [ - 'arch=%s' % 'amd64', - 'endpoint=https://management.core.windows.net/', - 'region=%s' % ss_region, - 'release=%s' % img_conf['release'] - ] - - LOG.debug('finding image using streams') - image = self._query_streams(img_conf, filters) - - try: - image_id = image['id'] - LOG.debug('found image: %s', image_id) - if image_id.find('__') > 0: - image_id = image_id.split('__')[1] - LOG.debug('image_id shortened to %s', image_id) - except KeyError as e: - raise RuntimeError( - 'no images found for %s' % img_conf['release'] - ) from e - - return AzureCloudImage(self, img_conf, image_id) - - def destroy(self): - """Delete all resources in resource group.""" - LOG.debug("Deleting resource group: %s", self.resource_group.name) - delete = self.resource_client.resource_groups.delete( - self.resource_group.name) - delete.wait() - - def azure_location_to_simplestreams_region(self): - """Convert location to simplestreams region""" - location = self.location.lower().replace(' ', '') - LOG.debug('finding location %s using simple streams', location) - regions_file = os.path.join( - os.path.dirname(os.path.abspath(__file__)), 'regions.json') - region_simplestreams_map = c_util.load_json( - c_util.load_file(regions_file)) - return region_simplestreams_map.get(location, location) - - def _get_credentials(self): - """Get credentials from environment""" - LOG.debug('getting credentials from environment') - cred_file = os.path.expanduser('~/.azure/credentials.json') - try: - azure_creds = c_util.load_json( - c_util.load_file(cred_file)) - subscription_id = azure_creds['subscriptionId'] - credentials = ServicePrincipalCredentials( - client_id=azure_creds['clientId'], - secret=azure_creds['clientSecret'], - tenant=azure_creds['tenantId']) - return credentials, subscription_id - except KeyError as e: - raise RuntimeError( - 'Please configure Azure service principal' - ' credentials in %s' % cred_file - ) from e - - def _create_resource_group(self): - """Create resource group""" - LOG.debug('creating resource group') - resource_group_name = self.tag - resource_group_params = { - 'location': self.location - } - resource_group = self.resource_client.resource_groups.create_or_update( - resource_group_name, resource_group_params) - return resource_group - - def _create_storage_account(self, config): - LOG.debug('creating storage account') - storage_account_name = 'storage%s' % datetime.now().\ - strftime('%Y%m%d%H%M%S') - storage_params = { - 'sku': { - 'name': config['storage_sku'] - }, - 'kind': "Storage", - 'location': self.location - } - storage_account = self.storage_client.storage_accounts.create( - self.resource_group.name, storage_account_name, storage_params) - return storage_account.result() - - def _create_public_ip_address(self): - """Create public ip address""" - LOG.debug('creating public ip address') - public_ip_name = '%s-ip' % self.resource_group.name - public_ip_params = { - 'location': self.location, - 'public_ip_allocation_method': 'Dynamic' - } - ip = self.network_client.public_ip_addresses.create_or_update( - self.resource_group.name, public_ip_name, public_ip_params) - return ip.result() - - def _create_vnet(self): - """create virtual network""" - LOG.debug('creating vnet') - vnet_name = '%s-vnet' % self.resource_group.name - vnet_params = { - 'location': self.location, - 'address_space': { - 'address_prefixes': ['10.0.0.0/16'] - } - } - vnet = self.network_client.virtual_networks.create_or_update( - self.resource_group.name, vnet_name, vnet_params) - return vnet.result() - - def _create_subnet(self): - """create sub-network""" - LOG.debug('creating subnet') - subnet_name = '%s-subnet' % self.resource_group.name - subnet_params = { - 'address_prefix': '10.0.0.0/24' - } - subnet = self.network_client.subnets.create_or_update( - self.resource_group.name, self.vnet.name, - subnet_name, subnet_params) - return subnet.result() - - def _create_nic(self): - """Create network interface controller""" - LOG.debug('creating nic') - nic_name = '%s-nic' % self.resource_group.name - nic_params = { - 'location': self.location, - 'ip_configurations': [{ - 'name': 'ipconfig', - 'subnet': { - 'id': self.subnet.id - }, - 'publicIpAddress': { - 'id': "/subscriptions/%s" - "/resourceGroups/%s/providers/Microsoft.Network" - "/publicIPAddresses/%s" % ( - self.subscription_id, self.resource_group.name, - self.public_ip.name), - } - }] - } - nic = self.network_client.network_interfaces.create_or_update( - self.resource_group.name, nic_name, nic_params) - return nic.result() diff --git a/tests/cloud_tests/platforms/azurecloud/regions.json b/tests/cloud_tests/platforms/azurecloud/regions.json deleted file mode 100644 index c1b4da20..00000000 --- a/tests/cloud_tests/platforms/azurecloud/regions.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "eastasia": "East Asia", - "southeastasia": "Southeast Asia", - "centralus": "Central US", - "eastus": "East US", - "eastus2": "East US 2", - "westus": "West US", - "northcentralus": "North Central US", - "southcentralus": "South Central US", - "northeurope": "North Europe", - "westeurope": "West Europe", - "japanwest": "Japan West", - "japaneast": "Japan East", - "brazilsouth": "Brazil South", - "australiaeast": "Australia East", - "australiasoutheast": "Australia Southeast", - "southindia": "South India", - "centralindia": "Central India", - "westindia": "West India", - "canadacentral": "Canada Central", - "canadaeast": "Canada East", - "uksouth": "UK South", - "ukwest": "UK West", - "westcentralus": "West Central US", - "westus2": "West US 2", - "koreacentral": "Korea Central", - "koreasouth": "Korea South", - "francecentral": "France Central", - "francesouth": "France South", - "australiacentral": "Australia Central", - "australiacentral2": "Australia Central 2", - "uaecentral": "UAE Central", - "uaenorth": "UAE North", - "southafricanorth": "South Africa North", - "southafricawest": "South Africa West", - "switzerlandnorth": "Switzerland North", - "switzerlandwest": "Switzerland West", - "germanynorth": "Germany North", - "germanywestcentral": "Germany West Central", - "norwaywest": "Norway West", - "norwayeast": "Norway East" -} diff --git a/tests/cloud_tests/platforms/azurecloud/snapshot.py b/tests/cloud_tests/platforms/azurecloud/snapshot.py deleted file mode 100644 index 580cc596..00000000 --- a/tests/cloud_tests/platforms/azurecloud/snapshot.py +++ /dev/null @@ -1,58 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Base Azure Cloud snapshot.""" - -from ..snapshots import Snapshot - -from tests.cloud_tests import LOG - - -class AzureCloudSnapshot(Snapshot): - """Azure Cloud image copy backed snapshot.""" - - platform_name = 'azurecloud' - - def __init__(self, platform, properties, config, features, image_id, - delete_on_destroy=True): - """Set up snapshot. - - @param platform: platform object - @param properties: image properties - @param config: image config - @param features: supported feature flags - """ - super(AzureCloudSnapshot, self).__init__( - platform, properties, config, features) - - self.image_id = image_id - self.delete_on_destroy = delete_on_destroy - - def launch(self, user_data, meta_data=None, block=True, start=True, - use_desc=None): - """Launch instance. - - @param user_data: user-data for the instance - @param meta_data: meta_data for the instance - @param block: wait until instance is created - @param start: start instance and wait until fully started - @param use_desc: description of snapshot instance use - @return_value: an Instance - """ - if meta_data is not None: - raise ValueError("metadata not supported on Azure Cloud tests") - - instance = self.platform.create_instance( - self.properties, self.config, self.features, - self.image_id, user_data) - - return instance - - def destroy(self): - """Clean up snapshot data.""" - LOG.debug('destroying image %s', self.image_id) - if self.delete_on_destroy: - self.platform.compute_client.images.delete( - self.platform.resource_group.name, - self.image_id) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms/ec2/__init__.py b/tests/cloud_tests/platforms/ec2/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/cloud_tests/platforms/ec2/image.py b/tests/cloud_tests/platforms/ec2/image.py deleted file mode 100644 index d7b2c908..00000000 --- a/tests/cloud_tests/platforms/ec2/image.py +++ /dev/null @@ -1,100 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""EC2 Image Base Class.""" - -from ..images import Image -from .snapshot import EC2Snapshot - -from tests.cloud_tests import LOG - - -class EC2Image(Image): - """EC2 backed image.""" - - platform_name = 'ec2' - - def __init__(self, platform, config, image_ami): - """Set up image. - - @param platform: platform object - @param config: image configuration - @param image_ami: string of image ami ID - """ - super(EC2Image, self).__init__(platform, config) - self._img_instance = None - self.image_ami = image_ami - - @property - def _instance(self): - """Internal use only, returns a running instance""" - if not self._img_instance: - self._img_instance = self.platform.create_instance( - self.properties, self.config, self.features, - self.image_ami, user_data=None) - self._img_instance.start(wait=True, wait_for_cloud_init=True) - return self._img_instance - - def destroy(self): - """Delete the instance used to create a custom image.""" - if self._img_instance: - LOG.debug('terminating backing instance %s', - self._img_instance.instance.instance_id) - self._img_instance.instance.terminate() - self._img_instance.instance.wait_until_terminated() - - super(EC2Image, self).destroy() - - def _execute(self, *args, **kwargs): - """Execute command in image, modifying image.""" - self._instance.start(wait=True) - return self._instance._execute(*args, **kwargs) - - def push_file(self, local_path, remote_path): - """Copy file at 'local_path' to instance at 'remote_path'.""" - self._instance.start(wait=True) - return self._instance.push_file(local_path, remote_path) - - def run_script(self, *args, **kwargs): - """Run script in image, modifying image. - - @return_value: script output - """ - self._instance.start(wait=True) - return self._instance.run_script(*args, **kwargs) - - def snapshot(self): - """Create snapshot of image, block until done. - - Will return base image_ami if no instance has been booted, otherwise - will run the clean script, shutdown the instance, create a custom - AMI, and use that AMI once available. - """ - if not self._img_instance: - return EC2Snapshot(self.platform, self.properties, self.config, - self.features, self.image_ami, - delete_on_destroy=False) - - if self.config.get('boot_clean_script'): - self._img_instance.run_script(self.config.get('boot_clean_script')) - - self._img_instance.shutdown(wait=True) - - LOG.debug('creating custom ami from instance %s', - self._img_instance.instance.instance_id) - response = self.platform.ec2_client.create_image( - Name='%s-%s' % (self.platform.tag, self.image_ami), - InstanceId=self._img_instance.instance.instance_id - ) - image_ami_edited = response['ImageId'] - - # Create image and wait until it is in the 'available' state - image = self.platform.ec2_resource.Image(image_ami_edited) - image.wait_until_exists() - waiter = self.platform.ec2_client.get_waiter('image_available') - waiter.wait(ImageIds=[image.id]) - image.reload() - - return EC2Snapshot(self.platform, self.properties, self.config, - self.features, image_ami_edited) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms/ec2/instance.py b/tests/cloud_tests/platforms/ec2/instance.py deleted file mode 100644 index d2e84047..00000000 --- a/tests/cloud_tests/platforms/ec2/instance.py +++ /dev/null @@ -1,132 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Base EC2 instance.""" -import os - -import botocore - -from ..instances import Instance -from tests.cloud_tests import LOG, util - - -class EC2Instance(Instance): - """EC2 backed instance.""" - - platform_name = "ec2" - _ssh_client = None - - def __init__(self, platform, properties, config, features, - image_ami, user_data=None): - """Set up instance. - - @param platform: platform object - @param properties: dictionary of properties - @param config: dictionary of configuration values - @param features: dictionary of supported feature flags - @param image_ami: AWS AMI ID for image to use - @param user_data: test user-data to pass to instance - """ - super(EC2Instance, self).__init__( - platform, image_ami, properties, config, features) - - self.image_ami = image_ami - self.instance = None - self.user_data = user_data - self.ssh_ip = None - self.ssh_port = 22 - self.ssh_key_file = os.path.join( - platform.config['data_dir'], platform.config['private_key']) - self.ssh_pubkey_file = os.path.join( - platform.config['data_dir'], platform.config['public_key']) - - def console_log(self): - """Collect console log from instance. - - The console log is buffered and not always present, therefore - may return empty string. - """ - try: - # OutputBytes comes from platform._decode_console_output_as_bytes - response = self.instance.console_output() - return response['OutputBytes'] - except KeyError as e: - if 'Output' in response: - msg = ("'OutputBytes' did not exist in console_output() but " - "'Output' did: %s..." % response['Output'][0:128]) - raise util.PlatformError('console_log', msg) from e - return ('No Console Output [%s]' % self.instance).encode() - - def destroy(self): - """Clean up instance.""" - if self.instance: - LOG.debug('destroying instance %s', self.instance.id) - self.instance.terminate() - self.instance.wait_until_terminated() - - self._ssh_close() - - super(EC2Instance, self).destroy() - - def _execute(self, command, stdin=None, env=None): - """Execute command on instance.""" - env_args = [] - if env: - env_args = ['env'] + ["%s=%s" for k, v in env.items()] - - return self._ssh(['sudo'] + env_args + list(command), stdin=stdin) - - def start(self, wait=True, wait_for_cloud_init=False): - """Start instance on EC2 with the platfrom's VPC.""" - if self.instance: - if self.instance.state['Name'] == 'running': - return - - LOG.debug('starting instance %s', self.instance.id) - self.instance.start() - else: - LOG.debug('launching instance') - - args = { - 'ImageId': self.image_ami, - 'InstanceType': self.platform.instance_type, - 'KeyName': self.platform.key_name, - 'MaxCount': 1, - 'MinCount': 1, - 'SecurityGroupIds': [self.platform.security_group.id], - 'SubnetId': self.platform.subnet.id, - 'TagSpecifications': [{ - 'ResourceType': 'instance', - 'Tags': [{ - 'Key': 'Name', 'Value': self.platform.tag - }] - }], - } - - if self.user_data: - args['UserData'] = self.user_data - - try: - instances = self.platform.ec2_resource.create_instances(**args) - except botocore.exceptions.ClientError as error: - error_msg = error.response['Error']['Message'] - raise util.PlatformError('start', error_msg) - - self.instance = instances[0] - - LOG.debug('instance id: %s', self.instance.id) - if wait: - self.instance.wait_until_running() - self.instance.reload() - self.ssh_ip = self.instance.public_ip_address - self._wait_for_system(wait_for_cloud_init) - - def shutdown(self, wait=True): - """Shutdown instance.""" - LOG.debug('stopping instance %s', self.instance.id) - self.instance.stop() - - if wait: - self.instance.wait_until_stopped() - self.instance.reload() - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms/ec2/platform.py b/tests/cloud_tests/platforms/ec2/platform.py deleted file mode 100644 index b61a2ffb..00000000 --- a/tests/cloud_tests/platforms/ec2/platform.py +++ /dev/null @@ -1,263 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Base EC2 platform.""" -from datetime import datetime -import os - -import boto3 -import botocore -from botocore import session, handlers -import base64 - -from ..platforms import Platform -from .image import EC2Image -from .instance import EC2Instance -from tests.cloud_tests import LOG - - -class EC2Platform(Platform): - """EC2 test platform.""" - - platform_name = 'ec2' - ipv4_cidr = '192.168.1.0/20' - - def __init__(self, config): - """Set up platform.""" - super(EC2Platform, self).__init__(config) - # Used for unique VPC, SSH key, and custom AMI generation naming - self.tag = '%s-%s' % ( - config['tag'], datetime.now().strftime('%Y%m%d%H%M%S')) - self.instance_type = config['instance-type'] - - try: - b3session = get_session() - self.ec2_client = b3session.client('ec2') - self.ec2_resource = b3session.resource('ec2') - self.ec2_region = b3session.region_name - self.key_name = self._upload_public_key(config) - except botocore.exceptions.NoRegionError as e: - raise RuntimeError( - 'Please configure default region in $HOME/.aws/config' - ) from e - except botocore.exceptions.NoCredentialsError as e: - raise RuntimeError( - 'Please configure ec2 credentials in $HOME/.aws/credentials' - ) from e - - self.vpc = self._create_vpc() - self.internet_gateway = self._create_internet_gateway() - self.subnet = self._create_subnet() - self.routing_table = self._create_routing_table() - self.security_group = self._create_security_group() - - def create_instance(self, properties, config, features, - image_ami, user_data=None): - """Create an instance - - @param src_img_path: image path to launch from - @param properties: image properties - @param config: image configuration - @param features: image features - @param image_ami: string of image ami ID - @param user_data: test user-data to pass to instance - @return_value: cloud_tests.instances instance - """ - return EC2Instance(self, properties, config, features, - image_ami, user_data) - - def destroy(self): - """Delete SSH keys, terminate all instances, and delete VPC.""" - for instance in self.vpc.instances.all(): - LOG.debug('waiting for instance %s termination', instance.id) - instance.terminate() - instance.wait_until_terminated() - - if self.key_name: - LOG.debug('deleting SSH key %s', self.key_name) - self.ec2_client.delete_key_pair(KeyName=self.key_name) - - if self.security_group: - LOG.debug('deleting security group %s', self.security_group.id) - self.security_group.delete() - - if self.subnet: - LOG.debug('deleting subnet %s', self.subnet.id) - self.subnet.delete() - - if self.routing_table: - LOG.debug('deleting routing table %s', self.routing_table.id) - self.routing_table.delete() - - if self.internet_gateway: - LOG.debug('deleting internet gateway %s', self.internet_gateway.id) - self.internet_gateway.detach_from_vpc(VpcId=self.vpc.id) - self.internet_gateway.delete() - - if self.vpc: - LOG.debug('deleting vpc %s', self.vpc.id) - self.vpc.delete() - - def get_image(self, img_conf): - """Get image using specified image configuration. - - Hard coded for 'amd64' based images. - - @param img_conf: configuration for image - @return_value: cloud_tests.images instance - """ - if img_conf['root-store'] == 'ebs': - root_store = 'ssd' - elif img_conf['root-store'] == 'instance-store': - root_store = 'instance' - else: - raise RuntimeError('Unknown root-store type: %s' % - (img_conf['root-store'])) - - filters = [ - 'arch=%s' % 'amd64', - 'endpoint=https://ec2.%s.amazonaws.com' % self.ec2_region, - 'region=%s' % self.ec2_region, - 'release=%s' % img_conf['release'], - 'root_store=%s' % root_store, - 'virt=hvm', - ] - - LOG.debug('finding image using streams') - image = self._query_streams(img_conf, filters) - - try: - image_ami = image['id'] - except KeyError as e: - raise RuntimeError( - 'No images found for %s!' % img_conf['release'] - ) from e - - LOG.debug('found image: %s', image_ami) - image = EC2Image(self, img_conf, image_ami) - return image - - def _create_internet_gateway(self): - """Create Internet Gateway and assign to VPC.""" - LOG.debug('creating internet gateway') - # pylint: disable=no-member - internet_gateway = self.ec2_resource.create_internet_gateway() - internet_gateway.attach_to_vpc(VpcId=self.vpc.id) - self._tag_resource(internet_gateway) - - return internet_gateway - - def _create_routing_table(self): - """Update default routing table with internet gateway. - - This sets up internet access between the VPC via the internet gateway - by configuring routing tables for IPv4 and IPv6. - """ - LOG.debug('creating routing table') - route_table = self.vpc.create_route_table() - route_table.create_route(DestinationCidrBlock='0.0.0.0/0', - GatewayId=self.internet_gateway.id) - route_table.create_route(DestinationIpv6CidrBlock='::/0', - GatewayId=self.internet_gateway.id) - route_table.associate_with_subnet(SubnetId=self.subnet.id) - self._tag_resource(route_table) - - return route_table - - def _create_security_group(self): - """Enables ingress to default VPC security group.""" - LOG.debug('creating security group') - security_group = self.vpc.create_security_group( - GroupName=self.tag, Description='integration test security group') - security_group.authorize_ingress( - IpProtocol='-1', FromPort=-1, ToPort=-1, CidrIp='0.0.0.0/0') - self._tag_resource(security_group) - - return security_group - - def _create_subnet(self): - """Generate IPv4 and IPv6 subnets for use.""" - ipv6_cidr = self.vpc.ipv6_cidr_block_association_set[0][ - 'Ipv6CidrBlock'][:-2] + '64' - - LOG.debug('creating subnet with following ranges:') - LOG.debug('ipv4: %s', self.ipv4_cidr) - LOG.debug('ipv6: %s', ipv6_cidr) - subnet = self.vpc.create_subnet(CidrBlock=self.ipv4_cidr, - Ipv6CidrBlock=ipv6_cidr) - modify_subnet = subnet.meta.client.modify_subnet_attribute - modify_subnet(SubnetId=subnet.id, - MapPublicIpOnLaunch={'Value': True}) - self._tag_resource(subnet) - - return subnet - - def _create_vpc(self): - """Setup AWS EC2 VPC or return existing VPC.""" - LOG.debug('creating new vpc') - try: - vpc = self.ec2_resource.create_vpc( # pylint: disable=no-member - CidrBlock=self.ipv4_cidr, - AmazonProvidedIpv6CidrBlock=True) - except botocore.exceptions.ClientError as e: - raise RuntimeError(e) from e - - vpc.wait_until_available() - self._tag_resource(vpc) - - return vpc - - def _tag_resource(self, resource): - """Tag a resource with the specified tag. - - This makes finding and deleting resources specific to this testing - much easier to find. - - @param resource: resource to tag - """ - tag = { - 'Key': 'Name', - 'Value': self.tag - } - resource.create_tags(Tags=[tag]) - - def _upload_public_key(self, config): - """Generate random name and upload SSH key with that name. - - @param config: platform config - @return: string of ssh key name - """ - key_file = os.path.join(config['data_dir'], config['public_key']) - with open(key_file, 'r') as file: - public_key = file.read().strip('\n') - - LOG.debug('uploading SSH key %s', self.tag) - self.ec2_client.import_key_pair(KeyName=self.tag, - PublicKeyMaterial=public_key) - - return self.tag - - -def _decode_console_output_as_bytes(parsed, **kwargs): - """Provide console output as bytes in OutputBytes. - - For this to be useful, the session has to have had the - decode_console_output handler unregistered already. - - https://github.com/boto/botocore/issues/1351 .""" - if 'Output' not in parsed: - return - orig = parsed['Output'] - handlers.decode_console_output(parsed, **kwargs) - parsed['OutputBytes'] = base64.b64decode(orig) - - -def get_session(): - mysess = session.get_session() - mysess.unregister('after-call.ec2.GetConsoleOutput', - handlers.decode_console_output) - mysess.register('after-call.ec2.GetConsoleOutput', - _decode_console_output_as_bytes) - return boto3.Session(botocore_session=mysess) - - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms/ec2/snapshot.py b/tests/cloud_tests/platforms/ec2/snapshot.py deleted file mode 100644 index 2c48cb54..00000000 --- a/tests/cloud_tests/platforms/ec2/snapshot.py +++ /dev/null @@ -1,66 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Base EC2 snapshot.""" - -from ..snapshots import Snapshot -from tests.cloud_tests import LOG - - -class EC2Snapshot(Snapshot): - """EC2 image copy backed snapshot.""" - - platform_name = 'ec2' - - def __init__(self, platform, properties, config, features, image_ami, - delete_on_destroy=True): - """Set up snapshot. - - @param platform: platform object - @param properties: image properties - @param config: image config - @param features: supported feature flags - @param image_ami: string of image ami ID - @param delete_on_destroy: boolean to delete on destroy - """ - super(EC2Snapshot, self).__init__( - platform, properties, config, features) - - self.image_ami = image_ami - self.delete_on_destroy = delete_on_destroy - - def destroy(self): - """Deregister the backing AMI.""" - if self.delete_on_destroy: - image = self.platform.ec2_resource.Image(self.image_ami) - snapshot_id = image.block_device_mappings[0]['Ebs']['SnapshotId'] - - LOG.debug('removing custom ami %s', self.image_ami) - self.platform.ec2_client.deregister_image(ImageId=self.image_ami) - - LOG.debug('removing custom snapshot %s', snapshot_id) - self.platform.ec2_client.delete_snapshot(SnapshotId=snapshot_id) - - def launch(self, user_data, meta_data=None, block=True, start=True, - use_desc=None): - """Launch instance. - - @param user_data: user-data for the instance - @param meta_data: meta_data for the instance - @param block: wait until instance is created - @param start: start instance and wait until fully started - @param use_desc: string of test name - @return_value: an Instance - """ - if meta_data is not None: - raise ValueError("metadata not supported on Ec2") - - instance = self.platform.create_instance( - self.properties, self.config, self.features, - self.image_ami, user_data) - - if start: - instance.start() - - return instance - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms/images.py b/tests/cloud_tests/platforms/images.py deleted file mode 100644 index f047de2e..00000000 --- a/tests/cloud_tests/platforms/images.py +++ /dev/null @@ -1,56 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Base class for images.""" - -from ..util import TargetBase - - -class Image(TargetBase): - """Base class for images.""" - - platform_name = None - - def __init__(self, platform, config): - """Set up image. - - @param platform: platform object - @param config: image configuration - """ - self.platform = platform - self.config = config - - def __str__(self): - """A brief description of the image.""" - return '-'.join((self.properties['os'], self.properties['release'])) - - @property - def properties(self): - """{} containing: 'arch', 'os', 'version', 'release'.""" - return {k: self.config[k] - for k in ('arch', 'os', 'release', 'version')} - - @property - def features(self): - """Feature flags supported by this image. - - @return_value: list of feature names - """ - return [k for k, v in self.config.get('features', {}).items() if v] - - @property - def setup_overrides(self): - """Setup options that need to be overridden for the image. - - @return_value: dictionary to update args with - """ - # NOTE: more sophisticated options may be requied at some point - return self.config.get('setup_overrides', {}) - - def snapshot(self): - """Create snapshot of image, block until done.""" - raise NotImplementedError - - def destroy(self): - """Clean up data associated with image.""" - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms/instances.py b/tests/cloud_tests/platforms/instances.py deleted file mode 100644 index efc35c7f..00000000 --- a/tests/cloud_tests/platforms/instances.py +++ /dev/null @@ -1,165 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Base instance.""" -import time - -import paramiko -from paramiko.ssh_exception import ( - BadHostKeyException, AuthenticationException, SSHException) - -from ..util import TargetBase -from tests.cloud_tests import LOG, util - - -class Instance(TargetBase): - """Base instance object.""" - - platform_name = None - _ssh_client = None - - def __init__(self, platform, name, properties, config, features): - """Set up instance. - - @param platform: platform object - @param name: hostname of instance - @param properties: image properties - @param config: image config - @param features: supported feature flags - """ - self.platform = platform - self.name = name - self.properties = properties - self.config = config - self.features = features - self._tmp_count = 0 - - self.ssh_ip = None - self.ssh_port = None - self.ssh_key_file = None - self.ssh_username = 'ubuntu' - - def console_log(self): - """Instance console. - - @return_value: bytes of this instance’s console - """ - raise NotImplementedError - - def reboot(self, wait=True): - """Reboot instance.""" - raise NotImplementedError - - def shutdown(self, wait=True): - """Shutdown instance.""" - raise NotImplementedError - - def start(self, wait=True, wait_for_cloud_init=False): - """Start instance.""" - raise NotImplementedError - - def destroy(self): - """Clean up instance.""" - self._ssh_close() - - def _ssh(self, command, stdin=None): - """Run a command via SSH.""" - client = self._ssh_connect() - - cmd = util.shell_pack(command) - fp_in, fp_out, fp_err = client.exec_command(cmd) - channel = fp_in.channel - - if stdin is not None: - fp_in.write(stdin) - fp_in.close() - - channel.shutdown_write() - rc = channel.recv_exit_status() - - return (fp_out.read(), fp_err.read(), rc) - - def _ssh_close(self): - if self._ssh_client: - try: - self._ssh_client.close() - except SSHException: - LOG.warning('Failed to close SSH connection.') - self._ssh_client = None - - def _ssh_connect(self): - """Connect via SSH. - - Attempt to SSH to the client on the specific IP and port. If it - fails in some manner, then retry 2 more times for a total of 3 - attempts; sleeping a few seconds between attempts. - """ - if self._ssh_client: - return self._ssh_client - - if not self.ssh_ip or not self.ssh_port: - raise ValueError("Cannot ssh_connect, ssh_ip=%s ssh_port=%s" % - (self.ssh_ip, self.ssh_port)) - - client = paramiko.SSHClient() - client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - private_key = paramiko.RSAKey.from_private_key_file(self.ssh_key_file) - - retries = 3 - while retries: - try: - client.connect(username=self.ssh_username, - hostname=self.ssh_ip, port=self.ssh_port, - pkey=private_key) - self._ssh_client = client - return client - except (ConnectionRefusedError, AuthenticationException, - BadHostKeyException, ConnectionResetError, SSHException, - OSError): - retries -= 1 - LOG.debug('Retrying ssh connection on connect failure') - time.sleep(3) - - ssh_cmd = 'Failed ssh connection to %s@%s:%s after 3 retries' % ( - self.ssh_username, self.ssh_ip, self.ssh_port - ) - raise util.InTargetExecuteError(b'', b'', 1, ssh_cmd, 'ssh') - - def _wait_for_system(self, wait_for_cloud_init): - """Wait until system has fully booted and cloud-init has finished. - - @param wait_time: maximum time to wait - @return_value: None, may raise OSError if wait_time exceeded - """ - def clean_test(test): - """Clean formatting for system ready test testcase.""" - return ' '.join(line for line in test.strip().splitlines() - if not line.lstrip().startswith('#')) - - boot_timeout = self.config['boot_timeout'] - tests = [self.config['system_ready_script']] - if wait_for_cloud_init: - tests.append(self.config['cloud_init_ready_script']) - - formatted_tests = ' && '.join(clean_test(t) for t in tests) - cmd = ('i=0; while [ $i -lt {time} ] && i=$(($i+1)); do {test} && ' - 'exit 0; sleep 1; done; exit 1').format(time=boot_timeout, - test=formatted_tests) - - end_time = time.time() + boot_timeout - while True: - try: - return_code = self.execute( - cmd, rcs=(0, 1), description='wait for instance start' - )[-1] - if return_code == 0: - break - except util.InTargetExecuteError: - LOG.warning("failed to connect via SSH") - - if time.time() < end_time: - time.sleep(3) - else: - raise util.PlatformError('ssh', 'after %ss instance is not ' - 'reachable' % boot_timeout) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms/lxd/__init__.py b/tests/cloud_tests/platforms/lxd/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/cloud_tests/platforms/lxd/image.py b/tests/cloud_tests/platforms/lxd/image.py deleted file mode 100644 index a88b47f3..00000000 --- a/tests/cloud_tests/platforms/lxd/image.py +++ /dev/null @@ -1,211 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""LXD Image Base Class.""" - -import os -import shutil -import tempfile - -from ..images import Image -from .snapshot import LXDSnapshot -from cloudinit import subp -from cloudinit import util as c_util -from tests.cloud_tests import util - - -class LXDImage(Image): - """LXD backed image.""" - - platform_name = "lxd" - - def __init__(self, platform, config, pylxd_image): - """Set up image. - - @param platform: platform object - @param config: image configuration - """ - self.modified = False - self._img_instance = None - self._pylxd_image = None - self.pylxd_image = pylxd_image - super(LXDImage, self).__init__(platform, config) - - @property - def pylxd_image(self): - """Property function.""" - if self._pylxd_image: - self._pylxd_image.sync() - return self._pylxd_image - - @pylxd_image.setter - def pylxd_image(self, pylxd_image): - if self._img_instance: - self._instance.destroy() - self._img_instance = None - if (self._pylxd_image and - (self._pylxd_image is not pylxd_image) and - (not self.config.get('cache_base_image') or self.modified)): - self._pylxd_image.delete(wait=True) - self.modified = False - self._pylxd_image = pylxd_image - - @property - def _instance(self): - """Internal use only, returns a instance - - This starts an lxc instance from the image, so it is "dirty". - Better would be some way to modify this "at rest". - lxc-pstart would be an option.""" - if not self._img_instance: - self._img_instance = self.platform.launch_container( - self.properties, self.config, self.features, - use_desc='image-modification', image_desc=str(self), - image=self.pylxd_image.fingerprint) - self._img_instance.start() - return self._img_instance - - @property - def properties(self): - """{} containing: 'arch', 'os', 'version', 'release'.""" - properties = self.pylxd_image.properties - return { - 'arch': properties.get('architecture'), - 'os': properties.get('os'), - 'version': properties.get('version'), - 'release': properties.get('release'), - } - - def export_image(self, output_dir): - """Export image from lxd image store to disk. - - @param output_dir: dir to store the exported image in - @return_value: tuple of path to metadata tarball and rootfs - - Only the "split" image format with separate rootfs and metadata - files is supported, e.g: - - 71f171df[...]cd31.squashfs (could also be: .tar.xz or .tar.gz) - meta-71f171df[...]cd31.tar.xz - - Combined images made by a single tarball are not supported. - """ - # pylxd's image export feature doesn't do split exports, so use cmdline - fp = self.pylxd_image.fingerprint - subp.subp(['lxc', 'image', 'export', fp, output_dir], capture=True) - image_files = [p for p in os.listdir(output_dir) if fp in p] - - if len(image_files) != 2: - raise NotImplementedError( - "Image %s has unsupported format. " - "Expected 2 files, found %d: %s." - % (fp, len(image_files), ', '.join(image_files))) - - metadata = os.path.join( - output_dir, - next(p for p in image_files if p.startswith('meta-'))) - rootfs = os.path.join( - output_dir, - next(p for p in image_files if not p.startswith('meta-'))) - return (metadata, rootfs) - - def import_image(self, metadata, rootfs): - """Import image to lxd image store from (split) tarball on disk. - - Note, this will replace and delete the current pylxd_image - - @param metadata: metadata tarball - @param rootfs: rootfs tarball - @return_value: imported image fingerprint - """ - alias = util.gen_instance_name( - image_desc=str(self), use_desc='update-metadata') - subp.subp(['lxc', 'image', 'import', metadata, rootfs, - '--alias', alias], capture=True) - self.pylxd_image = self.platform.query_image_by_alias(alias) - return self.pylxd_image.fingerprint - - def update_templates(self, template_config, template_data): - """Update the image's template configuration. - - Note, this will replace and delete the current pylxd_image - - @param template_config: config overrides for template metadata - @param template_data: template data to place into templates/ - """ - # set up tmp files - export_dir = tempfile.mkdtemp(prefix='cloud_test_util_') - extract_dir = tempfile.mkdtemp(prefix='cloud_test_util_') - new_metadata = os.path.join(export_dir, 'new-meta.tar.xz') - metadata_yaml = os.path.join(extract_dir, 'metadata.yaml') - template_dir = os.path.join(extract_dir, 'templates') - - try: - # extract old data - (metadata, rootfs) = self.export_image(export_dir) - shutil.unpack_archive(metadata, extract_dir) - - # update metadata - metadata = c_util.read_conf(metadata_yaml) - templates = metadata.get('templates', {}) - templates.update(template_config) - metadata['templates'] = templates - util.yaml_dump(metadata, metadata_yaml) - - # write out template files - for name, content in template_data.items(): - path = os.path.join(template_dir, name) - c_util.write_file(path, content) - - # store new data, mark new image as modified - util.flat_tar(new_metadata, extract_dir) - self.import_image(new_metadata, rootfs) - self.modified = True - - finally: - # remove tmpfiles - shutil.rmtree(export_dir) - shutil.rmtree(extract_dir) - - def _execute(self, *args, **kwargs): - """Execute command in image, modifying image.""" - return self._instance._execute(*args, **kwargs) - - def push_file(self, local_path, remote_path): - """Copy file at 'local_path' to instance at 'remote_path'.""" - return self._instance.push_file(local_path, remote_path) - - def run_script(self, *args, **kwargs): - """Run script in image, modifying image. - - @return_value: script output - """ - return self._instance.run_script(*args, **kwargs) - - def snapshot(self): - """Create snapshot of image, block until done.""" - # get empty user data to pass in to instance - # if overrides for user data provided, use them - empty_userdata = util.update_user_data( - {}, self.config.get('user_data_overrides', {})) - conf = {'user.user-data': empty_userdata} - # clone current instance - instance = self.platform.launch_container( - self.properties, self.config, self.features, - container=self._instance.name, image_desc=str(self), - use_desc='snapshot', container_config=conf) - # wait for cloud-init before boot_clean_script is run to ensure - # /var/lib/cloud is removed cleanly - instance.start(wait=True, wait_for_cloud_init=True) - if self.config.get('boot_clean_script'): - instance.run_script(self.config.get('boot_clean_script')) - # freeze current instance and return snapshot - instance.freeze() - return LXDSnapshot(self.platform, self.properties, self.config, - self.features, instance) - - def destroy(self): - """Clean up data associated with image.""" - self.pylxd_image = None - super(LXDImage, self).destroy() - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms/lxd/instance.py b/tests/cloud_tests/platforms/lxd/instance.py deleted file mode 100644 index 2b973a08..00000000 --- a/tests/cloud_tests/platforms/lxd/instance.py +++ /dev/null @@ -1,278 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Base LXD instance.""" - -import os -import shutil -import time -from tempfile import mkdtemp - -from cloudinit.subp import subp, ProcessExecutionError, which -from cloudinit.util import load_yaml -from tests.cloud_tests import LOG -from tests.cloud_tests.util import PlatformError - -from ..instances import Instance - -from pylxd import exceptions as pylxd_exc - - -class LXDInstance(Instance): - """LXD container backed instance.""" - - platform_name = "lxd" - _console_log_method = None - _console_log_file = None - - def __init__(self, platform, name, properties, config, features, - pylxd_container): - """Set up instance. - - @param platform: platform object - @param name: hostname of instance - @param properties: image properties - @param config: image config - @param features: supported feature flags - """ - if not pylxd_container: - raise ValueError("Invalid value pylxd_container: %s" % - pylxd_container) - self._pylxd_container = pylxd_container - super(LXDInstance, self).__init__( - platform, name, properties, config, features) - self.tmpd = mkdtemp(prefix="%s-%s" % (type(self).__name__, name)) - self.name = name - self._setup_console_log() - - @property - def pylxd_container(self): - """Property function.""" - if self._pylxd_container is None: - raise RuntimeError( - "%s: Attempted use of pylxd_container after deletion." % self) - self._pylxd_container.sync() - return self._pylxd_container - - def __str__(self): - return ( - '%s(name=%s) status=%s' % - (self.__class__.__name__, self.name, - ("deleted" if self._pylxd_container is None else - self.pylxd_container.status))) - - def _execute(self, command, stdin=None, env=None): - if env is None: - env = {} - - env_args = [] - if env: - env_args = ['env'] + ["%s=%s" for k, v in env.items()] - - # ensure instance is running and execute the command - self.start() - - # Use cmdline client due to https://github.com/lxc/pylxd/issues/268 - exit_code = 0 - try: - stdout, stderr = subp( - ['lxc', 'exec', self.name, '--'] + env_args + list(command), - data=stdin, decode=False) - except ProcessExecutionError as e: - exit_code = e.exit_code - stdout = e.stdout - stderr = e.stderr - - return stdout, stderr, exit_code - - def read_data(self, remote_path, decode=False): - """Read data from instance filesystem. - - @param remote_path: path in instance - @param decode: decode data before returning. - @return_value: content of remote_path as bytes if 'decode' is False, - and as string if 'decode' is True. - """ - data = self.pylxd_container.files.get(remote_path) - return data.decode() if decode else data - - def write_data(self, remote_path, data): - """Write data to instance filesystem. - - @param remote_path: path in instance - @param data: data to write in bytes - """ - self.pylxd_container.files.put(remote_path, data) - - @property - def console_log_method(self): - if self._console_log_method is not None: - return self._console_log_method - - client = which('lxc') - if not client: - raise PlatformError("No 'lxc' client.") - - elif _has_proper_console_support(): - self._console_log_method = 'show-log' - elif client.startswith("/snap"): - self._console_log_method = 'logfile-snap' - else: - self._console_log_method = 'logfile-tmp' - - LOG.debug("Set console log method to %s", self._console_log_method) - return self._console_log_method - - def _setup_console_log(self): - method = self.console_log_method - if not method.startswith("logfile-"): - return - - if method == "logfile-snap": - log_dir = "/var/snap/lxd/common/consoles" - if not os.path.exists(log_dir): - raise PlatformError( - "Unable to log with snap lxc. Please run:\n" - " sudo mkdir --mode=1777 -p %s" % log_dir) - elif method == "logfile-tmp": - log_dir = "/tmp" - else: - raise PlatformError( - "Unexpected value for console method: %s" % method) - - # doing this ensures we can read it. Otherwise it ends up root:root. - log_file = os.path.join(log_dir, self.name) - with open(log_file, "w") as fp: - fp.write("# %s\n" % self.name) - - cfg = "lxc.console.logfile=%s" % log_file - orig = self._pylxd_container.config.get('raw.lxc', "") - if orig: - orig += "\n" - self._pylxd_container.config['raw.lxc'] = orig + cfg - self._pylxd_container.save() - self._console_log_file = log_file - - def console_log(self): - """Console log. - - @return_value: bytes of this instance's console - """ - - if self._console_log_file: - if not os.path.exists(self._console_log_file): - raise NotImplementedError( - "Console log '%s' does not exist. If this is a remote " - "lxc, then this is really NotImplementedError. If it is " - "A local lxc, then this is a RuntimeError." - "https://github.com/lxc/lxd/issues/1129") - with open(self._console_log_file, "rb") as fp: - return fp.read() - - try: - return subp(['lxc', 'console', '--show-log', self.name], - decode=False)[0] - except ProcessExecutionError as e: - raise PlatformError( - "console log", - "Console log failed [%d]: stdout=%s stderr=%s" % ( - e.exit_code, e.stdout, e.stderr) - ) from e - - def reboot(self, wait=True): - """Reboot instance.""" - self.shutdown(wait=wait) - self.start(wait=wait) - - def shutdown(self, wait=True, retry=1): - """Shutdown instance.""" - if self.pylxd_container.status == 'Stopped': - return - - try: - LOG.debug("%s: shutting down (wait=%s)", self, wait) - self.pylxd_container.stop(wait=wait) - except (pylxd_exc.LXDAPIException, pylxd_exc.NotFound) as e: - # An exception happens here sometimes (LP: #1783198) - # LOG it, and try again. - LOG.warning( - ("%s: shutdown(retry=%d) caught %s in shutdown " - "(response=%s): %s"), - self, retry, e.__class__.__name__, e.response, e) - if isinstance(e, pylxd_exc.NotFound): - LOG.debug("container_exists(%s) == %s", - self.name, self.platform.container_exists(self.name)) - if retry == 0: - raise e - return self.shutdown(wait=wait, retry=retry - 1) - - def start(self, wait=True, wait_for_cloud_init=False): - """Start instance.""" - if self.pylxd_container.status != 'Running': - self.pylxd_container.start(wait=wait) - if wait: - self._wait_for_system(wait_for_cloud_init) - - def freeze(self): - """Freeze instance.""" - if self.pylxd_container.status != 'Frozen': - self.pylxd_container.freeze(wait=True) - - def unfreeze(self): - """Unfreeze instance.""" - if self.pylxd_container.status == 'Frozen': - self.pylxd_container.unfreeze(wait=True) - - def destroy(self): - """Clean up instance.""" - LOG.debug("%s: deleting container.", self) - self.unfreeze() - self.shutdown() - retries = [1] * 5 - for attempt, wait in enumerate(retries): - try: - self.pylxd_container.delete(wait=True) - break - except Exception: - if attempt + 1 >= len(retries): - raise - LOG.debug('Failed to delete container %s (%s/%s) retrying...', - self, attempt + 1, len(retries)) - time.sleep(wait) - - self._pylxd_container = None - - if self.platform.container_exists(self.name): - raise OSError('%s: container was not properly removed' % self) - if self._console_log_file and os.path.exists(self._console_log_file): - os.unlink(self._console_log_file) - shutil.rmtree(self.tmpd) - super(LXDInstance, self).destroy() - - -def _has_proper_console_support(): - stdout, _ = subp(['lxc', 'info']) - info = load_yaml(stdout) - reason = None - if 'console' not in info.get('api_extensions', []): - reason = "LXD server does not support console api extension" - else: - dver = str(info.get('environment', {}).get('driver_version', "")) - if dver.startswith("2.") or dver.startswith("1."): - reason = "LXD Driver version not 3.x+ (%s)" % dver - else: - try: - stdout = subp(['lxc', 'console', '--help'], decode=False)[0] - if not (b'console' in stdout and b'log' in stdout): - reason = "no '--log' in lxc console --help" - except ProcessExecutionError: - reason = "no 'console' command in lxc client" - - if reason: - LOG.debug("no console-support: %s", reason) - return False - else: - LOG.debug("console-support looks good") - return True - - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms/lxd/platform.py b/tests/cloud_tests/platforms/lxd/platform.py deleted file mode 100644 index f7251a07..00000000 --- a/tests/cloud_tests/platforms/lxd/platform.py +++ /dev/null @@ -1,104 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Base LXD platform.""" - -from pylxd import (Client, exceptions) - -from ..platforms import Platform -from .image import LXDImage -from .instance import LXDInstance -from tests.cloud_tests import util - -DEFAULT_SSTREAMS_SERVER = "https://images.linuxcontainers.org:8443" - - -class LXDPlatform(Platform): - """LXD test platform.""" - - platform_name = 'lxd' - - def __init__(self, config): - """Set up platform.""" - super(LXDPlatform, self).__init__(config) - # TODO: allow configuration of remote lxd host via env variables - # set up lxd connection - self.client = Client() - - def get_image(self, img_conf): - """Get image using specified image configuration. - - @param img_conf: configuration for image - @return_value: cloud_tests.images instance - """ - pylxd_image = self.client.images.create_from_simplestreams( - img_conf.get('sstreams_server', DEFAULT_SSTREAMS_SERVER), - img_conf['alias']) - image = LXDImage(self, img_conf, pylxd_image) - if img_conf.get('override_templates', False): - image.update_templates(self.config.get('template_overrides', {}), - self.config.get('template_files', {})) - return image - - def launch_container(self, properties, config, features, - image=None, container=None, ephemeral=False, - container_config=None, block=True, image_desc=None, - use_desc=None): - """Launch a container. - - @param properties: image properties - @param config: image configuration - @param features: image features - @param image: image fingerprint to launch from - @param container: container to copy - @param ephemeral: delete image after first shutdown - @param container_config: config options for instance as dict - @param block: wait until container created - @param image_desc: description of image being launched - @param use_desc: description of container's use - @return_value: cloud_tests.instances instance - """ - if not (image or container): - raise ValueError("either image or container must be specified") - container = self.client.containers.create({ - 'name': util.gen_instance_name(image_desc=image_desc, - use_desc=use_desc, - used_list=self.list_containers()), - 'ephemeral': bool(ephemeral), - 'config': (container_config - if isinstance(container_config, dict) else {}), - 'source': ({'type': 'image', 'fingerprint': image} if image else - {'type': 'copy', 'source': container}) - }, wait=block) - return LXDInstance(self, container.name, properties, config, features, - container) - - def container_exists(self, container_name): - """Check if container with name 'container_name' exists. - - @return_value: True if exists else False - """ - res = True - try: - self.client.containers.get(container_name) - except exceptions.LXDAPIException as e: - res = False - if e.response.status_code != 404: - raise - return res - - def list_containers(self): - """List names of all containers. - - @return_value: list of names - """ - return [container.name for container in self.client.containers.all()] - - def query_image_by_alias(self, alias): - """Get image by alias in local image store. - - @param alias: alias of image - @return_value: pylxd image (not cloud_tests.images instance) - """ - return self.client.images.get_by_alias(alias) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms/lxd/snapshot.py b/tests/cloud_tests/platforms/lxd/snapshot.py deleted file mode 100644 index b524644f..00000000 --- a/tests/cloud_tests/platforms/lxd/snapshot.py +++ /dev/null @@ -1,53 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Base LXD snapshot.""" - -from ..snapshots import Snapshot - - -class LXDSnapshot(Snapshot): - """LXD image copy backed snapshot.""" - - platform_name = "lxd" - - def __init__(self, platform, properties, config, features, - pylxd_frozen_instance): - """Set up snapshot. - - @param platform: platform object - @param properties: image properties - @param config: image config - @param features: supported feature flags - """ - self.pylxd_frozen_instance = pylxd_frozen_instance - super(LXDSnapshot, self).__init__( - platform, properties, config, features) - - def launch(self, user_data, meta_data=None, block=True, start=True, - use_desc=None): - """Launch instance. - - @param user_data: user-data for the instance - @param instance_id: instance-id for the instance - @param block: wait until instance is created - @param start: start instance and wait until fully started - @param use_desc: description of snapshot instance use - @return_value: an Instance - """ - inst_config = {'user.user-data': user_data} - if meta_data: - inst_config['user.meta-data'] = meta_data - instance = self.platform.launch_container( - self.properties, self.config, self.features, block=block, - image_desc=str(self), container=self.pylxd_frozen_instance.name, - use_desc=use_desc, container_config=inst_config) - if start: - instance.start() - return instance - - def destroy(self): - """Clean up snapshot data.""" - self.pylxd_frozen_instance.destroy() - super(LXDSnapshot, self).destroy() - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms/nocloudkvm/__init__.py b/tests/cloud_tests/platforms/nocloudkvm/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/cloud_tests/platforms/nocloudkvm/image.py b/tests/cloud_tests/platforms/nocloudkvm/image.py deleted file mode 100644 index ff5b6ad7..00000000 --- a/tests/cloud_tests/platforms/nocloudkvm/image.py +++ /dev/null @@ -1,79 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""NoCloud KVM Image Base Class.""" - -from cloudinit import subp - -import os -import shutil -import tempfile - -from ..images import Image -from .snapshot import NoCloudKVMSnapshot - - -class NoCloudKVMImage(Image): - """NoCloud KVM backed image.""" - - platform_name = "nocloud-kvm" - - def __init__(self, platform, config, orig_img_path): - """Set up image. - - @param platform: platform object - @param config: image configuration - @param img_path: path to the image - """ - self.modified = False - self._workd = tempfile.mkdtemp(prefix='NoCloudKVMImage') - self._orig_img_path = orig_img_path - self._img_path = os.path.join(self._workd, - os.path.basename(self._orig_img_path)) - - subp.subp(['qemu-img', 'create', '-f', 'qcow2', - '-b', orig_img_path, self._img_path]) - - super(NoCloudKVMImage, self).__init__(platform, config) - - def _execute(self, command, stdin=None, env=None): - """Execute command in image, modifying image.""" - return self.mount_image_callback(command, stdin=stdin, env=env) - - def mount_image_callback(self, command, stdin=None, env=None): - """Run mount-image-callback.""" - - env_args = [] - if env: - env_args = ['env'] + ["%s=%s" for k, v in env.items()] - - mic_chroot = ['sudo', 'mount-image-callback', '--system-mounts', - '--system-resolvconf', self._img_path, - '--', 'chroot', '_MOUNTPOINT_'] - try: - out, err = subp.subp(mic_chroot + env_args + list(command), - data=stdin, decode=False) - return (out, err, 0) - except subp.ProcessExecutionError as e: - return (e.stdout, e.stderr, e.exit_code) - - def snapshot(self): - """Create snapshot of image, block until done.""" - if not self._img_path: - raise RuntimeError() - - return NoCloudKVMSnapshot(self.platform, self.properties, self.config, - self.features, self._img_path) - - def destroy(self): - """Unset path to signal image is no longer used. - - The removal of the images and all other items is handled by the - framework. In some cases we want to keep the images, so let the - framework decide whether to keep or destroy everything. - """ - self._img_path = None - shutil.rmtree(self._workd) - - super(NoCloudKVMImage, self).destroy() - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms/nocloudkvm/instance.py b/tests/cloud_tests/platforms/nocloudkvm/instance.py deleted file mode 100644 index 5140a11c..00000000 --- a/tests/cloud_tests/platforms/nocloudkvm/instance.py +++ /dev/null @@ -1,197 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Base NoCloud KVM instance.""" - -import copy -import os -import socket -import subprocess -import time -import uuid - -from ..instances import Instance -from cloudinit.atomic_helper import write_json -from cloudinit import subp -from tests.cloud_tests import LOG, util - -# This domain contains reverse lookups for hostnames that are used. -# The primary reason is so sudo will return quickly when it attempts -# to look up the hostname. i9n is just short for 'integration'. -# see also bug 1730744 for why we had to do this. -CI_DOMAIN = "i9n.cloud-init.io" - - -class NoCloudKVMInstance(Instance): - """NoCloud KVM backed instance.""" - - platform_name = "nocloud-kvm" - - def __init__(self, platform, name, image_path, properties, config, - features, user_data, meta_data): - """Set up instance. - - @param platform: platform object - @param name: image path - @param image_path: path to disk image to boot. - @param properties: dictionary of properties - @param config: dictionary of configuration values - @param features: dictionary of supported feature flags - """ - super(NoCloudKVMInstance, self).__init__( - platform, name, properties, config, features - ) - - self.user_data = user_data - if meta_data: - meta_data = copy.deepcopy(meta_data) - else: - meta_data = {} - - if 'instance-id' in meta_data: - iid = meta_data['instance-id'] - else: - iid = str(uuid.uuid1()) - meta_data['instance-id'] = iid - - self.instance_id = iid - self.ssh_key_file = os.path.join( - platform.config['data_dir'], platform.config['private_key']) - self.ssh_pubkey_file = os.path.join( - platform.config['data_dir'], platform.config['public_key']) - - self.ssh_pubkey = None - if self.ssh_pubkey_file: - with open(self.ssh_pubkey_file, "r") as fp: - self.ssh_pubkey = fp.read().rstrip('\n') - - if not meta_data.get('public-keys'): - meta_data['public-keys'] = [] - meta_data['public-keys'].append(self.ssh_pubkey) - - self.ssh_ip = '127.0.0.1' - self.ssh_port = None - self.pid = None - self.pid_file = None - self.console_file = None - self.disk = image_path - self.cache_mode = platform.config.get('cache_mode', - 'cache=none,aio=native') - self.meta_data = meta_data - - def shutdown(self, wait=True): - """Shutdown instance.""" - - if self.pid: - # This relies on _execute which uses sudo over ssh. The ssh - # connection would get killed before sudo exited, so ignore errors. - cmd = ['shutdown', 'now'] - try: - self._execute(cmd) - except util.InTargetExecuteError: - pass - self._ssh_close() - - if wait: - LOG.debug("Executed shutdown. waiting on pid %s to end", - self.pid) - time_for_shutdown = 120 - give_up_at = time.time() + time_for_shutdown - pid_file_path = '/proc/%s' % self.pid - msg = ("pid %s did not exit in %s seconds after shutdown." % - (self.pid, time_for_shutdown)) - while True: - if not os.path.exists(pid_file_path): - break - if time.time() > give_up_at: - raise util.PlatformError("shutdown", msg) - self.pid = None - - def destroy(self): - """Clean up instance.""" - if self.pid: - try: - subp.subp(['kill', '-9', self.pid]) - except subp.ProcessExecutionError: - pass - - if self.pid_file: - try: - os.remove(self.pid_file) - except Exception: - pass - - self.pid = None - self._ssh_close() - - super(NoCloudKVMInstance, self).destroy() - - def _execute(self, command, stdin=None, env=None): - env_args = [] - if env: - env_args = ['env'] + ["%s=%s" for k, v in env.items()] - - return self._ssh(['sudo'] + env_args + list(command), stdin=stdin) - - def generate_seed(self, tmpdir): - """Generate nocloud seed from user-data""" - seed_file = os.path.join(tmpdir, '%s_seed.img' % self.name) - user_data_file = os.path.join(tmpdir, '%s_user_data' % self.name) - meta_data_file = os.path.join(tmpdir, '%s_meta_data' % self.name) - - with open(user_data_file, "w") as ud_file: - ud_file.write(self.user_data) - - # meta-data can be yaml, but more easily pretty printed with json - write_json(meta_data_file, self.meta_data) - subp.subp(['cloud-localds', seed_file, user_data_file, - meta_data_file]) - - return seed_file - - def get_free_port(self): - """Get a free port assigned by the kernel.""" - s = socket.socket() - s.bind(('', 0)) - num = s.getsockname()[1] - s.close() - return num - - def start(self, wait=True, wait_for_cloud_init=False): - """Start instance.""" - tmpdir = self.platform.config['data_dir'] - seed = self.generate_seed(tmpdir) - self.pid_file = os.path.join(tmpdir, '%s.pid' % self.name) - self.console_file = os.path.join(tmpdir, '%s-console.log' % self.name) - self.ssh_port = self.get_free_port() - - cmd = ['./tools/xkvm', - '--disk', '%s,%s' % (self.disk, self.cache_mode), - '--disk', '%s' % seed, - '--netdev', ','.join(['user', - 'hostfwd=tcp::%s-:22' % self.ssh_port, - 'dnssearch=%s' % CI_DOMAIN]), - '--', '-pidfile', self.pid_file, '-vnc', 'none', - '-m', '2G', '-smp', '2', '-nographic', '-name', self.name, - '-serial', 'file:' + self.console_file] - subprocess.Popen(cmd, - close_fds=True, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - - while not os.path.exists(self.pid_file): - time.sleep(1) - - with open(self.pid_file, 'r') as pid_f: - self.pid = pid_f.readlines()[0].strip() - - if wait: - self._wait_for_system(wait_for_cloud_init) - - def console_log(self): - if not self.console_file: - return b'' - with open(self.console_file, "rb") as fp: - return fp.read() - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms/nocloudkvm/platform.py b/tests/cloud_tests/platforms/nocloudkvm/platform.py deleted file mode 100644 index 53c8ebf2..00000000 --- a/tests/cloud_tests/platforms/nocloudkvm/platform.py +++ /dev/null @@ -1,94 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Base NoCloud KVM platform.""" -import glob -import os - -from simplestreams import filters -from simplestreams import mirrors -from simplestreams import objectstores -from simplestreams import util as s_util - -from ..platforms import Platform -from .image import NoCloudKVMImage -from .instance import NoCloudKVMInstance -from cloudinit import subp -from cloudinit import util as c_util -from tests.cloud_tests import util - - -class NoCloudKVMPlatform(Platform): - """NoCloud KVM test platform.""" - - platform_name = 'nocloud-kvm' - - def get_image(self, img_conf): - """Get image using specified image configuration. - - @param img_conf: configuration for image - @return_value: cloud_tests.images instance - """ - (url, path) = s_util.path_from_mirror_url(img_conf['mirror_url'], None) - - filter = filters.get_filters( - [ - 'arch=%s' % c_util.get_dpkg_architecture(), - 'release=%s' % img_conf['release'], - 'ftype=disk1.img', - ] - ) - mirror_config = {'filters': filter, - 'keep_items': False, - 'max_items': 1, - 'checksumming_reader': True, - 'item_download': True - } - - def policy(content, path): - return s_util.read_signed(content, keyring=img_conf['keyring']) - - smirror = mirrors.UrlMirrorReader(url, policy=policy) - tstore = objectstores.FileStore(img_conf['mirror_dir']) - tmirror = mirrors.ObjectFilterMirror(config=mirror_config, - objectstore=tstore) - tmirror.sync(smirror, path) - - search_d = os.path.join(img_conf['mirror_dir'], '**', - img_conf['release'], '**', '*.img') - - images = [] - for fname in glob.iglob(search_d, recursive=True): - images.append(fname) - - if len(images) < 1: - raise RuntimeError("No images found under '%s'" % search_d) - if len(images) > 1: - raise RuntimeError( - "Multiple images found in '%s': %s" % (search_d, - ' '.join(images))) - - image = NoCloudKVMImage(self, img_conf, images[0]) - return image - - def create_instance(self, properties, config, features, - src_img_path, image_desc=None, use_desc=None, - user_data=None, meta_data=None): - """Create an instance - - @param src_img_path: image path to launch from - @param properties: image properties - @param config: image configuration - @param features: image features - @param image_desc: description of image being launched - @param use_desc: description of container's use - @return_value: cloud_tests.instances instance - """ - name = util.gen_instance_name(image_desc=image_desc, use_desc=use_desc) - img_path = os.path.join(self.config['data_dir'], name + '.qcow2') - subp.subp(['qemu-img', 'create', '-f', 'qcow2', - '-b', src_img_path, img_path]) - - return NoCloudKVMInstance(self, name, img_path, properties, config, - features, user_data, meta_data) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms/nocloudkvm/snapshot.py b/tests/cloud_tests/platforms/nocloudkvm/snapshot.py deleted file mode 100644 index 2dae3590..00000000 --- a/tests/cloud_tests/platforms/nocloudkvm/snapshot.py +++ /dev/null @@ -1,59 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Base NoCloud KVM snapshot.""" -import os -import shutil -import tempfile - -from ..snapshots import Snapshot - - -class NoCloudKVMSnapshot(Snapshot): - """NoCloud KVM image copy backed snapshot.""" - - platform_name = "nocloud-kvm" - - def __init__(self, platform, properties, config, features, image_path): - """Set up snapshot. - - @param platform: platform object - @param properties: image properties - @param config: image config - @param features: supported feature flags - @param image_path: image file to snapshot. - """ - self._workd = tempfile.mkdtemp(prefix='NoCloudKVMSnapshot') - snapshot = os.path.join(self._workd, 'snapshot') - shutil.copyfile(image_path, snapshot) - self._image_path = snapshot - - super(NoCloudKVMSnapshot, self).__init__( - platform, properties, config, features) - - def launch(self, user_data, meta_data=None, block=True, start=True, - use_desc=None): - """Launch instance. - - @param user_data: user-data for the instance - @param instance_id: instance-id for the instance - @param block: wait until instance is created - @param start: start instance and wait until fully started - @param use_desc: description of snapshot instance use - @return_value: an Instance - """ - instance = self.platform.create_instance( - self.properties, self.config, self.features, - self._image_path, image_desc=str(self), use_desc=use_desc, - user_data=user_data, meta_data=meta_data) - - if start: - instance.start() - - return instance - - def destroy(self): - """Clean up snapshot data.""" - shutil.rmtree(self._workd) - super(NoCloudKVMSnapshot, self).destroy() - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms/platforms.py b/tests/cloud_tests/platforms/platforms.py deleted file mode 100644 index ac3b6563..00000000 --- a/tests/cloud_tests/platforms/platforms.py +++ /dev/null @@ -1,109 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Base platform class.""" -import os -import shutil - -from simplestreams import filters, mirrors -from simplestreams import util as s_util - -from cloudinit import subp -from cloudinit import util as c_util - -from tests.cloud_tests import util - - -class Platform(object): - """Base class for platforms.""" - - platform_name = None - - def __init__(self, config): - """Set up platform.""" - self.config = config - self.tmpdir = util.mkdtemp() - if 'data_dir' in config: - self.data_dir = config['data_dir'] - else: - self.data_dir = os.path.join(self.tmpdir, "data_dir") - os.mkdir(self.data_dir) - - self._generate_ssh_keys(self.data_dir) - - def get_image(self, img_conf): - """Get image using specified image configuration. - - @param img_conf: configuration for image - @return_value: cloud_tests.images instance - """ - raise NotImplementedError - - def destroy(self): - """Clean up platform data.""" - shutil.rmtree(self.tmpdir) - - def _generate_ssh_keys(self, data_dir): - """Generate SSH keys to be used with image.""" - filename = os.path.join(data_dir, self.config['private_key']) - - if os.path.exists(filename): - c_util.del_file(filename) - - subp.subp(['ssh-keygen', '-m', 'PEM', '-t', 'rsa', '-b', '4096', - '-f', filename, '-P', '', - '-C', 'ubuntu@cloud_test'], - capture=True) - - @staticmethod - def _query_streams(img_conf, img_filter): - """Query streams for latest image given a specific filter. - - @param img_conf: configuration for image - @param filters: array of filters as strings format 'key=value' - @return: dictionary with latest image information or empty - """ - def policy(content, path): - return s_util.read_signed(content, keyring=img_conf['keyring']) - - (url, path) = s_util.path_from_mirror_url(img_conf['mirror_url'], None) - smirror = mirrors.UrlMirrorReader(url, policy=policy) - - config = {'max_items': 1, 'filters': filters.get_filters(img_filter)} - tmirror = FilterMirror(config) - tmirror.sync(smirror, path) - - try: - return tmirror.json_entries[0] - except IndexError as e: - raise RuntimeError( - 'no images found with filter: %s' % img_filter - ) from e - - -class FilterMirror(mirrors.BasicMirrorWriter): - """Taken from sstream-query to return query result as json array.""" - - def __init__(self, config=None): - super(FilterMirror, self).__init__(config=config) - if config is None: - config = {} - self.config = config - self.filters = config.get('filters', []) - self.json_entries = [] - - def load_products(self, path=None, content_id=None): - return {'content_id': content_id, 'products': {}} - - def filter_item(self, data, src, target, pedigree): - return filters.filter_item(self.filters, data, src, pedigree) - - def insert_item(self, data, src, target, pedigree, contentsource): - # src and target are top level products:1.0 - # data is src['products'][ped[0]]['versions'][ped[1]]['items'][ped[2]] - # contentsource is a ContentSource if 'path' exists in data or None - data = s_util.products_exdata(src, pedigree) - if 'path' in data: - data.update({'item_url': contentsource.url}) - self.json_entries.append(data) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms/snapshots.py b/tests/cloud_tests/platforms/snapshots.py deleted file mode 100644 index 0f5f8bb6..00000000 --- a/tests/cloud_tests/platforms/snapshots.py +++ /dev/null @@ -1,44 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Base snapshot.""" - - -class Snapshot(object): - """Base class for snapshots.""" - - platform_name = None - - def __init__(self, platform, properties, config, features): - """Set up snapshot. - - @param platform: platform object - @param properties: image properties - @param config: image config - @param features: supported feature flags - """ - self.platform = platform - self.properties = properties - self.config = config - self.features = features - - def __str__(self): - """A brief description of the snapshot.""" - return '-'.join((self.properties['os'], self.properties['release'])) - - def launch(self, user_data, meta_data=None, block=True, start=True, - use_desc=None): - """Launch instance. - - @param user_data: user-data for the instance - @param instance_id: instance-id for the instance - @param block: wait until instance is created - @param start: start instance and wait until fully started - @param use_desc: description of snapshot instance use - @return_value: an Instance - """ - raise NotImplementedError - - def destroy(self): - """Clean up snapshot data.""" - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/releases.yaml b/tests/cloud_tests/releases.yaml deleted file mode 100644 index c52b78f9..00000000 --- a/tests/cloud_tests/releases.yaml +++ /dev/null @@ -1,381 +0,0 @@ -# ============================= Release Config ================================ -default_release_config: - # global default configuration options - default: - # all are disabled by default - enabled: false - # timeout for booting image and running cloud init - boot_timeout: 120 - # a script to run after a boot that is used to modify an image, before - # making a snapshot of the image. may be useful for removing data left - # behind from cloud-init booting, such as logs, to ensure that data - # from snapshot.launch() will not include a cloud-init.log from a boot - # used to create the snapshot, if cloud-init has not run - boot_clean_script: | - #!/bin/bash - rm -rf /var/log/cloud-init.log /var/log/cloud-init-output.log \ - /var/lib/cloud/ /run/cloud-init/ /var/log/syslog - # test script to determine if system is booted fully - system_ready_script: | - # permit running or degraded state as both indicate complete boot - [ $(systemctl is-system-running) = 'running' -o - $(systemctl is-system-running) = 'degraded' ] - # test script to determine if cloud-init has finished - cloud_init_ready_script: | - [ -f '/run/cloud-init/result.json' ] - # currently used features and their uses are: - # features groups and additional feature settings - feature_groups: [] - features: {} - mirror_url: https://cloud-images.ubuntu.com/daily - mirror_dir: '/srv/citest/images' - keyring: /usr/share/keyrings/ubuntu-cloudimage-keyring.gpg - # The OS version formatted as Major.Minor is used to compare releases. - # Each release needs to define this, for example "16.04". Quoting is - # necessary to ensure the version is treated as a string. - version: null - - ec2: - # Choose from: [ebs, instance-store] - root-store: ebs - boot_timeout: 300 - nocloud-kvm: - setup_overrides: null - override_templates: false - # lxd specific default configuration options - lxd: - # default sstreams server to use for lxd image retrieval - sstreams_server: https://us.images.linuxcontainers.org:8443 - # keep base image, avoids downloading again next run - cache_base_image: true - # lxd images from linuxcontainers.org do not have the nocloud seed - # templates in place, so the image metadata must be modified - override_templates: true - # arg overrides to set image up - setup_overrides: - # lxd images from linuxcontainers.org do not come with - # cloud-init, so must pull cloud-init in from repo using - # setup_image.upgrade - upgrade: true - azurecloud: - boot_timeout: 300 - -features: - # all currently supported feature flags - all: - - apt # image supports apt package manager - - byobu # byobu is available in repositories - - landscape # landscape-client available in repos - - lxd # lxd is available in the image - - ppa # image supports ppas - - rpm # image supports rpms - - snap # supports snapd - # NOTE: the following feature flags are to work around bugs in the - # images, and can be removed when no longer needed - - hostname # setting system hostname works - # NOTE: the following feature flags are to work around issues in the - # testcases, and can be removed when no longer needed - - apt_src_cont # default contents and format of sources.list matches - # ubuntu sources.list - - apt_hist_fmt # apt command history entries use full paths to apt - # executable rather than relative paths - - daylight_time # timezones are daylight not standard time - - apt_up_out # 'Calculating upgrade..' present in log output from - # apt-get dist-upgrade output - - engb_locale # locale en_GB.UTF-8 is available - - locale_gen # the /etc/locale.gen file exists - - no_ntpdate # 'ntpdate' is not installed by default - - no_file_fmt_e # the 'file' utility does not have a formatting error - - ppa_file_name # the name of the source file added to sources.list.d has - # the expected format for newer ubuntu releases - - sshd # requires ssh server to be installed by default - - ssh_key_fmt # ssh auth keys printed to console have expected format - - syslog # test case requires syslog to be written by default - - ubuntu_ntp # expect ubuntu.pool.ntp.org to be used as ntp server - - ubuntu_repos # test case requres ubuntu repositories to be used - - ubuntu_user # test case needs user with the name 'ubuntu' to exist - # NOTE: the following feature flags are to work around issues that may - # be considered bugs in cloud-init - - lsb_release # image has lsb_release installed, maybe should install - # if missing by default - - sudo # image has sudo installed, should not be required - # feature flag groups - groups: - base: - hostname: true - no_file_fmt_e: true - ubuntu_specific: - apt_src_cont: true - apt_hist_fmt: true - byobu: true - daylight_time: true - engb_locale: true - landscape: true - locale_gen: true - lsb_release: true - lxd: true - ppa: true - ppa_file_name: true - snap: true - sshd: true - ssh_key_fmt: true - sudo: true - syslog: true - ubuntu_ntp: true - ubuntu_repos: true - ubuntu_user: true - debian_base: - apt: true - apt_up_out: true - no_ntpdate: true - rhel_base: - rpm: true - -releases: - # UBUNTU ================================================================= - impish: - # EOL: July 2022 - default: - enabled: true - release: impish - version: "21.10" - os: ubuntu - feature_groups: - - base - - debian_base - - ubuntu_specific - lxd: - sstreams_server: https://cloud-images.ubuntu.com/daily - alias: impish - setup_overrides: null - override_templates: false - - hirsute: - # EOL: Jan 2022 - default: - enabled: true - release: hirsute - version: "21.04" - os: ubuntu - feature_groups: - - base - - debian_base - - ubuntu_specific - lxd: - sstreams_server: https://cloud-images.ubuntu.com/daily - alias: hirsute - setup_overrides: null - override_templates: false - groovy: - # EOL: Jul 2021 - default: - enabled: true - release: groovy - version: "20.10" - os: ubuntu - feature_groups: - - base - - debian_base - - ubuntu_specific - lxd: - sstreams_server: https://cloud-images.ubuntu.com/daily - alias: groovy - setup_overrides: null - override_templates: false - focal: - # EOL: Apr 2025 - default: - enabled: true - release: focal - version: "20.04" - os: ubuntu - feature_groups: - - base - - debian_base - - ubuntu_specific - lxd: - sstreams_server: https://cloud-images.ubuntu.com/daily - alias: focal - setup_overrides: null - override_templates: false - eoan: - # EOL: Jul 2020 - default: - enabled: true - release: eoan - version: "19.10" - os: ubuntu - feature_groups: - - base - - debian_base - - ubuntu_specific - lxd: - sstreams_server: https://cloud-images.ubuntu.com/daily - alias: eoan - setup_overrides: null - override_templates: false - disco: - # EOL: Jan 2020 - default: - enabled: true - release: disco - version: "19.04" - os: ubuntu - feature_groups: - - base - - debian_base - - ubuntu_specific - lxd: - sstreams_server: https://cloud-images.ubuntu.com/daily - alias: disco - setup_overrides: null - override_templates: false - cosmic: - # EOL: Jul 2019 - default: - enabled: true - release: cosmic - version: "18.10" - os: ubuntu - feature_groups: - - base - - debian_base - - ubuntu_specific - lxd: - sstreams_server: https://cloud-images.ubuntu.com/daily - alias: cosmic - setup_overrides: null - override_templates: false - bionic: - # EOL: Apr 2023 - default: - enabled: true - release: bionic - version: "18.04" - os: ubuntu - feature_groups: - - base - - debian_base - - ubuntu_specific - lxd: - sstreams_server: https://cloud-images.ubuntu.com/daily - alias: bionic - setup_overrides: null - override_templates: false - artful: - # EOL: Jul 2018 - default: - enabled: true - release: artful - version: "17.10" - os: ubuntu - feature_groups: - - base - - debian_base - - ubuntu_specific - lxd: - sstreams_server: https://cloud-images.ubuntu.com/daily - alias: artful - setup_overrides: null - override_templates: false - xenial: - # EOL: Apr 2021 - default: - enabled: true - release: xenial - version: "16.04" - os: ubuntu - feature_groups: - - base - - debian_base - - ubuntu_specific - lxd: - sstreams_server: https://cloud-images.ubuntu.com/daily - alias: xenial - setup_overrides: null - override_templates: false - trusty: - # EOL: Apr 2019 - default: - enabled: true - release: trusty - version: "14.04" - os: ubuntu - feature_groups: - - base - - debian_base - - ubuntu_specific - features: - apt_up_out: false - locale_gen: false - lxd: false - ppa_file_name: false - snap: false - ssh_key_fmt: false - no_ntpdate: false - no_file_fmt_e: false - system_ready_script: | - #!/bin/bash - # upstart based, so use old style runlevels - [ $(runlevel | awk '{print $2}') = '2' ] - lxd: - sstreams_server: https://cloud-images.ubuntu.com/daily - alias: trusty - setup_overrides: null - override_templates: false - # DEBIAN ================================================================= - stretch: - # EOL: Not yet released - default: - enabled: true - feature_groups: - - base - - debian_base - lxd: - alias: debian/stretch/default - jessie: - # EOL: Jun 2020 - # NOTE: the cloud-init version shipped with jessie is out of date - # tests work if an up to date deb is used - default: - enabled: true - feature_groups: - - base - - debian_base - lxd: - alias: debian/jessie/default - # CENTOS ================================================================= - centos70: - # EOL: Jun 2024 (2020 - end of full updates) - default: - enabled: true - feature_groups: - - base - - rhel_base - user_data_overrides: - preserve_hostname: true - lxd: - features: - # NOTE: (LP: #1575779) - hostname: false - alias: centos/7/default - centos66: - # EOL: Nov 2020 - default: - enabled: true - feature_groups: - - base - - rhel_base - # still supported, but only bugfixes after may 2017 - system_ready_script: | - #!/bin/bash - [ $(runlevel | awk '{print $2}') = '3' ] - user_data_overrides: - preserve_hostname: true - lxd: - features: - # NOTE: (LP: #1575779) - hostname: false - alias: centos/6/default - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/run_funcs.py b/tests/cloud_tests/run_funcs.py deleted file mode 100644 index 8ae91120..00000000 --- a/tests/cloud_tests/run_funcs.py +++ /dev/null @@ -1,75 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Run functions.""" - -import os - -from tests.cloud_tests import bddeb, collect, util, verify - - -def tree_collect(args): - """Collect data using deb build from current tree. - - @param args: cmdline args - @return_value: fail count - """ - failed = 0 - tmpdir = util.TempDir(tmpdir=args.data_dir, preserve=args.preserve_data) - - with tmpdir as data_dir: - args.data_dir = data_dir - args.deb = os.path.join(tmpdir.tmpdir, 'cloud-init_all.deb') - try: - failed += bddeb.bddeb(args) - failed += collect.collect(args) - except Exception: - failed += 1 - raise - - return failed - - -def tree_run(args): - """Run test suite using deb build from current tree. - - @param args: cmdline args - @return_value: fail count - """ - failed = 0 - tmpdir = util.TempDir(tmpdir=args.data_dir, preserve=args.preserve_data) - - with tmpdir as data_dir: - args.data_dir = data_dir - args.deb = os.path.join(tmpdir.tmpdir, 'cloud-init_all.deb') - try: - failed += bddeb.bddeb(args) - failed += collect.collect(args) - failed += verify.verify(args) - except Exception: - failed += 1 - raise - - return failed - - -def run(args): - """Run test suite. - - @param args: cmdline args - @return_value: fail count - """ - failed = 0 - tmpdir = util.TempDir(tmpdir=args.data_dir, preserve=args.preserve_data) - - with tmpdir as data_dir: - args.data_dir = data_dir - try: - failed += collect.collect(args) - failed += verify.verify(args) - except Exception: - failed += 1 - raise - - return failed - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/setup_image.py b/tests/cloud_tests/setup_image.py deleted file mode 100644 index 69e66e3f..00000000 --- a/tests/cloud_tests/setup_image.py +++ /dev/null @@ -1,237 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Setup image for testing.""" - -from functools import partial -import os -import yaml - -from tests.cloud_tests import LOG -from tests.cloud_tests import stage, util - - -def installed_package_version(image, package, ensure_installed=True): - """Get installed version of package. - - @param image: cloud_tests.images instance to operate on - @param package: name of package - @param ensure_installed: raise error if not installed - @return_value: cloud-init version string - """ - os_family = util.get_os_family(image.properties['os']) - if os_family == 'debian': - cmd = ['dpkg-query', '-W', "--showformat=${Version}", package] - elif os_family == 'redhat': - cmd = ['rpm', '-q', '--queryformat', "'%{VERSION}'", package] - else: - raise NotImplementedError - - return image.execute( - cmd, description='query version for package: {}'.format(package), - rcs=(0,) if ensure_installed else range(0, 256))[0].strip() - - -def install_deb(args, image): - """Install deb into image. - - @param args: cmdline arguments, must contain --deb - @param image: cloud_tests.images instance to operate on - @return_value: None, may raise errors - """ - # ensure system is compatible with package format - os_family = util.get_os_family(image.properties['os']) - if os_family != 'debian': - raise NotImplementedError('install deb: {} not supported on os ' - 'family: {}'.format(args.deb, os_family)) - - # install deb - msg = 'install deb: "{}" into target'.format(args.deb) - LOG.debug(msg) - remote_path = os.path.join('/tmp', os.path.basename(args.deb)) - image.push_file(args.deb, remote_path) - image.execute( - ['apt-get', 'install', '--allow-downgrades', '--assume-yes', - remote_path], description=msg) - # check installed deb version matches package - fmt = ['-W', "--showformat=${Version}"] - out = image.execute(['dpkg-deb'] + fmt + [remote_path])[0] - expected_version = out.strip() - found_version = installed_package_version(image, 'cloud-init') - if expected_version != found_version: - raise OSError('install deb version "{}" does not match expected "{}"' - .format(found_version, expected_version)) - - LOG.debug('successfully installed: %s, version: %s', args.deb, - found_version) - - -def install_rpm(args, image): - """Install rpm into image. - - @param args: cmdline arguments, must contain --rpm - @param image: cloud_tests.images instance to operate on - @return_value: None, may raise errors - """ - os_family = util.get_os_family(image.properties['os']) - if os_family != 'redhat': - raise NotImplementedError('install rpm: {} not supported on os ' - 'family: {}'.format(args.rpm, os_family)) - - # install rpm - msg = 'install rpm: "{}" into target'.format(args.rpm) - LOG.debug(msg) - remote_path = os.path.join('/tmp', os.path.basename(args.rpm)) - image.push_file(args.rpm, remote_path) - image.execute(['rpm', '-U', remote_path], description=msg) - - fmt = ['--queryformat', '"%{VERSION}"'] - (out, _err, _exit) = image.execute(['rpm', '-q'] + fmt + [remote_path]) - expected_version = out.strip() - found_version = installed_package_version(image, 'cloud-init') - if expected_version != found_version: - raise OSError('install rpm version "{}" does not match expected "{}"' - .format(found_version, expected_version)) - - LOG.debug('successfully installed: %s, version %s', args.rpm, - found_version) - - -def upgrade(args, image): - """Upgrade or install cloud-init from repo. - - @param args: cmdline arguments - @param image: cloud_tests.images instance to operate on - @return_value: None, may raise errors - """ - os_family = util.get_os_family(image.properties['os']) - if os_family == 'debian': - cmd = 'apt-get update && apt-get install cloud-init --yes' - elif os_family == 'redhat': - cmd = 'sleep 10 && yum install cloud-init --assumeyes' - else: - raise NotImplementedError - - msg = 'upgrading cloud-init' - LOG.debug(msg) - image.execute(cmd, description=msg) - - -def upgrade_full(args, image): - """Run the system's full upgrade command. - - @param args: cmdline arguments - @param image: cloud_tests.images instance to operate on - @return_value: None, may raise errors - """ - os_family = util.get_os_family(image.properties['os']) - if os_family == 'debian': - cmd = 'apt-get update && apt-get upgrade --yes' - elif os_family == 'redhat': - cmd = 'yum upgrade --assumeyes' - else: - raise NotImplementedError('upgrade command not configured for distro ' - 'from family: {}'.format(os_family)) - - msg = 'full system upgrade' - LOG.debug(msg) - image.execute(cmd, description=msg) - - -def run_script(args, image): - """Run a script in the target image. - - @param args: cmdline arguments, must contain --script - @param image: cloud_tests.images instance to operate on - @return_value: None, may raise errors - """ - msg = 'run setup image script in target image' - LOG.debug(msg) - image.run_script(args.script, description=msg) - - -def enable_ppa(args, image): - """Enable a ppa in the target image. - - @param args: cmdline arguments, must contain --ppa - @param image: cloud_tests.image instance to operate on - @return_value: None, may raise errors - """ - # ppa only supported on ubuntu (maybe debian?) - if image.properties['os'].lower() != 'ubuntu': - raise NotImplementedError('enabling a ppa is only available on ubuntu') - - # add ppa with add-apt-repository and update - ppa = 'ppa:{}'.format(args.ppa) - msg = 'enable ppa: "{}" in target'.format(ppa) - LOG.debug(msg) - cmd = 'add-apt-repository --yes {} && apt-get update'.format(ppa) - image.execute(cmd, description=msg) - - -def enable_repo(args, image): - """Enable a repository in the target image. - - @param args: cmdline arguments, must contain --repo - @param image: cloud_tests.image instance to operate on - @return_value: None, may raise errors - """ - # find enable repo command for the distro - os_family = util.get_os_family(image.properties['os']) - if os_family == 'debian': - cmd = ('echo "{}" >> "/etc/apt/sources.list" '.format(args.repo) + - '&& apt-get update') - elif os_family == 'centos': - cmd = 'yum-config-manager --add-repo="{}"'.format(args.repo) - else: - raise NotImplementedError('enable repo command not configured for ' - 'distro from family: {}'.format(os_family)) - - msg = 'enable repo: "{}" in target'.format(args.repo) - LOG.debug(msg) - image.execute(cmd, description=msg) - - -def setup_image(args, image): - """Set up image as specified in args. - - @param args: cmdline arguments - @param image: cloud_tests.image instance to operate on - @return_value: tuple of results and fail count - """ - # update the args if necessary for this image - overrides = image.setup_overrides - LOG.debug('updating args for setup with: %s', overrides) - args = util.update_args(args, overrides, preserve_old=True) - - # mapping of setup cmdline arg name to setup function - # represented as a tuple rather than a dict or odict as lookup by name not - # needed, and order is important as --script and --upgrade go at the end - handlers = ( - # arg handler description - ('deb', install_deb, 'setup func for --deb, install deb'), - ('rpm', install_rpm, 'setup func for --rpm, install rpm'), - ('repo', enable_repo, 'setup func for --repo, enable repo'), - ('ppa', enable_ppa, 'setup func for --ppa, enable ppa'), - ('script', run_script, 'setup func for --script, run script'), - ('upgrade', upgrade, 'setup func for --upgrade, upgrade cloud-init'), - ('upgrade-full', upgrade_full, 'setup func for --upgrade-full'), - ) - - # determine which setup functions needed - calls = [partial(stage.run_single, desc, partial(func, args, image)) - for name, func, desc in handlers if getattr(args, name, None)] - - try: - data = yaml.safe_load( - image.read_data("/etc/cloud/build.info", decode=True)) - info = ' '.join(["%s=%s" % (k, data.get(k)) - for k in ("build_name", "serial") if k in data]) - except Exception as e: - info = "N/A (%s)" % e - - LOG.info('setting up image %s (info %s)', image, info) - res = stage.run_stage( - 'set up for {}'.format(image), calls, continue_after_error=False) - return res - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/stage.py b/tests/cloud_tests/stage.py deleted file mode 100644 index d64a1dcc..00000000 --- a/tests/cloud_tests/stage.py +++ /dev/null @@ -1,116 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Stage a run.""" - -import sys -import time -import traceback - -from tests.cloud_tests import LOG - - -class PlatformComponent(object): - """Context manager to safely handle platform components.""" - - def __init__(self, get_func, preserve_instance=False): - """Store get_ function as partial with no args. - - @param get_func: Callable returning an instance from the platform. - @param preserve_instance: Boolean, when True, do not destroy instance - after test. Used for test development. - """ - self.get_func = get_func - self.preserve_instance = preserve_instance - - def __enter__(self): - """Create instance of platform component.""" - self.instance = self.get_func() - return self.instance - - def __exit__(self, etype, value, trace): - """Destroy instance.""" - if self.instance is not None: - if self.preserve_instance: - LOG.info('Preserving test instance %s', self.instance.name) - else: - self.instance.destroy() - - -def run_single(name, call): - """Run a single function, keeping track of results and time. - - @param name: name of part - @param call: call to make - @return_value: a tuple of result and fail count - """ - res = { - 'name': name, - 'time': 0, - 'errors': [], - 'success': False - } - failed = 0 - start_time = time.time() - - try: - call() - except Exception as e: - failed += 1 - res['errors'].append(str(e)) - LOG.error('stage part: %s encountered error: %s', name, str(e)) - trace = traceback.extract_tb(sys.exc_info()[-1]) - LOG.error('traceback:\n%s', ''.join(traceback.format_list(trace))) - - res['time'] = time.time() - start_time - if failed == 0: - res['success'] = True - - return res, failed - - -def run_stage(parent_name, calls, continue_after_error=True): - """Run a stage of collection, keeping track of results and failures. - - @param parent_name: name of stage calls are under - @param calls: list of function call taking no params. must return a tuple - of results and failures. may raise exceptions - @param continue_after_error: whether or not to proceed to the next call - after catching an exception or recording a - failure - @return_value: a tuple of results and failures, with result containing - results from the function call under 'stages', and a list - of errors (if any on this level), and elapsed time - running stage, and the name - """ - res = { - 'name': parent_name, - 'time': 0, - 'errors': [], - 'stages': [], - 'success': False, - } - failed = 0 - start_time = time.time() - - for call in calls: - try: - (call_res, call_failed) = call() - res['stages'].append(call_res) - except Exception as e: - call_failed = 1 - res['errors'].append(str(e)) - LOG.error('stage: %s encountered error: %s', parent_name, str(e)) - trace = traceback.extract_tb(sys.exc_info()[-1]) - LOG.error('traceback:\n%s', ''.join(traceback.format_list(trace))) - - failed += call_failed - if call_failed and not continue_after_error: - break - - res['time'] = time.time() - start_time - if not failed: - res['success'] = True - - return (res, failed) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases.yaml b/tests/cloud_tests/testcases.yaml deleted file mode 100644 index fb9a5d27..00000000 --- a/tests/cloud_tests/testcases.yaml +++ /dev/null @@ -1,50 +0,0 @@ -# ============================= Base Test Config ============================== -base_test_data: - script_timeout: 20 - enabled: True - required_features: [] - cloud_config: | - #cloud-config - collect_scripts: - cloud-init.log: | - #!/bin/sh - cat /var/log/cloud-init.log - cloud-init-output.log: | - #!/bin/sh - cat /var/log/cloud-init-output.log - instance-id: | - #!/bin/sh - cat /run/cloud-init/.instance-id - instance-data.json: | - #!/bin/sh - cat /run/cloud-init/instance-data.json - result.json: | - #!/bin/sh - cat /run/cloud-init/result.json - status.json: | - #!/bin/sh - cat /run/cloud-init/status.json - package-versions: | - #!/bin/sh - dpkg-query --show - build.info: | - #!/bin/sh - binfo=/etc/cloud/build.info - [ -f "$binfo" ] && cat "$binfo" || echo "N/A" - system.journal.gz: | - #!/bin/sh - [ -d /run/systemd ] || { echo "not systemd."; exit 0; } - fail() { echo "ERROR:" "$@" 1>&2; exit 1; } - journal="" - for d in /run/log/journal /var/log/journal; do - for f in $d/*/system.journal; do - [ -f "$f" ] || continue - [ -z "$journal" ] || - fail "multiple journal found: $f $journal." - journal="$f" - done - done - [ -f "$journal" ] || fail "no journal file found." - gzip --to-stdout "$journal" - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/__init__.py b/tests/cloud_tests/testcases/__init__.py deleted file mode 100644 index bb9785d3..00000000 --- a/tests/cloud_tests/testcases/__init__.py +++ /dev/null @@ -1,73 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Main init.""" - -import importlib -import inspect -import unittest - -from cloudinit.util import read_conf - -from tests.cloud_tests import config -from tests.cloud_tests.testcases.base import CloudTestCase as base_test - - -def discover_test(test_name): - """Discover tests in test file for 'testname'. - - @return_value: list of test classes - """ - testmod_name = 'tests.cloud_tests.testcases.{}'.format( - config.name_sanitize(test_name)) - try: - testmod = importlib.import_module(testmod_name) - except NameError as e: - raise ValueError( - 'no test verifier found at: {}'.format(testmod_name) - ) from e - - found = [mod for name, mod in inspect.getmembers(testmod) - if (inspect.isclass(mod) - and base_test in inspect.getmro(mod) - and getattr(mod, '__test__', True))] - if len(found) != 1: - raise RuntimeError( - "Unexpected situation, multiple tests for %s: %s" % ( - test_name, found)) - - return found - - -def get_test_class(test_name, test_data, test_conf): - test_class = discover_test(test_name)[0] - - class DynamicTestSubclass(test_class): - - _realclass = test_class - data = test_data - conf = test_conf - release_conf = read_conf(config.RELEASES_CONF)['releases'] - - def __str__(self): - return "%s (%s)" % (self._testMethodName, - unittest.util.strclass(self._realclass)) - - @classmethod - def setUpClass(cls): - cls.maybeSkipTest() - - return DynamicTestSubclass - - -def get_suite(test_name, data, conf): - """Get test suite with all tests for 'testname'. - - @return_value: a test suite - """ - suite = unittest.TestSuite() - suite.addTest( - unittest.defaultTestLoader.loadTestsFromTestCase( - get_test_class(test_name, data, conf))) - return suite - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/base.py b/tests/cloud_tests/testcases/base.py deleted file mode 100644 index 4448e0b5..00000000 --- a/tests/cloud_tests/testcases/base.py +++ /dev/null @@ -1,385 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Base test case module.""" - -import crypt -import json -import re -import unittest - - -from cloudinit import util as c_util - -SkipTest = unittest.SkipTest - - -class CloudTestCase(unittest.TestCase): - """Base test class for verifiers.""" - - # data gets populated in get_suite.setUpClass - data = {} - conf = None - _cloud_config = None - release_conf = {} # The platform's os release configuration - - expected_warnings = () # Subclasses set to ignore expected WARN logs - - @property - def os_cfg(self): - return self.release_conf[self.os_name]['default'] - - def is_distro(self, distro_name): - return self.os_cfg['os'] == distro_name - - @classmethod - def maybeSkipTest(cls): - """Present to allow subclasses to override and raise a skipTest.""" - - def assertPackageInstalled(self, name, version=None): - """Check dpkg-query --show output for matching package name. - - @param name: package base name - @param version: string representing a package version or part of a - version. - """ - pkg_out = self.get_data_file('package-versions') - pkg_match = re.search( - '^%s\t(?P.*)$' % name, pkg_out, re.MULTILINE) - if pkg_match: - installed_version = pkg_match.group('version') - if not version: - return # Success - if installed_version.startswith(version): - return # Success - raise AssertionError( - 'Expected package version %s-%s not found. Found %s' % - name, version, installed_version) - raise AssertionError('Package not installed: %s' % name) - - def os_version_cmp(self, cmp_version): - """Compare the version of the test to comparison_version. - - @param: cmp_version: Either a float or a string representing - a release os from releases.yaml (e.g. centos66) - - @return: -1 when version < cmp_version, 0 when version=cmp_version and - 1 when version > cmp_version. - """ - version = self.release_conf[self.os_name]['default']['version'] - if isinstance(cmp_version, str): - cmp_version = self.release_conf[cmp_version]['default']['version'] - if version < cmp_version: - return -1 - elif version == cmp_version: - return 0 - else: - return 1 - - @property - def os_name(self): - return self.data.get('os_name', 'UNKNOWN') - - @property - def platform(self): - return self.data.get('platform', 'UNKNOWN') - - @property - def cloud_config(self): - """Get the cloud-config used by the test.""" - if not self._cloud_config: - self._cloud_config = c_util.load_yaml(self.conf) - return self._cloud_config - - def get_config_entry(self, name): - """Get a config entry from cloud-config ensuring that it is present.""" - if name not in self.cloud_config: - raise AssertionError('Key "{}" not in cloud config'.format(name)) - return self.cloud_config[name] - - def get_data_file(self, name, decode=True): - """Get data file failing test if it is not present.""" - if name not in self.data: - raise AssertionError('File "{}" missing from collect data' - .format(name)) - if not decode: - return self.data[name] - return self.data[name].decode('utf-8') - - def get_instance_id(self): - """Get recorded instance id.""" - return self.get_data_file('instance-id').strip() - - def get_status_data(self, data, version=None): - """Parse result.json and status.json like data files. - - @param data: data to load - @param version: cloud-init output version, defaults to 'v1' - @return_value: dict of data or None if missing - """ - if not version: - version = 'v1' - data = json.loads(data) - return data.get(version) - - def get_datasource(self): - """Get datasource name.""" - data = self.get_status_data(self.get_data_file('result.json')) - return data.get('datasource') - - def test_no_stages_errors(self): - """Ensure that there were no errors in any stage.""" - status = self.get_status_data(self.get_data_file('status.json')) - for stage in ('init', 'init-local', 'modules-config', 'modules-final'): - self.assertIn(stage, status) - self.assertEqual(len(status[stage]['errors']), 0, - 'errors {} were encountered in stage {}' - .format(status[stage]['errors'], stage)) - result = self.get_status_data(self.get_data_file('result.json')) - self.assertEqual(len(result['errors']), 0) - - def test_no_warnings_in_log(self): - """Unexpected warnings should not be found in the log.""" - warnings = [ - line for line in self.get_data_file('cloud-init.log').splitlines() - if 'WARN' in line] - joined_warnings = '\n'.join(warnings) - for expected_warning in self.expected_warnings: - self.assertIn( - expected_warning, joined_warnings, - msg="Did not find %s in cloud-init.log" % expected_warning) - # Prune expected from discovered warnings - warnings = [w for w in warnings if expected_warning not in w] - self.assertEqual( - [], warnings, msg="'WARN' found inside cloud-init.log") - - def test_instance_data_json_ec2(self): - """Validate instance-data.json content by ec2 platform. - - This content is sourced by snapd when determining snapstore endpoints. - We validate expected values per cloud type to ensure we don't break - snapd. - """ - if self.platform != 'ec2': - raise SkipTest( - 'Skipping ec2 instance-data.json on %s' % self.platform) - out = self.get_data_file('instance-data.json') - if not out: - if self.is_distro('ubuntu') and self.os_version_cmp('bionic') >= 0: - raise AssertionError( - 'No instance-data.json found on %s' % self.os_name) - raise SkipTest( - 'Skipping instance-data.json test.' - ' OS: %s not bionic or newer' % self.os_name) - instance_data = json.loads(out) - self.assertCountEqual(['merged_cfg'], instance_data['sensitive_keys']) - ds = instance_data.get('ds', {}) - v1_data = instance_data.get('v1', {}) - metadata = ds.get('meta-data', {}) - macs = metadata.get( - 'network', {}).get('interfaces', {}).get('macs', {}) - if not macs: - raise AssertionError('No network data from EC2 meta-data') - # Check meta-data items we depend on - expected_net_keys = [ - 'public-ipv4s', 'ipv4-associations', 'local-hostname', - 'public-hostname'] - for mac_data in macs.values(): - for key in expected_net_keys: - self.assertIn(key, mac_data) - self.assertIsNotNone( - metadata.get('placement', {}).get('availability-zone'), - 'Could not determine EC2 Availability zone placement') - self.assertIsNotNone( - v1_data['availability_zone'], 'expected ec2 availability_zone') - self.assertEqual('aws', v1_data['cloud_name']) - self.assertEqual('ec2', v1_data['platform']) - self.assertEqual( - 'metadata (http://169.254.169.254)', v1_data['subplatform']) - self.assertIn('i-', v1_data['instance_id']) - self.assertIn('ip-', v1_data['local_hostname']) - self.assertIsNotNone(v1_data['region'], 'expected ec2 region') - self.assertIsNotNone( - re.match(r'\d\.\d+\.\d+-\d+-aws', v1_data['kernel_release'])) - self.assertEqual( - 'redacted for non-root user', instance_data['merged_cfg']) - self.assertEqual(self.os_cfg['os'], v1_data['variant']) - self.assertEqual(self.os_cfg['os'], v1_data['distro']) - self.assertEqual( - self.os_cfg['os'], instance_data["sys_info"]['dist'][0], - "Unexpected sys_info dist value") - self.assertEqual(self.os_name, v1_data['distro_release']) - self.assertEqual( - str(self.os_cfg['version']), v1_data['distro_version']) - self.assertEqual('x86_64', v1_data['machine']) - self.assertIsNotNone( - re.match(r'3.\d\.\d', v1_data['python_version']), - "unexpected python version: {ver}".format( - ver=v1_data["python_version"])) - - def test_instance_data_json_lxd(self): - """Validate instance-data.json content by lxd platform. - - This content is sourced by snapd when determining snapstore endpoints. - We validate expected values per cloud type to ensure we don't break - snapd. - """ - if self.platform != 'lxd': - raise SkipTest( - 'Skipping lxd instance-data.json on %s' % self.platform) - out = self.get_data_file('instance-data.json') - if not out: - if self.is_distro('ubuntu') and self.os_version_cmp('bionic') >= 0: - raise AssertionError( - 'No instance-data.json found on %s' % self.os_name) - raise SkipTest( - 'Skipping instance-data.json test.' - ' OS: %s not bionic or newer' % self.os_name) - instance_data = json.loads(out) - v1_data = instance_data.get('v1', {}) - self.assertCountEqual([], sorted(instance_data['base64_encoded_keys'])) - self.assertEqual('unknown', v1_data['cloud_name']) - self.assertEqual('lxd', v1_data['platform']) - self.assertEqual( - 'seed-dir (/var/lib/cloud/seed/nocloud-net)', - v1_data['subplatform']) - self.assertIsNone( - v1_data['availability_zone'], - 'found unexpected lxd availability_zone %s' % - v1_data['availability_zone']) - self.assertIn('cloud-test', v1_data['instance_id']) - self.assertIn('cloud-test', v1_data['local_hostname']) - self.assertIsNone( - v1_data['region'], - 'found unexpected lxd region %s' % v1_data['region']) - self.assertIsNotNone( - re.match(r'\d\.\d+\.\d+-\d+', v1_data['kernel_release'])) - self.assertEqual( - 'redacted for non-root user', instance_data['merged_cfg']) - self.assertEqual(self.os_cfg['os'], v1_data['variant']) - self.assertEqual(self.os_cfg['os'], v1_data['distro']) - self.assertEqual( - self.os_cfg['os'], instance_data["sys_info"]['dist'][0], - "Unexpected sys_info dist value") - self.assertEqual(self.os_name, v1_data['distro_release']) - self.assertEqual( - str(self.os_cfg['version']), v1_data['distro_version']) - self.assertEqual('x86_64', v1_data['machine']) - self.assertIsNotNone( - re.match(r'3.\d\.\d', v1_data['python_version']), - "unexpected python version: {ver}".format( - ver=v1_data["python_version"])) - - def test_instance_data_json_kvm(self): - """Validate instance-data.json content by nocloud-kvm platform. - - This content is sourced by snapd when determining snapstore endpoints. - We validate expected values per cloud type to ensure we don't break - snapd. - """ - if self.platform != 'nocloud-kvm': - raise SkipTest( - 'Skipping nocloud-kvm instance-data.json on %s' % - self.platform) - out = self.get_data_file('instance-data.json') - if not out: - if self.is_distro('ubuntu') and self.os_version_cmp('bionic') >= 0: - raise AssertionError( - 'No instance-data.json found on %s' % self.os_name) - raise SkipTest( - 'Skipping instance-data.json test.' - ' OS: %s not bionic or newer' % self.os_name) - instance_data = json.loads(out) - v1_data = instance_data.get('v1', {}) - self.assertCountEqual([], instance_data['base64_encoded_keys']) - self.assertEqual('unknown', v1_data['cloud_name']) - self.assertEqual('nocloud', v1_data['platform']) - subplatform = v1_data['subplatform'] - self.assertIsNotNone( - re.match(r'config-disk \(\/dev\/[a-z]{3}\)', subplatform), - 'kvm subplatform "%s" != "config-disk (/dev/...)"' % subplatform) - self.assertIsNone( - v1_data['availability_zone'], - 'found unexpected kvm availability_zone %s' % - v1_data['availability_zone']) - self.assertIsNotNone( - re.match(r'[\da-f]{8}(-[\da-f]{4}){3}-[\da-f]{12}', - v1_data['instance_id']), - 'kvm instance_id is not a UUID: %s' % v1_data['instance_id']) - self.assertIn('ubuntu', v1_data['local_hostname']) - self.assertIsNone( - v1_data['region'], - 'found unexpected lxd region %s' % v1_data['region']) - self.assertIsNotNone( - re.match(r'\d\.\d+\.\d+-\d+', v1_data['kernel_release'])) - self.assertEqual( - 'redacted for non-root user', instance_data['merged_cfg']) - self.assertEqual(self.os_cfg['os'], v1_data['variant']) - self.assertEqual(self.os_cfg['os'], v1_data['distro']) - self.assertEqual( - self.os_cfg['os'], instance_data["sys_info"]['dist'][0], - "Unexpected sys_info dist value") - self.assertEqual(self.os_name, v1_data['distro_release']) - self.assertEqual( - str(self.os_cfg['version']), v1_data['distro_version']) - self.assertEqual('x86_64', v1_data['machine']) - self.assertIsNotNone( - re.match(r'3.\d\.\d', v1_data['python_version']), - "unexpected python version: {ver}".format( - ver=v1_data["python_version"])) - - -class PasswordListTest(CloudTestCase): - """Base password test case class.""" - - def test_shadow_passwords(self): - """Test shadow passwords.""" - shadow = self.get_data_file('shadow') - users = {} - dupes = [] - for line in shadow.splitlines(): - user, encpw = line.split(":")[0:2] - if user in users: - dupes.append(user) - users[user] = encpw - - jane_enc = "$5$iW$XsxmWCdpwIW8Yhv.Jn/R3uk6A4UaicfW5Xp7C9p9pg." - self.assertEqual([], dupes) - self.assertEqual(jane_enc, users['jane']) - - mikey_enc = "$5$xZ$B2YGGEx2AOf4PeW48KC6.QyT1W2B4rZ9Qbltudtha89" - self.assertEqual(mikey_enc, users['mikey']) - - # shadow entry is $N$salt$, so we encrypt with the same format - # and salt and expect the result. - tom = "mypassword123!" - fmtsalt = users['tom'][0:users['tom'].rfind("$") + 1] - tom_enc = crypt.crypt(tom, fmtsalt) - self.assertEqual(tom_enc, users['tom']) - - harry_enc = ("$6$LF$9Z2p6rWK6TNC1DC6393ec0As.18KRAvKDbfsG" - "JEdWN3sRQRwpdfoh37EQ3yUh69tP4GSrGW5XKHxMLiKowJgm/") - dick_enc = "$1$ssisyfpf$YqvuJLfrrW6Cg/l53Pi1n1" - - # these should have been changed to random values. - self.assertNotEqual(harry_enc, users['harry']) - self.assertTrue(users['harry'].startswith("$")) - self.assertNotEqual(dick_enc, users['dick']) - self.assertTrue(users['dick'].startswith("$")) - - self.assertNotEqual(users['harry'], users['dick']) - - def test_shadow_expected_users(self): - """Test every tom, dick, and harry user in shadow.""" - out = self.get_data_file('shadow') - self.assertIn('tom:', out) - self.assertIn('dick:', out) - self.assertIn('harry:', out) - self.assertIn('jane:', out) - self.assertIn('mikey:', out) - - def test_sshd_config(self): - """Test sshd config allows passwords.""" - out = self.get_data_file('sshd_config') - self.assertIn('PasswordAuthentication yes', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/bugs/README.md b/tests/cloud_tests/testcases/bugs/README.md deleted file mode 100644 index 09ce0765..00000000 --- a/tests/cloud_tests/testcases/bugs/README.md +++ /dev/null @@ -1,13 +0,0 @@ -# Bug Test Configs - -## purpose -Configs that reproduce bugs filed against cloud-init. Having test configs for -cloud-init bugs ensures that the fixes do not break in the future, and makes it -easy to see how many systems and platforms are effected by a new bug. - -## structure -Should have one test config for most bugs filed. The name of the test should -contain ``lp`` followed by the bug number. It may also be useful to add a -comment to each bug config with a summary copied from the bug report. - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/bugs/__init__.py b/tests/cloud_tests/testcases/bugs/__init__.py deleted file mode 100644 index c6452f9c..00000000 --- a/tests/cloud_tests/testcases/bugs/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Test verifiers for cloud-init bugs. - -See configs/bugs/README.md for more information -""" - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/bugs/lp1511485.py b/tests/cloud_tests/testcases/bugs/lp1511485.py deleted file mode 100644 index 670d3aff..00000000 --- a/tests/cloud_tests/testcases/bugs/lp1511485.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestLP1511485(base.CloudTestCase): - """Test LP# 1511485.""" - - def test_final_message(self): - """Test final message exists.""" - out = self.get_data_file('cloud-init-output.log') - self.assertIn('Final message from cloud-config', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/bugs/lp1511485.yaml b/tests/cloud_tests/testcases/bugs/lp1511485.yaml deleted file mode 100644 index ebf9763f..00000000 --- a/tests/cloud_tests/testcases/bugs/lp1511485.yaml +++ /dev/null @@ -1,11 +0,0 @@ -# -# LP Bug 1511485: final_message is silent on ubuntu-12.04.5 / cloud-init 0.6.3 -# -# 2016-11-17: Disabled as covered by module based tests -# -enabled: False -cloud_config: | - #cloud-config - final_message: "Final message from cloud-config" - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/bugs/lp1611074.yaml b/tests/cloud_tests/testcases/bugs/lp1611074.yaml deleted file mode 100644 index 960679d5..00000000 --- a/tests/cloud_tests/testcases/bugs/lp1611074.yaml +++ /dev/null @@ -1,8 +0,0 @@ -# -# LP Bug 1611074: Reformatting of ephemeral drive fails on resize of Azure VM -# -# 2016-11-18: Disabled until test written -# -enabled: False - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/bugs/lp1628337.py b/tests/cloud_tests/testcases/bugs/lp1628337.py deleted file mode 100644 index a2c90481..00000000 --- a/tests/cloud_tests/testcases/bugs/lp1628337.py +++ /dev/null @@ -1,23 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestLP1628337(base.CloudTestCase): - """Test LP# 1511485.""" - - def test_fetch_indices(self): - """Verify no apt errors.""" - out = self.get_data_file('cloud-init-output.log') - self.assertNotIn('W: Failed to fetch', out) - self.assertNotIn('W: Some index files failed to download. ' - 'They have been ignored, or old ones used instead.', - out) - - def test_ntp(self): - """Verify can find ntp and install it.""" - out = self.get_data_file('cloud-init-output.log') - self.assertNotIn('E: Unable to locate package ntp', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/bugs/lp1628337.yaml b/tests/cloud_tests/testcases/bugs/lp1628337.yaml deleted file mode 100644 index e39b3cd8..00000000 --- a/tests/cloud_tests/testcases/bugs/lp1628337.yaml +++ /dev/null @@ -1,23 +0,0 @@ -# -# LP Bug 1628337: cloud-init tries to install NTP before even configuring the archives -# -required_features: - - apt - - lsb_release -cloud_config: | - #cloud-config - ntp: - servers: ['ntp.ubuntu.com'] - apt: - primary: - - arches: [default] - uri: http://us.archive.ubuntu.com/ubuntu/ -collect_sciprts: - ntp.conf: | - #!/bin/bash - cat /etc/ntp.conf - sources.list: | - #!/bin/bash - cat /etc/apt/sources.list - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/README.md b/tests/cloud_tests/testcases/examples/README.md deleted file mode 100644 index 110a223b..00000000 --- a/tests/cloud_tests/testcases/examples/README.md +++ /dev/null @@ -1,12 +0,0 @@ -# Example Test Configs - -## Purpose -This folder contains example cloud configs found on -[cloudinit.readthedocs.io](https://cloudinit.readthedocs.io/en/latest/topics/examples.html). -Examples covered by other tests, like modules, are excluded from tests here -to prevent duplication and reduce test time. - -## Structure -One test per example test config on cloudinit.readthedocs.io - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/TODO.md b/tests/cloud_tests/testcases/examples/TODO.md deleted file mode 100644 index cde699a7..00000000 --- a/tests/cloud_tests/testcases/examples/TODO.md +++ /dev/null @@ -1,15 +0,0 @@ -# Missing Examples - -Below lists each of the issing examples and why it is not currently added. - - - Chef (takes > 60 seconds to run) - - Puppet (takes > 60 seconds to run) - - Manage resolve.conf (lxd backend overrides changes) - - Adding a yum repository (need centos system) - - Register Red Hat Subscription (need centos system + subscription) - - Adjust mount points mounted (need multiple disks) - - Call a url when finished (need end point) - - Reboot/poweroff when finished (how to test) - - Disk setup (need multiple disks) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/__init__.py b/tests/cloud_tests/testcases/examples/__init__.py deleted file mode 100644 index 39af88c2..00000000 --- a/tests/cloud_tests/testcases/examples/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Test verifiers for cloud-init examples. - -See configs/examples/README.md for more information -""" - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/add_apt_repositories.py b/tests/cloud_tests/testcases/examples/add_apt_repositories.py deleted file mode 100644 index 71eede97..00000000 --- a/tests/cloud_tests/testcases/examples/add_apt_repositories.py +++ /dev/null @@ -1,20 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestAptconfigurePrimary(base.CloudTestCase): - """Example cloud-config test.""" - - def test_ubuntu_sources(self): - """Test no default Ubuntu entries exist.""" - out = self.get_data_file('ubuntu.sources.list') - self.assertEqual(0, int(out)) - - def test_gatech_sources(self): - """Test GaTech entires exist.""" - out = self.get_data_file('gatech.sources.list') - self.assertEqual(20, int(out)) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/add_apt_repositories.yaml b/tests/cloud_tests/testcases/examples/add_apt_repositories.yaml deleted file mode 100644 index 4b8575f7..00000000 --- a/tests/cloud_tests/testcases/examples/add_apt_repositories.yaml +++ /dev/null @@ -1,23 +0,0 @@ -# -# From cloud config examples on cloudinit.readthedocs.io -# -# 2016-11-17: Disabled as covered by module based tests -# -enabled: False -required_features: - - apt -cloud_config: | - #cloud-config - apt: - primary: - - arches: [default] - uri: "http://www.gtlib.gatech.edu/pub/ubuntu-releases/" -collect_scripts: - ubuntu.sources.list: | - #!/bin/bash - cat /etc/apt/sources.list | grep -v '^#' | sed '/^\s*$/d' | grep archive.ubuntu.com | wc -l - gatech.sources.list: | - #!/bin/bash - cat /etc/apt/sources.list | grep -v '^#' | sed '/^\s*$/d' | grep gtlib.gatech.edu | wc -l - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/alter_completion_message.py b/tests/cloud_tests/testcases/examples/alter_completion_message.py deleted file mode 100644 index b7b5d5e0..00000000 --- a/tests/cloud_tests/testcases/examples/alter_completion_message.py +++ /dev/null @@ -1,40 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestFinalMessage(base.CloudTestCase): - """Test cloud init module `cc_final_message`.""" - - subs_char = '$' - - def get_final_message_config(self): - """Get config for final message.""" - self.assertIn('final_message', self.cloud_config) - return self.cloud_config['final_message'] - - def get_final_message(self): - """Get final message from log.""" - out = self.get_data_file('cloud-init-output.log') - lines = len(self.get_final_message_config().splitlines()) - return '\n'.join(out.splitlines()[-1 * lines:]) - - def test_final_message_string(self): - """Ensure final handles regular strings.""" - for actual, config in zip( - self.get_final_message().splitlines(), - self.get_final_message_config().splitlines()): - if self.subs_char not in config: - self.assertEqual(actual, config) - - def test_final_message_subs(self): - """Test variable substitution in final message.""" - # TODO: add verification of other substitutions - patterns = {'$datasource': self.get_datasource()} - for key, expected in patterns.items(): - index = self.get_final_message_config().splitlines().index(key) - actual = self.get_final_message().splitlines()[index] - self.assertEqual(actual, expected) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/alter_completion_message.yaml b/tests/cloud_tests/testcases/examples/alter_completion_message.yaml deleted file mode 100644 index 9e154f80..00000000 --- a/tests/cloud_tests/testcases/examples/alter_completion_message.yaml +++ /dev/null @@ -1,16 +0,0 @@ -# -# From cloud config examples on cloudinit.readthedocs.io -# -# 2016-11-17: Disabled as covered by module based tests -# -enabled: False -cloud_config: | - #cloud-config - final_message: | - This is my final message! - $version - $timestamp - $datasource - $uptime - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/configure_instance_trusted_ca_certificates.py b/tests/cloud_tests/testcases/examples/configure_instance_trusted_ca_certificates.py deleted file mode 100644 index 38540eb8..00000000 --- a/tests/cloud_tests/testcases/examples/configure_instance_trusted_ca_certificates.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestTrustedCA(base.CloudTestCase): - """Example cloud-config test.""" - - def test_cert_count_ca(self): - """Test correct count of CAs in .crt.""" - out = self.get_data_file('cert_count_ca') - self.assertIn('7 /etc/ssl/certs/ca-certificates.crt', out) - - def test_cert_count_cloudinit(self): - """Test correct count of CAs in .pem.""" - out = self.get_data_file('cert_count_cloudinit') - self.assertIn('7 /etc/ssl/certs/cloud-init-ca-certs.pem', out) - - def test_cloudinit_certs(self): - """Test text of cert.""" - out = self.get_data_file('cloudinit_certs') - self.assertIn('-----BEGIN CERTIFICATE-----', out) - self.assertIn('YOUR-ORGS-TRUSTED-CA-CERT-HERE', out) - self.assertIn('-----END CERTIFICATE-----', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/configure_instance_trusted_ca_certificates.yaml b/tests/cloud_tests/testcases/examples/configure_instance_trusted_ca_certificates.yaml deleted file mode 100644 index ad32b088..00000000 --- a/tests/cloud_tests/testcases/examples/configure_instance_trusted_ca_certificates.yaml +++ /dev/null @@ -1,41 +0,0 @@ -# -# From cloud config examples on cloudinit.readthedocs.io -# -# 2016-11-17: Disabled as covered by module based tests -# -enabled: False -cloud_config: | - #cloud-config - ca-certs: - # If present and set to True, the 'remove-defaults' parameter will remove - # all the default trusted CA certificates that are normally shipped with - # Ubuntu. - # This is mainly for paranoid admins - most users will not need this - # functionality. - remove-defaults: true - - # If present, the 'trusted' parameter should contain a certificate (or list - # of certificates) to add to the system as trusted CA certificates. - # Pay close attention to the YAML multiline list syntax. The example shown - # here is for a list of multiline certificates. - trusted: - - | - -----BEGIN CERTIFICATE----- - YOUR-ORGS-TRUSTED-CA-CERT-HERE - -----END CERTIFICATE----- - - | - -----BEGIN CERTIFICATE----- - YOUR-ORGS-TRUSTED-CA-CERT-HERE - -----END CERTIFICATE----- -collect_scripts: - cloudinit_certs: | - #!/bin/bash - cat /etc/ssl/certs/cloud-init-ca-certs.pem - cert_count_ca: | - #!/bin/bash - wc -l /etc/ssl/certs/ca-certificates.crt - cert_count_cloudinit: | - #!/bin/bash - wc -l /etc/ssl/certs/cloud-init-ca-certs.pem - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/configure_instances_ssh_keys.py b/tests/cloud_tests/testcases/examples/configure_instances_ssh_keys.py deleted file mode 100644 index 691a316b..00000000 --- a/tests/cloud_tests/testcases/examples/configure_instances_ssh_keys.py +++ /dev/null @@ -1,31 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestSSHKeys(base.CloudTestCase): - """Example cloud-config test.""" - - def test_cert_count(self): - """Test cert count.""" - out = self.get_data_file('cert_count') - self.assertEqual(20, int(out)) - - def test_dsa_public(self): - """Test DSA key has ending.""" - out = self.get_data_file('dsa_public') - self.assertIn('ZN4XnifuO5krqAybngIy66PMEoQ= smoser@localhost', out) - - def test_rsa_public(self): - """Test RSA key has specific ending.""" - out = self.get_data_file('rsa_public') - self.assertIn('PemAWthxHO18QJvWPocKJtlsDNi3 smoser@localhost', out) - - def test_auth_keys(self): - """Test authorized keys has specific ending.""" - out = self.get_data_file('auth_keys') - self.assertIn('QPOt5Q8zWd9qG7PBl9+eiH5qV7NZ mykey@host', out) - self.assertIn('Hj29SCmXp5Kt5/82cD/VN3NtHw== smoser@brickies', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/configure_instances_ssh_keys.yaml b/tests/cloud_tests/testcases/examples/configure_instances_ssh_keys.yaml deleted file mode 100644 index f3eaf3ce..00000000 --- a/tests/cloud_tests/testcases/examples/configure_instances_ssh_keys.yaml +++ /dev/null @@ -1,63 +0,0 @@ -# -# From cloud config examples on cloudinit.readthedocs.io -# -# 2016-11-17: Disabled as covered by module based tests -# -enabled: False -cloud_config: | - #cloud-config - ssh_authorized_keys: - - ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAGEA3FSyQwBI6Z+nCSjUUk8EEAnnkhXlukKoUPND/RRClWz2s5TCzIkd3Ou5+Cyz71X0XmazM3l5WgeErvtIwQMyT1KjNoMhoJMrJnWqQPOt5Q8zWd9qG7PBl9+eiH5qV7NZ mykey@host - - ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA3I7VUf2l5gSn5uavROsc5HRDpZdQueUq5ozemNSj8T7enqKHOEaFoU2VoPgGEWC9RyzSQVeyD6s7APMcE82EtmW4skVEgEGSbDc1pvxzxtchBj78hJP6Cf5TCMFSXw+Fz5rF1dR23QDbN1mkHs7adr8GW4kSWqU7Q7NDwfIrJJtO7Hi42GyXtvEONHbiRPOe8stqUly7MvUoN+5kfjBM8Qqpfl2+FNhTYWpMfYdPUnE7u536WqzFmsaqJctz3gBxH9Ex7dFtrxR4qiqEr9Qtlu3xGn7Bw07/+i1D+ey3ONkZLN+LQ714cgj8fRS4Hj29SCmXp5Kt5/82cD/VN3NtHw== smoser@brickies - - # Send pre-generated ssh private keys to the server - # If these are present, they will be written to /etc/ssh and - # new random keys will not be generated - # in addition to 'rsa' and 'dsa' as shown below, 'ecdsa' is also supported - ssh_keys: - rsa_private: | - -----BEGIN RSA PRIVATE KEY----- - MIIBxwIBAAJhAKD0YSHy73nUgysO13XsJmd4fHiFyQ+00R7VVu2iV9Qcon2LZS/x - 1cydPZ4pQpfjEha6WxZ6o8ci/Ea/w0n+0HGPwaxlEG2Z9inNtj3pgFrYcRztfECb - 1j6HCibZbAzYtwIBIwJgO8h72WjcmvcpZ8OvHSvTwAguO2TkR6mPgHsgSaKy6GJo - PUJnaZRWuba/HX0KGyhz19nPzLpzG5f0fYahlMJAyc13FV7K6kMBPXTRR6FxgHEg - L0MPC7cdqAwOVNcPY6A7AjEA1bNaIjOzFN2sfZX0j7OMhQuc4zP7r80zaGc5oy6W - p58hRAncFKEvnEq2CeL3vtuZAjEAwNBHpbNsBYTRPCHM7rZuG/iBtwp8Rxhc9I5w - ixvzMgi+HpGLWzUIBS+P/XhekIjPAjA285rVmEP+DR255Ls65QbgYhJmTzIXQ2T9 - luLvcmFBC6l35Uc4gTgg4ALsmXLn71MCMGMpSWspEvuGInayTCL+vEjmNBT+FAdO - W7D4zCpI43jRS9U06JVOeSc9CDk2lwiA3wIwCTB/6uc8Cq85D9YqpM10FuHjKpnP - REPPOyrAspdeOAV+6VKRavstea7+2DZmSUgE - -----END RSA PRIVATE KEY----- - - rsa_public: ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAGEAoPRhIfLvedSDKw7XdewmZ3h8eIXJD7TRHtVW7aJX1ByifYtlL/HVzJ09nilCl+MSFrpbFnqjxyL8Rr/DSf7QcY/BrGUQbZn2Kc22PemAWthxHO18QJvWPocKJtlsDNi3 smoser@localhost - - dsa_private: | - -----BEGIN DSA PRIVATE KEY----- - MIIBuwIBAAKBgQDP2HLu7pTExL89USyM0264RCyWX/CMLmukxX0Jdbm29ax8FBJT - pLrO8TIXVY5rPAJm1dTHnpuyJhOvU9G7M8tPUABtzSJh4GVSHlwaCfycwcpLv9TX - DgWIpSj+6EiHCyaRlB1/CBp9RiaB+10QcFbm+lapuET+/Au6vSDp9IRtlQIVAIMR - 8KucvUYbOEI+yv+5LW9u3z/BAoGBAI0q6JP+JvJmwZFaeCMMVxXUbqiSko/P1lsa - LNNBHZ5/8MOUIm8rB2FC6ziidfueJpqTMqeQmSAlEBCwnwreUnGfRrKoJpyPNENY - d15MG6N5J+z81sEcHFeprryZ+D3Ge9VjPq3Tf3NhKKwCDQ0240aPezbnjPeFm4mH - bYxxcZ9GAoGAXmLIFSQgiAPu459rCKxT46tHJtM0QfnNiEnQLbFluefZ/yiI4DI3 - 8UzTCOXLhUA7ybmZha+D/csj15Y9/BNFuO7unzVhikCQV9DTeXX46pG4s1o23JKC - /QaYWNMZ7kTRv+wWow9MhGiVdML4ZN4XnifuO5krqAybngIy66PMEoQCFEIsKKWv - 99iziAH0KBMVbxy03Trz - -----END DSA PRIVATE KEY----- - - dsa_public: ssh-dsa AAAAB3NzaC1kc3MAAACBAM/Ycu7ulMTEvz1RLIzTbrhELJZf8Iwua6TFfQl1ubb1rHwUElOkus7xMhdVjms8AmbV1Meem7ImE69T0bszy09QAG3NImHgZVIeXBoJ/JzByku/1NcOBYilKP7oSIcLJpGUHX8IGn1GJoH7XRBwVub6Vqm4RP78C7q9IOn0hG2VAAAAFQCDEfCrnL1GGzhCPsr/uS1vbt8/wQAAAIEAjSrok/4m8mbBkVp4IwxXFdRuqJKSj8/WWxos00Ednn/ww5QibysHYULrOKJ1+54mmpMyp5CZICUQELCfCt5ScZ9GsqgmnI80Q1h3Xkwbo3kn7PzWwRwcV6muvJn4PcZ71WM+rdN/c2EorAINDTbjRo97NueM94WbiYdtjHFxn0YAAACAXmLIFSQgiAPu459rCKxT46tHJtM0QfnNiEnQLbFluefZ/yiI4DI38UzTCOXLhUA7ybmZha+D/csj15Y9/BNFuO7unzVhikCQV9DTeXX46pG4s1o23JKC/QaYWNMZ7kTRv+wWow9MhGiVdML4ZN4XnifuO5krqAybngIy66PMEoQ= smoser@localhost -collect_scripts: - cert_count: | - #!/bin/bash - ls | wc -l - dsa_public: | - #!/bin/bash - cat /etc/ssh/ssh_host_dsa_key.pub - rsa_public: | - #!/bin/bash - cat /etc/ssh/ssh_host_rsa_key.pub - auth_keys: | - #!/bin/bash - cat /home/ubuntu/.ssh/authorized_keys - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/including_user_groups.py b/tests/cloud_tests/testcases/examples/including_user_groups.py deleted file mode 100644 index 4067348d..00000000 --- a/tests/cloud_tests/testcases/examples/including_user_groups.py +++ /dev/null @@ -1,49 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestUserGroups(base.CloudTestCase): - """Example cloud-config test.""" - - def test_group_ubuntu(self): - """Test ubuntu group exists.""" - out = self.get_data_file('group_ubuntu') - self.assertRegex(out, r'ubuntu:x:[0-9]{4}:') - - def test_group_cloud_users(self): - """Test cloud users group exists.""" - out = self.get_data_file('group_cloud_users') - self.assertRegex(out, r'cloud-users:x:[0-9]{4}:barfoo') - - def test_user_ubuntu(self): - """Test ubuntu user exists.""" - out = self.get_data_file('user_ubuntu') - self.assertRegex( - out, r'ubuntu:x:[0-9]{4}:[0-9]{4}:Ubuntu:/home/ubuntu:/bin/bash') - - def test_user_foobar(self): - """Test foobar user exists.""" - out = self.get_data_file('user_foobar') - self.assertRegex( - out, r'foobar:x:[0-9]{4}:[0-9]{4}:Foo B. Bar:/home/foobar:') - - def test_user_barfoo(self): - """Test barfoo user exists.""" - out = self.get_data_file('user_barfoo') - self.assertRegex( - out, r'barfoo:x:[0-9]{4}:[0-9]{4}:Bar B. Foo:/home/barfoo:') - - def test_user_cloudy(self): - """Test cloudy user exists.""" - out = self.get_data_file('user_cloudy') - self.assertRegex(out, r'cloudy:x:[0-9]{3,4}:') - - def test_user_root_in_secret(self): - """Test root user is in 'secret' group.""" - _user, _, groups = self.get_data_file('root_groups').partition(":") - self.assertIn("secret", groups.split(), - msg="User root is not in group 'secret'") - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/including_user_groups.yaml b/tests/cloud_tests/testcases/examples/including_user_groups.yaml deleted file mode 100644 index 86e392dd..00000000 --- a/tests/cloud_tests/testcases/examples/including_user_groups.yaml +++ /dev/null @@ -1,56 +0,0 @@ -# -# From cloud config examples on cloudinit.readthedocs.io -# -# 2016-11-17: Disabled as covered by module based tests -# -enabled: False -cloud_config: | - #cloud-config - # Add groups to the system - groups: - - secret: [root] - - cloud-users - - # Add users to the system. Users are added after groups are added. - users: - - default - - name: foobar - gecos: Foo B. Bar - primary_group: foobar - groups: users - expiredate: '2038-01-19' - lock_passwd: false - passwd: $6$j212wezy$7H/1LT4f9/N3wpgNunhsIqtMj62OKiS3nyNwuizouQc3u7MbYCarYeAHWYPYb2FT.lbioDm2RrkJPb9BZMN1O/ - - name: barfoo - gecos: Bar B. Foo - sudo: ALL=(ALL) NOPASSWD:ALL - groups: [cloud-users, secret] - lock_passwd: true - - name: cloudy - gecos: Magic Cloud App Daemon User - inactive: '5' - system: true -collect_scripts: - group_ubuntu: | - #!/bin/bash - getent group ubuntu - group_cloud_users: | - #!/bin/bash - getent group cloud-users - user_ubuntu: | - #!/bin/bash - getent passwd ubuntu - user_foobar: | - #!/bin/bash - getent passwd foobar - user_barfoo: | - #!/bin/bash - getent passwd barfoo - user_cloudy: | - #!/bin/bash - getent passwd cloudy - root_groups: | - #!/bin/bash - groups root - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/install_arbitrary_packages.py b/tests/cloud_tests/testcases/examples/install_arbitrary_packages.py deleted file mode 100644 index df133844..00000000 --- a/tests/cloud_tests/testcases/examples/install_arbitrary_packages.py +++ /dev/null @@ -1,20 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestInstall(base.CloudTestCase): - """Example cloud-config test.""" - - def test_htop(self): - """Verify htop installed.""" - out = self.get_data_file('htop') - self.assertEqual(1, int(out)) - - def test_tree(self): - """Verify tree installed.""" - out = self.get_data_file('treeutils') - self.assertEqual(1, int(out)) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/install_arbitrary_packages.yaml b/tests/cloud_tests/testcases/examples/install_arbitrary_packages.yaml deleted file mode 100644 index d3980228..00000000 --- a/tests/cloud_tests/testcases/examples/install_arbitrary_packages.yaml +++ /dev/null @@ -1,20 +0,0 @@ -# -# From cloud config examples on cloudinit.readthedocs.io -# -# 2016-11-17: Disabled as covered by module based tests -# -enabled: False -cloud_config: | - #cloud-config - packages: - - htop - - tree -collect_scripts: - htop: | - #!/bin/bash - dpkg -l | grep htop | wc -l - tree: | - #!/bin/bash - dpkg -l | grep tree | wc -l - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/install_run_chef_recipes.py b/tests/cloud_tests/testcases/examples/install_run_chef_recipes.py deleted file mode 100644 index 4ec26b8f..00000000 --- a/tests/cloud_tests/testcases/examples/install_run_chef_recipes.py +++ /dev/null @@ -1,17 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestChefExample(base.CloudTestCase): - """Test chef module.""" - - def test_chef_basic(self): - """Test chef installed.""" - out = self.get_data_file('chef_installed') - self.assertIn('install ok', out) - - # FIXME: Add more tests, and/or replace with comprehensive module tests - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/install_run_chef_recipes.yaml b/tests/cloud_tests/testcases/examples/install_run_chef_recipes.yaml deleted file mode 100644 index 68ca95b5..00000000 --- a/tests/cloud_tests/testcases/examples/install_run_chef_recipes.yaml +++ /dev/null @@ -1,104 +0,0 @@ -# -# From cloud config examples on cloudinit.readthedocs.io -# -# 2017-03-31: Disabled as depends on third party apt repository -# -enabled: False -cloud_config: | - #cloud-config - # Key from https://packages.chef.io/chef.asc - apt: - sources: - source1: - source: "deb http://packages.chef.io/repos/apt/stable $RELEASE main" - key: | - -----BEGIN PGP PUBLIC KEY BLOCK----- - Version: GnuPG v1.4.12 (Darwin) - Comment: GPGTools - http://gpgtools.org - - mQGiBEppC7QRBADfsOkZU6KZK+YmKw4wev5mjKJEkVGlus+NxW8wItX5sGa6kdUu - twAyj7Yr92rF+ICFEP3gGU6+lGo0Nve7KxkN/1W7/m3G4zuk+ccIKmjp8KS3qn99 - dxy64vcji9jIllVa+XXOGIp0G8GEaj7mbkixL/bMeGfdMlv8Gf2XPpp9vwCgn/GC - JKacfnw7MpLKUHOYSlb//JsEAJqao3ViNfav83jJKEkD8cf59Y8xKia5OpZqTK5W - ShVnNWS3U5IVQk10ZDH97Qn/YrK387H4CyhLE9mxPXs/ul18ioiaars/q2MEKU2I - XKfV21eMLO9LYd6Ny/Kqj8o5WQK2J6+NAhSwvthZcIEphcFignIuobP+B5wNFQpe - DbKfA/0WvN2OwFeWRcmmd3Hz7nHTpcnSF+4QX6yHRF/5BgxkG6IqBIACQbzPn6Hm - sMtm/SVf11izmDqSsQptCrOZILfLX/mE+YOl+CwWSHhl+YsFts1WOuh1EhQD26aO - Z84HuHV5HFRWjDLw9LriltBVQcXbpfSrRP5bdr7Wh8vhqJTPjrQnT3BzY29kZSBQ - YWNrYWdlcyA8cGFja2FnZXNAb3BzY29kZS5jb20+iGAEExECACAFAkppC7QCGwMG - CwkIBwMCBBUCCAMEFgIDAQIeAQIXgAAKCRApQKupg++Caj8sAKCOXmdG36gWji/K - +o+XtBfvdMnFYQCfTCEWxRy2BnzLoBBFCjDSK6sJqCu0IENIRUYgUGFja2FnZXMg - PHBhY2thZ2VzQGNoZWYuaW8+iGIEExECACIFAlQwYFECGwMGCwkIBwMCBhUIAgkK - CwQWAgMBAh4BAheAAAoJEClAq6mD74JqX94An26z99XOHWpLN8ahzm7cp13t4Xid - AJ9wVcgoUBzvgg91lKfv/34cmemZn7kCDQRKaQu0EAgAg7ZLCVGVTmLqBM6njZEd - Zbv+mZbvwLBSomdiqddE6u3eH0X3GuwaQfQWHUVG2yedyDMiG+EMtCdEeeRebTCz - SNXQ8Xvi22hRPoEsBSwWLZI8/XNg0n0f1+GEr+mOKO0BxDB2DG7DA0nnEISxwFkK - OFJFebR3fRsrWjj0KjDxkhse2ddU/jVz1BY7Nf8toZmwpBmdozETMOTx3LJy1HZ/ - Te9FJXJMUaB2lRyluv15MVWCKQJro4MQG/7QGcIfrIZNfAGJ32DDSjV7/YO+IpRY - IL4CUBQ65suY4gYUG4jhRH6u7H1p99sdwsg5OIpBe/v2Vbc/tbwAB+eJJAp89Zeu - twADBQf/ZcGoPhTGFuzbkcNRSIz+boaeWPoSxK2DyfScyCAuG41CY9+g0HIw9Sq8 - DuxQvJ+vrEJjNvNE3EAEdKl/zkXMZDb1EXjGwDi845TxEMhhD1dDw2qpHqnJ2mtE - WpZ7juGwA3sGhi6FapO04tIGacCfNNHmlRGipyq5ZiKIRq9mLEndlECr8cwaKgkS - 0wWu+xmMZe7N5/t/TK19HXNh4tVacv0F3fYK54GUjt2FjCQV75USnmNY4KPTYLXA - dzC364hEMlXpN21siIFgB04w+TXn5UF3B4FfAy5hevvr4DtV4MvMiGLu0oWjpaLC - MpmrR3Ny2wkmO0h+vgri9uIP06ODWIhJBBgRAgAJBQJKaQu0AhsMAAoJEClAq6mD - 74Jq4hIAoJ5KrYS8kCwj26SAGzglwggpvt3CAJ0bekyky56vNqoegB+y4PQVDv4K - zA== - =IxPr - -----END PGP PUBLIC KEY BLOCK----- - - chef: - - # Valid values are 'gems' and 'packages' and 'omnibus' - install_type: "packages" - - # Boolean: run 'install_type' code even if chef-client - # appears already installed. - force_install: false - - # Chef settings - server_url: "https://chef.yourorg.com:4000" - - # Node Name - # Defaults to the instance-id if not present - node_name: "your-node-name" - - # Environment - # Defaults to '_default' if not present - environment: "production" - - # Default validation name is chef-validator - validation_name: "yourorg-validator" - # if validation_cert's value is "system" then it is expected - # that the file already exists on the system. - validation_cert: | - -----BEGIN RSA PRIVATE KEY----- - YOUR-ORGS-VALIDATION-KEY-HERE - -----END RSA PRIVATE KEY----- - - # A run list for a first boot json - run_list: - - "recipe[apache2]" - - "role[db]" - - # Specify a list of initial attributes used by the cookbooks - initial_attributes: - apache: - prefork: - maxclients: 100 - keepalive: "off" - - # if install_type is 'omnibus', change the url to download - omnibus_url: "https://www.opscode.com/chef/install.sh" - - - # Capture all subprocess output into a logfile - # Useful for troubleshooting cloud-init issues - output: {all: '| tee -a /var/log/cloud-init-output.log'} - -collect_scripts: - chef_installed: | - #!/bin/sh - dpkg-query -W -f '${Status}\n' chef - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/run_apt_upgrade.py b/tests/cloud_tests/testcases/examples/run_apt_upgrade.py deleted file mode 100644 index 744e49cb..00000000 --- a/tests/cloud_tests/testcases/examples/run_apt_upgrade.py +++ /dev/null @@ -1,19 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestUpgrade(base.CloudTestCase): - """Example cloud-config test.""" - - def test_upgrade(self): - """Test upgrade exists in apt history.""" - out = self.get_data_file('cloud-init.log') - self.assertIn( - '[CLOUDINIT] util.py[DEBUG]: apt-upgrade ' - '[eatmydata apt-get --option=Dpkg::Options::=--force-confold ' - '--option=Dpkg::options::=--force-unsafe-io --assume-yes --quiet ' - 'dist-upgrade] took', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/run_apt_upgrade.yaml b/tests/cloud_tests/testcases/examples/run_apt_upgrade.yaml deleted file mode 100644 index 2b7eae4c..00000000 --- a/tests/cloud_tests/testcases/examples/run_apt_upgrade.yaml +++ /dev/null @@ -1,11 +0,0 @@ -# -# From cloud config examples on cloudinit.readthedocs.io -# -# 2016-11-17: Disabled as covered by module based tests -# -enabled: False -cloud_config: | - #cloud-config - package_upgrade: true - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/run_commands.py b/tests/cloud_tests/testcases/examples/run_commands.py deleted file mode 100644 index 01d5d4fc..00000000 --- a/tests/cloud_tests/testcases/examples/run_commands.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestRunCmd(base.CloudTestCase): - """Example cloud-config test.""" - - def test_run_cmd(self): - """Test run command worked.""" - out = self.get_data_file('run_cmd') - self.assertIn('cloud-init run cmd test', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/run_commands.yaml b/tests/cloud_tests/testcases/examples/run_commands.yaml deleted file mode 100644 index f80eb8ce..00000000 --- a/tests/cloud_tests/testcases/examples/run_commands.yaml +++ /dev/null @@ -1,16 +0,0 @@ -# -# From cloud config examples on cloudinit.readthedocs.io -# -# 2016-11-17: Disabled as covered by module based tests -# -enabled: False -cloud_config: | - #cloud-config - runcmd: - - echo cloud-init run cmd test > /var/tmp/run_cmd -collect_scripts: - run_cmd: | - #!/bin/bash - cat /var/tmp/run_cmd - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/run_commands_first_boot.py b/tests/cloud_tests/testcases/examples/run_commands_first_boot.py deleted file mode 100644 index 3f3d8f84..00000000 --- a/tests/cloud_tests/testcases/examples/run_commands_first_boot.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestBootCmd(base.CloudTestCase): - """Example cloud-config test.""" - - def test_bootcmd_host(self): - """Test boot command worked.""" - out = self.get_data_file('hosts') - self.assertIn('192.168.1.130 us.archive.ubuntu.com', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/run_commands_first_boot.yaml b/tests/cloud_tests/testcases/examples/run_commands_first_boot.yaml deleted file mode 100644 index 7bd803db..00000000 --- a/tests/cloud_tests/testcases/examples/run_commands_first_boot.yaml +++ /dev/null @@ -1,16 +0,0 @@ -# -# From cloud config examples on cloudinit.readthedocs.io -# -# 2016-11-17: Disabled as covered by module based tests -# -enabled: False -cloud_config: | - #cloud-config - bootcmd: - - echo 192.168.1.130 us.archive.ubuntu.com > /etc/hosts -collect_scripts: - hosts: | - #!/bin/bash - cat /etc/hosts - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/setup_run_puppet.yaml b/tests/cloud_tests/testcases/examples/setup_run_puppet.yaml deleted file mode 100644 index cdb1c28d..00000000 --- a/tests/cloud_tests/testcases/examples/setup_run_puppet.yaml +++ /dev/null @@ -1,55 +0,0 @@ -# -# From cloud config examples on cloudinit.readthedocs.io -# -# 2016-11-17: Disabled as test suite fails this long running test currently -# -enabled: False -cloud_config: | - #cloud-config - puppet: - # Every key present in the conf object will be added to puppet.conf: - # [name] - # subkey=value - # - # For example the configuration below will have the following section - # added to puppet.conf: - # [puppetd] - # server=puppetserver.example.org - # certname=i-0123456.ip-X-Y-Z.cloud.internal - # - # The puppmaster ca certificate will be available in - # /var/lib/puppet/ssl/certs/ca.pem - conf: - agent: - server: "puppetserver.example.org" - # certname supports substitutions at runtime: - # %i: instanceid - # Example: i-0123456 - # %f: fqdn of the machine - # Example: ip-X-Y-Z.cloud.internal - # - # NB: the certname will automatically be lowercased as required by puppet - certname: "%i.%f" - # ca_cert is a special case. It won't be added to puppet.conf. - # It holds the puppetserver certificate in pem format. - # It should be a multi-line string (using the | yaml notation for - # multi-line strings). - # The puppetserver certificate is located in - # /var/lib/puppet/ssl/ca/ca_crt.pem on the puppetserver host. - # - ca_cert: | - -----BEGIN CERTIFICATE----- - MIICCTCCAXKgAwIBAgIBATANBgkqhkiG9w0BAQUFADANMQswCQYDVQQDDAJjYTAe - Fw0xMDAyMTUxNzI5MjFaFw0xNTAyMTQxNzI5MjFaMA0xCzAJBgNVBAMMAmNhMIGf - MA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCu7Q40sm47/E1Pf+r8AYb/V/FWGPgc - b014OmNoX7dgCxTDvps/h8Vw555PdAFsW5+QhsGr31IJNI3kSYprFQcYf7A8tNWu - 1MASW2CfaEiOEi9F1R3R4Qlz4ix+iNoHiUDTjazw/tZwEdxaQXQVLwgTGRwVa+aA - qbutJKi93MILLwIDAQABo3kwdzA4BglghkgBhvhCAQ0EKxYpUHVwcGV0IFJ1Ynkv - T3BlblNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwDwYDVR0TAQH/BAUwAwEB/zAd - BgNVHQ4EFgQUu4+jHB+GYE5Vxo+ol1OAhevspjAwCwYDVR0PBAQDAgEGMA0GCSqG - SIb3DQEBBQUAA4GBAH/rxlUIjwNb3n7TXJcDJ6MMHUlwjr03BDJXKb34Ulndkpaf - +GAlzPXWa7bO908M9I8RnPfvtKnteLbvgTK+h+zX1XCty+S2EQWk29i2AdoqOTxb - hppiGMp0tT5Havu4aceCXiy2crVcudj3NFciy8X66SoECemW9UYDCb9T5D0d - -----END CERTIFICATE----- - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/writing_out_arbitrary_files.py b/tests/cloud_tests/testcases/examples/writing_out_arbitrary_files.py deleted file mode 100644 index 7bd520f6..00000000 --- a/tests/cloud_tests/testcases/examples/writing_out_arbitrary_files.py +++ /dev/null @@ -1,30 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestWriteFiles(base.CloudTestCase): - """Example cloud-config test.""" - - def test_b64(self): - """Test b64 encoded file reads as ascii.""" - out = self.get_data_file('file_b64') - self.assertIn('ASCII text', out) - - def test_binary(self): - """Test binary file reads as executable.""" - out = self.get_data_file('file_binary') - self.assertIn('ELF 64-bit LSB executable, x86-64, version 1', out) - - def test_gzip(self): - """Test gzip file shows up as a shell script.""" - out = self.get_data_file('file_gzip') - self.assertIn('POSIX shell script, ASCII text executable', out) - - def test_text(self): - """Test text shows up as ASCII text.""" - out = self.get_data_file('file_text') - self.assertIn('ASCII text', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/examples/writing_out_arbitrary_files.yaml b/tests/cloud_tests/testcases/examples/writing_out_arbitrary_files.yaml deleted file mode 100644 index 6f78f994..00000000 --- a/tests/cloud_tests/testcases/examples/writing_out_arbitrary_files.yaml +++ /dev/null @@ -1,45 +0,0 @@ -# -# From cloud config examples on cloudinit.readthedocs.io -# -# 2016-11-17: Disabled as covered by module based tests -# -enabled: False -cloud_config: | - #cloud-config - write_files: - - encoding: b64 - content: CiMgVGhpcyBmaWxlIGNvbnRyb2xzIHRoZSBzdGF0ZSBvZiBTRUxpbnV4 - owner: root:root - path: /root/file_b64 - permissions: '0644' - - content: | - # My new /root/file_text - - SMBDOPTIONS="-D" - path: /root/file_text - - content: !!binary | - f0VMRgIBAQAAAAAAAAAAAAIAPgABAAAAwARAAAAAAABAAAAAAAAAAJAVAAAAAAAAAAAAAEAAOAAI - AEAAHgAdAAYAAAAFAAAAQAAAAAAAAABAAEAAAAAAAEAAQAAAAAAAwAEAAAAAAADAAQAAAAAAAAgA - AAAAAAAAAwAAAAQAAAAAAgAAAAAAAAACQAAAAAAAAAJAAAAAAAAcAAAAAAAAABwAAAAAAAAAAQAA - path: /root/file_binary - permissions: '0555' - - encoding: gzip - content: !!binary | - H4sIAIDb/U8C/1NW1E/KzNMvzuBKTc7IV8hIzcnJVyjPL8pJ4QIA6N+MVxsAAAA= - path: /root/file_gzip - permissions: '0755' -collect_scripts: - file_b64: | - #!/bin/bash - file /root/file_b64 - file_text: | - #!/bin/bash - file /root/file_text - file_binary: | - #!/bin/bash - file /root/file_binary - file_gzip: | - #!/bin/bash - file /root/file_gzip - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/main/README.md b/tests/cloud_tests/testcases/main/README.md deleted file mode 100644 index 60346063..00000000 --- a/tests/cloud_tests/testcases/main/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# Main Functionality Test Configs - -## purpose -Test main features and config options of cloud-init such as logging, output -redirection, early init and integration with init system - -## structure -Should have one or more test configs for all main cloud-init output and logging -options, and basic functionality test cases - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/main/__init__.py b/tests/cloud_tests/testcases/main/__init__.py deleted file mode 100644 index 0a592637..00000000 --- a/tests/cloud_tests/testcases/main/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Test verifiers for cloud-init main features. - -See configs/main/README.md for more information -""" - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/main/command_output_simple.py b/tests/cloud_tests/testcases/main/command_output_simple.py deleted file mode 100644 index 80a2c8d7..00000000 --- a/tests/cloud_tests/testcases/main/command_output_simple.py +++ /dev/null @@ -1,21 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestCommandOutputSimple(base.CloudTestCase): - """Test functionality of simple output redirection.""" - - expected_warnings = ('Stdout, stderr changing to',) - - def test_output_file(self): - """Ensure that the output file is not empty and has all stages.""" - data = self.get_data_file('cloud-init-test-output') - self.assertNotEqual(len(data), 0, "specified log empty") - self.assertEqual(self.get_config_entry('final_message'), - data.splitlines()[-1].strip()) - # TODO: need to test that all stages redirected here - - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/main/command_output_simple.yaml b/tests/cloud_tests/testcases/main/command_output_simple.yaml deleted file mode 100644 index 08ca8940..00000000 --- a/tests/cloud_tests/testcases/main/command_output_simple.yaml +++ /dev/null @@ -1,13 +0,0 @@ -# -# Test functionality of simple output redirection -# -cloud_config: | - #cloud-config - output: { all: "| tee -a /var/log/cloud-init-test-output" } - final_message: "should be last line in cloud-init-test-output file" -collect_scripts: - cloud-init-test-output: | - #!/bin/bash - cat /var/log/cloud-init-test-output - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/README.md b/tests/cloud_tests/testcases/modules/README.md deleted file mode 100644 index d66101f2..00000000 --- a/tests/cloud_tests/testcases/modules/README.md +++ /dev/null @@ -1,12 +0,0 @@ -# Module Test Configs - -## Purpose -Test functionality of cloud config modules. See -[here](https://cloudinit.readthedocs.io/en/latest/topics/modules.html) for -a full list. - -## Structure -Should have one or more test configs for each module in cloudinit/config/. The -name of the test should indicate which module the config is verifying. - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/TODO.md b/tests/cloud_tests/testcases/modules/TODO.md deleted file mode 100644 index 9513cb2d..00000000 --- a/tests/cloud_tests/testcases/modules/TODO.md +++ /dev/null @@ -1,95 +0,0 @@ -# TODO - -The following lists complete or partially misisng modules. If a module is -listed with nothing below it indicates that no work is completed on that -module. If there is a list below the module name that is the remainig -identified work. - -## apt_configure - - * apt_get_wrapper - * What does this do? How to use it? - * apt_get_command - * To specify a different 'apt-get' command, set 'apt_get_command'. - This must be a list, and the subcommand (update, upgrade) is appended to it. - * Modify default and verify the options got passed correctly. - * preserve sources - * TBD - -## chef -2016-11-17: Tests took > 60 seconds and test framework times out currently. - -## disable EC2 metadata - -## disk setup - -## emit upstart - -## fan - -## growpart - -## grub dpkg - -## landscape -2016-11-17: Module is not working - -## lxd -2016-11-17: Need a zfs backed test written - -## mcollective - -## migrator - -## mounts - -## phone home - -## power state change - -## puppet -2016-11-17: Tests took > 60 seconds and test framework times out currently. - -## resizefs - -## resolv conf -2016-11-17: Issues with changing resolv.conf and lxc backend. - -## redhat subscription -2016-11-17: Need RH support in test framework. - -## rightscale userdata -2016-11-17: Specific to RightScale cloud enviornment. - -## rsyslog - -## scripts per boot -Not applicable to write a test for this as it specifies when something should be run. - -## scripts per instance -Not applicable to write a test for this as it specifies when something should be run. - -## scripts per once -Not applicable to write a test for this as it specifies when something should be run. - -## scripts user -Not applicable to write a test for this as it specifies when something should be run. - -## scripts vendor -Not applicable to write a test for this as it specifies when something should be run. - -## snap -2019-12-19: Need to investigate - -## spacewalk - -## ssh authkey fingerprints -The authkey_hash key does not appear to work. In fact the default claims to be md5, however syslog only shows sha256 - -## update etc hosts -2016-11-17: Issues with changing /etc/hosts and lxc backend. - -## yum add repo -2016-11-17: Need RH support in test framework. - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/__init__.py b/tests/cloud_tests/testcases/modules/__init__.py deleted file mode 100644 index 6ab8114d..00000000 --- a/tests/cloud_tests/testcases/modules/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Test verifiers for cloud-init cc modules. - -See configs/modules/README.md for more information -""" - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_configure_conf.py b/tests/cloud_tests/testcases/modules/apt_configure_conf.py deleted file mode 100644 index 3bf93447..00000000 --- a/tests/cloud_tests/testcases/modules/apt_configure_conf.py +++ /dev/null @@ -1,20 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestAptconfigureConf(base.CloudTestCase): - """Test apt-configure module.""" - - def test_apt_conf_assumeyes(self): - """Test config assumes true.""" - out = self.get_data_file('94cloud-init-config') - self.assertIn('Assume-Yes "true";', out) - - def test_apt_conf_fixbroken(self): - """Test config fixes broken.""" - out = self.get_data_file('94cloud-init-config') - self.assertIn('Fix-Broken "true";', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_configure_conf.yaml b/tests/cloud_tests/testcases/modules/apt_configure_conf.yaml deleted file mode 100644 index de453000..00000000 --- a/tests/cloud_tests/testcases/modules/apt_configure_conf.yaml +++ /dev/null @@ -1,21 +0,0 @@ -# -# Provide a configuration for APT -# -required_features: - - apt -cloud_config: | - #cloud-config - apt: - conf: | - APT { - Get { - Assume-Yes "true"; - Fix-Broken "true"; - } - } -collect_scripts: - 94cloud-init-config: | - #!/bin/bash - cat /etc/apt/apt.conf.d/94cloud-init-config - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_configure_disable_suites.py b/tests/cloud_tests/testcases/modules/apt_configure_disable_suites.py deleted file mode 100644 index eabe4607..00000000 --- a/tests/cloud_tests/testcases/modules/apt_configure_disable_suites.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestAptconfigureDisableSuites(base.CloudTestCase): - """Test apt-configure module.""" - - def test_empty_sourcelist(self): - """Test source list is empty.""" - out = self.get_data_file('sources.list') - self.assertEqual('', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_configure_disable_suites.yaml b/tests/cloud_tests/testcases/modules/apt_configure_disable_suites.yaml deleted file mode 100644 index 98800673..00000000 --- a/tests/cloud_tests/testcases/modules/apt_configure_disable_suites.yaml +++ /dev/null @@ -1,20 +0,0 @@ -# -# Disables everything in sources.list -# -required_features: - - apt - - lsb_release -cloud_config: | - #cloud-config - apt: - disable_suites: - - $RELEASE - - $RELEASE-updates - - $RELEASE-backports - - $RELEASE-security -collect_scripts: - sources.list: | - #!/bin/bash - grep -v '^#' /etc/apt/sources.list | sed '/^\s*$/d' - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_configure_primary.py b/tests/cloud_tests/testcases/modules/apt_configure_primary.py deleted file mode 100644 index 4950a2ef..00000000 --- a/tests/cloud_tests/testcases/modules/apt_configure_primary.py +++ /dev/null @@ -1,24 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestAptconfigurePrimary(base.CloudTestCase): - """Test apt-configure module.""" - - def test_ubuntu_sources(self): - """Test no default Ubuntu entries exist.""" - out = self.get_data_file('sources.list') - ubuntu_source_count = len( - [line for line in out.split('\n') if 'archive.ubuntu.com' in line]) - self.assertEqual(0, ubuntu_source_count) - - def test_gatech_sources(self): - """Test GaTech entries exist.""" - out = self.get_data_file('sources.list') - gatech_source_count = len( - [line for line in out.split('\n') if 'gtlib.gatech.edu' in line]) - self.assertGreater(gatech_source_count, 0) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_configure_primary.yaml b/tests/cloud_tests/testcases/modules/apt_configure_primary.yaml deleted file mode 100644 index cc067d4f..00000000 --- a/tests/cloud_tests/testcases/modules/apt_configure_primary.yaml +++ /dev/null @@ -1,19 +0,0 @@ -# -# Setup a custome primary sources.list -# -required_features: - - apt - - apt_src_cont -cloud_config: | - #cloud-config - apt: - primary: - - arches: - - default - uri: "http://www.gtlib.gatech.edu/pub/ubuntu-releases/" -collect_scripts: - sources.list: | - #!/bin/bash - cat /etc/apt/sources.list - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_configure_proxy.py b/tests/cloud_tests/testcases/modules/apt_configure_proxy.py deleted file mode 100644 index 0c61b6cc..00000000 --- a/tests/cloud_tests/testcases/modules/apt_configure_proxy.py +++ /dev/null @@ -1,22 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestAptconfigureProxy(base.CloudTestCase): - """Test apt-configure module.""" - - def test_proxy_config(self): - """Test proxy options added to apt config.""" - out = self.get_data_file('90cloud-init-aptproxy') - self.assertIn( - 'Acquire::http::Proxy "http://squid.internal:3128";', out) - self.assertIn( - 'Acquire::http::Proxy "http://squid.internal:3128";', out) - self.assertIn( - 'Acquire::ftp::Proxy "ftp://squid.internal:3128";', out) - self.assertIn( - 'Acquire::https::Proxy "https://squid.internal:3128";', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_configure_proxy.yaml b/tests/cloud_tests/testcases/modules/apt_configure_proxy.yaml deleted file mode 100644 index be6c6f81..00000000 --- a/tests/cloud_tests/testcases/modules/apt_configure_proxy.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# -# Set apt proxy -# -required_features: - - apt -cloud_config: | - #cloud-config - apt: - proxy: "http://squid.internal:3128" - http_proxy: "http://squid.internal:3128" - ftp_proxy: "ftp://squid.internal:3128" - https_proxy: "https://squid.internal:3128" -collect_scripts: - 90cloud-init-aptproxy: | - #!/bin/bash - cat /etc/apt/apt.conf.d/90cloud-init-aptproxy - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_configure_security.py b/tests/cloud_tests/testcases/modules/apt_configure_security.py deleted file mode 100644 index 7d7e2585..00000000 --- a/tests/cloud_tests/testcases/modules/apt_configure_security.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestAptconfigureSecurity(base.CloudTestCase): - """Test apt-configure module.""" - - def test_security_mirror(self): - """Test security lines added and uncommented in source.list.""" - out = self.get_data_file('sources.list') - self.assertEqual(6, int(out)) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_configure_security.yaml b/tests/cloud_tests/testcases/modules/apt_configure_security.yaml deleted file mode 100644 index 83dd51df..00000000 --- a/tests/cloud_tests/testcases/modules/apt_configure_security.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# -# Add security to sources.list -# -required_features: - - apt - - ubuntu_repos -cloud_config: | - #cloud-config - apt: - security: - - arches: - - default -collect_scripts: - sources.list: | - #!/bin/bash - grep -c security.ubuntu.com /etc/apt/sources.list - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_configure_sources_key.py b/tests/cloud_tests/testcases/modules/apt_configure_sources_key.py deleted file mode 100644 index d9061f3c..00000000 --- a/tests/cloud_tests/testcases/modules/apt_configure_sources_key.py +++ /dev/null @@ -1,23 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestAptconfigureSourcesKey(base.CloudTestCase): - """Test apt-configure module.""" - - def test_apt_key_list(self): - """Test key list updated.""" - out = self.get_data_file('apt_key_list') - self.assertIn( - '1FF0 D853 5EF7 E719 E5C8 1B9C 083D 06FB E4D3 04DF', out) - self.assertIn('Launchpad PPA for cloud init development team', out) - - def test_source_list(self): - """Test source.list updated.""" - out = self.get_data_file('sources.list') - self.assertIn( - 'http://ppa.launchpad.net/cloud-init-dev/test-archive/ubuntu', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_configure_sources_key.yaml b/tests/cloud_tests/testcases/modules/apt_configure_sources_key.yaml deleted file mode 100644 index bde9398a..00000000 --- a/tests/cloud_tests/testcases/modules/apt_configure_sources_key.yaml +++ /dev/null @@ -1,50 +0,0 @@ -# -# Add a sources.list entry with a given key (Debian Jessie) -# -required_features: - - apt - - lsb_release -cloud_config: | - #cloud-config - apt: - sources: - source1: - source: "deb http://ppa.launchpad.net/cloud-init-dev/test-archive/ubuntu $RELEASE main" - key: | - -----BEGIN PGP PUBLIC KEY BLOCK----- - Version: SKS 1.1.6 - Comment: Hostname: keyserver.ubuntu.com - - mQINBFbZRUIBEAC+A0PIKYBP9kLC4hQtRrffRS11uLo8/BdtmOdrlW0hpPHzCfKnjR3tvSEI - lqPHG1QrrjAXKZDnZMRz+h/px7lUztvytGzHPSJd5ARUzAyjyRezUhoJ3VSCxrPqx62avuWf - RfoJaIeHfDehL5/dTVkyiWxfVZ369ZX6JN2AgLsQTeybTQ75+2z0xPrrhnGmgh6g0qTYcAaq - M5ONOGiqeSBX/Smjh6ALy5XkhUiFGLsI7Yluf6XSICY/x7gd6RAfgSIQrUTNMoS1sqhT4aot - +xvOfQy8ySkfAK4NddXql6E/+ZqTmBY/Lr0YklFBy8jGT+UysfiIznPMIwbmgq5Li7BtDDtX - b8Uyi4edPpjtextezfXYn4NVIpPL5dPZS/FXh4HpzyH0pYCfrH4QDGA7i52AGmhpiOFjJMo6 - N33sdjZHOH/2Vyp+QZaQnsdUAi1N4M6c33tQbpIScn1SY+El8z5JDA4PBzkw8HpLCi1gGoa6 - V4kfbWqXXbGAJFkLkP/vc4+pY9axOlmCkJg7xCPwhI75y1cONgovhz+BEXOzolh5KZuGbGbj - xe0wva5DLBeIg7EQFf+99pOS7Syby3Xpm6ZbswEFV0cllK4jf/QMjtfInxobuMoI0GV0bE5l - WlRtPCK5FnbHwxi0wPNzB/5fwzJ77r6HgPrR0OkT0lWmbUyoOQARAQABtC1MYXVuY2hwYWQg - UFBBIGZvciBjbG91ZCBpbml0IGRldmVsb3BtZW50IHRlYW2JAjgEEwECACIFAlbZRUICGwMG - CwkIBwMCBhUIAgkKCwQWAgMBAh4BAheAAAoJEAg9Bvvk0wTfHfcP/REK5N2s1JYc69qEa9ZN - o6oi+A7l6AYw+ZY88O5TJe7F9otv5VXCIKSUT0Vsepjgf0mtXAgf/sb2lsJn/jp7tzgov3YH - vSrkTkRydz8xcA87gwQKePuvTLxQpftF4flrBxgSueIn5O/tPrBOxLz7EVYBc78SKg9aj9L2 - yUp+YuNevlwfZCTYeBb9r3FHaab2HcgkwqYch66+nKYfwiLuQ9NzXXm0Wn0JcEQ6pWvJscbj - C9BdawWovfvMK5/YLfI6Btm7F4mIpQBdhSOUp/YXKmdvHpmwxMCN2QhqYK49SM7qE9aUDbJL - arppSEBtlCLWhRBZYLTUna+BkuQ1bHz4St++XTR49Qd7vDERALpApDjB2dxPfMiBzCMwQQyq - uy13exU8o2ETLg+dZSLfDTzrBNsBFmXlw8WW17nTISYdKeGKL+QdlUjpzdwUMMzHhAO8SmMH - zjeSlDSRMXBJFAFSbCl7EwmMKa3yVX0zInT91fNllZ3iatAmtVdqVH/BFQfTIMH2ET7A8WzJ - ZzVSuMRhqoKdr5AMcHuJGPUoVkVJHQA+NNvEiXSysF3faL7jmKapmUwrhpYYX2H8pf+VMu2e - cLflKTI28dl+ZQ4Pl/aVsxrti/pzhdYy05Sn5ddtySyIkvo8L1cU5MWpbvSlFPkTstBUDLBf - pb0uBy+g0oxJQg15 - =uy53 - -----END PGP PUBLIC KEY BLOCK----- -collect_scripts: - sources.list: | - #!/bin/bash - cat /etc/apt/sources.list.d/source1.list - apt_key_list: | - #!/bin/bash - apt-key finger - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_configure_sources_keyserver.py b/tests/cloud_tests/testcases/modules/apt_configure_sources_keyserver.py deleted file mode 100644 index ddc86174..00000000 --- a/tests/cloud_tests/testcases/modules/apt_configure_sources_keyserver.py +++ /dev/null @@ -1,23 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestAptconfigureSourcesKeyserver(base.CloudTestCase): - """Test apt-configure module.""" - - def test_apt_key_list(self): - """Test specific key added.""" - out = self.get_data_file('apt_key_list') - self.assertIn( - '1FF0 D853 5EF7 E719 E5C8 1B9C 083D 06FB E4D3 04DF', out) - self.assertIn('Launchpad PPA for cloud init development team', out) - - def test_source_list(self): - """Test source.list updated.""" - out = self.get_data_file('sources.list') - self.assertIn( - 'http://ppa.launchpad.net/cloud-init-dev/test-archive/ubuntu', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_configure_sources_keyserver.yaml b/tests/cloud_tests/testcases/modules/apt_configure_sources_keyserver.yaml deleted file mode 100644 index 25088135..00000000 --- a/tests/cloud_tests/testcases/modules/apt_configure_sources_keyserver.yaml +++ /dev/null @@ -1,23 +0,0 @@ -# -# Add a sources.list entry with a key from a keyserver -# -required_features: - - apt - - lsb_release -cloud_config: | - #cloud-config - apt: - sources: - source1: - keyid: 1FF0D8535EF7E719E5C81B9C083D06FBE4D304DF - keyserver: keyserver.ubuntu.com - source: "deb http://ppa.launchpad.net/cloud-init-dev/test-archive/ubuntu $RELEASE main" -collect_scripts: - sources.list: | - #!/bin/bash - cat /etc/apt/sources.list.d/source1.list - apt_key_list: | - #!/bin/bash - apt-key finger - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_configure_sources_list.py b/tests/cloud_tests/testcases/modules/apt_configure_sources_list.py deleted file mode 100644 index cf84e056..00000000 --- a/tests/cloud_tests/testcases/modules/apt_configure_sources_list.py +++ /dev/null @@ -1,31 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestAptconfigureSourcesList(base.CloudTestCase): - """Test apt-configure module.""" - - def test_sources_list(self): - """Test sources.list includes sources.""" - out = self.get_data_file('sources.list') - - # Verify we have 6 entires - self.assertEqual(6, len(out.rstrip().split('\n'))) - - # Verify the keys generated the list correctly - self.assertRegex(out, r'deb http:\/\/archive.ubuntu.com\/ubuntu ' - '[a-z].* main restricted') - self.assertRegex(out, r'deb-src http:\/\/archive.ubuntu.com\/ubuntu ' - '[a-z].* main restricted') - self.assertRegex(out, r'deb http:\/\/archive.ubuntu.com\/ubuntu ' - '[a-z].* universe restricted') - self.assertRegex(out, r'deb-src http:\/\/archive.ubuntu.com\/ubuntu ' - '[a-z].* universe restricted') - self.assertRegex(out, r'deb http:\/\/security.ubuntu.com\/ubuntu ' - '[a-z].*security multiverse') - self.assertRegex(out, r'deb-src http:\/\/security.ubuntu.com\/ubuntu ' - '[a-z].*security multiverse') - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_configure_sources_list.yaml b/tests/cloud_tests/testcases/modules/apt_configure_sources_list.yaml deleted file mode 100644 index 87e470c1..00000000 --- a/tests/cloud_tests/testcases/modules/apt_configure_sources_list.yaml +++ /dev/null @@ -1,28 +0,0 @@ -# -# Generate a sources.list -# -required_features: - - apt - - lsb_release -cloud_config: | - #cloud-config - apt: - primary: - - arches: [default] - uri: http://archive.ubuntu.com/ubuntu - security: - - arches: [default] - uri: http://security.ubuntu.com/ubuntu - sources_list: | - deb $MIRROR $RELEASE main restricted - deb-src $MIRROR $RELEASE main restricted - deb $PRIMARY $RELEASE universe restricted - deb-src $PRIMARY $RELEASE universe restricted - deb $SECURITY $RELEASE-security multiverse - deb-src $SECURITY $RELEASE-security multiverse -collect_scripts: - sources.list: | - #/bin/bash - cat /etc/apt/sources.list - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_configure_sources_ppa.py b/tests/cloud_tests/testcases/modules/apt_configure_sources_ppa.py deleted file mode 100644 index dfbdeadf..00000000 --- a/tests/cloud_tests/testcases/modules/apt_configure_sources_ppa.py +++ /dev/null @@ -1,23 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestAptconfigureSourcesPPA(base.CloudTestCase): - """Test apt-configure module.""" - - def test_ppa(self): - """Test specific ppa added.""" - out = self.get_data_file('sources.list') - self.assertIn( - 'http://ppa.launchpad.net/cloud-init-dev/test-archive/ubuntu', out) - - def test_ppa_key(self): - """Test ppa key added.""" - out = self.get_data_file('apt-key') - self.assertIn( - '1FF0 D853 5EF7 E719 E5C8 1B9C 083D 06FB E4D3 04DF', out) - self.assertIn('Launchpad PPA for cloud init development team', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_configure_sources_ppa.yaml b/tests/cloud_tests/testcases/modules/apt_configure_sources_ppa.yaml deleted file mode 100644 index b997bcfb..00000000 --- a/tests/cloud_tests/testcases/modules/apt_configure_sources_ppa.yaml +++ /dev/null @@ -1,29 +0,0 @@ -# -# Add a PPA to source.list -# -# NOTE: on older ubuntu releases the sources file added is named -# 'cloud-init-dev-test-archive-trusty', without 'ubuntu' in the middle -required_features: - - apt - - ppa - - ppa_file_name -cloud_config: | - #cloud-config - apt: - sources: - source1: - keyid: 0165013E - keyserver: keyserver.ubuntu.com - source: "ppa:cloud-init-dev/test-archive" -collect_scripts: - sources.list: | - #!/bin/bash - cat /etc/apt/sources.list.d/cloud-init-dev-ubuntu-test-archive-*.list - apt-key: | - #!/bin/bash - apt-key finger - sources_full: | - #!/bin/bash - cat /etc/apt/sources.list - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_pipelining_disable.py b/tests/cloud_tests/testcases/modules/apt_pipelining_disable.py deleted file mode 100644 index c98eedef..00000000 --- a/tests/cloud_tests/testcases/modules/apt_pipelining_disable.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestAptPipeliningDisable(base.CloudTestCase): - """Test apt-pipelining module.""" - - def test_disable_pipelining(self): - """Test pipelining disabled.""" - out = self.get_data_file('90cloud-init-pipelining') - self.assertIn('Acquire::http::Pipeline-Depth "0";', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_pipelining_disable.yaml b/tests/cloud_tests/testcases/modules/apt_pipelining_disable.yaml deleted file mode 100644 index 22a31dc4..00000000 --- a/tests/cloud_tests/testcases/modules/apt_pipelining_disable.yaml +++ /dev/null @@ -1,14 +0,0 @@ -# -# Disable apt pipelining value -# -required_features: - - apt -cloud_config: | - #cloud-config - apt_pipelining: false -collect_scripts: - 90cloud-init-pipelining: | - #!/bin/bash - cat /etc/apt/apt.conf.d/90cloud-init-pipelining - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_pipelining_os.py b/tests/cloud_tests/testcases/modules/apt_pipelining_os.py deleted file mode 100644 index 2b940a66..00000000 --- a/tests/cloud_tests/testcases/modules/apt_pipelining_os.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestAptPipeliningOS(base.CloudTestCase): - """Test apt-pipelining module.""" - - def test_os_pipelining(self): - """test 'os' settings does not write apt config file.""" - out = self.get_data_file('90cloud-init-pipelining_not_written') - self.assertEqual(0, int(out)) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/apt_pipelining_os.yaml b/tests/cloud_tests/testcases/modules/apt_pipelining_os.yaml deleted file mode 100644 index 86d5220b..00000000 --- a/tests/cloud_tests/testcases/modules/apt_pipelining_os.yaml +++ /dev/null @@ -1,14 +0,0 @@ -# -# Set apt pipelining value to OS, no conf written -# -required_features: - - apt -cloud_config: | - #cloud-config - apt_pipelining: os -collect_scripts: - 90cloud-init-pipelining_not_written: | - #!/bin/bash - ls /etc/apt/apt.conf.d/90cloud-init-pipelining | wc -l - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/bootcmd.py b/tests/cloud_tests/testcases/modules/bootcmd.py deleted file mode 100644 index f5b86b03..00000000 --- a/tests/cloud_tests/testcases/modules/bootcmd.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestBootCmd(base.CloudTestCase): - """Test bootcmd module.""" - - def test_bootcmd_host(self): - """Test boot cmd worked.""" - out = self.get_data_file('hosts') - self.assertIn('192.168.1.130 us.archive.ubuntu.com', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/bootcmd.yaml b/tests/cloud_tests/testcases/modules/bootcmd.yaml deleted file mode 100644 index 3a73994e..00000000 --- a/tests/cloud_tests/testcases/modules/bootcmd.yaml +++ /dev/null @@ -1,13 +0,0 @@ -# -# Early boot command -# -cloud_config: | - #cloud-config - bootcmd: - - echo 192.168.1.130 us.archive.ubuntu.com > /etc/hosts -collect_scripts: - hosts: | - #!/bin/bash - cat /etc/hosts - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/byobu.py b/tests/cloud_tests/testcases/modules/byobu.py deleted file mode 100644 index 74d0529a..00000000 --- a/tests/cloud_tests/testcases/modules/byobu.py +++ /dev/null @@ -1,24 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestByobu(base.CloudTestCase): - """Test Byobu module.""" - - def test_byobu_installed(self): - """Test byobu installed.""" - self.assertPackageInstalled('byobu') - - def test_byobu_profile_enabled(self): - """Test byobu profile.d file exists.""" - out = self.get_data_file('byobu_profile_enabled') - self.assertIn('/etc/profile.d/Z97-byobu.sh', out) - - def test_byobu_launch_exists(self): - """Test byobu-launch exists.""" - out = self.get_data_file('byobu_launch_exists') - self.assertIn('/usr/bin/byobu-launch', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/byobu.yaml b/tests/cloud_tests/testcases/modules/byobu.yaml deleted file mode 100644 index d002a611..00000000 --- a/tests/cloud_tests/testcases/modules/byobu.yaml +++ /dev/null @@ -1,17 +0,0 @@ -# -# Install and enable byobu system wide and default user -# -required_features: - - byobu -cloud_config: | - #cloud-config - byobu_by_default: enable -collect_scripts: - byobu_profile_enabled: | - #!/bin/bash - ls /etc/profile.d/Z97-byobu.sh - byobu_launch_exists: | - #!/bin/bash - which /usr/bin/byobu-launch - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ca_certs.py b/tests/cloud_tests/testcases/modules/ca_certs.py deleted file mode 100644 index 6b56f639..00000000 --- a/tests/cloud_tests/testcases/modules/ca_certs.py +++ /dev/null @@ -1,33 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestCaCerts(base.CloudTestCase): - """Test ca certs module.""" - - def test_certs_updated(self): - """Test certs have been updated in /etc/ssl/certs.""" - out = self.get_data_file('cert_links') - # Bionic update-ca-certificates creates less links debian #895075 - unlinked_files = [] - links = {} - for cert_line in out.splitlines(): - if '->' in cert_line: - fname, _sep, link = cert_line.split() - links[fname] = link - else: - unlinked_files.append(cert_line) - self.assertEqual(['ca-certificates.crt'], unlinked_files) - self.assertEqual('cloud-init-ca-certs.pem', links['a535c1f3.0']) - self.assertEqual( - '/usr/share/ca-certificates/cloud-init-ca-certs.crt', - links['cloud-init-ca-certs.pem']) - - def test_cert_installed(self): - """Test line from our cert exists.""" - out = self.get_data_file('cert') - self.assertIn('a36c744454555024e7f82edc420fd2c8', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ca_certs.yaml b/tests/cloud_tests/testcases/modules/ca_certs.yaml deleted file mode 100644 index 2cd91551..00000000 --- a/tests/cloud_tests/testcases/modules/ca_certs.yaml +++ /dev/null @@ -1,56 +0,0 @@ -# -# Remove existing ca_certs and install custom ca-cert -# -cloud_config: | - #cloud-config - ca-certs: - remove-defaults: true - trusted: - - | - -----BEGIN CERTIFICATE----- - MIIGJzCCBA+gAwIBAgIBATANBgkqhkiG9w0BAQUFADCBsjELMAkGA1UEBhMCRlIx - DzANBgNVBAgMBkFsc2FjZTETMBEGA1UEBwwKU3RyYXNib3VyZzEYMBYGA1UECgwP - d3d3LmZyZWVsYW4ub3JnMRAwDgYDVQQLDAdmcmVlbGFuMS0wKwYDVQQDDCRGcmVl - bGFuIFNhbXBsZSBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxIjAgBgkqhkiG9w0BCQEW - E2NvbnRhY3RAZnJlZWxhbi5vcmcwHhcNMTIwNDI3MTAzMTE4WhcNMjIwNDI1MTAz - MTE4WjB+MQswCQYDVQQGEwJGUjEPMA0GA1UECAwGQWxzYWNlMRgwFgYDVQQKDA93 - d3cuZnJlZWxhbi5vcmcxEDAOBgNVBAsMB2ZyZWVsYW4xDjAMBgNVBAMMBWFsaWNl - MSIwIAYJKoZIhvcNAQkBFhNjb250YWN0QGZyZWVsYW4ub3JnMIICIjANBgkqhkiG - 9w0BAQEFAAOCAg8AMIICCgKCAgEA3W29+ID6194bH6ejLrIC4hb2Ugo8v6ZC+Mrc - k2dNYMNPjcOKABvxxEtBamnSaeU/IY7FC/giN622LEtV/3oDcrua0+yWuVafyxmZ - yTKUb4/GUgafRQPf/eiX9urWurtIK7XgNGFNUjYPq4dSJQPPhwCHE/LKAykWnZBX - RrX0Dq4XyApNku0IpjIjEXH+8ixE12wH8wt7DEvdO7T3N3CfUbaITl1qBX+Nm2Z6 - q4Ag/u5rl8NJfXg71ZmXA3XOj7zFvpyapRIZcPmkvZYn7SMCp8dXyXHPdpSiIWL2 - uB3KiO4JrUYvt2GzLBUThp+lNSZaZ/Q3yOaAAUkOx+1h08285Pi+P8lO+H2Xic4S - vMq1xtLg2bNoPC5KnbRfuFPuUD2/3dSiiragJ6uYDLOyWJDivKGt/72OVTEPAL9o - 6T2pGZrwbQuiFGrGTMZOvWMSpQtNl+tCCXlT4mWqJDRwuMGrI4DnnGzt3IKqNwS4 - Qyo9KqjMIPwnXZAmWPm3FOKe4sFwc5fpawKO01JZewDsYTDxVj+cwXwFxbE2yBiF - z2FAHwfopwaH35p3C6lkcgP2k/zgAlnBluzACUI+MKJ/G0gv/uAhj1OHJQ3L6kn1 - SpvQ41/ueBjlunExqQSYD7GtZ1Kg8uOcq2r+WISE3Qc9MpQFFkUVllmgWGwYDuN3 - Zsez95kCAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYdT3BlblNT - TCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFFlfyRO6G8y5qEFKikl5 - ajb2fT7XMB8GA1UdIwQYMBaAFCNsLT0+KV14uGw+quK7Lh5sh/JTMA0GCSqGSIb3 - DQEBBQUAA4ICAQAT5wJFPqervbja5+90iKxi1d0QVtVGB+z6aoAMuWK+qgi0vgvr - mu9ot2lvTSCSnRhjeiP0SIdqFMORmBtOCFk/kYDp9M/91b+vS+S9eAlxrNCB5VOf - PqxEPp/wv1rBcE4GBO/c6HcFon3F+oBYCsUQbZDKSSZxhDm3mj7pb67FNbZbJIzJ - 70HDsRe2O04oiTx+h6g6pW3cOQMgIAvFgKN5Ex727K4230B0NIdGkzuj4KSML0NM - slSAcXZ41OoSKNjy44BVEZv0ZdxTDrRM4EwJtNyggFzmtTuV02nkUj1bYYYC5f0L - ADr6s0XMyaNk8twlWYlYDZ5uKDpVRVBfiGcq0uJIzIvemhuTrofh8pBQQNkPRDFT - Rq1iTo1Ihhl3/Fl1kXk1WR3jTjNb4jHX7lIoXwpwp767HAPKGhjQ9cFbnHMEtkro - RlJYdtRq5mccDtwT0GFyoJLLBZdHHMHJz0F9H7FNk2tTQQMhK5MVYwg+LIaee586 - CQVqfbscp7evlgjLW98H+5zylRHAgoH2G79aHljNKMp9BOuq6SnEglEsiWGVtu2l - hnx8SB3sVJZHeer8f/UQQwqbAO+Kdy70NmbSaqaVtp8jOxLiidWkwSyRTsuU6D8i - DiH5uEqBXExjrj0FslxcVKdVj5glVcSmkLwZKbEU1OKwleT/iXFhvooWhQ== - -----END CERTIFICATE----- -collect_scripts: - cert_links: | - #!/bin/bash - # links printed -> - # non-links printed - for file in `ls /etc/ssl/certs`; do - [ -h /etc/ssl/certs/$file ] && echo -n $file ' -> ' && readlink /etc/ssl/certs/$file || echo $file; - done - cert: | - #!/bin/bash - md5sum /etc/ssl/certs/ca-certificates.crt -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/debug_disable.py b/tests/cloud_tests/testcases/modules/debug_disable.py deleted file mode 100644 index e40e4b89..00000000 --- a/tests/cloud_tests/testcases/modules/debug_disable.py +++ /dev/null @@ -1,16 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestDebugDisable(base.CloudTestCase): - """Disable debug messages.""" - - def test_debug_disable(self): - """Test verbose output missing from logs.""" - out = self.get_data_file('cloud-init.log') - self.assertNotIn( - out, r'Skipping module named [a-z].* verbose printing disabled') - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/debug_disable.yaml b/tests/cloud_tests/testcases/modules/debug_disable.yaml deleted file mode 100644 index 63218b18..00000000 --- a/tests/cloud_tests/testcases/modules/debug_disable.yaml +++ /dev/null @@ -1,9 +0,0 @@ -# -# Do not run in debug mode -# -cloud_config: | - #cloud-config - debug: - verbose: False - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/debug_enable.py b/tests/cloud_tests/testcases/modules/debug_enable.py deleted file mode 100644 index 28d26062..00000000 --- a/tests/cloud_tests/testcases/modules/debug_enable.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestDebugEnable(base.CloudTestCase): - """Test debug messages.""" - - def test_debug_enable(self): - """Test debug messages in cloud-init log.""" - out = self.get_data_file('cloud-init.log') - self.assertIn('[DEBUG]', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/debug_enable.yaml b/tests/cloud_tests/testcases/modules/debug_enable.yaml deleted file mode 100644 index d44147db..00000000 --- a/tests/cloud_tests/testcases/modules/debug_enable.yaml +++ /dev/null @@ -1,9 +0,0 @@ -# -# Run in debug mode -# -cloud_config: | - #cloud-config - debug: - verbose: True - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/final_message.py b/tests/cloud_tests/testcases/modules/final_message.py deleted file mode 100644 index b7b5d5e0..00000000 --- a/tests/cloud_tests/testcases/modules/final_message.py +++ /dev/null @@ -1,40 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestFinalMessage(base.CloudTestCase): - """Test cloud init module `cc_final_message`.""" - - subs_char = '$' - - def get_final_message_config(self): - """Get config for final message.""" - self.assertIn('final_message', self.cloud_config) - return self.cloud_config['final_message'] - - def get_final_message(self): - """Get final message from log.""" - out = self.get_data_file('cloud-init-output.log') - lines = len(self.get_final_message_config().splitlines()) - return '\n'.join(out.splitlines()[-1 * lines:]) - - def test_final_message_string(self): - """Ensure final handles regular strings.""" - for actual, config in zip( - self.get_final_message().splitlines(), - self.get_final_message_config().splitlines()): - if self.subs_char not in config: - self.assertEqual(actual, config) - - def test_final_message_subs(self): - """Test variable substitution in final message.""" - # TODO: add verification of other substitutions - patterns = {'$datasource': self.get_datasource()} - for key, expected in patterns.items(): - index = self.get_final_message_config().splitlines().index(key) - actual = self.get_final_message().splitlines()[index] - self.assertEqual(actual, expected) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/final_message.yaml b/tests/cloud_tests/testcases/modules/final_message.yaml deleted file mode 100644 index c9ed6118..00000000 --- a/tests/cloud_tests/testcases/modules/final_message.yaml +++ /dev/null @@ -1,13 +0,0 @@ -# -# Print a final message with various predefined variables -# -cloud_config: | - #cloud-config - final_message: | - This is my final message! - $version - $timestamp - $datasource - $uptime - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/keys_to_console.py b/tests/cloud_tests/testcases/modules/keys_to_console.py deleted file mode 100644 index 07f38112..00000000 --- a/tests/cloud_tests/testcases/modules/keys_to_console.py +++ /dev/null @@ -1,22 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestKeysToConsole(base.CloudTestCase): - """Test proper keys are included and excluded to console.""" - - def test_excluded_keys(self): - """Test excluded keys missing.""" - out = self.get_data_file('syslog') - self.assertNotIn('(DSA)', out) - self.assertNotIn('(ECDSA)', out) - - def test_expected_keys(self): - """Test expected keys exist.""" - out = self.get_data_file('syslog') - self.assertIn('(ED25519)', out) - self.assertIn('(RSA)', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/keys_to_console.yaml b/tests/cloud_tests/testcases/modules/keys_to_console.yaml deleted file mode 100644 index 5d86e739..00000000 --- a/tests/cloud_tests/testcases/modules/keys_to_console.yaml +++ /dev/null @@ -1,15 +0,0 @@ -# -# Hide printing of ssh key and fingerprints for specific keys -# -required_features: - - syslog -cloud_config: | - #cloud-config - ssh_fp_console_blacklist: [ssh-dss, ssh-dsa, ecdsa-sha2-nistp256] - ssh_key_console_blacklist: [ssh-dss, ssh-dsa, ecdsa-sha2-nistp256] -collect_scripts: - syslog: | - #!/bin/bash - cat /var/log/syslog - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/landscape.yaml b/tests/cloud_tests/testcases/modules/landscape.yaml deleted file mode 100644 index ed2c37c4..00000000 --- a/tests/cloud_tests/testcases/modules/landscape.yaml +++ /dev/null @@ -1,28 +0,0 @@ -# -# Setup landscape client settings -# -# 2016-11-17: Disabled due to this not working -# -enabled: false -required_features: - - landscape -cloud_config: | - #cloud-conifg - landscape: - client: - log_level: "info" - url: "https://landscape.canonical.com/message-system" - ping_url: "http://landscape.canonical.com/ping" - data_path: "/var/lib/landscape/client" - http_proxy: "http://my.proxy.com/foobar" - https_proxy: "https://my.proxy.com/foobar" - tags: "server,cloud" - computer_title: "footitle" - registration_key: "fookey" - account_name: "fooaccount" -collect_scripts: - client.conf: | - #!/bin/bash - cat /etc/landscape/client.conf - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/locale.py b/tests/cloud_tests/testcases/modules/locale.py deleted file mode 100644 index cb9e1dce..00000000 --- a/tests/cloud_tests/testcases/modules/locale.py +++ /dev/null @@ -1,30 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - -from cloudinit import util - - -class TestLocale(base.CloudTestCase): - """Test locale is set properly.""" - - def test_locale(self): - """Test locale is set properly.""" - data = util.load_shell_content(self.get_data_file('locale_default')) - self.assertIn("LANG", data) - self.assertEqual('en_GB.UTF-8', data['LANG']) - - def test_locale_a(self): - """Test locale -a has both options.""" - out = self.get_data_file('locale_a') - self.assertIn('en_GB.utf8', out) - self.assertIn('en_US.utf8', out) - - def test_locale_gen(self): - """Test local.gen file has all entries.""" - out = self.get_data_file('locale_gen') - self.assertIn('en_GB.UTF-8', out) - self.assertIn('en_US.UTF-8', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/locale.yaml b/tests/cloud_tests/testcases/modules/locale.yaml deleted file mode 100644 index e01518a1..00000000 --- a/tests/cloud_tests/testcases/modules/locale.yaml +++ /dev/null @@ -1,22 +0,0 @@ -# -# Set locale to non-default option and verify -# -required_features: - - engb_locale - - locale_gen -cloud_config: | - #cloud-config - locale: en_GB.UTF-8 - locale_configfile: /etc/default/locale -collect_scripts: - locale_default: | - #!/bin/bash - cat /etc/default/locale - locale_a: | - #!/bin/bash - locale -a - locale_gen: | - #!/bin/bash - cat /etc/locale.gen | grep -v '^#' | uniq - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/lxd_bridge.py b/tests/cloud_tests/testcases/modules/lxd_bridge.py deleted file mode 100644 index ea545e0a..00000000 --- a/tests/cloud_tests/testcases/modules/lxd_bridge.py +++ /dev/null @@ -1,36 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestLxdBridge(base.CloudTestCase): - """Test LXD module.""" - - @classmethod - def maybeSkipTest(cls): - """Skip on cosmic for two reasons: - a.) LP: #1795036 - 'lxd init' fails on cosmic kernel. - b.) apt install lxd installs via snap which can be slow - as that will download core snap and lxd.""" - os_name = cls.data.get('os_name', 'UNKNOWN') - if os_name == "cosmic": - raise base.SkipTest('Skipping test on cosmic (LP: #1795036).') - - def test_lxd(self): - """Test lxd installed.""" - out = self.get_data_file('lxd') - self.assertIn('/lxd', out) - - def test_lxc(self): - """Test lxc installed.""" - out = self.get_data_file('lxc') - self.assertIn('/lxc', out) - - def test_bridge(self): - """Test bridge config.""" - out = self.get_data_file('lxc-bridge') - self.assertIn('lxdbr0', out) - self.assertIn('10.100.100.1/24', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/lxd_bridge.yaml b/tests/cloud_tests/testcases/modules/lxd_bridge.yaml deleted file mode 100644 index e6b7e76a..00000000 --- a/tests/cloud_tests/testcases/modules/lxd_bridge.yaml +++ /dev/null @@ -1,32 +0,0 @@ -# -# LXD configured with directory backend and IPv4 bridge -# -required_features: - - lxd -cloud_config: | - #cloud-config - lxd: - init: - storage_backend: dir - bridge: - mode: new - name: lxdbr0 - ipv4_address: 10.100.100.1 - ipv4_netmask: 24 - ipv4_dhcp_first: 10.100.100.100 - ipv4_dhcp_last: 10.100.100.200 - ipv4_nat: true - domain: lxd -collect_scripts: - lxc: | - #!/bin/bash - which lxc - lxd: | - #!/bin/bash - which lxd - lxc-bridge: | - #!/bin/bash - ip addr show lxdbr0 - cat /etc/default/lxd-bridge 2>/dev/null | grep -v ^# | sort -u - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/lxd_dir.py b/tests/cloud_tests/testcases/modules/lxd_dir.py deleted file mode 100644 index 797bafed..00000000 --- a/tests/cloud_tests/testcases/modules/lxd_dir.py +++ /dev/null @@ -1,30 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestLxdDir(base.CloudTestCase): - """Test LXD module.""" - - @classmethod - def maybeSkipTest(cls): - """Skip on cosmic for two reasons: - a.) LP: #1795036 - 'lxd init' fails on cosmic kernel. - b.) apt install lxd installs via snap which can be slow - as that will download core snap and lxd.""" - os_name = cls.data.get('os_name', 'UNKNOWN') - if os_name == "cosmic": - raise base.SkipTest('Skipping test on cosmic (LP: #1795036).') - - def test_lxd(self): - """Test lxd installed.""" - out = self.get_data_file('lxd') - self.assertIn('/lxd', out) - - def test_lxc(self): - """Test lxc installed.""" - out = self.get_data_file('lxc') - self.assertIn('/lxc', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/lxd_dir.yaml b/tests/cloud_tests/testcases/modules/lxd_dir.yaml deleted file mode 100644 index f93a3fa7..00000000 --- a/tests/cloud_tests/testcases/modules/lxd_dir.yaml +++ /dev/null @@ -1,19 +0,0 @@ -# -# LXD configured with directory backend -# -required_features: - - lxd -cloud_config: | - #cloud-config - lxd: - init: - storage_backend: dir -collect_scripts: - lxc: | - #!/bin/bash - which lxc - lxd: | - #!/bin/bash - which lxd - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ntp.py b/tests/cloud_tests/testcases/modules/ntp.py deleted file mode 100644 index c63cc15e..00000000 --- a/tests/cloud_tests/testcases/modules/ntp.py +++ /dev/null @@ -1,24 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestNtp(base.CloudTestCase): - """Test ntp module""" - - def test_ntp_installed(self): - """Test ntp installed""" - self.assertPackageInstalled('ntp') - - def test_ntp_dist_entries(self): - """Test dist config file is empty""" - out = self.get_data_file('ntp_conf_dist_empty') - self.assertEqual(0, int(out)) - - def test_ntp_entries(self): - """Test config entries""" - out = self.get_data_file('ntp_conf_pool_list') - self.assertIn('pool.ntp.org iburst', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ntp.yaml b/tests/cloud_tests/testcases/modules/ntp.yaml deleted file mode 100644 index 7ea0707d..00000000 --- a/tests/cloud_tests/testcases/modules/ntp.yaml +++ /dev/null @@ -1,22 +0,0 @@ -# -# Emtpy NTP config to setup using defaults -# -cloud_config: | - #cloud-config - ntp: - ntp_client: ntp - pools: [] - servers: [] -collect_scripts: - ntp_installed: | - #!/bin/bash - ntpd --version > /dev/null 2>&1 - echo $? - ntp_conf_dist_empty: | - #!/bin/bash - ls /etc/ntp.conf.dist | wc -l - ntp_conf_pool_list: | - #!/bin/bash - grep 'pool.ntp.org' /etc/ntp.conf | grep -v ^# - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ntp_chrony.py b/tests/cloud_tests/testcases/modules/ntp_chrony.py deleted file mode 100644 index 7d341773..00000000 --- a/tests/cloud_tests/testcases/modules/ntp_chrony.py +++ /dev/null @@ -1,26 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -import unittest - -from tests.cloud_tests.testcases import base - - -class TestNtpChrony(base.CloudTestCase): - """Test ntp module with chrony client""" - - def setUp(self): - """Skip this suite of tests on lxd and artful or older.""" - if self.platform == 'lxd': - if self.is_distro('ubuntu') and self.os_version_cmp('artful') <= 0: - raise unittest.SkipTest( - 'No support for chrony on containers <= artful.' - ' LP: #1589780') - return super(TestNtpChrony, self).setUp() - - def test_chrony_entries(self): - """Test chrony config entries""" - out = self.get_data_file('chrony_conf') - self.assertIn('.pool.ntp.org', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ntp_chrony.yaml b/tests/cloud_tests/testcases/modules/ntp_chrony.yaml deleted file mode 100644 index 120735e2..00000000 --- a/tests/cloud_tests/testcases/modules/ntp_chrony.yaml +++ /dev/null @@ -1,17 +0,0 @@ -# -# ntp enabled, chrony selected, check conf file -# as chrony won't start in a container -# -cloud_config: | - #cloud-config - ntp: - enabled: true - ntp_client: chrony -collect_scripts: - chrony_conf: | - #!/bin/sh - set -- /etc/chrony.conf /etc/chrony/chrony.conf - for p in "$@"; do - [ -e "$p" ] && { cat "$p"; exit; } - done -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ntp_pools.py b/tests/cloud_tests/testcases/modules/ntp_pools.py deleted file mode 100644 index 152fd3f1..00000000 --- a/tests/cloud_tests/testcases/modules/ntp_pools.py +++ /dev/null @@ -1,34 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestNtpPools(base.CloudTestCase): - """Test ntp module.""" - - def test_ntp_installed(self): - """Test ntp installed""" - out = self.get_data_file('ntp_installed_pools') - self.assertEqual(0, int(out)) - - def test_ntp_dist_entries(self): - """Test dist config file is empty""" - out = self.get_data_file('ntp_conf_dist_pools') - self.assertEqual(0, int(out)) - - def test_ntp_entires(self): - """Test config entries""" - out = self.get_data_file('ntp_conf_pools') - pools = self.cloud_config.get('ntp').get('pools') - for pool in pools: - self.assertIn('pool %s iburst' % pool, out) - - def test_ntpq_servers(self): - """Test ntpq output has configured servers""" - out = self.get_data_file('ntpq_servers') - pools = self.cloud_config.get('ntp').get('pools') - for pool in pools: - self.assertIn(pool, out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ntp_pools.yaml b/tests/cloud_tests/testcases/modules/ntp_pools.yaml deleted file mode 100644 index 60fa0fd1..00000000 --- a/tests/cloud_tests/testcases/modules/ntp_pools.yaml +++ /dev/null @@ -1,32 +0,0 @@ -# -# NTP config using specific pools -# -# NOTE: lsb_release listed here because with recent cloud-init deb with -# (LP: 1628337) resolved, cloud-init will attempt to configure archives. -# this fails without lsb_release as UNAVAILABLE is used for $RELEASE -required_features: - - lsb_release -cloud_config: | - #cloud-config - ntp: - ntp_client: ntp - pools: - - 0.cloud-init.mypool - - 1.cloud-init.mypool - - 172.16.15.14 -collect_scripts: - ntp_installed_pools: | - #!/bin/bash - ntpd --version > /dev/null 2>&1 - echo $? - ntp_conf_dist_pools: | - #!/bin/bash - ls /etc/ntp.conf.dist | wc -l - ntp_conf_pools: | - #!/bin/bash - grep '^pool' /etc/ntp.conf - ntpq_servers: | - #!/bin/sh - ntpq -p -w -n - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ntp_servers.py b/tests/cloud_tests/testcases/modules/ntp_servers.py deleted file mode 100644 index 8d2a68b3..00000000 --- a/tests/cloud_tests/testcases/modules/ntp_servers.py +++ /dev/null @@ -1,34 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script""" -from tests.cloud_tests.testcases import base - - -class TestNtpServers(base.CloudTestCase): - """Test ntp module""" - - def test_ntp_installed(self): - """Test ntp installed""" - out = self.get_data_file('ntp_installed_servers') - self.assertEqual(0, int(out)) - - def test_ntp_dist_entries(self): - """Test dist config file is empty""" - out = self.get_data_file('ntp_conf_dist_servers') - self.assertEqual(0, int(out)) - - def test_ntp_entries(self): - """Test config server entries""" - out = self.get_data_file('ntp_conf_servers') - servers = self.cloud_config.get('ntp').get('servers') - for server in servers: - self.assertIn('server %s iburst' % server, out) - - def test_ntpq_servers(self): - """Test ntpq output has configured servers""" - out = self.get_data_file('ntpq_servers') - servers = self.cloud_config.get('ntp').get('servers') - for server in servers: - self.assertIn(server, out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ntp_servers.yaml b/tests/cloud_tests/testcases/modules/ntp_servers.yaml deleted file mode 100644 index ee636679..00000000 --- a/tests/cloud_tests/testcases/modules/ntp_servers.yaml +++ /dev/null @@ -1,28 +0,0 @@ -# -# NTP config using specific servers -# -required_features: - - lsb_release -cloud_config: | - #cloud-config - ntp: - ntp_client: ntp - servers: - - 172.16.15.14 - - 172.16.17.18 -collect_scripts: - ntp_installed_servers: | - #!/bin/sh - ntpd --version > /dev/null 2>&1 - echo $? - ntp_conf_dist_servers: | - #!/bin/sh - cat /etc/ntp.conf.dist | wc -l - ntp_conf_servers: | - #!/bin/sh - grep '^server' /etc/ntp.conf - ntpq_servers: | - #!/bin/sh - ntpq -p -w -n - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ntp_timesyncd.py b/tests/cloud_tests/testcases/modules/ntp_timesyncd.py deleted file mode 100644 index eca750bc..00000000 --- a/tests/cloud_tests/testcases/modules/ntp_timesyncd.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestNtpTimesyncd(base.CloudTestCase): - """Test ntp module with systemd-timesyncd client""" - - def test_timesyncd_entries(self): - """Test timesyncd config entries""" - out = self.get_data_file('timesyncd_conf') - self.assertIn('.pool.ntp.org', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ntp_timesyncd.yaml b/tests/cloud_tests/testcases/modules/ntp_timesyncd.yaml deleted file mode 100644 index ee47a741..00000000 --- a/tests/cloud_tests/testcases/modules/ntp_timesyncd.yaml +++ /dev/null @@ -1,15 +0,0 @@ -# -# ntp enabled, systemd-timesyncd selected, check conf file -# as systemd-timesyncd won't start in a container -# -cloud_config: | - #cloud-config - ntp: - enabled: true - ntp_client: systemd-timesyncd -collect_scripts: - timesyncd_conf: | - #!/bin/sh - cat /etc/systemd/timesyncd.conf.d/cloud-init.conf - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/package_update_upgrade_install.py b/tests/cloud_tests/testcases/modules/package_update_upgrade_install.py deleted file mode 100644 index fecad768..00000000 --- a/tests/cloud_tests/testcases/modules/package_update_upgrade_install.py +++ /dev/null @@ -1,36 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestPackageInstallUpdateUpgrade(base.CloudTestCase): - """Test package install update upgrade module.""" - - def test_installed_sl(self): - """Test sl got installed.""" - self.assertPackageInstalled('sl') - - def test_installed_tree(self): - """Test tree got installed.""" - self.assertPackageInstalled('tree') - - def test_apt_history(self): - """Test apt history for update command.""" - out = self.get_data_file('apt_history_cmdline') - self.assertIn( - 'Commandline: /usr/bin/apt-get --option=Dpkg::Options' - '::=--force-confold --option=Dpkg::options::=--force-unsafe-io ' - '--assume-yes --quiet install sl tree', out) - - def test_cloud_init_output(self): - """Test cloud-init-output for install & upgrade stuff.""" - out = self.get_data_file('cloud-init-output.log') - self.assertIn('Setting up tree (', out) - self.assertIn('Setting up sl (', out) - self.assertIn('Reading package lists...', out) - self.assertIn('Building dependency tree...', out) - self.assertIn('Reading state information...', out) - self.assertIn('Calculating upgrade...', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/package_update_upgrade_install.yaml b/tests/cloud_tests/testcases/modules/package_update_upgrade_install.yaml deleted file mode 100644 index dd79e438..00000000 --- a/tests/cloud_tests/testcases/modules/package_update_upgrade_install.yaml +++ /dev/null @@ -1,30 +0,0 @@ -# -# Update/upgrade via apt and then install a pair of packages -# -# NOTE: this should not require apt feature, use 'which' rather than 'dpkg -l' -# NOTE: the testcase for this looks for the command in history.log as -# /usr/bin/apt-get..., which is not how it always appears. it should -# instead look for just apt-get... -# NOTE: this testcase should not require 'apt_up_out', and should look for a -# call to 'apt-get upgrade' or 'apt-get dist-upgrade' in cloud-init.log -# rather than 'Calculating upgrade...' in output -required_features: - - apt - - apt_hist_fmt - - apt_up_out -cloud_config: | - #cloud-config - packages: - - sl - - tree - package_update: true - package_upgrade: true -collect_scripts: - apt_history_cmdline: | - #!/bin/bash - grep ^Commandline: /var/log/apt/history.log - dpkg_show: | - #!/bin/bash - dpkg-query --show - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/runcmd.py b/tests/cloud_tests/testcases/modules/runcmd.py deleted file mode 100644 index 9fce3062..00000000 --- a/tests/cloud_tests/testcases/modules/runcmd.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestRunCmd(base.CloudTestCase): - """Test runcmd module.""" - - def test_run_cmd(self): - """Test run command worked.""" - out = self.get_data_file('run_cmd') - self.assertIn('cloud-init run cmd test', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/runcmd.yaml b/tests/cloud_tests/testcases/modules/runcmd.yaml deleted file mode 100644 index 8309a883..00000000 --- a/tests/cloud_tests/testcases/modules/runcmd.yaml +++ /dev/null @@ -1,13 +0,0 @@ -# -# Run a simple command -# -cloud_config: | - #cloud-config - runcmd: - - echo cloud-init run cmd test > /var/tmp/run_cmd -collect_scripts: - run_cmd: | - #!/bin/bash - cat /var/tmp/run_cmd - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/seed_random_command.yaml b/tests/cloud_tests/testcases/modules/seed_random_command.yaml deleted file mode 100644 index 6a9157eb..00000000 --- a/tests/cloud_tests/testcases/modules/seed_random_command.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# -# Use uuid to create a random string -# -# 2016-11-15 Disabled as this is not working currently -# -enabled: False -cloud_config: | - #cloud-config - random_seed: - command: ["cat", "/proc/sys/kernel/random/uuid"] - command_required: true - file: /root/seed -collect_scripts: - seed_data: | - #!/bin/bash - cat /root/seed - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/seed_random_data.py b/tests/cloud_tests/testcases/modules/seed_random_data.py deleted file mode 100644 index db433d26..00000000 --- a/tests/cloud_tests/testcases/modules/seed_random_data.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestSeedRandom(base.CloudTestCase): - """Test seed random module.""" - - def test_random_seed_data(self): - """Test random data passed in exists.""" - out = self.get_data_file('seed_data') - self.assertIn('MYUb34023nD:LFDK10913jk;dfnk:Df', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/seed_random_data.yaml b/tests/cloud_tests/testcases/modules/seed_random_data.yaml deleted file mode 100644 index a9b2c885..00000000 --- a/tests/cloud_tests/testcases/modules/seed_random_data.yaml +++ /dev/null @@ -1,15 +0,0 @@ -# -# Push in random raw string to set as seed -# -cloud_config: | - #cloud-config - random_seed: - data: 'MYUb34023nD:LFDK10913jk;dfnk:Df' - encoding: raw - file: /root/seed -collect_scripts: - seed_data: | - #!/bin/bash - cat /root/seed - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/set_hostname.py b/tests/cloud_tests/testcases/modules/set_hostname.py deleted file mode 100644 index 1dbe64c2..00000000 --- a/tests/cloud_tests/testcases/modules/set_hostname.py +++ /dev/null @@ -1,17 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestHostname(base.CloudTestCase): - """Test hostname module.""" - - ex_hostname = "cloudinit2" - - def test_hostname(self): - """Test hostname command shows correct output.""" - out = self.get_data_file('hostname') - self.assertIn(self.ex_hostname, out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/set_hostname.yaml b/tests/cloud_tests/testcases/modules/set_hostname.yaml deleted file mode 100644 index 071fb220..00000000 --- a/tests/cloud_tests/testcases/modules/set_hostname.yaml +++ /dev/null @@ -1,21 +0,0 @@ -# -# Set the hostname and update /etc/hosts -# -required_features: - - hostname -cloud_config: | - #cloud-config - hostname: cloudinit2 - -collect_scripts: - hosts: | - #!/bin/bash - grep ^127 /etc/hosts - hostname: | - #!/bin/bash - hostname - fqdn: | - #!/bin/bash - hostname --fqdn - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/set_hostname_fqdn.py b/tests/cloud_tests/testcases/modules/set_hostname_fqdn.py deleted file mode 100644 index a405b30b..00000000 --- a/tests/cloud_tests/testcases/modules/set_hostname_fqdn.py +++ /dev/null @@ -1,31 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests import CI_DOMAIN -from tests.cloud_tests.testcases import base - - -class TestHostnameFqdn(base.CloudTestCase): - """Test Hostname module.""" - - ex_hostname = "cloudinit1" - ex_fqdn = "cloudinit2." + CI_DOMAIN - - def test_hostname(self): - """Test hostname output.""" - out = self.get_data_file('hostname') - self.assertIn(self.ex_hostname, out) - - def test_hostname_fqdn(self): - """Test hostname fqdn output.""" - out = self.get_data_file('fqdn') - self.assertIn(self.ex_fqdn, out) - - def test_hosts(self): - """Test /etc/hosts file.""" - out = self.get_data_file('hosts') - self.assertIn('127.0.1.1 %s %s' % (self.ex_fqdn, self.ex_hostname), - out) - self.assertIn('127.0.0.1 localhost', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/set_hostname_fqdn.yaml b/tests/cloud_tests/testcases/modules/set_hostname_fqdn.yaml deleted file mode 100644 index a85ee79e..00000000 --- a/tests/cloud_tests/testcases/modules/set_hostname_fqdn.yaml +++ /dev/null @@ -1,23 +0,0 @@ -# -# Set the hostname and update /etc/hosts -# -required_features: - - hostname -cloud_config: | - #cloud-config - manage_etc_hosts: true - hostname: cloudinit1 - # this needs changing if CI_DOMAIN were updated. - fqdn: cloudinit2.i9n.cloud-init.io -collect_scripts: - hosts: | - #!/bin/bash - grep ^127 /etc/hosts - hostname: | - #!/bin/bash - hostname - fqdn: | - #!/bin/bash - hostname --fqdn - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/set_password.py b/tests/cloud_tests/testcases/modules/set_password.py deleted file mode 100644 index a29b2261..00000000 --- a/tests/cloud_tests/testcases/modules/set_password.py +++ /dev/null @@ -1,22 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestPassword(base.CloudTestCase): - """Test password module.""" - - # TODO add test to make sure password is actually "password" - - def test_shadow(self): - """Test ubuntu user in shadow.""" - out = self.get_data_file('shadow') - self.assertIn('ubuntu:', out) - - def test_sshd_config(self): - """Test sshd config allows passwords.""" - out = self.get_data_file('sshd_config') - self.assertIn('PasswordAuthentication yes', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/set_password.yaml b/tests/cloud_tests/testcases/modules/set_password.yaml deleted file mode 100644 index 04d7c58a..00000000 --- a/tests/cloud_tests/testcases/modules/set_password.yaml +++ /dev/null @@ -1,19 +0,0 @@ -# -# Set password of default user -# -required_features: - - ubuntu_user -cloud_config: | - #cloud-config - password: password - chpasswd: { expire: False } - ssh_pwauth: True -collect_scripts: - shadow: | - #!/bin/bash - cat /etc/shadow - sshd_config: | - #!/bin/bash - grep '^PasswordAuth' /etc/ssh/sshd_config - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/set_password_expire.py b/tests/cloud_tests/testcases/modules/set_password_expire.py deleted file mode 100644 index 967aca7b..00000000 --- a/tests/cloud_tests/testcases/modules/set_password_expire.py +++ /dev/null @@ -1,23 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestPasswordExpire(base.CloudTestCase): - """Test password module.""" - - def test_shadow(self): - """Test user frozen in shadow.""" - out = self.get_data_file('shadow') - self.assertIn('harry:!:', out) - self.assertIn('dick:!:', out) - self.assertIn('tom:!:', out) - self.assertIn('harry:!:', out) - - def test_sshd_config(self): - """Test sshd config allows passwords.""" - out = self.get_data_file('sshd_config') - self.assertIn('PasswordAuthentication yes', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/set_password_expire.yaml b/tests/cloud_tests/testcases/modules/set_password_expire.yaml deleted file mode 100644 index ba6344b9..00000000 --- a/tests/cloud_tests/testcases/modules/set_password_expire.yaml +++ /dev/null @@ -1,32 +0,0 @@ -# -# Expire password for all users -# -required_features: - - sshd -cloud_config: | - #cloud-config - chpasswd: { expire: True } - ssh_pwauth: yes - users: - - default - - name: tom - password: $1$xyz$sPMsLNmf66Ohl.ol6JvzE. - lock_passwd: false - - name: dick - password: $1$xyz$sPMsLNmf66Ohl.ol6JvzE. - lock_passwd: false - - name: harry - password: $1$xyz$sPMsLNmf66Ohl.ol6JvzE. - lock_passwd: false - - name: jane - password: $1$xyz$sPMsLNmf66Ohl.ol6JvzE. - lock_passwd: false -collect_scripts: - shadow: | - #!/bin/bash - cat /etc/shadow - sshd_config: | - #!/bin/bash - grep '^PasswordAuth' /etc/ssh/sshd_config - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/set_password_list.py b/tests/cloud_tests/testcases/modules/set_password_list.py deleted file mode 100644 index 375cd27d..00000000 --- a/tests/cloud_tests/testcases/modules/set_password_list.py +++ /dev/null @@ -1,12 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestPasswordList(base.PasswordListTest, base.CloudTestCase): - """Test password setting via list in chpasswd/list.""" - - __test__ = True - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/set_password_list.yaml b/tests/cloud_tests/testcases/modules/set_password_list.yaml deleted file mode 100644 index fd3e1e44..00000000 --- a/tests/cloud_tests/testcases/modules/set_password_list.yaml +++ /dev/null @@ -1,41 +0,0 @@ -# -# Set password of list of users -# -cloud_config: | - #cloud-config - ssh_pwauth: yes - users: - - default - - name: tom - # md5 gotomgo - passwd: "$1$S7$tT1BEDIYrczeryDQJfdPe0" - lock_passwd: false - - name: dick - # md5 gocubsgo - passwd: "$1$ssisyfpf$YqvuJLfrrW6Cg/l53Pi1n1" - lock_passwd: false - - name: harry - # sha512 goharrygo - passwd: "$6$LF$9Z2p6rWK6TNC1DC6393ec0As.18KRAvKDbfsGJEdWN3sRQRwpdfoh37EQ3yUh69tP4GSrGW5XKHxMLiKowJgm/" - lock_passwd: false - - name: jane - # sha256 gojanego - passwd: "$5$iW$XsxmWCdpwIW8Yhv.Jn/R3uk6A4UaicfW5Xp7C9p9pg." - lock_passwd: false - - name: "mikey" - lock_passwd: false - chpasswd: - list: - - tom:mypassword123! - - dick:RANDOM - - harry:RANDOM - - mikey:$5$xZ$B2YGGEx2AOf4PeW48KC6.QyT1W2B4rZ9Qbltudtha89 -collect_scripts: - shadow: | - #!/bin/bash - cat /etc/shadow - sshd_config: | - #!/bin/bash - grep '^PasswordAuth' /etc/ssh/sshd_config - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/set_password_list_string.py b/tests/cloud_tests/testcases/modules/set_password_list_string.py deleted file mode 100644 index 8c2634c5..00000000 --- a/tests/cloud_tests/testcases/modules/set_password_list_string.py +++ /dev/null @@ -1,12 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestPasswordListString(base.PasswordListTest, base.CloudTestCase): - """Test password setting via string in chpasswd/list.""" - - __test__ = True - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/set_password_list_string.yaml b/tests/cloud_tests/testcases/modules/set_password_list_string.yaml deleted file mode 100644 index e9fe54b0..00000000 --- a/tests/cloud_tests/testcases/modules/set_password_list_string.yaml +++ /dev/null @@ -1,41 +0,0 @@ -# -# Set password of list of users as a string -# -cloud_config: | - #cloud-config - ssh_pwauth: yes - users: - - default - - name: tom - # md5 gotomgo - passwd: "$1$S7$tT1BEDIYrczeryDQJfdPe0" - lock_passwd: false - - name: dick - # md5 gocubsgo - passwd: "$1$ssisyfpf$YqvuJLfrrW6Cg/l53Pi1n1" - lock_passwd: false - - name: harry - # sha512 goharrygo - passwd: "$6$LF$9Z2p6rWK6TNC1DC6393ec0As.18KRAvKDbfsGJEdWN3sRQRwpdfoh37EQ3yUh69tP4GSrGW5XKHxMLiKowJgm/" - lock_passwd: false - - name: jane - # sha256 gojanego - passwd: "$5$iW$XsxmWCdpwIW8Yhv.Jn/R3uk6A4UaicfW5Xp7C9p9pg." - lock_passwd: false - - name: "mikey" - lock_passwd: false - chpasswd: - list: | - tom:mypassword123! - dick:RANDOM - harry:RANDOM - mikey:$5$xZ$B2YGGEx2AOf4PeW48KC6.QyT1W2B4rZ9Qbltudtha89 -collect_scripts: - shadow: | - #!/bin/bash - cat /etc/shadow - sshd_config: | - #!/bin/bash - grep '^PasswordAuth' /etc/ssh/sshd_config - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/snap.py b/tests/cloud_tests/testcases/modules/snap.py deleted file mode 100644 index ff68abbe..00000000 --- a/tests/cloud_tests/testcases/modules/snap.py +++ /dev/null @@ -1,16 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script""" -from tests.cloud_tests.testcases import base - - -class TestSnap(base.CloudTestCase): - """Test snap module""" - - def test_snappy_version(self): - """Expect hello-world and core snaps are installed.""" - out = self.get_data_file('snaplist') - self.assertIn('core', out) - self.assertIn('hello-world', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/snap.yaml b/tests/cloud_tests/testcases/modules/snap.yaml deleted file mode 100644 index 322199c3..00000000 --- a/tests/cloud_tests/testcases/modules/snap.yaml +++ /dev/null @@ -1,21 +0,0 @@ -# -# Install snappy -# -# Aug 23, 2018: Disabled due to requiring a proxy for testing -# tests do not handle the proxy well at this time. -enabled: False -required_features: - - snap -cloud_config: | - #cloud-config - package_update: true - snap: - squashfuse_in_container: true - commands: - - snap install hello-world -collect_scripts: - snaplist: | - #!/bin/bash - snap list - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_disable.py b/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_disable.py deleted file mode 100644 index 02935447..00000000 --- a/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_disable.py +++ /dev/null @@ -1,16 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestSshKeyFingerprintsDisable(base.CloudTestCase): - """Test ssh key fingerprints module.""" - - def test_cloud_init_log(self): - """Verify disabled.""" - out = self.get_data_file('cloud-init.log') - self.assertIn('Skipping module named ssh-authkey-fingerprints, ' - 'logging of SSH fingerprints disabled', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_disable.yaml b/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_disable.yaml deleted file mode 100644 index d93893e2..00000000 --- a/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_disable.yaml +++ /dev/null @@ -1,14 +0,0 @@ -# -# Disable fingerprint printing -# -required_features: - - syslog -cloud_config: | - #cloud-config - no_ssh_fingerprints: true -collect_scripts: - syslog: | - #!/bin/bash - cat /var/log/syslog - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_enable.py b/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_enable.py deleted file mode 100644 index 3510e75a..00000000 --- a/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_enable.py +++ /dev/null @@ -1,18 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestSshKeyFingerprintsEnable(base.CloudTestCase): - """Test ssh key fingerprints module.""" - - def test_syslog(self): - """Verify output of syslog.""" - out = self.get_data_file('syslog') - self.assertRegex(out, r'256 SHA256:.*(ECDSA)') - self.assertRegex(out, r'256 SHA256:.*(ED25519)') - self.assertNotRegex(out, r'1024 SHA256:.*(DSA)') - self.assertNotRegex(out, r'2048 SHA256:.*(RSA)') - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_enable.yaml b/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_enable.yaml deleted file mode 100644 index 9f5dc34a..00000000 --- a/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_enable.yaml +++ /dev/null @@ -1,21 +0,0 @@ -# -# Print auth keys with different hash than md5 -# -# NOTE: testcase checks for '256 SHA256:.*(ECDSA)' on output line on trusty -# this fails as line in output reads '256:.*(ECDSA)' -required_features: - - syslog - - ssh_key_fmt -cloud_config: | - #cloud-config - ssh_genkeytypes: - - ecdsa - - ed25519 - ssh_authorized_keys: - - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDXW9Gg5H7ehjdSc6qDzwNtgCy94XYHhEYlXZMO2+FJrH3wfHGiMfCwOHxcOMt2QiXItULthdeQWS9QjBSSjVRXf6731igFrqPFyS9qBlOQ5D29C4HBXFnQggGVpBNJ82IRJv7szbbe/vpgLBP4kttUza9Dr4e1YM1ln4PRnjfXea6T0m+m1ixNb5432pTXlqYOnNOxSIm1gHgMLxPuDrJvQERDKrSiKSjIdyC9Jd8t2e1tkNLY0stmckVRbhShmcJvlyofHWbc2Ca1mmtP7MlS1VQnfLkvU1IrFwkmaQmaggX6WR6coRJ6XFXdWcq/AI2K6GjSnl1dnnCxE8VCEXBlXgFzad+PMSG4yiL5j8Oo1ZVpkTdgBnw4okGqTYCXyZg6X00As9IBNQfZMFlQXlIo4FiWgj3CO5QHQOyOX6FuEumaU13GnERrSSdp9tCs1Qm3/DG2RSCQBWTfcgMcStIvKqvJ3IjFn0vGLvI3Ampnq9q1SHwmmzAPSdzcMA76HyMUA5VWaBvWHlUxzIM6unxZASnwvuCzpywSEB5J2OF+p6H+cStJwQ32XwmOG8pLp1srlVWpqZI58Du/lzrkPqONphoZx0LDV86w7RUz1ksDzAdcm0tvmNRFMN1a0frDs506oA3aWK0oDk4Nmvk8sXGTYYw3iQSkOvDUUlIsqdaO+w== -collect_scripts: - syslog: | - #!/bin/bash - cat /var/log/syslog - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ssh_import_id.py b/tests/cloud_tests/testcases/modules/ssh_import_id.py deleted file mode 100644 index ef156f47..00000000 --- a/tests/cloud_tests/testcases/modules/ssh_import_id.py +++ /dev/null @@ -1,17 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestSshImportId(base.CloudTestCase): - """Test ssh import id module.""" - - def test_authorized_keys(self): - """Test that ssh keys were imported.""" - out = self.get_data_file('auth_keys_ubuntu') - - self.assertIn('# ssh-import-id gh:powersj', out) - self.assertIn('# ssh-import-id lp:smoser', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ssh_import_id.yaml b/tests/cloud_tests/testcases/modules/ssh_import_id.yaml deleted file mode 100644 index b62d3f69..00000000 --- a/tests/cloud_tests/testcases/modules/ssh_import_id.yaml +++ /dev/null @@ -1,17 +0,0 @@ -# -# Import a user's ssh key via gh or lp -# -required_features: - - ubuntu_user - - sudo -cloud_config: | - #cloud-config - ssh_import_id: - - gh:powersj - - lp:smoser -collect_scripts: - auth_keys_ubuntu: | - #!/bin/bash - cat /home/ubuntu/.ssh/authorized_keys - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ssh_keys_generate.py b/tests/cloud_tests/testcases/modules/ssh_keys_generate.py deleted file mode 100644 index b68f5565..00000000 --- a/tests/cloud_tests/testcases/modules/ssh_keys_generate.py +++ /dev/null @@ -1,52 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestSshKeysGenerate(base.CloudTestCase): - """Test ssh keys module.""" - - # TODO: Check cloud-init-output for the correct keys being generated - - def test_dsa_public(self): - """Test dsa public key not generated.""" - out = self.get_data_file('dsa_public') - self.assertEqual('', out) - - def test_dsa_private(self): - """Test dsa private key not generated.""" - out = self.get_data_file('dsa_private') - self.assertEqual('', out) - - def test_rsa_public(self): - """Test rsa public key not generated.""" - out = self.get_data_file('rsa_public') - self.assertEqual('', out) - - def test_rsa_private(self): - """Test rsa public key not generated.""" - out = self.get_data_file('rsa_private') - self.assertEqual('', out) - - def test_ecdsa_public(self): - """Test ecdsa public key generated.""" - out = self.get_data_file('ecdsa_public') - self.assertIsNotNone(out) - - def test_ecdsa_private(self): - """Test ecdsa public key generated.""" - out = self.get_data_file('ecdsa_private') - self.assertIsNotNone(out) - - def test_ed25519_public(self): - """Test ed25519 public key generated.""" - out = self.get_data_file('ed25519_public') - self.assertIsNotNone(out) - - def test_ed25519_private(self): - """Test ed25519 public key generated.""" - out = self.get_data_file('ed25519_private') - self.assertIsNotNone(out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ssh_keys_generate.yaml b/tests/cloud_tests/testcases/modules/ssh_keys_generate.yaml deleted file mode 100644 index 0a7adf62..00000000 --- a/tests/cloud_tests/testcases/modules/ssh_keys_generate.yaml +++ /dev/null @@ -1,38 +0,0 @@ -# -# SSH keys generated using cloud-init -# -required_features: - - ubuntu_user -cloud_config: | - #cloud-config - ssh_genkeytypes: - - ecdsa - - ed25519 - authkey_hash: sha512 -collect_scripts: - dsa_public: | - #!/bin/bash - cat /etc/ssh/ssh_host_dsa_key.pub - dsa_private: | - #!/bin/bash - cat /etc/ssh/ssh_host_dsa_key - rsa_public: | - #!/bin/bash - cat /etc/ssh/ssh_host_rsa_key.pub - rsa_private: | - #!/bin/bash - cat /etc/ssh/ssh_host_rsa_key - ecdsa_public: | - #!/bin/bash - cat /etc/ssh/ssh_host_ecdsa_key.pub - ecdsa_private: | - #!/bin/bash - cat /etc/ssh/ssh_host_ecdsa_key - ed25519_public: | - #!/bin/bash - cat /etc/ssh/ssh_host_ed25519_key.pub - ed25519_private: | - #!/bin/bash - cat /etc/ssh/ssh_host_ed25519_key - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ssh_keys_provided.py b/tests/cloud_tests/testcases/modules/ssh_keys_provided.py deleted file mode 100644 index add3f469..00000000 --- a/tests/cloud_tests/testcases/modules/ssh_keys_provided.py +++ /dev/null @@ -1,58 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestSshKeysProvided(base.CloudTestCase): - """Test ssh keys module.""" - - def test_dsa_public(self): - """Test dsa public key passed in.""" - out = self.get_data_file('dsa_public') - self.assertIn('AAAAB3NzaC1kc3MAAACBAPkWy1zbchVIN7qTgM0/yyY8q4RZS8c' - 'NM4ZpeuE5UB/Nnr6OSU/nmbO8LuM', out) - - def test_dsa_private(self): - """Test dsa private key passed in.""" - out = self.get_data_file('dsa_private') - self.assertIn('MIIBuwIBAAKBgQD5Fstc23IVSDe6k4DNP8smPKuEWUvHDTOGaXr' - 'hOVAfzZ6+jklP', out) - - def test_rsa_public(self): - """Test rsa public key passed in.""" - out = self.get_data_file('rsa_public') - self.assertIn('AAAAB3NzaC1yc2EAAAADAQABAAABAQC0/Ho+o3eJISydO2JvIgT' - 'LnZOtrxPl+fSvJfKDjoOLY0HB2eOjy2s2/2N6d9X9SGZ4', out) - - def test_rsa_private(self): - """Test rsa public key passed in.""" - out = self.get_data_file('rsa_private') - self.assertIn('4DOkqNiUGl80Zp1RgZNohHUXlJMtAbrIlAVEk+mTmg7vjfyp2un' - 'RQvLZpMRdywBm', out) - - def test_ecdsa_public(self): - """Test ecdsa public key passed in.""" - out = self.get_data_file('ecdsa_public') - self.assertIn('AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAAB' - 'BBFsS5Tvky/IC/dXhE/afxxU', out) - - def test_ecdsa_private(self): - """Test ecdsa public key passed in.""" - out = self.get_data_file('ecdsa_private') - self.assertIn('AwEHoUQDQgAEWxLlO+TL8gL91eET9p/HFQbqR1A691AkJgZk3jY' - '5mpZqxgX4vcgb', out) - - def test_ed25519_public(self): - """Test ed25519 public key passed in.""" - out = self.get_data_file('ed25519_public') - self.assertIn('AAAAC3NzaC1lZDI1NTE5AAAAINudAZSu4vjZpVWzId5pXmZg1M6' - 'G15dqjQ2XkNVOEnb5', out) - - def test_ed25519_private(self): - """Test ed25519 public key passed in.""" - out = self.get_data_file('ed25519_private') - self.assertIn('XAAAAAtzc2gtZWQyNTUxOQAAACDbnQGUruL42aVVsyHeaV5mYNT' - 'OhteXao0Nl5DVThJ2+Q', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/ssh_keys_provided.yaml b/tests/cloud_tests/testcases/modules/ssh_keys_provided.yaml deleted file mode 100644 index 41f63550..00000000 --- a/tests/cloud_tests/testcases/modules/ssh_keys_provided.yaml +++ /dev/null @@ -1,99 +0,0 @@ -# -# SSH keys provided via cloud config -# -enabled: False -required_features: - - ubuntu_user - - sudo -cloud_config: | - #cloud-config - disable_root: false - ssh_authorized_keys: - - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDXW9Gg5H7ehjdSc6qDzwNtgCy94XYHhEYlXZMO2+FJrH3wfHGiMfCwOHxcOMt2QiXItULthdeQWS9QjBSSjVRXf6731igFrqPFyS9qBlOQ5D29C4HBXFnQggGVpBNJ82IRJv7szbbe/vpgLBP4kttUza9Dr4e1YM1ln4PRnjfXea6T0m+m1ixNb5432pTXlqYOnNOxSIm1gHgMLxPuDrJvQERDKrSiKSjIdyC9Jd8t2e1tkNLY0stmckVRbhShmcJvlyofHWbc2Ca1mmtP7MlS1VQnfLkvU1IrFwkmaQmaggX6WR6coRJ6XFXdWcq/AI2K6GjSnl1dnnCxE8VCEXBlXgFzad+PMSG4yiL5j8Oo1ZVpkTdgBnw4okGqTYCXyZg6X00As9IBNQfZMFlQXlIo4FiWgj3CO5QHQOyOX6FuEumaU13GnERrSSdp9tCs1Qm3/DG2RSCQBWTfcgMcStIvKqvJ3IjFn0vGLvI3Ampnq9q1SHwmmzAPSdzcMA76HyMUA5VWaBvWHlUxzIM6unxZASnwvuCzpywSEB5J2OF+p6H+cStJwQ32XwmOG8pLp1srlVWpqZI58Du/lzrkPqONphoZx0LDV86w7RUz1ksDzAdcm0tvmNRFMN1a0frDs506oA3aWK0oDk4Nmvk8sXGTYYw3iQSkOvDUUlIsqdaO+w== - ssh_keys: - rsa_private: | - -----BEGIN RSA PRIVATE KEY----- - MIIEowIBAAKCAQEAtPx6PqN3iSEsnTtibyIEy52Tra8T5fn0ryXyg46Di2NBwdnj - o8trNv9jenfV/UhmePl58lXjT43wV8OCMl6KsYXyBdegM35NNtono4I4mLLKFMR9 - 9TOtDn6iYcaNenVhF3ZCj9Z2nNOlTrdc0uchHqKMrxLjCRCUrL91Uf+xioTF901Y - RM+ZqC5lT92yAL76F4qPF+Lq1QtUfNfUIwwvOp5ccDZLPxij0YvyBzubYye9hJHu - yjbJv78R4JHV+L2WhzSoX3W/6WrxVzeXqFGqH894ccOaC/7tnqSP6V8lIQ6fE2+c - DurJcpM3CJRgkndGHjtU55Y71YkcdLksSMvezQIDAQABAoIBAQCrU4IJP8dNeaj5 - IpkY6NQvR/jfZqfogYi+MKb1IHin/4rlDfUvPcY9pt8ttLlObjYK+OcWn3Vx/sRw - 4DOkqNiUGl80Zp1RgZNohHUXlJMtAbrIlAVEk+mTmg7vjfyp2unRQvLZpMRdywBm - lq95OrCghnG03aUsFJUZPpi5ydnwbA12ma+KHkG0EzaVlhA7X9N6z0K6U+zue2gl - goMLt/MH0rsYawkHrwiwXaIFQeyV4MJP0vmrZLbFk1bycu9X/xPtTYotWyWo4eKA - cb05uu04qwexkKHDM0KXtT0JecbTo2rOefFo8Uuab6uJY+fEHNocZ+v1vLA4aOxJ - ovp1JuXlAoGBAOWYNgKrlTfy5n0sKsNk+1RuL2jHJZJ3HMd0EIt7/fFQN3Fi08Hu - jtntqD30Wj+DJK8b8Lrt66FruxyEJm5VhVmwkukrLR5ige2f6ftZnoFCmdyy+0zP - dnPZSUe2H5ZPHa+qthJgHLn+al2P04tGh+1fGHC2PbP+e0Co+/ZRIOxrAoGBAMnN - IEen9/FRsqvnDd36I8XnJGskVRTZNjylxBmbKcuMWm+gNhOI7gsCAcqzD4BYZjjW - pLhrt/u9p+l4MOJy6OUUdM/okg12SnJEGryysOcVBcXyrvOfklWnANG4EAH5jt1N - ftTb1XTxzvWVuR/WJK0B5MZNYM71cumBdUDtPi+nAoGAYmoIXMSnxb+8xNL10aOr - h9ljQQp8NHgSQfyiSufvRk0YNuYh1vMnEIsqnsPrG2Zfhx/25GmvoxXGssaCorDN - 5FAn6QK06F1ZTD5L0Y3sv4OI6G1gAuC66ZWuL6sFhyyKkQ4f1WiVZ7SCa3CHQSAO - i9VDaKz1bf4bXvAQcNj9v9kCgYACSOZCqW4vN0OUmqsXhkt9ZB6Pb/veno70pNPR - jmYsvcwQU3oJQpWfXkhy6RAV3epaXmPDCsUsfns2M3wqNC7a2R5xdCqjKGGzZX4A - AO3rz9se4J6Gd5oKijeCKFlWDGNHsibrdgm2pz42nZlY+O21X74dWKbt8O16I1MW - hxkbJQKBgAXfuen/srVkJgPuqywUYag90VWCpHsuxdn+fZJa50SyZADr+RbiDfH2 - vek8Uo8ap8AEsv4Rfs9opUcUZevLp3g2741eOaidHVLm0l4iLIVl03otGOqvSzs+ - A3tFPEOxauXpzCt8f8eXsz0WQXAgIKW2h8zu5QHjomioU3i27mtE - -----END RSA PRIVATE KEY----- - rsa_public: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC0/Ho+o3eJISydO2JvIgTLnZOtrxPl+fSvJfKDjoOLY0HB2eOjy2s2/2N6d9X9SGZ4+XnyVeNPjfBXw4IyXoqxhfIF16Azfk022iejgjiYssoUxH31M60OfqJhxo16dWEXdkKP1nac06VOt1zS5yEeooyvEuMJEJSsv3VR/7GKhMX3TVhEz5moLmVP3bIAvvoXio8X4urVC1R819QjDC86nlxwNks/GKPRi/IHO5tjJ72Eke7KNsm/vxHgkdX4vZaHNKhfdb/pavFXN5eoUaofz3hxw5oL/u2epI/pXyUhDp8Tb5wO6slykzcIlGCSd0YeO1TnljvViRx0uSxIy97N root@xenial-lxd - dsa_private: | - -----BEGIN DSA PRIVATE KEY----- - MIIBuwIBAAKBgQD5Fstc23IVSDe6k4DNP8smPKuEWUvHDTOGaXrhOVAfzZ6+jklP - 55mzvC7jO53PWWC31hq10xBoWdev0WtcNF9Tv+4bAa1263y51Rqo4GI7xx+xic1d - mLqqfYijBT9k48J/1tV0cs1Wjs6FP/IJTD/kYVC930JjYQMi722lBnUxsQIVAL7i - z3fTGKTvSzvW0wQlwnYpS2QFAoGANp+KdyS9V93HgxGQEN1rlj/TSv/a3EVdCKtE - nQf55aPHxDAVDVw5JtRh4pZbbRV4oGRPc9KOdjo5BU28vSM3Lmhkb+UaaDXwHkgI - nK193o74DKjADWZxuLyyiKHiMOhxozoxDfjWxs8nz6uqvSW0pr521EwIY6RajbED - nZ2a3GkCgYEAyoUomNRB6bmpsIfzt8zdtqLP5umIj2uhr9MVPL8/QdbxmJ72Z7pf - Q2z1B7QAdIBGOlqJXtlau7ABhWK29Efe+99ObyTSSdDc6RCDeAwUmBAiPRQhDH2E - wExw3doDSCUb28L1B50wBzQ8mC3KXp6C7IkBXWspb16DLHUHFSI8bkICFA5kVUcW - nCPOXEQsayANi8+Cb7BH - -----END DSA PRIVATE KEY----- - dsa_public: ssh-dss AAAAB3NzaC1kc3MAAACBAPkWy1zbchVIN7qTgM0/yyY8q4RZS8cNM4ZpeuE5UB/Nnr6OSU/nmbO8LuM7nc9ZYLfWGrXTEGhZ16/Ra1w0X1O/7hsBrXbrfLnVGqjgYjvHH7GJzV2Yuqp9iKMFP2Tjwn/W1XRyzVaOzoU/8glMP+RhUL3fQmNhAyLvbaUGdTGxAAAAFQC+4s930xik70s71tMEJcJ2KUtkBQAAAIA2n4p3JL1X3ceDEZAQ3WuWP9NK/9rcRV0Iq0SdB/nlo8fEMBUNXDkm1GHillttFXigZE9z0o52OjkFTby9IzcuaGRv5RpoNfAeSAicrX3ejvgMqMANZnG4vLKIoeIw6HGjOjEN+NbGzyfPq6q9JbSmvnbUTAhjpFqNsQOdnZrcaQAAAIEAyoUomNRB6bmpsIfzt8zdtqLP5umIj2uhr9MVPL8/QdbxmJ72Z7pfQ2z1B7QAdIBGOlqJXtlau7ABhWK29Efe+99ObyTSSdDc6RCDeAwUmBAiPRQhDH2EwExw3doDSCUb28L1B50wBzQ8mC3KXp6C7IkBXWspb16DLHUHFSI8bkI= root@xenial-lxd - ed25519_private: | - -----BEGIN OPENSSH PRIVATE KEY----- - b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW - QyNTUxOQAAACDbnQGUruL42aVVsyHeaV5mYNTOhteXao0Nl5DVThJ2+QAAAJgwt+lcMLfp - XAAAAAtzc2gtZWQyNTUxOQAAACDbnQGUruL42aVVsyHeaV5mYNTOhteXao0Nl5DVThJ2+Q - AAAEDQlFZpz9q8+/YJHS9+jPAqy2ZT6cGEv8HTB6RZtTjd/dudAZSu4vjZpVWzId5pXmZg - 1M6G15dqjQ2XkNVOEnb5AAAAD3Jvb3RAeGVuaWFsLWx4ZAECAwQFBg== - -----END OPENSSH PRIVATE KEY----- - ed25519_public: ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAINudAZSu4vjZpVWzId5pXmZg1M6G15dqjQ2XkNVOEnb5 root@xenial-lxd - ecdsa_private: | - -----BEGIN EC PRIVATE KEY----- - MHcCAQEEIDuK+QFc1wmyJY8uDqQVa1qHte30Rk/fdLxGIBkwJAyOoAoGCCqGSM49 - AwEHoUQDQgAEWxLlO+TL8gL91eET9p/HFQbqR1A691AkJgZk3jY5mpZqxgX4vcgb - 7f/CtXuM6s2svcDJqAeXr6Wk8OJJcMxylA== - -----END EC PRIVATE KEY----- - ecdsa_public: ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBFsS5Tvky/IC/dXhE/afxxUG6kdQOvdQJCYGZN42OZqWasYF+L3IG+3/wrV7jOrNrL3AyagHl6+lpPDiSXDMcpQ= root@xenial-lxd -collect_scripts: - dsa_public: | - #!/bin/bash - cat /etc/ssh/ssh_host_dsa_key.pub - dsa_private: | - #!/bin/bash - cat /etc/ssh/ssh_host_dsa_key - rsa_public: | - #!/bin/bash - cat /etc/ssh/ssh_host_rsa_key.pub - rsa_private: | - #!/bin/bash - cat /etc/ssh/ssh_host_rsa_key - ecdsa_public: | - #!/bin/bash - cat /etc/ssh/ssh_host_ecdsa_key.pub - ecdsa_private: | - #!/bin/bash - cat /etc/ssh/ssh_host_ecdsa_key - ed25519_public: | - #!/bin/bash - cat /etc/ssh/ssh_host_ed25519_key.pub - ed25519_private: | - #!/bin/bash - cat /etc/ssh/ssh_host_ed25519_key - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/timezone.py b/tests/cloud_tests/testcases/modules/timezone.py deleted file mode 100644 index 654fa53d..00000000 --- a/tests/cloud_tests/testcases/modules/timezone.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestTimezone(base.CloudTestCase): - """Test timezone module.""" - - def test_timezone(self): - """Test date prints correct timezone.""" - out = self.get_data_file('timezone') - self.assertEqual('HDT', out.rstrip()) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/timezone.yaml b/tests/cloud_tests/testcases/modules/timezone.yaml deleted file mode 100644 index 5112aa9f..00000000 --- a/tests/cloud_tests/testcases/modules/timezone.yaml +++ /dev/null @@ -1,16 +0,0 @@ -# -# Set system timezone -# -required_features: - - daylight_time -cloud_config: | - #cloud-config - timezone: US/Aleutian -collect_scripts: - timezone: | - #!/bin/bash - # date will convert this to system's configured time zone. - # use a static date to avoid dealing with daylight savings. - date "+%Z" --date="Thu, 03 Nov 2016 00:47:00 -0400" - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/user_groups.py b/tests/cloud_tests/testcases/modules/user_groups.py deleted file mode 100644 index 4067348d..00000000 --- a/tests/cloud_tests/testcases/modules/user_groups.py +++ /dev/null @@ -1,49 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestUserGroups(base.CloudTestCase): - """Example cloud-config test.""" - - def test_group_ubuntu(self): - """Test ubuntu group exists.""" - out = self.get_data_file('group_ubuntu') - self.assertRegex(out, r'ubuntu:x:[0-9]{4}:') - - def test_group_cloud_users(self): - """Test cloud users group exists.""" - out = self.get_data_file('group_cloud_users') - self.assertRegex(out, r'cloud-users:x:[0-9]{4}:barfoo') - - def test_user_ubuntu(self): - """Test ubuntu user exists.""" - out = self.get_data_file('user_ubuntu') - self.assertRegex( - out, r'ubuntu:x:[0-9]{4}:[0-9]{4}:Ubuntu:/home/ubuntu:/bin/bash') - - def test_user_foobar(self): - """Test foobar user exists.""" - out = self.get_data_file('user_foobar') - self.assertRegex( - out, r'foobar:x:[0-9]{4}:[0-9]{4}:Foo B. Bar:/home/foobar:') - - def test_user_barfoo(self): - """Test barfoo user exists.""" - out = self.get_data_file('user_barfoo') - self.assertRegex( - out, r'barfoo:x:[0-9]{4}:[0-9]{4}:Bar B. Foo:/home/barfoo:') - - def test_user_cloudy(self): - """Test cloudy user exists.""" - out = self.get_data_file('user_cloudy') - self.assertRegex(out, r'cloudy:x:[0-9]{3,4}:') - - def test_user_root_in_secret(self): - """Test root user is in 'secret' group.""" - _user, _, groups = self.get_data_file('root_groups').partition(":") - self.assertIn("secret", groups.split(), - msg="User root is not in group 'secret'") - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/user_groups.yaml b/tests/cloud_tests/testcases/modules/user_groups.yaml deleted file mode 100644 index 91b0e281..00000000 --- a/tests/cloud_tests/testcases/modules/user_groups.yaml +++ /dev/null @@ -1,55 +0,0 @@ -# -# Create groups and users with various options -# -required_features: - - ubuntu_user -cloud_config: | - #cloud-config - # Add groups to the system - groups: - - secret: [root] - - cloud-users - - # Add users to the system. Users are added after groups are added. - users: - - default - - name: foobar - gecos: Foo B. Bar - primary_group: foobar - groups: users - expiredate: '2038-01-19' - lock_passwd: false - passwd: $6$j212wezy$7H/1LT4f9/N3wpgNunhsIqtMj62OKiS3nyNwuizouQc3u7MbYCarYeAHWYPYb2FT.lbioDm2RrkJPb9BZMN1O/ - - name: barfoo - gecos: Bar B. Foo - sudo: ALL=(ALL) NOPASSWD:ALL - groups: [cloud-users, secret] - lock_passwd: true - - name: cloudy - gecos: Magic Cloud App Daemon User - inactive: '5' - system: true -collect_scripts: - group_ubuntu: | - #!/bin/bash - getent group ubuntu - group_cloud_users: | - #!/bin/bash - getent group cloud-users - user_ubuntu: | - #!/bin/bash - getent passwd ubuntu - user_foobar: | - #!/bin/bash - getent passwd foobar - user_barfoo: | - #!/bin/bash - getent passwd barfoo - user_cloudy: | - #!/bin/bash - getent passwd cloudy - root_groups: | - #!/bin/bash - groups root - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/write_files.py b/tests/cloud_tests/testcases/modules/write_files.py deleted file mode 100644 index 526a2ebd..00000000 --- a/tests/cloud_tests/testcases/modules/write_files.py +++ /dev/null @@ -1,33 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""cloud-init Integration Test Verify Script.""" -from tests.cloud_tests.testcases import base - - -class TestWriteFiles(base.CloudTestCase): - """Example cloud-config test.""" - - def test_b64(self): - """Test b64 encoded file reads as ascii.""" - out = self.get_data_file('file_b64') - self.assertIn('ASCII text', out) - - def test_binary(self): - """Test binary file reads as executable.""" - out = self.get_data_file('file_binary').strip() - md5 = "3801184b97bb8c6e63fa0e1eae2920d7" - sha256 = ("2c791c4037ea5bd7e928d6a87380f8ba7a803cd83d" - "5e4f269e28f5090f0f2c9a") - self.assertIn(out, (md5 + " -", sha256 + " -")) - - def test_gzip(self): - """Test gzip file shows up as a shell script.""" - out = self.get_data_file('file_gzip') - self.assertIn('POSIX shell script, ASCII text executable', out) - - def test_text(self): - """Test text shows up as ASCII text.""" - out = self.get_data_file('file_text') - self.assertIn('ASCII text', out) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/testcases/modules/write_files.yaml b/tests/cloud_tests/testcases/modules/write_files.yaml deleted file mode 100644 index cc7ea4bd..00000000 --- a/tests/cloud_tests/testcases/modules/write_files.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# -# Write various file types -# -# NOTE: on trusty 'file' has an output formatting error for binary files and -# has 2 spaces in 'LSB executable', which causes a failure here -# -# NOTE: the binary data can be any binary data, not only executables -# and can be generated via the base 64 command as such: -# $ base64 < hello > hello.txt -# the opposite is running: -# $ base64 -d < hello.txt > hello -# -required_features: - - no_file_fmt_e -cloud_config: | - #cloud-config - write_files: - - encoding: b64 - content: CiMgVGhpcyBmaWxlIGNvbnRyb2xzIHRoZSBzdGF0ZSBvZiBTRUxpbnV4 - owner: root:root - path: /root/file_b64 - permissions: '0644' - - content: | - # My new /root/file_text - - SMBDOPTIONS="-D" - path: /root/file_text - - content: !!binary | - /Z/xrHR4WINT0UNoKPQKbuovp6+Js+JK - path: /root/file_binary - permissions: '0555' - - encoding: gzip - content: !!binary | - H4sIAIDb/U8C/1NW1E/KzNMvzuBKTc7IV8hIzcnJVyjPL8pJ4QIA6N+MVxsAAAA= - path: /root/file_gzip - permissions: '0755' -collect_scripts: - file_b64: | - #!/bin/bash - file /root/file_b64 - file_text: | - #!/bin/bash - file /root/file_text - file_binary: | - #!/bin/bash - for hasher in md5sum sha256sum; do - $hasher /// - @return_value: {: {: []}} - """ - if not os.path.isdir(data_dir): - raise ValueError("bad data dir") - - res = {} - for platform in os.listdir(data_dir): - if not os.path.isdir(os.path.join(data_dir, platform)): - continue - - res[platform] = {} - for os_name in os.listdir(os.path.join(data_dir, platform)): - res[platform][os_name] = [ - os.path.sep.join(f.split(os.path.sep)[-2:]) for f in - glob.glob(os.sep.join((data_dir, platform, os_name, '*/*')))] - - LOG.debug('found test data: %s\n', res) - return res - - -def gen_instance_name(prefix='cloud-test', image_desc=None, use_desc=None, - max_len=63, delim='-', max_tries=16, used_list=None, - valid=string.ascii_lowercase + string.digits): - """Generate an unique name for a test instance. - - @param prefix: name prefix, defaults to cloud-test, default should be left - @param image_desc: short string (len <= 16) with image desc - @param use_desc: short string (len <= 30) with usage desc - @param max_len: maximum name length, defaults to 64 chars - @param delim: delimiter to use between tokens - @param max_tries: maximum tries to find a unique name before giving up - @param used_list: already used names, or none to not check - @param valid: string of valid characters for name - @return_value: valid, unused name, may raise StopIteration - """ - unknown = 'unknown' - - def join(*args): - """Join args with delim.""" - return delim.join(args) - - def fill(*args): - """Join name elems and fill rest with random data.""" - name = join(*args) - num = max_len - len(name) - len(delim) - return join(name, ''.join(random.choice(valid) for _ in range(num))) - - def clean(elem, max_len): - """Filter bad characters out of elem and trim to length.""" - elem = elem.lower()[:max_len] if elem else unknown - return ''.join(c if c in valid else delim for c in elem) - - return next(name for name in - (fill(prefix, clean(image_desc, 16), clean(use_desc, 30)) - for _ in range(max_tries)) - if not used_list or name not in used_list) - - -def sorted_unique(iterable, key=None, reverse=False): - """Create unique sorted list. - - @param iterable: the data structure to sort - @param key: if you have a specific key - @param reverse: to reverse or not - @return_value: a sorted list of unique items in iterable - """ - return sorted(set(iterable), key=key, reverse=reverse) - - -def get_os_family(os_name): - """Get os family type for os_name. - - @param os_name: name of os - @return_value: family name for os_name - """ - return next((k for k, v in OS_FAMILY_MAPPING.items() - if os_name.lower() in v), None) - - -def current_verbosity(): - """Get verbosity currently in effect from log level. - - @return_value: verbosity, 0-2, 2=verbose, 0=quiet - """ - return max(min(3 - int(LOG.level / 10), 2), 0) - - -@contextmanager -def emit_dots_on_travis(): - """ - A context manager that emits a dot every 10 seconds if running on Travis. - - Travis will kill jobs that don't emit output for a certain amount of time. - This context manager spins up a background process which will emit a dot to - stdout every 10 seconds to avoid being killed. - - It should be wrapped selectively around operations that are known to take a - long time. - """ - if os.environ.get('TRAVIS') != "true": - # If we aren't on Travis, don't do anything. - yield - return - - def emit_dots(): - while True: - print(".") - time.sleep(10) - - dot_process = multiprocessing.Process(target=emit_dots) - dot_process.start() - try: - yield - finally: - dot_process.terminate() - - -def is_writable_dir(path): - """Make sure dir is writable. - - @param path: path to determine if writable - @return_value: boolean with result - """ - try: - c_util.ensure_dir(path) - os.remove(tempfile.mkstemp(dir=os.path.abspath(path))[1]) - except (IOError, OSError): - return False - return True - - -def is_clean_writable_dir(path): - """Make sure dir is empty and writable, creating it if it does not exist. - - @param path: path to check - @return_value: True/False if successful - """ - path = os.path.abspath(path) - if not (is_writable_dir(path) and len(os.listdir(path)) == 0): - return False - return True - - -def configure_yaml(): - """Clean yaml.""" - yaml.add_representer(str, (lambda dumper, data: dumper.represent_scalar( - 'tag:yaml.org,2002:str', data, style='|' if '\n' in data else ''))) - - -def yaml_format(data, content_type=None): - """Format data as yaml. - - @param data: data to dump - @param header: if specified, add a header to the dumped data - @return_value: yaml string - """ - configure_yaml() - content_type = ( - '#{}\n'.format(content_type.strip('#\n')) if content_type else '') - return content_type + yaml.dump(data, indent=2, default_flow_style=False) - - -def yaml_dump(data, path): - """Dump data to path in yaml format.""" - c_util.write_file(os.path.abspath(path), yaml_format(data), omode='w') - - -def merge_results(data, path): - """Handle merging results from collect phase and verify phase.""" - current = {} - if os.path.exists(path): - with open(path, 'r') as fp: - current = c_util.load_yaml(fp.read()) - current.update(data) - yaml_dump(current, path) - - -def rel_files(basedir): - """List of files under directory by relative path, not including dirs. - - @param basedir: directory to search - @return_value: list or relative paths - """ - basedir = os.path.normpath(basedir) - return [path[len(basedir) + 1:] for path in - glob.glob(os.path.join(basedir, '**'), recursive=True) - if not os.path.isdir(path)] - - -def flat_tar(output, basedir, owner='root', group='root'): - """Create a flat tar archive (no leading ./) from basedir. - - @param output: output tar file to write - @param basedir: base directory for archive - @param owner: owner of archive files - @param group: group archive files belong to - @return_value: none - """ - subp.subp(['tar', 'cf', output, '--owner', owner, '--group', group, - '-C', basedir] + rel_files(basedir), capture=True) - - -def parse_conf_list(entries, valid=None, boolean=False): - """Parse config in a list of strings in key=value format. - - @param entries: list of key=value strings - @param valid: list of valid keys in result, return None if invalid input - @param boolean: if true, then interpret all values as booleans - @return_value: dict of configuration or None if invalid - """ - res = {key: value.lower() == 'true' if boolean else value - for key, value in (i.split('=') for i in entries)} - return res if not valid or all(k in valid for k in res.keys()) else None - - -def update_args(args, updates, preserve_old=True): - """Update cmdline arguments from a dictionary. - - @param args: cmdline arguments - @param updates: dictionary of {arg_name: new_value} mappings - @param preserve_old: if true, create a deep copy of args before updating - @return_value: updated cmdline arguments - """ - args = copy.deepcopy(args) if preserve_old else args - if updates: - vars(args).update(updates) - return args - - -def update_user_data(user_data, updates, dump_to_yaml=True): - """Update user_data from dictionary. - - @param user_data: user data as yaml string or dict - @param updates: dictionary to merge with user data - @param dump_to_yaml: return as yaml dumped string if true - @return_value: updated user data, as yaml string if dump_to_yaml is true - """ - user_data = (c_util.load_yaml(user_data) - if isinstance(user_data, str) else copy.deepcopy(user_data)) - user_data.update(updates) - return (yaml_format(user_data, content_type='cloud-config') - if dump_to_yaml else user_data) - - -def shell_safe(cmd): - """Produce string safe shell string. - - Create a string that can be passed to: - set -- - to produce the same array that cmd represents. - - Internally we utilize 'getopt's ability/knowledge on how to quote - strings to be safe for shell. This implementation could be changed - to be pure python. It is just a matter of correctly escaping - or quoting characters like: ' " ^ & $ ; ( ) ... - - @param cmd: command as a list - """ - out = subprocess.check_output( - ["getopt", "--shell", "sh", "--options", "", "--", "--"] + list(cmd)) - # out contains ' -- \n'. drop the ' -- ' and the '\n' - return out.decode()[4:-1] - - -def shell_pack(cmd): - """Return a string that can shuffled through 'sh' and execute cmd. - - In Python subprocess terms: - check_output(cmd) == check_output(shell_pack(cmd), shell=True) - - @param cmd: list or string of command to pack up - """ - - if isinstance(cmd, str): - cmd = [cmd] - else: - cmd = list(cmd) - - stuffed = shell_safe(cmd) - # for whatever reason b64encode returns bytes when it is clearly - # representable as a string by nature of being base64 encoded. - b64 = base64.b64encode(stuffed.encode()).decode() - return 'eval set -- "$(echo %s | base64 --decode)" && exec "$@"' % b64 - - -def shell_quote(cmd): - if isinstance(cmd, (tuple, list)): - return ' '.join([shlex.quote(x) for x in cmd]) - return shlex.quote(cmd) - - -class TargetBase(object): - _tmp_count = 0 - - def execute(self, command, stdin=None, env=None, - rcs=None, description=None): - """Execute command in instance, recording output, error and exit code. - - Assumes functional networking and execution as root with the - target filesystem being available at /. - - @param command: the command to execute as root inside the image - if command is a string, then it will be executed as: - ['sh', '-c', command] - @param stdin: bytes content for standard in - @param env: environment variables - @param rcs: return codes. - None (default): non-zero exit code will raise exception. - False: any is allowed (No execption raised). - list of int: any rc not in the list will raise exception. - @param description: purpose of command - @return_value: tuple containing stdout data, stderr data, exit code - """ - if isinstance(command, str): - command = ['sh', '-c', command] - - if rcs is None: - rcs = (0,) - - if description: - LOG.debug('executing "%s"', description) - else: - LOG.debug("executing command: %s", shell_quote(command)) - - out, err, rc = self._execute(command=command, stdin=stdin, env=env) - - # False means accept anything. - if (rcs is False or rc in rcs): - return out, err, rc - - raise InTargetExecuteError(out, err, rc, command, description) - - def _execute(self, command, stdin=None, env=None): - """Execute command in inside, return stdout, stderr and exit code. - - Assumes functional networking and execution as root with the - target filesystem being available at /. - - @param stdin: bytes content for standard in - @param env: environment variables - @return_value: tuple containing stdout data, stderr data, exit code - - This is intended to be implemented by the Image or Instance. - Many callers will use the higher level 'execute'.""" - raise NotImplementedError("_execute must be implemented by subclass.") - - def read_data(self, remote_path, decode=False): - """Read data from instance filesystem. - - @param remote_path: path in instance - @param decode: decode data before returning. - @return_value: content of remote_path as bytes if 'decode' is False, - and as string if 'decode' is True. - """ - # when sh is invoked with '-c', then the first argument is "$0" - # which is commonly understood as the "program name". - # 'read_data' is the program name, and 'remote_path' is '$1' - stdout, _stderr, rc = self._execute( - ["sh", "-c", 'exec cat "$1"', 'read_data', remote_path]) - if rc != 0: - raise RuntimeError("Failed to read file '%s'" % remote_path) - - if decode: - return stdout.decode() - return stdout - - def write_data(self, remote_path, data): - """Write data to instance filesystem. - - @param remote_path: path in instance - @param data: data to write in bytes - """ - # when sh is invoked with '-c', then the first argument is "$0" - # which is commonly understood as the "program name". - # 'write_data' is the program name, and 'remote_path' is '$1' - _, _, rc = self._execute( - ["sh", "-c", 'exec cat >"$1"', 'write_data', remote_path], - stdin=data) - - if rc != 0: - raise RuntimeError("Failed to write to '%s'" % remote_path) - return - - def pull_file(self, remote_path, local_path): - """Copy file at 'remote_path', from instance to 'local_path'. - - @param remote_path: path on remote instance - @param local_path: path on local instance - """ - with open(local_path, 'wb') as fp: - fp.write(self.read_data(remote_path)) - - def push_file(self, local_path, remote_path): - """Copy file at 'local_path' to instance at 'remote_path'. - - @param local_path: path on local instance - @param remote_path: path on remote instance""" - with open(local_path, "rb") as fp: - self.write_data(remote_path, data=fp.read()) - - def run_script(self, script, rcs=None, description=None): - """Run script in target and return stdout. - - @param script: script contents - @param rcs: allowed return codes from script - @param description: purpose of script - @return_value: stdout from script - """ - # Just write to a file, add execute, run it, then remove it. - shblob = '; '.join(( - 'set -e', - 's="$1"', - 'shift', - 'cat > "$s"', - 'trap "rm -f $s" EXIT', - 'chmod +x "$s"', - '"$s" "$@"')) - return self.execute( - ['sh', '-c', shblob, 'runscript', self.tmpfile()], - stdin=script, description=description, rcs=rcs) - - def tmpfile(self): - """Get a tmp file in the target. - - @return_value: path to new file in target - """ - path = "/tmp/%s-%04d" % (type(self).__name__, self._tmp_count) - self._tmp_count += 1 - return path - - -class InTargetExecuteError(subp.ProcessExecutionError): - """Error type for in target commands that fail.""" - - default_desc = 'Unexpected error while running command.' - - def __init__(self, stdout, stderr, exit_code, cmd, description=None, - reason=None): - """Init error and parent error class.""" - super(InTargetExecuteError, self).__init__( - stdout=stdout, stderr=stderr, exit_code=exit_code, - cmd=shell_quote(cmd), - description=description if description else self.default_desc, - reason=reason) - - -class PlatformError(IOError): - """Error type for platform errors.""" - - default_desc = 'unexpected error in platform.' - - def __init__(self, operation, description=None): - """Init error and parent error class.""" - description = description if description else self.default_desc - - message = '%s: %s' % (operation, description) - IOError.__init__(self, message) - - -def mkdtemp(prefix='cloud_test_data'): - return tempfile.mkdtemp(prefix=prefix) - - -class TempDir(object): - """Configurable temporary directory like tempfile.TemporaryDirectory.""" - - def __init__(self, tmpdir=None, preserve=False, prefix='cloud_test_data_'): - """Initialize. - - @param tmpdir: directory to use as tempdir - @param preserve: if true, always preserve data on exit - @param prefix: prefix to use for tempfile name - """ - self.tmpdir = tmpdir - self.preserve = preserve - self.prefix = prefix - - def __enter__(self): - """Create tempdir. - - @return_value: tempdir path - """ - if not self.tmpdir: - self.tmpdir = mkdtemp(prefix=self.prefix) - LOG.debug('using tmpdir: %s', self.tmpdir) - return self.tmpdir - - def __exit__(self, etype, value, trace): - """Destroy tempdir if no errors occurred.""" - if etype or self.preserve: - LOG.info('leaving data in %s', self.tmpdir) - else: - shutil.rmtree(self.tmpdir) - -# vi: ts=4 expandtab diff --git a/tests/cloud_tests/verify.py b/tests/cloud_tests/verify.py deleted file mode 100644 index 0295af40..00000000 --- a/tests/cloud_tests/verify.py +++ /dev/null @@ -1,149 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -"""Verify test results.""" - -import os -import unittest - -from tests.cloud_tests import (config, LOG, util, testcases) - - -def verify_data(data_dir, platform, os_name, tests): - """Verify test data is correct. - - @param data_dir: top level directory for all tests - @param platform: The platform name we for this test data (e.g. lxd) - @param os_name: The operating system under test (xenial, artful, etc.). - @param tests: list of test names - @return_value: {: {passed: True/False, failures: []}} - """ - base_dir = os.sep.join((data_dir, platform, os_name)) - runner = unittest.TextTestRunner(verbosity=util.current_verbosity()) - res = {} - for test_name in tests: - LOG.debug('verifying test data for %s', test_name) - - # get cloudconfig for test - test_conf = config.load_test_config(test_name) - test_module = config.name_to_module(test_name) - cloud_conf = test_conf['cloud_config'] - - # load script outputs - data = {'platform': platform, 'os_name': os_name} - test_dir = os.path.join(base_dir, test_name) - for script_name in os.listdir(test_dir): - with open(os.path.join(test_dir, script_name), 'rb') as fp: - data[script_name] = fp.read() - - # get test suite and launch tests - suite = testcases.get_suite(test_module, data, cloud_conf) - suite_results = runner.run(suite) - res[test_name] = { - 'passed': suite_results.wasSuccessful(), - 'failures': [{'module': type(test_class).__base__.__module__, - 'class': type(test_class).__base__.__name__, - 'function': str(test_class).split()[0], - 'error': trace.splitlines()[-1], - 'traceback': trace, } - for test_class, trace in suite_results.failures] - } - - for failure in res[test_name]['failures']: - LOG.warning('test case: %s failed %s.%s with: %s', - test_name, failure['class'], failure['function'], - failure['error']) - - return res - - -def format_test_failures(test_result): - """Return a human-readable printable format of test failures.""" - if not test_result['failures']: - return '' - failure_hdr = ' test failures:' - failure_fmt = ' * {module}.{class}.{function}\n ' - output = [] - for failure in test_result['failures']: - if not output: - output = [failure_hdr] - msg = failure_fmt.format(**failure) - if failure.get('error'): - msg += failure['error'] - else: - msg += failure.get('traceback', '') - output.append(msg) - return '\n'.join(output) - - -def format_results(res): - """Return human-readable results as a string""" - platform_hdr = 'Platform: {platform}' - distro_hdr = ' Distro: {distro}' - distro_summary_fmt = ( - ' test modules passed:{passed} tests failed:{failed}') - output = [''] - counts = {} - for platform, platform_data in res.items(): - output.append(platform_hdr.format(platform=platform)) - counts[platform] = {} - for distro, distro_data in platform_data.items(): - distro_failure_output = [] - output.append(distro_hdr.format(distro=distro)) - counts[platform][distro] = {'passed': 0, 'failed': 0} - for _, test_result in distro_data.items(): - if test_result['passed']: - counts[platform][distro]['passed'] += 1 - else: - counts[platform][distro]['failed'] += len( - test_result['failures']) - failure_output = format_test_failures(test_result) - if failure_output: - distro_failure_output.append(failure_output) - output.append( - distro_summary_fmt.format(**counts[platform][distro])) - if distro_failure_output: - output.extend(distro_failure_output) - return '\n'.join(output) - - -def verify(args): - """Verify test data. - - @param args: directory of test data - @return_value: 0 for success, or number of failed tests - """ - failed = 0 - res = {} - - # find test data - tests = util.list_test_data(args.data_dir) - - for platform in tests.keys(): - res[platform] = {} - for os_name in tests[platform].keys(): - test_name = "platform='{}', os='{}'".format(platform, os_name) - LOG.info('test: %s verifying test data', test_name) - - # run test - res[platform][os_name] = verify_data( - args.data_dir, platform, os_name, - tests[platform][os_name]) - - # handle results - fail_list = [k for k, v in res[platform][os_name].items() - if not v.get('passed')] - if len(fail_list) == 0: - LOG.info('test: %s passed all tests', test_name) - else: - LOG.warning('test: %s failed %s tests', test_name, - len(fail_list)) - failed += len(fail_list) - - # dump results - LOG.debug('\n---- Verify summarized results:\n%s', format_results(res)) - if args.result: - util.merge_results({'verify': res}, args.result) - - return failed - -# vi: ts=4 expandtab diff --git a/tests/configs/sample1.yaml b/tests/configs/sample1.yaml deleted file mode 100644 index ae935cc0..00000000 --- a/tests/configs/sample1.yaml +++ /dev/null @@ -1,49 +0,0 @@ -#cloud-config -#apt_update: false -#apt_upgrade: true -packages: [ bzr, pastebinit, ubuntu-dev-tools, ccache, bzr-builddeb, vim-nox, git-core, lftp ] - -#disable_root: False - -# mounts: -# - [ ephemeral0, /mnt ] -# - [ swap, none, swap, sw, 0, 0 ] - -ssh_import_id: [smoser ] - -#!/bin/sh - -output: {all: '| tee -a /var/log/cloud-init-output.log'} - -sm_misc: - - &user_setup | - set -x; exec > ~/user_setup.log 2>&1 - echo "starting at $(date -R)" - echo "set -o vi" >> ~/.bashrc - cat >> ~/.profile <<"EOF" - export EDITOR=vi - export DEB_BUILD_OPTIONS=parallel=4 - export PATH=/usr/lib/ccache:$PATH - EOF - - mkdir ~/bin - chmod 755 ~/bin - cat > ~/bin/mdebuild <<"EOF" - #!/bin/sh - exec debuild --prepend-path /usr/lib/ccache "$@" - EOF - chmod 755 ~/bin/* - - #byobu-launcher-install - byobu-ctrl-a screen 2>&1 || : - - echo "pinging 8.8.8.8" - ping -c 4 8.8.8.8 - -runcmd: - - [ sudo, -Hu, ubuntu, sh, -c, '[ -e /var/log/cloud-init.log ] || exit 0; grep "cloud-init.*running" /var/log/cloud-init.log > ~/runcmd.log' ] - - [ sudo, -Hu, ubuntu, sh, -c, 'read up sleep < /proc/uptime; echo $(date): runcmd up at $up | tee -a ~/runcmd.log' ] - - [ sudo, -Hu, ubuntu, sh, -c, *user_setup ] - - -byobu_by_default: user diff --git a/tests/unittests/test_handler/test_schema.py b/tests/unittests/test_handler/test_schema.py index 15aa77bb..6b0b1f74 100644 --- a/tests/unittests/test_handler/test_schema.py +++ b/tests/unittests/test_handler/test_schema.py @@ -10,7 +10,6 @@ from cloudinit.tests.helpers import CiTestCase, mock, skipUnlessJsonSchema from copy import copy import itertools -import os import pytest from pathlib import Path from textwrap import dedent @@ -493,46 +492,6 @@ class TestMain: assert expected == err -class CloudTestsIntegrationTest(CiTestCase): - """Validate all cloud-config yaml schema provided in integration tests. - - It is less expensive to have unittests validate schema of all cloud-config - yaml provided to integration tests, than to run an integration test which - raises Warnings or errors on invalid cloud-config schema. - """ - - @skipUnlessJsonSchema() - def test_all_integration_test_cloud_config_schema(self): - """Validate schema of cloud_tests yaml files looking for warnings.""" - schema = get_schema() - testsdir = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) - integration_testdir = os.path.sep.join( - [testsdir, 'cloud_tests', 'testcases']) - errors = [] - - yaml_files = [] - for root, _dirnames, filenames in os.walk(integration_testdir): - yaml_files.extend([os.path.join(root, f) - for f in filenames if f.endswith(".yaml")]) - self.assertTrue(len(yaml_files) > 0) - - for filename in yaml_files: - test_cfg = safe_load(open(filename)) - cloud_config = test_cfg.get('cloud_config') - if cloud_config: - cloud_config = safe_load( - cloud_config.replace("#cloud-config\n", "")) - try: - validate_cloudconfig_schema( - cloud_config, schema, strict=True) - except SchemaValidationError as e: - errors.append( - '{0}: {1}'.format( - filename, e)) - if errors: - raise AssertionError(', '.join(errors)) - - def _get_schema_doc_examples(): examples_dir = Path( cloudinit.__file__).parent.parent / 'doc' / 'examples' diff --git a/tox.ini b/tox.ini index 45ccadce..214fb623 100644 --- a/tox.ini +++ b/tox.ini @@ -27,7 +27,7 @@ deps = # test-requirements because unit tests are now present in cloudinit tree -r{toxinidir}/test-requirements.txt -r{toxinidir}/integration-requirements.txt -commands = {envpython} -m pylint {posargs:cloudinit tests --ignore=cloud_tests tools} +commands = {envpython} -m pylint {posargs:cloudinit tests tools} [testenv:py3] @@ -123,7 +123,7 @@ commands = {envpython} -m flake8 {posargs:cloudinit/ tests/ tools/ setup.py} deps = flake8 [testenv:tip-pylint] -commands = {envpython} -m pylint {posargs:cloudinit tests --ignore=cloud_tests tools} +commands = {envpython} -m pylint {posargs:cloudinit tests tools} deps = # requirements pylint @@ -131,13 +131,6 @@ deps = -r{toxinidir}/test-requirements.txt -r{toxinidir}/integration-requirements.txt -[testenv:citest] -basepython = python3 -commands = {envpython} -m tests.cloud_tests {posargs} -passenv = HOME TRAVIS -deps = - -r{toxinidir}/cloud-tests-requirements.txt - # Until Xenial tox support is dropped or bumps to tox:2.3.2, reflect changes to # deps into testenv:integration-tests-ci: commands, passenv and deps. # This is due to (https://github.com/tox-dev/tox/issues/208) which means that -- cgit v1.2.3