summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
Diffstat (limited to 'tests')
-rw-r--r--tests/cloud_tests/__init__.py7
-rw-r--r--tests/cloud_tests/__main__.py45
-rw-r--r--tests/cloud_tests/args.py150
-rw-r--r--tests/cloud_tests/bddeb.py118
-rw-r--r--tests/cloud_tests/collect.py114
-rw-r--r--tests/cloud_tests/config.py139
-rw-r--r--tests/cloud_tests/configs/bugs/lp1628337.yaml3
-rw-r--r--tests/cloud_tests/configs/examples/add_apt_repositories.yaml2
-rw-r--r--tests/cloud_tests/configs/modules/apt_configure_conf.yaml2
-rw-r--r--tests/cloud_tests/configs/modules/apt_configure_disable_suites.yaml3
-rw-r--r--tests/cloud_tests/configs/modules/apt_configure_primary.yaml7
-rw-r--r--tests/cloud_tests/configs/modules/apt_configure_proxy.yaml2
-rw-r--r--tests/cloud_tests/configs/modules/apt_configure_security.yaml3
-rw-r--r--tests/cloud_tests/configs/modules/apt_configure_sources_key.yaml3
-rw-r--r--tests/cloud_tests/configs/modules/apt_configure_sources_keyserver.yaml5
-rw-r--r--tests/cloud_tests/configs/modules/apt_configure_sources_list.yaml3
-rw-r--r--tests/cloud_tests/configs/modules/apt_configure_sources_ppa.yaml9
-rw-r--r--tests/cloud_tests/configs/modules/apt_pipelining_disable.yaml2
-rw-r--r--tests/cloud_tests/configs/modules/apt_pipelining_os.yaml2
-rw-r--r--tests/cloud_tests/configs/modules/byobu.yaml2
-rw-r--r--tests/cloud_tests/configs/modules/keys_to_console.yaml2
-rw-r--r--tests/cloud_tests/configs/modules/landscape.yaml2
-rw-r--r--tests/cloud_tests/configs/modules/locale.yaml3
-rw-r--r--tests/cloud_tests/configs/modules/lxd_bridge.yaml2
-rw-r--r--tests/cloud_tests/configs/modules/lxd_dir.yaml2
-rw-r--r--tests/cloud_tests/configs/modules/ntp.yaml9
-rw-r--r--tests/cloud_tests/configs/modules/ntp_pools.yaml8
-rw-r--r--tests/cloud_tests/configs/modules/ntp_servers.yaml5
-rw-r--r--tests/cloud_tests/configs/modules/package_update_upgrade_install.yaml11
-rw-r--r--tests/cloud_tests/configs/modules/set_hostname.yaml2
-rw-r--r--tests/cloud_tests/configs/modules/set_hostname_fqdn.yaml2
-rw-r--r--tests/cloud_tests/configs/modules/set_password.yaml2
-rw-r--r--tests/cloud_tests/configs/modules/set_password_expire.yaml2
-rw-r--r--tests/cloud_tests/configs/modules/snappy.yaml2
-rw-r--r--tests/cloud_tests/configs/modules/ssh_auth_key_fingerprints_disable.yaml2
-rw-r--r--tests/cloud_tests/configs/modules/ssh_auth_key_fingerprints_enable.yaml5
-rw-r--r--tests/cloud_tests/configs/modules/ssh_import_id.yaml3
-rw-r--r--tests/cloud_tests/configs/modules/ssh_keys_generate.yaml2
-rw-r--r--tests/cloud_tests/configs/modules/ssh_keys_provided.yaml3
-rw-r--r--tests/cloud_tests/configs/modules/timezone.yaml2
-rw-r--r--tests/cloud_tests/configs/modules/user_groups.yaml2
-rw-r--r--tests/cloud_tests/configs/modules/write_files.yaml4
-rw-r--r--tests/cloud_tests/images/__init__.py7
-rw-r--r--tests/cloud_tests/images/base.py68
-rw-r--r--tests/cloud_tests/images/lxd.py176
-rw-r--r--tests/cloud_tests/instances/__init__.py6
-rw-r--r--tests/cloud_tests/instances/base.py162
-rw-r--r--tests/cloud_tests/instances/lxd.py132
-rw-r--r--tests/cloud_tests/manage.py29
-rw-r--r--tests/cloud_tests/platforms.yaml50
-rw-r--r--tests/cloud_tests/platforms/__init__.py6
-rw-r--r--tests/cloud_tests/platforms/base.py44
-rw-r--r--tests/cloud_tests/platforms/lxd.py97
-rw-r--r--tests/cloud_tests/releases.yaml306
-rw-r--r--tests/cloud_tests/run_funcs.py75
-rw-r--r--tests/cloud_tests/setup_image.py196
-rw-r--r--tests/cloud_tests/snapshots/__init__.py6
-rw-r--r--tests/cloud_tests/snapshots/base.py43
-rw-r--r--tests/cloud_tests/snapshots/lxd.py51
-rw-r--r--tests/cloud_tests/stage.py52
-rw-r--r--tests/cloud_tests/testcases.yaml1
-rw-r--r--tests/cloud_tests/testcases/__init__.py16
-rw-r--r--tests/cloud_tests/testcases/base.py51
-rw-r--r--tests/cloud_tests/testcases/bugs/__init__.py4
-rw-r--r--tests/cloud_tests/testcases/bugs/lp1511485.py6
-rw-r--r--tests/cloud_tests/testcases/bugs/lp1628337.py8
-rw-r--r--tests/cloud_tests/testcases/examples/__init__.py4
-rw-r--r--tests/cloud_tests/testcases/examples/add_apt_repositories.py8
-rw-r--r--tests/cloud_tests/testcases/examples/alter_completion_message.py23
-rw-r--r--tests/cloud_tests/testcases/examples/configure_instance_trusted_ca_certificates.py10
-rw-r--r--tests/cloud_tests/testcases/examples/configure_instances_ssh_keys.py12
-rw-r--r--tests/cloud_tests/testcases/examples/including_user_groups.py16
-rw-r--r--tests/cloud_tests/testcases/examples/install_arbitrary_packages.py8
-rw-r--r--tests/cloud_tests/testcases/examples/install_run_chef_recipes.py6
-rw-r--r--tests/cloud_tests/testcases/examples/run_apt_upgrade.py6
-rw-r--r--tests/cloud_tests/testcases/examples/run_commands.py6
-rw-r--r--tests/cloud_tests/testcases/examples/run_commands_first_boot.py6
-rw-r--r--tests/cloud_tests/testcases/examples/writing_out_arbitrary_files.py12
-rw-r--r--tests/cloud_tests/testcases/main/__init__.py4
-rw-r--r--tests/cloud_tests/testcases/main/command_output_simple.py9
-rw-r--r--tests/cloud_tests/testcases/modules/__init__.py4
-rw-r--r--tests/cloud_tests/testcases/modules/apt_configure_conf.py8
-rw-r--r--tests/cloud_tests/testcases/modules/apt_configure_disable_suites.py6
-rw-r--r--tests/cloud_tests/testcases/modules/apt_configure_primary.py8
-rw-r--r--tests/cloud_tests/testcases/modules/apt_configure_proxy.py6
-rw-r--r--tests/cloud_tests/testcases/modules/apt_configure_security.py6
-rw-r--r--tests/cloud_tests/testcases/modules/apt_configure_sources_key.py8
-rw-r--r--tests/cloud_tests/testcases/modules/apt_configure_sources_keyserver.py12
-rw-r--r--tests/cloud_tests/testcases/modules/apt_configure_sources_list.py6
-rw-r--r--tests/cloud_tests/testcases/modules/apt_configure_sources_ppa.py8
-rw-r--r--tests/cloud_tests/testcases/modules/apt_pipelining_disable.py6
-rw-r--r--tests/cloud_tests/testcases/modules/apt_pipelining_os.py6
-rw-r--r--tests/cloud_tests/testcases/modules/bootcmd.py6
-rw-r--r--tests/cloud_tests/testcases/modules/byobu.py10
-rw-r--r--tests/cloud_tests/testcases/modules/ca_certs.py8
-rw-r--r--tests/cloud_tests/testcases/modules/debug_disable.py6
-rw-r--r--tests/cloud_tests/testcases/modules/debug_enable.py6
-rw-r--r--tests/cloud_tests/testcases/modules/final_message.py23
-rw-r--r--tests/cloud_tests/testcases/modules/keys_to_console.py8
-rw-r--r--tests/cloud_tests/testcases/modules/locale.py15
-rw-r--r--tests/cloud_tests/testcases/modules/lxd_bridge.py10
-rw-r--r--tests/cloud_tests/testcases/modules/lxd_dir.py8
-rw-r--r--tests/cloud_tests/testcases/modules/ntp.py13
-rw-r--r--tests/cloud_tests/testcases/modules/ntp_pools.py6
-rw-r--r--tests/cloud_tests/testcases/modules/ntp_servers.py2
-rw-r--r--tests/cloud_tests/testcases/modules/package_update_upgrade_install.py12
-rw-r--r--tests/cloud_tests/testcases/modules/runcmd.py6
-rw-r--r--tests/cloud_tests/testcases/modules/salt_minion.py10
-rw-r--r--tests/cloud_tests/testcases/modules/seed_random_data.py6
-rw-r--r--tests/cloud_tests/testcases/modules/set_hostname.py6
-rw-r--r--tests/cloud_tests/testcases/modules/set_hostname_fqdn.py10
-rw-r--r--tests/cloud_tests/testcases/modules/set_password.py8
-rw-r--r--tests/cloud_tests/testcases/modules/set_password_expire.py8
-rw-r--r--tests/cloud_tests/testcases/modules/set_password_list.py5
-rw-r--r--tests/cloud_tests/testcases/modules/set_password_list_string.py5
-rw-r--r--tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_disable.py16
-rw-r--r--tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_enable.py14
-rw-r--r--tests/cloud_tests/testcases/modules/ssh_import_id.py19
-rw-r--r--tests/cloud_tests/testcases/modules/ssh_keys_generate.py22
-rw-r--r--tests/cloud_tests/testcases/modules/ssh_keys_provided.py24
-rw-r--r--tests/cloud_tests/testcases/modules/timezone.py6
-rw-r--r--tests/cloud_tests/testcases/modules/user_groups.py16
-rw-r--r--tests/cloud_tests/testcases/modules/write_files.py12
-rw-r--r--tests/cloud_tests/util.py235
-rw-r--r--tests/cloud_tests/verify.py22
-rw-r--r--tests/unittests/helpers.py47
-rw-r--r--tests/unittests/test_datasource/test_aliyun.py51
-rw-r--r--tests/unittests/test_datasource/test_azure.py250
-rw-r--r--tests/unittests/test_datasource/test_common.py5
-rw-r--r--tests/unittests/test_datasource/test_ec2.py202
-rw-r--r--tests/unittests/test_datasource/test_gce.py17
-rw-r--r--tests/unittests/test_datasource/test_scaleway.py262
-rw-r--r--tests/unittests/test_distros/test_create_users.py30
-rw-r--r--tests/unittests/test_distros/test_debian.py82
-rw-r--r--tests/unittests/test_distros/test_netconfig.py9
-rw-r--r--tests/unittests/test_ds_identify.py57
-rw-r--r--tests/unittests/test_handler/test_handler_disk_setup.py32
-rw-r--r--tests/unittests/test_handler/test_handler_ntp.py127
-rw-r--r--tests/unittests/test_handler/test_handler_write_files.py37
-rw-r--r--tests/unittests/test_handler/test_schema.py232
-rw-r--r--tests/unittests/test_net.py1262
-rw-r--r--tests/unittests/test_runs/test_simple_run.py18
-rw-r--r--tests/unittests/test_util.py48
143 files changed, 4601 insertions, 1254 deletions
diff --git a/tests/cloud_tests/__init__.py b/tests/cloud_tests/__init__.py
index 099c357f..07148c12 100644
--- a/tests/cloud_tests/__init__.py
+++ b/tests/cloud_tests/__init__.py
@@ -1,17 +1,18 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""Main init."""
+
import logging
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
TESTCASES_DIR = os.path.join(BASE_DIR, 'testcases')
TEST_CONF_DIR = os.path.join(BASE_DIR, 'configs')
+TREE_BASE = os.sep.join(BASE_DIR.split(os.sep)[:-2])
def _initialize_logging():
- """
- configure logging for cloud_tests
- """
+ """Configure logging for cloud_tests."""
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter(
diff --git a/tests/cloud_tests/__main__.py b/tests/cloud_tests/__main__.py
index ed654ad3..260ddb3f 100644
--- a/tests/cloud_tests/__main__.py
+++ b/tests/cloud_tests/__main__.py
@@ -1,19 +1,17 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""Main entry point."""
+
import argparse
import logging
-import shutil
import sys
-import tempfile
-from tests.cloud_tests import (args, collect, manage, verify)
+from tests.cloud_tests import args, bddeb, collect, manage, run_funcs, verify
from tests.cloud_tests import LOG
def configure_log(args):
- """
- configure logging
- """
+ """Configure logging."""
level = logging.INFO
if args.verbose:
level = logging.DEBUG
@@ -22,41 +20,15 @@ def configure_log(args):
LOG.setLevel(level)
-def run(args):
- """
- run full test suite
- """
- failed = 0
- args.data_dir = tempfile.mkdtemp(prefix='cloud_test_data_')
- LOG.debug('using tmpdir %s', args.data_dir)
- try:
- failed += collect.collect(args)
- failed += verify.verify(args)
- except Exception:
- failed += 1
- raise
- finally:
- # TODO: make this configurable via environ or cmdline
- if failed:
- LOG.warning('some tests failed, leaving data in %s', args.data_dir)
- else:
- shutil.rmtree(args.data_dir)
- return failed
-
-
def main():
- """
- entry point for cloud test suite
- """
+ """Entry point for cloud test suite."""
# configure parser
parser = argparse.ArgumentParser(prog='cloud_tests')
subparsers = parser.add_subparsers(dest="subcmd")
subparsers.required = True
def add_subparser(name, description, arg_sets):
- """
- add arguments to subparser
- """
+ """Add arguments to subparser."""
subparser = subparsers.add_parser(name, help=description)
for (_args, _kwargs) in (a for arg_set in arg_sets for a in arg_set):
subparser.add_argument(*_args, **_kwargs)
@@ -80,9 +52,12 @@ def main():
# run handler
LOG.debug('running with args: %s\n', parsed)
return {
+ 'bddeb': bddeb.bddeb,
'collect': collect.collect,
'create': manage.create,
- 'run': run,
+ 'run': run_funcs.run,
+ 'tree_collect': run_funcs.tree_collect,
+ 'tree_run': run_funcs.tree_run,
'verify': verify.verify,
}[parsed.subcmd](parsed)
diff --git a/tests/cloud_tests/args.py b/tests/cloud_tests/args.py
index 371b0444..369d60db 100644
--- a/tests/cloud_tests/args.py
+++ b/tests/cloud_tests/args.py
@@ -1,23 +1,43 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""Argparse argument setup and sanitization."""
+
import os
from tests.cloud_tests import config, util
-from tests.cloud_tests import LOG
+from tests.cloud_tests import LOG, TREE_BASE
ARG_SETS = {
+ 'BDDEB': (
+ (('--bddeb-args',),
+ {'help': 'args to pass through to bddeb',
+ 'action': 'store', 'default': None, 'required': False}),
+ (('--build-os',),
+ {'help': 'OS to use as build system (default is xenial)',
+ 'action': 'store', 'choices': config.ENABLED_DISTROS,
+ 'default': 'xenial', 'required': False}),
+ (('--build-platform',),
+ {'help': 'platform to use for build system (default is lxd)',
+ 'action': 'store', 'choices': config.ENABLED_PLATFORMS,
+ 'default': 'lxd', 'required': False}),
+ (('--cloud-init',),
+ {'help': 'path to base of cloud-init tree', 'metavar': 'DIR',
+ 'action': 'store', 'required': False, 'default': TREE_BASE}),),
'COLLECT': (
(('-p', '--platform'),
{'help': 'platform(s) to run tests on', 'metavar': 'PLATFORM',
- 'action': 'append', 'choices': config.list_enabled_platforms(),
+ 'action': 'append', 'choices': config.ENABLED_PLATFORMS,
'default': []}),
(('-n', '--os-name'),
{'help': 'the name(s) of the OS(s) to test', 'metavar': 'NAME',
- 'action': 'append', 'choices': config.list_enabled_distros(),
+ 'action': 'append', 'choices': config.ENABLED_DISTROS,
'default': []}),
(('-t', '--test-config'),
{'help': 'test config file(s) to use', 'metavar': 'FILE',
- 'action': 'append', 'default': []}),),
+ 'action': 'append', 'default': []}),
+ (('--feature-override',),
+ {'help': 'feature flags override(s), <flagname>=<true/false>',
+ 'action': 'append', 'default': [], 'required': False}),),
'CREATE': (
(('-c', '--config'),
{'help': 'cloud-config yaml for testcase', 'metavar': 'DATA',
@@ -41,7 +61,15 @@ ARG_SETS = {
'OUTPUT': (
(('-d', '--data-dir'),
{'help': 'directory to store test data in',
- 'action': 'store', 'metavar': 'DIR', 'required': True}),),
+ 'action': 'store', 'metavar': 'DIR', 'required': False}),
+ (('--preserve-data',),
+ {'help': 'do not remove collected data after successful run',
+ 'action': 'store_true', 'default': False, 'required': False}),),
+ 'OUTPUT_DEB': (
+ (('--deb',),
+ {'help': 'path to write output deb to', 'metavar': 'FILE',
+ 'action': 'store', 'required': False,
+ 'default': 'cloud-init_all.deb'}),),
'RESULT': (
(('-r', '--result'),
{'help': 'file to write results to',
@@ -61,31 +89,54 @@ ARG_SETS = {
{'help': 'ppa to enable (implies -u)', 'metavar': 'NAME',
'action': 'store'}),
(('-u', '--upgrade'),
- {'help': 'upgrade before starting tests', 'action': 'store_true',
- 'default': False}),),
+ {'help': 'upgrade or install cloud-init from repo',
+ 'action': 'store_true', 'default': False}),
+ (('--upgrade-full',),
+ {'help': 'do full system upgrade from repo (implies -u)',
+ 'action': 'store_true', 'default': False}),),
+
}
SUBCMDS = {
+ 'bddeb': ('build cloud-init deb from tree',
+ ('BDDEB', 'OUTPUT_DEB', 'INTERFACE')),
'collect': ('collect test data',
('COLLECT', 'INTERFACE', 'OUTPUT', 'RESULT', 'SETUP')),
'create': ('create new test case', ('CREATE', 'INTERFACE')),
- 'run': ('run test suite', ('COLLECT', 'INTERFACE', 'RESULT', 'SETUP')),
+ 'run': ('run test suite',
+ ('COLLECT', 'INTERFACE', 'RESULT', 'OUTPUT', 'SETUP')),
+ 'tree_collect': ('collect using current working tree',
+ ('BDDEB', 'COLLECT', 'INTERFACE', 'OUTPUT', 'RESULT')),
+ 'tree_run': ('run using current working tree',
+ ('BDDEB', 'COLLECT', 'INTERFACE', 'OUTPUT', 'RESULT')),
'verify': ('verify test data', ('INTERFACE', 'OUTPUT', 'RESULT')),
}
def _empty_normalizer(args):
+ """Do not normalize arguments."""
+ return args
+
+
+def normalize_bddeb_args(args):
+ """Normalize BDDEB arguments.
+
+ @param args: parsed args
+ @return_value: updated args, or None if errors encountered
"""
- do not normalize arguments
- """
+ # make sure cloud-init dir is accessible
+ if not (args.cloud_init and os.path.isdir(args.cloud_init)):
+ LOG.error('invalid cloud-init tree path')
+ return None
+
return args
def normalize_create_args(args):
- """
- normalize CREATE arguments
- args: parsed args
- return_value: updated args, or None if errors occurred
+ """Normalize CREATE arguments.
+
+ @param args: parsed args
+ @return_value: updated args, or None if errors occurred
"""
# ensure valid name for new test
if len(args.name.split('/')) != 2:
@@ -114,22 +165,22 @@ def normalize_create_args(args):
def normalize_collect_args(args):
- """
- normalize COLLECT arguments
- args: parsed args
- return_value: updated args, or None if errors occurred
+ """Normalize COLLECT arguments.
+
+ @param args: parsed args
+ @return_value: updated args, or None if errors occurred
"""
# platform should default to all supported
if len(args.platform) == 0:
- args.platform = config.list_enabled_platforms()
+ args.platform = config.ENABLED_PLATFORMS
args.platform = util.sorted_unique(args.platform)
# os name should default to all enabled
# if os name is provided ensure that all provided are supported
if len(args.os_name) == 0:
- args.os_name = config.list_enabled_distros()
+ args.os_name = config.ENABLED_DISTROS
else:
- supported = config.list_enabled_distros()
+ supported = config.ENABLED_DISTROS
invalid = [os_name for os_name in args.os_name
if os_name not in supported]
if len(invalid) != 0:
@@ -158,18 +209,33 @@ def normalize_collect_args(args):
args.test_config = valid
args.test_config = util.sorted_unique(args.test_config)
+ # parse feature flag overrides and ensure all are valid
+ if args.feature_override:
+ overrides = args.feature_override
+ args.feature_override = util.parse_conf_list(
+ overrides, boolean=True, valid=config.list_feature_flags())
+ if not args.feature_override:
+ LOG.error('invalid feature flag override(s): %s', overrides)
+ return None
+ else:
+ args.feature_override = {}
+
return args
def normalize_output_args(args):
+ """Normalize OUTPUT arguments.
+
+ @param args: parsed args
+ @return_value: updated args, or None if errors occurred
"""
- normalize OUTPUT arguments
- args: parsed args
- return_value: updated args, or None if errors occurred
- """
+ if args.data_dir:
+ args.data_dir = os.path.abspath(args.data_dir)
+ if not os.path.exists(args.data_dir):
+ os.mkdir(args.data_dir)
+
if not args.data_dir:
- LOG.error('--data-dir must be specified')
- return None
+ args.data_dir = None
# ensure clean output dir if collect
# ensure data exists if verify
@@ -177,19 +243,31 @@ def normalize_output_args(args):
if not util.is_clean_writable_dir(args.data_dir):
LOG.error('data_dir must be empty/new and must be writable')
return None
- elif args.subcmd == 'verify':
- if not os.path.exists(args.data_dir):
- LOG.error('data_dir %s does not exist', args.data_dir)
- return None
return args
-def normalize_setup_args(args):
+def normalize_output_deb_args(args):
+ """Normalize OUTPUT_DEB arguments.
+
+ @param args: parsed args
+ @return_value: updated args, or None if erros occurred
"""
- normalize SETUP arguments
- args: parsed args
- return_value: updated_args, or None if errors occurred
+ # make sure to use abspath for deb
+ args.deb = os.path.abspath(args.deb)
+
+ if not args.deb.endswith('.deb'):
+ LOG.error('output filename does not end in ".deb"')
+ return None
+
+ return args
+
+
+def normalize_setup_args(args):
+ """Normalize SETUP arguments.
+
+ @param args: parsed args
+ @return_value: updated_args, or None if errors occurred
"""
# ensure deb or rpm valid if specified
for pkg in (args.deb, args.rpm):
@@ -210,10 +288,12 @@ def normalize_setup_args(args):
NORMALIZERS = {
+ 'BDDEB': normalize_bddeb_args,
'COLLECT': normalize_collect_args,
'CREATE': normalize_create_args,
'INTERFACE': _empty_normalizer,
'OUTPUT': normalize_output_args,
+ 'OUTPUT_DEB': normalize_output_deb_args,
'RESULT': _empty_normalizer,
'SETUP': normalize_setup_args,
}
diff --git a/tests/cloud_tests/bddeb.py b/tests/cloud_tests/bddeb.py
new file mode 100644
index 00000000..53dbf74e
--- /dev/null
+++ b/tests/cloud_tests/bddeb.py
@@ -0,0 +1,118 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+"""Used to build a deb."""
+
+from functools import partial
+import os
+import tempfile
+
+from cloudinit import util as c_util
+from tests.cloud_tests import (config, LOG)
+from tests.cloud_tests import (platforms, images, snapshots, instances)
+from tests.cloud_tests.stage import (PlatformComponent, run_stage, run_single)
+
+build_deps = ['devscripts', 'equivs', 'git', 'tar']
+
+
+def _out(cmd_res):
+ """Get clean output from cmd result."""
+ return cmd_res[0].strip()
+
+
+def build_deb(args, instance):
+ """Build deb on system and copy out to location at args.deb.
+
+ @param args: cmdline arguments
+ @return_value: tuple of results and fail count
+ """
+ # update remote system package list and install build deps
+ LOG.debug('installing build deps')
+ pkgs = ' '.join(build_deps)
+ cmd = 'apt-get update && apt-get install --yes {}'.format(pkgs)
+ instance.execute(['/bin/sh', '-c', cmd])
+ # TODO Remove this call once we have a ci-deps Makefile target
+ instance.execute(['mk-build-deps', '--install', '-t',
+ 'apt-get --no-install-recommends --yes', 'cloud-init'])
+
+ # local tmpfile that must be deleted
+ local_tarball = tempfile.NamedTemporaryFile().name
+
+ # paths to use in remote system
+ output_link = '/root/cloud-init_all.deb'
+ remote_tarball = _out(instance.execute(['mktemp']))
+ extract_dir = _out(instance.execute(['mktemp', '--directory']))
+ bddeb_path = os.path.join(extract_dir, 'packages', 'bddeb')
+ git_env = {'GIT_DIR': os.path.join(extract_dir, '.git'),
+ 'GIT_WORK_TREE': extract_dir}
+
+ LOG.debug('creating tarball of cloud-init at: %s', local_tarball)
+ c_util.subp(['tar', 'cf', local_tarball, '--owner', 'root',
+ '--group', 'root', '-C', args.cloud_init, '.'])
+ LOG.debug('copying to remote system at: %s', remote_tarball)
+ instance.push_file(local_tarball, remote_tarball)
+
+ LOG.debug('extracting tarball in remote system at: %s', extract_dir)
+ instance.execute(['tar', 'xf', remote_tarball, '-C', extract_dir])
+ instance.execute(['git', 'commit', '-a', '-m', 'tmp', '--allow-empty'],
+ env=git_env)
+
+ LOG.debug('building deb in remote system at: %s', output_link)
+ bddeb_args = args.bddeb_args.split() if args.bddeb_args else []
+ instance.execute([bddeb_path, '-d'] + bddeb_args, env=git_env)
+
+ # copy the deb back to the host system
+ LOG.debug('copying built deb to host at: %s', args.deb)
+ instance.pull_file(output_link, args.deb)
+
+
+def setup_build(args):
+ """Set build system up then run build.
+
+ @param args: cmdline arguments
+ @return_value: tuple of results and fail count
+ """
+ res = ({}, 1)
+
+ # set up platform
+ LOG.info('setting up platform: %s', args.build_platform)
+ platform_config = config.load_platform_config(args.build_platform)
+ platform_call = partial(platforms.get_platform, args.build_platform,
+ platform_config)
+ with PlatformComponent(platform_call) as platform:
+
+ # set up image
+ LOG.info('acquiring image for os: %s', args.build_os)
+ img_conf = config.load_os_config(platform.platform_name, args.build_os)
+ image_call = partial(images.get_image, platform, img_conf)
+ with PlatformComponent(image_call) as image:
+
+ # set up snapshot
+ snapshot_call = partial(snapshots.get_snapshot, image)
+ with PlatformComponent(snapshot_call) as snapshot:
+
+ # create instance with cloud-config to set it up
+ LOG.info('creating instance to build deb in')
+ empty_cloud_config = "#cloud-config\n{}"
+ instance_call = partial(
+ instances.get_instance, snapshot, empty_cloud_config,
+ use_desc='build cloud-init deb')
+ with PlatformComponent(instance_call) as instance:
+
+ # build the deb
+ res = run_single('build deb on system',
+ partial(build_deb, args, instance))
+
+ return res
+
+
+def bddeb(args):
+ """Entry point for build deb.
+
+ @param args: cmdline arguments
+ @return_value: fail count
+ """
+ LOG.info('preparing to build cloud-init deb')
+ (res, failed) = run_stage('build deb', [partial(setup_build, args)])
+ return failed
+
+# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/collect.py b/tests/cloud_tests/collect.py
index 02fc0e52..b44e8bdd 100644
--- a/tests/cloud_tests/collect.py
+++ b/tests/cloud_tests/collect.py
@@ -1,34 +1,39 @@
# This file is part of cloud-init. See LICENSE file for license information.
-from tests.cloud_tests import (config, LOG, setup_image, util)
-from tests.cloud_tests.stage import (PlatformComponent, run_stage, run_single)
-from tests.cloud_tests import (platforms, images, snapshots, instances)
+"""Used to collect data from platforms during tests."""
from functools import partial
import os
+from cloudinit import util as c_util
+from tests.cloud_tests import (config, LOG, setup_image, util)
+from tests.cloud_tests.stage import (PlatformComponent, run_stage, run_single)
+from tests.cloud_tests import (platforms, images, snapshots, instances)
+
def collect_script(instance, base_dir, script, script_name):
- """
- collect script data
- instance: instance to run script on
- base_dir: base directory for output data
- script: script contents
- script_name: name of script to run
- return_value: None, may raise errors
+ """Collect script data.
+
+ @param instance: instance to run script on
+ @param base_dir: base directory for output data
+ @param script: script contents
+ @param script_name: name of script to run
+ @return_value: None, may raise errors
"""
LOG.debug('running collect script: %s', script_name)
- util.write_file(os.path.join(base_dir, script_name),
- instance.run_script(script))
+ (out, err, exit) = instance.run_script(
+ script, rcs=range(0, 256),
+ description='collect: {}'.format(script_name))
+ c_util.write_file(os.path.join(base_dir, script_name), out)
def collect_test_data(args, snapshot, os_name, test_name):
- """
- collect data for test case
- args: cmdline arguments
- snapshot: instantiated snapshot
- test_name: name or path of test to run
- return_value: tuple of results and fail count
+ """Collect data for test case.
+
+ @param args: cmdline arguments
+ @param snapshot: instantiated snapshot
+ @param test_name: name or path of test to run
+ @return_value: tuple of results and fail count
"""
res = ({}, 1)
@@ -39,15 +44,27 @@ def collect_test_data(args, snapshot, os_name, test_name):
test_scripts = test_config['collect_scripts']
test_output_dir = os.sep.join(
(args.data_dir, snapshot.platform_name, os_name, test_name))
- boot_timeout = (test_config.get('boot_timeout')
- if isinstance(test_config.get('boot_timeout'), int) else
- snapshot.config.get('timeout'))
# if test is not enabled, skip and return 0 failures
if not test_config.get('enabled', False):
LOG.warning('test config %s is not enabled, skipping', test_name)
return ({}, 0)
+ # if testcase requires a feature flag that the image does not support,
+ # skip the testcase with a warning
+ req_features = test_config.get('required_features', [])
+ if any(feature not in snapshot.features for feature in req_features):
+ LOG.warn('test config %s requires features not supported by image, '
+ 'skipping.\nrequired features: %s\nsupported features: %s',
+ test_name, req_features, snapshot.features)
+ return ({}, 0)
+
+ # if there are user data overrides required for this test case, apply them
+ overrides = snapshot.config.get('user_data_overrides', {})
+ if overrides:
+ LOG.debug('updating user data for collect with: %s', overrides)
+ user_data = util.update_user_data(user_data, overrides)
+
# create test instance
component = PlatformComponent(
partial(instances.get_instance, snapshot, user_data,
@@ -56,7 +73,7 @@ def collect_test_data(args, snapshot, os_name, test_name):
LOG.info('collecting test data for test: %s', test_name)
with component as instance:
start_call = partial(run_single, 'boot instance', partial(
- instance.start, wait=True, wait_time=boot_timeout))
+ instance.start, wait=True, wait_for_cloud_init=True))
collect_calls = [partial(run_single, 'script {}'.format(script_name),
partial(collect_script, instance,
test_output_dir, script, script_name))
@@ -69,11 +86,11 @@ def collect_test_data(args, snapshot, os_name, test_name):
def collect_snapshot(args, image, os_name):
- """
- collect data for snapshot of image
- args: cmdline arguments
- image: instantiated image with set up complete
- return_value tuple of results and fail count
+ """Collect data for snapshot of image.
+
+ @param args: cmdline arguments
+ @param image: instantiated image with set up complete
+ @return_value tuple of results and fail count
"""
res = ({}, 1)
@@ -91,19 +108,18 @@ def collect_snapshot(args, image, os_name):
def collect_image(args, platform, os_name):
- """
- collect data for image
- args: cmdline arguments
- platform: instantiated platform
- os_name: name of distro to collect for
- return_value: tuple of results and fail count
+ """Collect data for image.
+
+ @param args: cmdline arguments
+ @param platform: instantiated platform
+ @param os_name: name of distro to collect for
+ @return_value: tuple of results and fail count
"""
res = ({}, 1)
- os_config = config.load_os_config(os_name)
- if not os_config.get('enabled'):
- raise ValueError('OS {} not enabled'.format(os_name))
-
+ os_config = config.load_os_config(
+ platform.platform_name, os_name, require_enabled=True,
+ feature_overrides=args.feature_override)
component = PlatformComponent(
partial(images.get_image, platform, os_config))
@@ -118,18 +134,16 @@ def collect_image(args, platform, os_name):
def collect_platform(args, platform_name):
- """
- collect data for platform
- args: cmdline arguments
- platform_name: platform to collect for
- return_value: tuple of results and fail count
+ """Collect data for platform.
+
+ @param args: cmdline arguments
+ @param platform_name: platform to collect for
+ @return_value: tuple of results and fail count
"""
res = ({}, 1)
- platform_config = config.load_platform_config(platform_name)
- if not platform_config.get('enabled'):
- raise ValueError('Platform {} not enabled'.format(platform_name))
-
+ platform_config = config.load_platform_config(
+ platform_name, require_enabled=True)
component = PlatformComponent(
partial(platforms.get_platform, platform_name, platform_config))
@@ -143,10 +157,10 @@ def collect_platform(args, platform_name):
def collect(args):
- """
- entry point for collection
- args: cmdline arguments
- return_value: fail count
+ """Entry point for collection.
+
+ @param args: cmdline arguments
+ @return_value: fail count
"""
(res, failed) = run_stage(
'collect data', [partial(collect_platform, args, platform_name)
diff --git a/tests/cloud_tests/config.py b/tests/cloud_tests/config.py
index f3a13c9a..4d5dc801 100644
--- a/tests/cloud_tests/config.py
+++ b/tests/cloud_tests/config.py
@@ -1,5 +1,7 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""Used to setup test configuration."""
+
import glob
import os
@@ -14,46 +16,44 @@ RELEASES_CONF = os.path.join(BASE_DIR, 'releases.yaml')
TESTCASE_CONF = os.path.join(BASE_DIR, 'testcases.yaml')
+def get(base, key):
+ """Get config entry 'key' from base, ensuring is dictionary."""
+ return base[key] if key in base and base[key] is not None else {}
+
+
+def enabled(config):
+ """Test if config item is enabled."""
+ return isinstance(config, dict) and config.get('enabled', False)
+
+
def path_to_name(path):
- """
- convert abs or rel path to test config to path under configs/
- if already a test name, do nothing
- """
+ """Convert abs or rel path to test config to path under 'sconfigs/'."""
dir_path, file_name = os.path.split(os.path.normpath(path))
name = os.path.splitext(file_name)[0]
return os.sep.join((os.path.basename(dir_path), name))
def name_to_path(name):
- """
- convert test config path under configs/ to full config path,
- if already a full path, do nothing
- """
+ """Convert test config path under configs/ to full config path."""
name = os.path.normpath(name)
if not name.endswith(CONF_EXT):
name = name + CONF_EXT
return name if os.path.isabs(name) else os.path.join(TEST_CONF_DIR, name)
-def name_sanatize(name):
- """
- sanatize test name to be used as a module name
- """
+def name_sanitize(name):
+ """Sanitize test name to be used as a module name."""
return name.replace('-', '_')
def name_to_module(name):
- """
- convert test name to a loadable module name under testcases/
- """
- name = name_sanatize(path_to_name(name))
+ """Convert test name to a loadable module name under 'testcases/'."""
+ name = name_sanitize(path_to_name(name))
return name.replace(os.path.sep, '.')
def merge_config(base, override):
- """
- merge config and base
- """
+ """Merge config and base."""
res = base.copy()
res.update(override)
res.update({k: merge_config(base.get(k, {}), v)
@@ -61,53 +61,102 @@ def merge_config(base, override):
return res
-def load_platform_config(platform):
+def merge_feature_groups(feature_conf, feature_groups, overrides):
+ """Combine feature groups and overrides to construct a supported list.
+
+ @param feature_conf: feature config from releases.yaml
+ @param feature_groups: feature groups the release is a member of
+ @param overrides: overrides specified by the release's config
+ @return_value: dict of {feature: true/false} settings
"""
- load configuration for platform
+ res = dict().fromkeys(feature_conf['all'])
+ for group in feature_groups:
+ res.update(feature_conf['groups'][group])
+ res.update(overrides)
+ return res
+
+
+def load_platform_config(platform_name, require_enabled=False):
+ """Load configuration for platform.
+
+ @param platform_name: name of platform to retrieve config for
+ @param require_enabled: if true, raise error if 'enabled' not True
+ @return_value: config dict
"""
main_conf = c_util.read_conf(PLATFORM_CONF)
- return merge_config(main_conf.get('default_platform_config'),
- main_conf.get('platforms')[platform])
+ conf = merge_config(main_conf['default_platform_config'],
+ main_conf['platforms'][platform_name])
+ if require_enabled and not enabled(conf):
+ raise ValueError('Platform is not enabled')
+ return conf
-def load_os_config(os_name):
- """
- load configuration for os
+def load_os_config(platform_name, os_name, require_enabled=False,
+ feature_overrides={}):
+ """Load configuration for os.
+
+ @param platform_name: platform name to load os config for
+ @param os_name: name of os to retrieve config for
+ @param require_enabled: if true, raise error if 'enabled' not True
+ @param feature_overrides: feature flag overrides to merge with features
+ @return_value: config dict
"""
main_conf = c_util.read_conf(RELEASES_CONF)
- return merge_config(main_conf.get('default_release_config'),
- main_conf.get('releases')[os_name])
+ default = main_conf['default_release_config']
+ image = main_conf['releases'][os_name]
+ conf = merge_config(merge_config(get(default, 'default'),
+ get(default, platform_name)),
+ merge_config(get(image, 'default'),
+ get(image, platform_name)))
+
+ feature_conf = main_conf['features']
+ feature_groups = conf.get('feature_groups', [])
+ overrides = merge_config(get(conf, 'features'), feature_overrides)
+ conf['features'] = merge_feature_groups(
+ feature_conf, feature_groups, overrides)
+
+ if require_enabled and not enabled(conf):
+ raise ValueError('OS is not enabled')
+ return conf
def load_test_config(path):
- """
- load a test config file by either abs path or rel path
- """
+ """Load a test config file by either abs path or rel path."""
return merge_config(c_util.read_conf(TESTCASE_CONF)['base_test_data'],
c_util.read_conf(name_to_path(path)))
+def list_feature_flags():
+ """List all supported feature flags."""
+ feature_conf = get(c_util.read_conf(RELEASES_CONF), 'features')
+ return feature_conf.get('all', [])
+
+
def list_enabled_platforms():
- """
- list all platforms enabled for testing
- """
- platforms = c_util.read_conf(PLATFORM_CONF).get('platforms')
- return [k for k, v in platforms.items() if v.get('enabled')]
+ """List all platforms enabled for testing."""
+ platforms = get(c_util.read_conf(PLATFORM_CONF), 'platforms')
+ return [k for k, v in platforms.items() if enabled(v)]
-def list_enabled_distros():
- """
- list all distros enabled for testing
- """
- releases = c_util.read_conf(RELEASES_CONF).get('releases')
- return [k for k, v in releases.items() if v.get('enabled')]
+def list_enabled_distros(platforms):
+ """List all distros enabled for testing on specified platforms."""
+ def platform_has_enabled(config):
+ """List if platform is enabled."""
+ return any(enabled(merge_config(get(config, 'default'),
+ get(config, platform)))
+ for platform in platforms)
+
+ releases = get(c_util.read_conf(RELEASES_CONF), 'releases')
+ return [k for k, v in releases.items() if platform_has_enabled(v)]
def list_test_configs():
- """
- list all available test config files by abspath
- """
+ """List all available test config files by abspath."""
return [os.path.abspath(f) for f in
glob.glob(os.sep.join((TEST_CONF_DIR, '*', '*.yaml')))]
+
+ENABLED_PLATFORMS = sorted(list_enabled_platforms())
+ENABLED_DISTROS = sorted(list_enabled_distros(ENABLED_PLATFORMS))
+
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/configs/bugs/lp1628337.yaml b/tests/cloud_tests/configs/bugs/lp1628337.yaml
index 1d6bf483..e39b3cd8 100644
--- a/tests/cloud_tests/configs/bugs/lp1628337.yaml
+++ b/tests/cloud_tests/configs/bugs/lp1628337.yaml
@@ -1,6 +1,9 @@
#
# LP Bug 1628337: cloud-init tries to install NTP before even configuring the archives
#
+required_features:
+ - apt
+ - lsb_release
cloud_config: |
#cloud-config
ntp:
diff --git a/tests/cloud_tests/configs/examples/add_apt_repositories.yaml b/tests/cloud_tests/configs/examples/add_apt_repositories.yaml
index b8964357..4b8575f7 100644
--- a/tests/cloud_tests/configs/examples/add_apt_repositories.yaml
+++ b/tests/cloud_tests/configs/examples/add_apt_repositories.yaml
@@ -4,6 +4,8 @@
# 2016-11-17: Disabled as covered by module based tests
#
enabled: False
+required_features:
+ - apt
cloud_config: |
#cloud-config
apt:
diff --git a/tests/cloud_tests/configs/modules/apt_configure_conf.yaml b/tests/cloud_tests/configs/modules/apt_configure_conf.yaml
index 163ae3fc..de453000 100644
--- a/tests/cloud_tests/configs/modules/apt_configure_conf.yaml
+++ b/tests/cloud_tests/configs/modules/apt_configure_conf.yaml
@@ -1,6 +1,8 @@
#
# Provide a configuration for APT
#
+required_features:
+ - apt
cloud_config: |
#cloud-config
apt:
diff --git a/tests/cloud_tests/configs/modules/apt_configure_disable_suites.yaml b/tests/cloud_tests/configs/modules/apt_configure_disable_suites.yaml
index 73e4a538..98800673 100644
--- a/tests/cloud_tests/configs/modules/apt_configure_disable_suites.yaml
+++ b/tests/cloud_tests/configs/modules/apt_configure_disable_suites.yaml
@@ -1,6 +1,9 @@
#
# Disables everything in sources.list
#
+required_features:
+ - apt
+ - lsb_release
cloud_config: |
#cloud-config
apt:
diff --git a/tests/cloud_tests/configs/modules/apt_configure_primary.yaml b/tests/cloud_tests/configs/modules/apt_configure_primary.yaml
index 2ec30ca1..41bcf2fd 100644
--- a/tests/cloud_tests/configs/modules/apt_configure_primary.yaml
+++ b/tests/cloud_tests/configs/modules/apt_configure_primary.yaml
@@ -1,6 +1,9 @@
#
# Setup a custome primary sources.list
#
+required_features:
+ - apt
+ - apt_src_cont
cloud_config: |
#cloud-config
apt:
@@ -16,4 +19,8 @@ collect_scripts:
#!/bin/bash
grep -v '^#' /etc/apt/sources.list | sed '/^\s*$/d' | grep -c gtlib.gatech.edu
+ sources.list: |
+ #!/bin/bash
+ cat /etc/apt/sources.list
+
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/configs/modules/apt_configure_proxy.yaml b/tests/cloud_tests/configs/modules/apt_configure_proxy.yaml
index e7371305..be6c6f81 100644
--- a/tests/cloud_tests/configs/modules/apt_configure_proxy.yaml
+++ b/tests/cloud_tests/configs/modules/apt_configure_proxy.yaml
@@ -1,6 +1,8 @@
#
# Set apt proxy
#
+required_features:
+ - apt
cloud_config: |
#cloud-config
apt:
diff --git a/tests/cloud_tests/configs/modules/apt_configure_security.yaml b/tests/cloud_tests/configs/modules/apt_configure_security.yaml
index f6a2c828..83dd51df 100644
--- a/tests/cloud_tests/configs/modules/apt_configure_security.yaml
+++ b/tests/cloud_tests/configs/modules/apt_configure_security.yaml
@@ -1,6 +1,9 @@
#
# Add security to sources.list
#
+required_features:
+ - apt
+ - ubuntu_repos
cloud_config: |
#cloud-config
apt:
diff --git a/tests/cloud_tests/configs/modules/apt_configure_sources_key.yaml b/tests/cloud_tests/configs/modules/apt_configure_sources_key.yaml
index e7568a6a..bde9398a 100644
--- a/tests/cloud_tests/configs/modules/apt_configure_sources_key.yaml
+++ b/tests/cloud_tests/configs/modules/apt_configure_sources_key.yaml
@@ -1,6 +1,9 @@
#
# Add a sources.list entry with a given key (Debian Jessie)
#
+required_features:
+ - apt
+ - lsb_release
cloud_config: |
#cloud-config
apt:
diff --git a/tests/cloud_tests/configs/modules/apt_configure_sources_keyserver.yaml b/tests/cloud_tests/configs/modules/apt_configure_sources_keyserver.yaml
index 1a4a238f..25088135 100644
--- a/tests/cloud_tests/configs/modules/apt_configure_sources_keyserver.yaml
+++ b/tests/cloud_tests/configs/modules/apt_configure_sources_keyserver.yaml
@@ -1,12 +1,15 @@
#
# Add a sources.list entry with a key from a keyserver
#
+required_features:
+ - apt
+ - lsb_release
cloud_config: |
#cloud-config
apt:
sources:
source1:
- keyid: 0165013E
+ keyid: 1FF0D8535EF7E719E5C81B9C083D06FBE4D304DF
keyserver: keyserver.ubuntu.com
source: "deb http://ppa.launchpad.net/cloud-init-dev/test-archive/ubuntu $RELEASE main"
collect_scripts:
diff --git a/tests/cloud_tests/configs/modules/apt_configure_sources_list.yaml b/tests/cloud_tests/configs/modules/apt_configure_sources_list.yaml
index 057fc72c..143cb080 100644
--- a/tests/cloud_tests/configs/modules/apt_configure_sources_list.yaml
+++ b/tests/cloud_tests/configs/modules/apt_configure_sources_list.yaml
@@ -1,6 +1,9 @@
#
# Generate a sources.list
#
+required_features:
+ - apt
+ - lsb_release
cloud_config: |
#cloud-config
apt:
diff --git a/tests/cloud_tests/configs/modules/apt_configure_sources_ppa.yaml b/tests/cloud_tests/configs/modules/apt_configure_sources_ppa.yaml
index dee9dc70..9efdae52 100644
--- a/tests/cloud_tests/configs/modules/apt_configure_sources_ppa.yaml
+++ b/tests/cloud_tests/configs/modules/apt_configure_sources_ppa.yaml
@@ -1,6 +1,12 @@
#
# Add a PPA to source.list
#
+# NOTE: on older ubuntu releases the sources file added is named
+# 'curtin-dev-test-archive-trusty', without 'ubuntu' in the middle
+required_features:
+ - apt
+ - ppa
+ - ppa_file_name
cloud_config: |
#cloud-config
apt:
@@ -16,5 +22,8 @@ collect_scripts:
apt-key: |
#!/bin/bash
apt-key finger
+ sources_full: |
+ #!/bin/bash
+ cat /etc/apt/sources.list
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/configs/modules/apt_pipelining_disable.yaml b/tests/cloud_tests/configs/modules/apt_pipelining_disable.yaml
index 5fa0cee9..bd9b5d08 100644
--- a/tests/cloud_tests/configs/modules/apt_pipelining_disable.yaml
+++ b/tests/cloud_tests/configs/modules/apt_pipelining_disable.yaml
@@ -1,6 +1,8 @@
#
# Disable apt pipelining value
#
+required_features:
+ - apt
cloud_config: |
#cloud-config
apt:
diff --git a/tests/cloud_tests/configs/modules/apt_pipelining_os.yaml b/tests/cloud_tests/configs/modules/apt_pipelining_os.yaml
index 87d183e7..cbed3ba3 100644
--- a/tests/cloud_tests/configs/modules/apt_pipelining_os.yaml
+++ b/tests/cloud_tests/configs/modules/apt_pipelining_os.yaml
@@ -1,6 +1,8 @@
#
# Set apt pipelining value to OS
#
+required_features:
+ - apt
cloud_config: |
#cloud-config
apt:
diff --git a/tests/cloud_tests/configs/modules/byobu.yaml b/tests/cloud_tests/configs/modules/byobu.yaml
index fd648c77..a9aa1f3f 100644
--- a/tests/cloud_tests/configs/modules/byobu.yaml
+++ b/tests/cloud_tests/configs/modules/byobu.yaml
@@ -1,6 +1,8 @@
#
# Install and enable byobu system wide and default user
#
+required_features:
+ - byobu
cloud_config: |
#cloud-config
byobu_by_default: enable
diff --git a/tests/cloud_tests/configs/modules/keys_to_console.yaml b/tests/cloud_tests/configs/modules/keys_to_console.yaml
index a90e42c1..5d86e739 100644
--- a/tests/cloud_tests/configs/modules/keys_to_console.yaml
+++ b/tests/cloud_tests/configs/modules/keys_to_console.yaml
@@ -1,6 +1,8 @@
#
# Hide printing of ssh key and fingerprints for specific keys
#
+required_features:
+ - syslog
cloud_config: |
#cloud-config
ssh_fp_console_blacklist: [ssh-dss, ssh-dsa, ecdsa-sha2-nistp256]
diff --git a/tests/cloud_tests/configs/modules/landscape.yaml b/tests/cloud_tests/configs/modules/landscape.yaml
index e6f4955a..ed2c37c4 100644
--- a/tests/cloud_tests/configs/modules/landscape.yaml
+++ b/tests/cloud_tests/configs/modules/landscape.yaml
@@ -4,6 +4,8 @@
# 2016-11-17: Disabled due to this not working
#
enabled: false
+required_features:
+ - landscape
cloud_config: |
#cloud-conifg
landscape:
diff --git a/tests/cloud_tests/configs/modules/locale.yaml b/tests/cloud_tests/configs/modules/locale.yaml
index af5ad636..e01518a1 100644
--- a/tests/cloud_tests/configs/modules/locale.yaml
+++ b/tests/cloud_tests/configs/modules/locale.yaml
@@ -1,6 +1,9 @@
#
# Set locale to non-default option and verify
#
+required_features:
+ - engb_locale
+ - locale_gen
cloud_config: |
#cloud-config
locale: en_GB.UTF-8
diff --git a/tests/cloud_tests/configs/modules/lxd_bridge.yaml b/tests/cloud_tests/configs/modules/lxd_bridge.yaml
index 568bb700..e6b7e76a 100644
--- a/tests/cloud_tests/configs/modules/lxd_bridge.yaml
+++ b/tests/cloud_tests/configs/modules/lxd_bridge.yaml
@@ -1,6 +1,8 @@
#
# LXD configured with directory backend and IPv4 bridge
#
+required_features:
+ - lxd
cloud_config: |
#cloud-config
lxd:
diff --git a/tests/cloud_tests/configs/modules/lxd_dir.yaml b/tests/cloud_tests/configs/modules/lxd_dir.yaml
index 99b92195..f93a3fa7 100644
--- a/tests/cloud_tests/configs/modules/lxd_dir.yaml
+++ b/tests/cloud_tests/configs/modules/lxd_dir.yaml
@@ -1,6 +1,8 @@
#
# LXD configured with directory backend
#
+required_features:
+ - lxd
cloud_config: |
#cloud-config
lxd:
diff --git a/tests/cloud_tests/configs/modules/ntp.yaml b/tests/cloud_tests/configs/modules/ntp.yaml
index d0941578..fbef431b 100644
--- a/tests/cloud_tests/configs/modules/ntp.yaml
+++ b/tests/cloud_tests/configs/modules/ntp.yaml
@@ -7,14 +7,15 @@ cloud_config: |
pools: {}
servers: {}
collect_scripts:
- ntp_installed_empty: |
+ ntp_installed: |
#!/bin/bash
- dpkg -l | grep ntp | wc -l
+ ntpd --version > /dev/null 2>&1
+ echo $?
ntp_conf_dist_empty: |
#!/bin/bash
ls /etc/ntp.conf.dist | wc -l
- ntp_conf_empty: |
+ ntp_conf_pool_list: |
#!/bin/bash
- grep '^pool' /etc/ntp.conf
+ grep 'pool.ntp.org' /etc/ntp.conf | grep -v ^#
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/configs/modules/ntp_pools.yaml b/tests/cloud_tests/configs/modules/ntp_pools.yaml
index e040cc32..3a93faa2 100644
--- a/tests/cloud_tests/configs/modules/ntp_pools.yaml
+++ b/tests/cloud_tests/configs/modules/ntp_pools.yaml
@@ -1,6 +1,11 @@
#
# NTP config using specific pools
#
+# NOTE: lsb_release listed here because with recent cloud-init deb with
+# (LP: 1628337) resolved, cloud-init will attempt to configure archives.
+# this fails without lsb_release as UNAVAILABLE is used for $RELEASE
+required_features:
+ - lsb_release
cloud_config: |
#cloud-config
ntp:
@@ -11,7 +16,8 @@ cloud_config: |
collect_scripts:
ntp_installed_pools: |
#!/bin/bash
- dpkg -l | grep ntp | wc -l
+ ntpd --version > /dev/null 2>&1
+ echo $?
ntp_conf_dist_pools: |
#!/bin/bash
ls /etc/ntp.conf.dist | wc -l
diff --git a/tests/cloud_tests/configs/modules/ntp_servers.yaml b/tests/cloud_tests/configs/modules/ntp_servers.yaml
index e0564a03..d59d45a8 100644
--- a/tests/cloud_tests/configs/modules/ntp_servers.yaml
+++ b/tests/cloud_tests/configs/modules/ntp_servers.yaml
@@ -1,6 +1,8 @@
#
# NTP config using specific servers
#
+required_features:
+ - lsb_release
cloud_config: |
#cloud-config
ntp:
@@ -10,7 +12,8 @@ cloud_config: |
collect_scripts:
ntp_installed_servers: |
#!/bin/sh
- dpkg -l | grep -c ntp
+ ntpd --version > /dev/null 2>&1
+ echo $?
ntp_conf_dist_servers: |
#!/bin/sh
cat /etc/ntp.conf.dist | wc -l
diff --git a/tests/cloud_tests/configs/modules/package_update_upgrade_install.yaml b/tests/cloud_tests/configs/modules/package_update_upgrade_install.yaml
index d027d540..71d24b83 100644
--- a/tests/cloud_tests/configs/modules/package_update_upgrade_install.yaml
+++ b/tests/cloud_tests/configs/modules/package_update_upgrade_install.yaml
@@ -1,6 +1,17 @@
#
# Update/upgrade via apt and then install a pair of packages
#
+# NOTE: this should not require apt feature, use 'which' rather than 'dpkg -l'
+# NOTE: the testcase for this looks for the command in history.log as
+# /usr/bin/apt-get..., which is not how it always appears. it should
+# instead look for just apt-get...
+# NOTE: this testcase should not require 'apt_up_out', and should look for a
+# call to 'apt-get upgrade' or 'apt-get dist-upgrade' in cloud-init.log
+# rather than 'Calculating upgrade...' in output
+required_features:
+ - apt
+ - apt_hist_fmt
+ - apt_up_out
cloud_config: |
#cloud-config
packages:
diff --git a/tests/cloud_tests/configs/modules/set_hostname.yaml b/tests/cloud_tests/configs/modules/set_hostname.yaml
index 5aae1506..c96344cf 100644
--- a/tests/cloud_tests/configs/modules/set_hostname.yaml
+++ b/tests/cloud_tests/configs/modules/set_hostname.yaml
@@ -1,6 +1,8 @@
#
# Set the hostname and update /etc/hosts
#
+required_features:
+ - hostname
cloud_config: |
#cloud-config
hostname: myhostname
diff --git a/tests/cloud_tests/configs/modules/set_hostname_fqdn.yaml b/tests/cloud_tests/configs/modules/set_hostname_fqdn.yaml
index 0014c197..daf75931 100644
--- a/tests/cloud_tests/configs/modules/set_hostname_fqdn.yaml
+++ b/tests/cloud_tests/configs/modules/set_hostname_fqdn.yaml
@@ -1,6 +1,8 @@
#
# Set the hostname and update /etc/hosts
#
+required_features:
+ - hostname
cloud_config: |
#cloud-config
manage_etc_hosts: true
diff --git a/tests/cloud_tests/configs/modules/set_password.yaml b/tests/cloud_tests/configs/modules/set_password.yaml
index 8fa46d9f..04d7c58a 100644
--- a/tests/cloud_tests/configs/modules/set_password.yaml
+++ b/tests/cloud_tests/configs/modules/set_password.yaml
@@ -1,6 +1,8 @@
#
# Set password of default user
#
+required_features:
+ - ubuntu_user
cloud_config: |
#cloud-config
password: password
diff --git a/tests/cloud_tests/configs/modules/set_password_expire.yaml b/tests/cloud_tests/configs/modules/set_password_expire.yaml
index 926731f0..789604b0 100644
--- a/tests/cloud_tests/configs/modules/set_password_expire.yaml
+++ b/tests/cloud_tests/configs/modules/set_password_expire.yaml
@@ -1,6 +1,8 @@
#
# Expire password for all users
#
+required_features:
+ - sshd
cloud_config: |
#cloud-config
chpasswd: { expire: True }
diff --git a/tests/cloud_tests/configs/modules/snappy.yaml b/tests/cloud_tests/configs/modules/snappy.yaml
index 0e7dc852..43f93295 100644
--- a/tests/cloud_tests/configs/modules/snappy.yaml
+++ b/tests/cloud_tests/configs/modules/snappy.yaml
@@ -1,6 +1,8 @@
#
# Install snappy
#
+required_features:
+ - snap
cloud_config: |
#cloud-config
snappy:
diff --git a/tests/cloud_tests/configs/modules/ssh_auth_key_fingerprints_disable.yaml b/tests/cloud_tests/configs/modules/ssh_auth_key_fingerprints_disable.yaml
index 33943bdd..746653ec 100644
--- a/tests/cloud_tests/configs/modules/ssh_auth_key_fingerprints_disable.yaml
+++ b/tests/cloud_tests/configs/modules/ssh_auth_key_fingerprints_disable.yaml
@@ -1,6 +1,8 @@
#
# Disable fingerprint printing
#
+required_features:
+ - syslog
cloud_config: |
#cloud-config
ssh_genkeytypes: []
diff --git a/tests/cloud_tests/configs/modules/ssh_auth_key_fingerprints_enable.yaml b/tests/cloud_tests/configs/modules/ssh_auth_key_fingerprints_enable.yaml
index 4c970778..9f5dc34a 100644
--- a/tests/cloud_tests/configs/modules/ssh_auth_key_fingerprints_enable.yaml
+++ b/tests/cloud_tests/configs/modules/ssh_auth_key_fingerprints_enable.yaml
@@ -1,6 +1,11 @@
#
# Print auth keys with different hash than md5
#
+# NOTE: testcase checks for '256 SHA256:.*(ECDSA)' on output line on trusty
+# this fails as line in output reads '256:.*(ECDSA)'
+required_features:
+ - syslog
+ - ssh_key_fmt
cloud_config: |
#cloud-config
ssh_genkeytypes:
diff --git a/tests/cloud_tests/configs/modules/ssh_import_id.yaml b/tests/cloud_tests/configs/modules/ssh_import_id.yaml
index 6e5a1635..b62d3f69 100644
--- a/tests/cloud_tests/configs/modules/ssh_import_id.yaml
+++ b/tests/cloud_tests/configs/modules/ssh_import_id.yaml
@@ -1,6 +1,9 @@
#
# Import a user's ssh key via gh or lp
#
+required_features:
+ - ubuntu_user
+ - sudo
cloud_config: |
#cloud-config
ssh_import_id:
diff --git a/tests/cloud_tests/configs/modules/ssh_keys_generate.yaml b/tests/cloud_tests/configs/modules/ssh_keys_generate.yaml
index 637d7835..659fd939 100644
--- a/tests/cloud_tests/configs/modules/ssh_keys_generate.yaml
+++ b/tests/cloud_tests/configs/modules/ssh_keys_generate.yaml
@@ -1,6 +1,8 @@
#
# SSH keys generated using cloud-init
#
+required_features:
+ - ubuntu_user
cloud_config: |
#cloud-config
ssh_genkeytypes:
diff --git a/tests/cloud_tests/configs/modules/ssh_keys_provided.yaml b/tests/cloud_tests/configs/modules/ssh_keys_provided.yaml
index 25df6452..5ceb3623 100644
--- a/tests/cloud_tests/configs/modules/ssh_keys_provided.yaml
+++ b/tests/cloud_tests/configs/modules/ssh_keys_provided.yaml
@@ -2,6 +2,9 @@
# SSH keys provided via cloud config
#
enabled: False
+required_features:
+ - ubuntu_user
+ - sudo
cloud_config: |
#cloud-config
disable_root: false
diff --git a/tests/cloud_tests/configs/modules/timezone.yaml b/tests/cloud_tests/configs/modules/timezone.yaml
index 8c96ed47..5112aa9f 100644
--- a/tests/cloud_tests/configs/modules/timezone.yaml
+++ b/tests/cloud_tests/configs/modules/timezone.yaml
@@ -1,6 +1,8 @@
#
# Set system timezone
#
+required_features:
+ - daylight_time
cloud_config: |
#cloud-config
timezone: US/Aleutian
diff --git a/tests/cloud_tests/configs/modules/user_groups.yaml b/tests/cloud_tests/configs/modules/user_groups.yaml
index 92655958..71cc9da3 100644
--- a/tests/cloud_tests/configs/modules/user_groups.yaml
+++ b/tests/cloud_tests/configs/modules/user_groups.yaml
@@ -1,6 +1,8 @@
#
# Create groups and users with various options
#
+required_features:
+ - ubuntu_user
cloud_config: |
#cloud-config
# Add groups to the system
diff --git a/tests/cloud_tests/configs/modules/write_files.yaml b/tests/cloud_tests/configs/modules/write_files.yaml
index 4bb2991a..ce936b7b 100644
--- a/tests/cloud_tests/configs/modules/write_files.yaml
+++ b/tests/cloud_tests/configs/modules/write_files.yaml
@@ -1,6 +1,10 @@
#
# Write various file types
#
+# NOTE: on trusty 'file' has an output formatting error for binary files and
+# has 2 spaces in 'LSB executable', which causes a failure here
+required_features:
+ - no_file_fmt_e
cloud_config: |
#cloud-config
write_files:
diff --git a/tests/cloud_tests/images/__init__.py b/tests/cloud_tests/images/__init__.py
index b27d6931..106c59f3 100644
--- a/tests/cloud_tests/images/__init__.py
+++ b/tests/cloud_tests/images/__init__.py
@@ -1,11 +1,10 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""Main init."""
+
def get_image(platform, config):
- """
- get image from platform object using os_name, looking up img_conf in main
- config file
- """
+ """Get image from platform object using os_name."""
return platform.get_image(config)
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/images/base.py b/tests/cloud_tests/images/base.py
index 394b11ff..0a1e0563 100644
--- a/tests/cloud_tests/images/base.py
+++ b/tests/cloud_tests/images/base.py
@@ -1,65 +1,69 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""Base class for images."""
+
class Image(object):
- """
- Base class for images
- """
+ """Base class for images."""
+
platform_name = None
- def __init__(self, name, config, platform):
- """
- setup
+ def __init__(self, platform, config):
+ """Set up image.
+
+ @param platform: platform object
+ @param config: image configuration
"""
- self.name = name
- self.config = config
self.platform = platform
+ self.config = config
def __str__(self):
- """
- a brief description of the image
- """
+ """A brief description of the image."""
return '-'.join((self.properties['os'], self.properties['release']))
@property
def properties(self):
- """
- {} containing: 'arch', 'os', 'version', 'release'
- """
+ """{} containing: 'arch', 'os', 'version', 'release'."""
raise NotImplementedError
- # FIXME: instead of having execute and push_file and other instance methods
- # here which pass through to a hidden instance, it might be better
- # to expose an instance that the image can be modified through
- def execute(self, command, stdin=None, stdout=None, stderr=None, env={}):
+ @property
+ def features(self):
+ """Feature flags supported by this image.
+
+ @return_value: list of feature names
"""
- execute command in image, modifying image
+ return [k for k, v in self.config.get('features', {}).items() if v]
+
+ @property
+ def setup_overrides(self):
+ """Setup options that need to be overridden for the image.
+
+ @return_value: dictionary to update args with
"""
+ # NOTE: more sophisticated options may be requied at some point
+ return self.config.get('setup_overrides', {})
+
+ def execute(self, *args, **kwargs):
+ """Execute command in image, modifying image."""
raise NotImplementedError
def push_file(self, local_path, remote_path):
- """
- copy file at 'local_path' to instance at 'remote_path', modifying image
- """
+ """Copy file at 'local_path' to instance at 'remote_path'."""
raise NotImplementedError
- def run_script(self, script):
- """
- run script in image, modifying image
- return_value: script output
+ def run_script(self, *args, **kwargs):
+ """Run script in image, modifying image.
+
+ @return_value: script output
"""
raise NotImplementedError
def snapshot(self):
- """
- create snapshot of image, block until done
- """
+ """Create snapshot of image, block until done."""
raise NotImplementedError
def destroy(self):
- """
- clean up data associated with image
- """
+ """Clean up data associated with image."""
pass
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/images/lxd.py b/tests/cloud_tests/images/lxd.py
index 7a416141..fd4e93c2 100644
--- a/tests/cloud_tests/images/lxd.py
+++ b/tests/cloud_tests/images/lxd.py
@@ -1,43 +1,67 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""LXD Image Base Class."""
+
+import os
+import shutil
+import tempfile
+
+from cloudinit import util as c_util
from tests.cloud_tests.images import base
from tests.cloud_tests.snapshots import lxd as lxd_snapshot
+from tests.cloud_tests import util
class LXDImage(base.Image):
- """
- LXD backed image
- """
+ """LXD backed image."""
+
platform_name = "lxd"
- def __init__(self, name, config, platform, pylxd_image):
- """
- setup
+ def __init__(self, platform, config, pylxd_image):
+ """Set up image.
+
+ @param platform: platform object
+ @param config: image configuration
"""
- self.platform = platform
- self._pylxd_image = pylxd_image
+ self.modified = False
self._instance = None
- super(LXDImage, self).__init__(name, config, platform)
+ self._pylxd_image = None
+ self.pylxd_image = pylxd_image
+ super(LXDImage, self).__init__(platform, config)
@property
def pylxd_image(self):
- self._pylxd_image.sync()
+ """Property function."""
+ if self._pylxd_image:
+ self._pylxd_image.sync()
return self._pylxd_image
+ @pylxd_image.setter
+ def pylxd_image(self, pylxd_image):
+ if self._instance:
+ self._instance.destroy()
+ self._instance = None
+ if (self._pylxd_image and
+ (self._pylxd_image is not pylxd_image) and
+ (not self.config.get('cache_base_image') or self.modified)):
+ self._pylxd_image.delete(wait=True)
+ self.modified = False
+ self._pylxd_image = pylxd_image
+
@property
def instance(self):
+ """Property function."""
if not self._instance:
self._instance = self.platform.launch_container(
- image=self.pylxd_image.fingerprint,
- image_desc=str(self), use_desc='image-modification')
- self._instance.start(wait=True, wait_time=self.config.get('timeout'))
+ self.properties, self.config, self.features,
+ use_desc='image-modification', image_desc=str(self),
+ image=self.pylxd_image.fingerprint)
+ self._instance.start()
return self._instance
@property
def properties(self):
- """
- {} containing: 'arch', 'os', 'version', 'release'
- """
+ """{} containing: 'arch', 'os', 'version', 'release'."""
properties = self.pylxd_image.properties
return {
'arch': properties.get('architecture'),
@@ -46,47 +70,121 @@ class LXDImage(base.Image):
'release': properties.get('release'),
}
- def execute(self, *args, **kwargs):
+ def export_image(self, output_dir):
+ """Export image from lxd image store to (split) tarball on disk.
+
+ @param output_dir: dir to store tarballs in
+ @return_value: tuple of path to metadata tarball and rootfs tarball
"""
- execute command in image, modifying image
+ # pylxd's image export feature doesn't do split exports, so use cmdline
+ c_util.subp(['lxc', 'image', 'export', self.pylxd_image.fingerprint,
+ output_dir], capture=True)
+ tarballs = [p for p in os.listdir(output_dir) if p.endswith('tar.xz')]
+ metadata = os.path.join(
+ output_dir, next(p for p in tarballs if p.startswith('meta-')))
+ rootfs = os.path.join(
+ output_dir, next(p for p in tarballs if not p.startswith('meta-')))
+ return (metadata, rootfs)
+
+ def import_image(self, metadata, rootfs):
+ """Import image to lxd image store from (split) tarball on disk.
+
+ Note, this will replace and delete the current pylxd_image
+
+ @param metadata: metadata tarball
+ @param rootfs: rootfs tarball
+ @return_value: imported image fingerprint
+ """
+ alias = util.gen_instance_name(
+ image_desc=str(self), use_desc='update-metadata')
+ c_util.subp(['lxc', 'image', 'import', metadata, rootfs,
+ '--alias', alias], capture=True)
+ self.pylxd_image = self.platform.query_image_by_alias(alias)
+ return self.pylxd_image.fingerprint
+
+ def update_templates(self, template_config, template_data):
+ """Update the image's template configuration.
+
+ Note, this will replace and delete the current pylxd_image
+
+ @param template_config: config overrides for template metadata
+ @param template_data: template data to place into templates/
"""
+ # set up tmp files
+ export_dir = tempfile.mkdtemp(prefix='cloud_test_util_')
+ extract_dir = tempfile.mkdtemp(prefix='cloud_test_util_')
+ new_metadata = os.path.join(export_dir, 'new-meta.tar.xz')
+ metadata_yaml = os.path.join(extract_dir, 'metadata.yaml')
+ template_dir = os.path.join(extract_dir, 'templates')
+
+ try:
+ # extract old data
+ (metadata, rootfs) = self.export_image(export_dir)
+ shutil.unpack_archive(metadata, extract_dir)
+
+ # update metadata
+ metadata = c_util.read_conf(metadata_yaml)
+ templates = metadata.get('templates', {})
+ templates.update(template_config)
+ metadata['templates'] = templates
+ util.yaml_dump(metadata, metadata_yaml)
+
+ # write out template files
+ for name, content in template_data.items():
+ path = os.path.join(template_dir, name)
+ c_util.write_file(path, content)
+
+ # store new data, mark new image as modified
+ util.flat_tar(new_metadata, extract_dir)
+ self.import_image(new_metadata, rootfs)
+ self.modified = True
+
+ finally:
+ # remove tmpfiles
+ shutil.rmtree(export_dir)
+ shutil.rmtree(extract_dir)
+
+ def execute(self, *args, **kwargs):
+ """Execute command in image, modifying image."""
return self.instance.execute(*args, **kwargs)
def push_file(self, local_path, remote_path):
- """
- copy file at 'local_path' to instance at 'remote_path', modifying image
- """
+ """Copy file at 'local_path' to instance at 'remote_path'."""
return self.instance.push_file(local_path, remote_path)
- def run_script(self, script):
- """
- run script in image, modifying image
- return_value: script output
+ def run_script(self, *args, **kwargs):
+ """Run script in image, modifying image.
+
+ @return_value: script output
"""
- return self.instance.run_script(script)
+ return self.instance.run_script(*args, **kwargs)
def snapshot(self):
- """
- create snapshot of image, block until done
- """
- # clone current instance, start and freeze clone
+ """Create snapshot of image, block until done."""
+ # get empty user data to pass in to instance
+ # if overrides for user data provided, use them
+ empty_userdata = util.update_user_data(
+ {}, self.config.get('user_data_overrides', {}))
+ conf = {'user.user-data': empty_userdata}
+ # clone current instance
instance = self.platform.launch_container(
+ self.properties, self.config, self.features,
container=self.instance.name, image_desc=str(self),
- use_desc='snapshot')
- instance.start(wait=True, wait_time=self.config.get('timeout'))
+ use_desc='snapshot', container_config=conf)
+ # wait for cloud-init before boot_clean_script is run to ensure
+ # /var/lib/cloud is removed cleanly
+ instance.start(wait=True, wait_for_cloud_init=True)
if self.config.get('boot_clean_script'):
instance.run_script(self.config.get('boot_clean_script'))
+ # freeze current instance and return snapshot
instance.freeze()
return lxd_snapshot.LXDSnapshot(
- self.properties, self.config, self.platform, instance)
+ self.platform, self.properties, self.config,
+ self.features, instance)
def destroy(self):
- """
- clean up data associated with image
- """
- if self._instance:
- self._instance.destroy()
- self.pylxd_image.delete(wait=True)
+ """Clean up data associated with image."""
+ self.pylxd_image = None
super(LXDImage, self).destroy()
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/instances/__init__.py b/tests/cloud_tests/instances/__init__.py
index 85bea99f..fc2e9cbc 100644
--- a/tests/cloud_tests/instances/__init__.py
+++ b/tests/cloud_tests/instances/__init__.py
@@ -1,10 +1,10 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""Main init."""
+
def get_instance(snapshot, *args, **kwargs):
- """
- get instance from snapshot
- """
+ """Get instance from snapshot."""
return snapshot.launch(*args, **kwargs)
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/instances/base.py b/tests/cloud_tests/instances/base.py
index 9559d286..959e9cce 100644
--- a/tests/cloud_tests/instances/base.py
+++ b/tests/cloud_tests/instances/base.py
@@ -1,120 +1,148 @@
# This file is part of cloud-init. See LICENSE file for license information.
-import os
-import uuid
+"""Base instance."""
class Instance(object):
- """
- Base instance object
- """
+ """Base instance object."""
+
platform_name = None
- def __init__(self, name):
- """
- setup
+ def __init__(self, platform, name, properties, config, features):
+ """Set up instance.
+
+ @param platform: platform object
+ @param name: hostname of instance
+ @param properties: image properties
+ @param config: image config
+ @param features: supported feature flags
"""
+ self.platform = platform
self.name = name
+ self.properties = properties
+ self.config = config
+ self.features = features
- def execute(self, command, stdin=None, stdout=None, stderr=None, env={}):
- """
- command: the command to execute as root inside the image
- stdin, stderr, stdout: file handles
- env: environment variables
+ def execute(self, command, stdout=None, stderr=None, env={},
+ rcs=None, description=None):
+ """Execute command in instance, recording output, error and exit code.
- Execute assumes functional networking and execution as root with the
+ Assumes functional networking and execution as root with the
target filesystem being available at /.
- return_value: tuple containing stdout data, stderr data, exit code
+ @param command: the command to execute as root inside the image
+ @param stdout, stderr: file handles to write output and error to
+ @param env: environment variables
+ @param rcs: allowed return codes from command
+ @param description: purpose of command
+ @return_value: tuple containing stdout data, stderr data, exit code
"""
raise NotImplementedError
- def read_data(self, remote_path, encode=False):
- """
- read_data from instance filesystem
- remote_path: path in instance
- decode: return as string
- return_value: data as str or bytes
+ def read_data(self, remote_path, decode=False):
+ """Read data from instance filesystem.
+
+ @param remote_path: path in instance
+ @param decode: return as string
+ @return_value: data as str or bytes
"""
raise NotImplementedError
def write_data(self, remote_path, data):
- """
- write data to instance filesystem
- remote_path: path in instance
- data: data to write, either str or bytes
+ """Write data to instance filesystem.
+
+ @param remote_path: path in instance
+ @param data: data to write, either str or bytes
"""
raise NotImplementedError
def pull_file(self, remote_path, local_path):
- """
- copy file at 'remote_path', from instance to 'local_path'
+ """Copy file at 'remote_path', from instance to 'local_path'.
+
+ @param remote_path: path on remote instance
+ @param local_path: path on local instance
"""
with open(local_path, 'wb') as fp:
- fp.write(self.read_data(remote_path), encode=True)
+ fp.write(self.read_data(remote_path))
def push_file(self, local_path, remote_path):
- """
- copy file at 'local_path' to instance at 'remote_path'
+ """Copy file at 'local_path' to instance at 'remote_path'.
+
+ @param local_path: path on local instance
+ @param remote_path: path on remote instance
"""
with open(local_path, 'rb') as fp:
self.write_data(remote_path, fp.read())
- def run_script(self, script):
+ def run_script(self, script, rcs=None, description=None):
+ """Run script in target and return stdout.
+
+ @param script: script contents
+ @param rcs: allowed return codes from script
+ @param description: purpose of script
+ @return_value: stdout from script
"""
- run script in target and return stdout
+ script_path = self.tmpfile()
+ try:
+ self.write_data(script_path, script)
+ return self.execute(
+ ['/bin/bash', script_path], rcs=rcs, description=description)
+ finally:
+ self.execute(['rm', script_path], rcs=rcs)
+
+ def tmpfile(self):
+ """Get a tmp file in the target.
+
+ @return_value: path to new file in target
"""
- script_path = os.path.join('/tmp', str(uuid.uuid1()))
- self.write_data(script_path, script)
- (out, err, exit_code) = self.execute(['/bin/bash', script_path])
- return out
+ return self.execute(['mktemp'])[0].strip()
def console_log(self):
- """
- return_value: bytes of this instance’s console
+ """Instance console.
+
+ @return_value: bytes of this instance’s console
"""
raise NotImplementedError
def reboot(self, wait=True):
- """
- reboot instance
- """
+ """Reboot instance."""
raise NotImplementedError
def shutdown(self, wait=True):
- """
- shutdown instance
- """
+ """Shutdown instance."""
raise NotImplementedError
- def start(self, wait=True):
- """
- start instance
- """
+ def start(self, wait=True, wait_for_cloud_init=False):
+ """Start instance."""
raise NotImplementedError
def destroy(self):
- """
- clean up instance
- """
+ """Clean up instance."""
pass
- def _wait_for_cloud_init(self, wait_time):
- """
- wait until system has fully booted and cloud-init has finished
+ def _wait_for_system(self, wait_for_cloud_init):
+ """Wait until system has fully booted and cloud-init has finished.
+
+ @param wait_time: maximum time to wait
+ @return_value: None, may raise OSError if wait_time exceeded
"""
- if not wait_time:
- return
-
- found_msg = 'found'
- cmd = ('for ((i=0;i<{wait};i++)); do [ -f "{file}" ] && '
- '{{ echo "{msg}";break; }} || sleep 1; done').format(
- file='/run/cloud-init/result.json',
- wait=wait_time, msg=found_msg)
-
- (out, err, exit) = self.execute(['/bin/bash', '-c', cmd])
- if out.strip() != found_msg:
- raise OSError('timeout: after {}s, cloud-init has not started'
- .format(wait_time))
+ def clean_test(test):
+ """Clean formatting for system ready test testcase."""
+ return ' '.join(l for l in test.strip().splitlines()
+ if not l.lstrip().startswith('#'))
+
+ time = self.config['boot_timeout']
+ tests = [self.config['system_ready_script']]
+ if wait_for_cloud_init:
+ tests.append(self.config['cloud_init_ready_script'])
+
+ formatted_tests = ' && '.join(clean_test(t) for t in tests)
+ test_cmd = ('for ((i=0;i<{time};i++)); do {test} && exit 0; sleep 1; '
+ 'done; exit 1;').format(time=time, test=formatted_tests)
+ cmd = ['/bin/bash', '-c', test_cmd]
+
+ if self.execute(cmd, rcs=(0, 1))[-1] != 0:
+ raise OSError('timeout: after {}s system not started'.format(time))
+
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/instances/lxd.py b/tests/cloud_tests/instances/lxd.py
index f0aa1214..b9c2cc6b 100644
--- a/tests/cloud_tests/instances/lxd.py
+++ b/tests/cloud_tests/instances/lxd.py
@@ -1,115 +1,135 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""Base LXD instance."""
+
from tests.cloud_tests.instances import base
+from tests.cloud_tests import util
class LXDInstance(base.Instance):
- """
- LXD container backed instance
- """
+ """LXD container backed instance."""
+
platform_name = "lxd"
- def __init__(self, name, platform, pylxd_container):
- """
- setup
+ def __init__(self, platform, name, properties, config, features,
+ pylxd_container):
+ """Set up instance.
+
+ @param platform: platform object
+ @param name: hostname of instance
+ @param properties: image properties
+ @param config: image config
+ @param features: supported feature flags
"""
- self.platform = platform
self._pylxd_container = pylxd_container
- super(LXDInstance, self).__init__(name)
+ super(LXDInstance, self).__init__(
+ platform, name, properties, config, features)
@property
def pylxd_container(self):
+ """Property function."""
self._pylxd_container.sync()
return self._pylxd_container
- def execute(self, command, stdin=None, stdout=None, stderr=None, env={}):
- """
- command: the command to execute as root inside the image
- stdin, stderr, stdout: file handles
- env: environment variables
+ def execute(self, command, stdout=None, stderr=None, env={},
+ rcs=None, description=None):
+ """Execute command in instance, recording output, error and exit code.
- Execute assumes functional networking and execution as root with the
+ Assumes functional networking and execution as root with the
target filesystem being available at /.
- return_value: tuple containing stdout data, stderr data, exit code
+ @param command: the command to execute as root inside the image
+ @param stdout: file handler to write output
+ @param stderr: file handler to write error
+ @param env: environment variables
+ @param rcs: allowed return codes from command
+ @param description: purpose of command
+ @return_value: tuple containing stdout data, stderr data, exit code
"""
- # TODO: the pylxd api handler for container.execute needs to be
- # extended to properly pass in stdin
- # TODO: the pylxd api handler for container.execute needs to be
- # extended to get the return code, for now just use 0
+ # ensure instance is running and execute the command
self.start()
- if stdin:
- raise NotImplementedError
res = self.pylxd_container.execute(command, environment=env)
- for (f, data) in (i for i in zip((stdout, stderr), res) if i[0]):
- f.write(data)
- return res + (0,)
+
+ # get out, exit and err from pylxd return
+ if hasattr(res, 'exit_code'):
+ # pylxd 2.2 returns ContainerExecuteResult, named tuple of
+ # (exit_code, out, err)
+ (exit, out, err) = res
+ else:
+ # pylxd 2.1.3 and earlier only return out and err, no exit
+ # LOG.warning('using pylxd version < 2.2')
+ (out, err) = res
+ exit = 0
+
+ # write data to file descriptors if needed
+ if stdout:
+ stdout.write(out)
+ if stderr:
+ stderr.write(err)
+
+ # if the command exited with a code not allowed in rcs, then fail
+ if exit not in (rcs if rcs else (0,)):
+ error_desc = ('Failed command to: {}'.format(description)
+ if description else None)
+ raise util.InTargetExecuteError(
+ out, err, exit, command, self.name, error_desc)
+
+ return (out, err, exit)
def read_data(self, remote_path, decode=False):
- """
- read data from instance filesystem
- remote_path: path in instance
- decode: return as string
- return_value: data as str or bytes
+ """Read data from instance filesystem.
+
+ @param remote_path: path in instance
+ @param decode: return as string
+ @return_value: data as str or bytes
"""
data = self.pylxd_container.files.get(remote_path)
return data.decode() if decode and isinstance(data, bytes) else data
def write_data(self, remote_path, data):
- """
- write data to instance filesystem
- remote_path: path in instance
- data: data to write, either str or bytes
+ """Write data to instance filesystem.
+
+ @param remote_path: path in instance
+ @param data: data to write, either str or bytes
"""
self.pylxd_container.files.put(remote_path, data)
def console_log(self):
- """
- return_value: bytes of this instance’s console
+ """Console log.
+
+ @return_value: bytes of this instance’s console
"""
raise NotImplementedError
def reboot(self, wait=True):
- """
- reboot instance
- """
+ """Reboot instance."""
self.shutdown(wait=wait)
self.start(wait=wait)
def shutdown(self, wait=True):
- """
- shutdown instance
- """
+ """Shutdown instance."""
if self.pylxd_container.status != 'Stopped':
self.pylxd_container.stop(wait=wait)
- def start(self, wait=True, wait_time=None):
- """
- start instance
- """
+ def start(self, wait=True, wait_for_cloud_init=False):
+ """Start instance."""
if self.pylxd_container.status != 'Running':
self.pylxd_container.start(wait=wait)
- if wait and isinstance(wait_time, int):
- self._wait_for_cloud_init(wait_time)
+ if wait:
+ self._wait_for_system(wait_for_cloud_init)
def freeze(self):
- """
- freeze instance
- """
+ """Freeze instance."""
if self.pylxd_container.status != 'Frozen':
self.pylxd_container.freeze(wait=True)
def unfreeze(self):
- """
- unfreeze instance
- """
+ """Unfreeze instance."""
if self.pylxd_container.status == 'Frozen':
self.pylxd_container.unfreeze(wait=True)
def destroy(self):
- """
- clean up instance
- """
+ """Clean up instance."""
self.unfreeze()
self.shutdown()
self.pylxd_container.delete(wait=True)
diff --git a/tests/cloud_tests/manage.py b/tests/cloud_tests/manage.py
index 5342612b..5f0cfd23 100644
--- a/tests/cloud_tests/manage.py
+++ b/tests/cloud_tests/manage.py
@@ -1,11 +1,15 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""Create test cases automatically given a user_data script."""
+
+import os
+import textwrap
+
+from cloudinit import util as c_util
from tests.cloud_tests.config import VERIFY_EXT
from tests.cloud_tests import (config, util)
from tests.cloud_tests import TESTCASES_DIR
-import os
-import textwrap
_verifier_fmt = textwrap.dedent(
"""
@@ -35,29 +39,24 @@ _config_fmt = textwrap.dedent(
def write_testcase_config(args, fmt_args, testcase_file):
- """
- write the testcase config file
- """
+ """Write the testcase config file."""
testcase_config = {'enabled': args.enable, 'collect_scripts': {}}
if args.config:
testcase_config['cloud_config'] = args.config
fmt_args['config'] = util.yaml_format(testcase_config)
- util.write_file(testcase_file, _config_fmt.format(**fmt_args), omode='w')
+ c_util.write_file(testcase_file, _config_fmt.format(**fmt_args), omode='w')
def write_verifier(args, fmt_args, verifier_file):
- """
- write the verifier script
- """
+ """Write the verifier script."""
fmt_args['test_class'] = 'Test{}'.format(
- config.name_sanatize(fmt_args['test_name']).title())
- util.write_file(verifier_file, _verifier_fmt.format(**fmt_args), omode='w')
+ config.name_sanitize(fmt_args['test_name']).title())
+ c_util.write_file(verifier_file,
+ _verifier_fmt.format(**fmt_args), omode='w')
def create(args):
- """
- create a new testcase
- """
+ """Create a new testcase."""
(test_category, test_name) = args.name.split('/')
fmt_args = {'test_name': test_name, 'test_category': test_category,
'test_description': str(args.description)}
@@ -65,7 +64,7 @@ def create(args):
testcase_file = config.name_to_path(args.name)
verifier_file = os.path.join(
TESTCASES_DIR, test_category,
- config.name_sanatize(test_name) + VERIFY_EXT)
+ config.name_sanitize(test_name) + VERIFY_EXT)
write_testcase_config(args, fmt_args, testcase_file)
write_verifier(args, fmt_args, verifier_file)
diff --git a/tests/cloud_tests/platforms.yaml b/tests/cloud_tests/platforms.yaml
index 5972b32b..b91834ab 100644
--- a/tests/cloud_tests/platforms.yaml
+++ b/tests/cloud_tests/platforms.yaml
@@ -10,7 +10,55 @@ default_platform_config:
platforms:
lxd:
enabled: true
- get_image_timeout: 600
+ # overrides for image templates
+ template_overrides:
+ /var/lib/cloud/seed/nocloud-net/meta-data:
+ when:
+ - create
+ - copy
+ template: cloud-init-meta.tpl
+ /var/lib/cloud/seed/nocloud-net/network-config:
+ when:
+ - create
+ - copy
+ template: cloud-init-network.tpl
+ /var/lib/cloud/seed/nocloud-net/user-data:
+ when:
+ - create
+ - copy
+ template: cloud-init-user.tpl
+ properties:
+ default: |
+ #cloud-config
+ {}
+ /var/lib/cloud/seed/nocloud-net/vendor-data:
+ when:
+ - create
+ - copy
+ template: cloud-init-vendor.tpl
+ properties:
+ default: |
+ #cloud-config
+ {}
+ # overrides image template files
+ template_files:
+ cloud-init-meta.tpl: |
+ #cloud-config
+ instance-id: {{ container.name }}
+ local-hostname: {{ container.name }}
+ {{ config_get("user.meta-data", "") }}
+ cloud-init-network.tpl: |
+ {% if config_get("user.network-config", "") == "" %}version: 1
+ config:
+ - type: physical
+ name: eth0
+ subnets:
+ - type: {% if config_get("user.network_mode", "") == "link-local" %}manual{% else %}dhcp{% endif %}
+ control: auto{% else %}{{ config_get("user.network-config", "") }}{% endif %}
+ cloud-init-user.tpl: |
+ {{ config_get("user.user-data", properties.default) }}
+ cloud-init-vendor.tpl: |
+ {{ config_get("user.vendor-data", properties.default) }}
ec2: {}
azure: {}
diff --git a/tests/cloud_tests/platforms/__init__.py b/tests/cloud_tests/platforms/__init__.py
index f9f56035..443f6d44 100644
--- a/tests/cloud_tests/platforms/__init__.py
+++ b/tests/cloud_tests/platforms/__init__.py
@@ -1,5 +1,7 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""Main init."""
+
from tests.cloud_tests.platforms import lxd
PLATFORMS = {
@@ -8,9 +10,7 @@ PLATFORMS = {
def get_platform(platform_name, config):
- """
- Get the platform object for 'platform_name' and init
- """
+ """Get the platform object for 'platform_name' and init."""
platform_cls = PLATFORMS.get(platform_name)
if not platform_cls:
raise ValueError('invalid platform name: {}'.format(platform_name))
diff --git a/tests/cloud_tests/platforms/base.py b/tests/cloud_tests/platforms/base.py
index 615e2e06..28975368 100644
--- a/tests/cloud_tests/platforms/base.py
+++ b/tests/cloud_tests/platforms/base.py
@@ -1,53 +1,27 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""Base platform class."""
+
class Platform(object):
- """
- Base class for platforms
- """
+ """Base class for platforms."""
+
platform_name = None
def __init__(self, config):
- """
- Set up platform
- """
+ """Set up platform."""
self.config = config
def get_image(self, img_conf):
- """
- Get image using 'img_conf', where img_conf is a dict containing all
- image configuration parameters
-
- in this dict there must be a 'platform_ident' key containing
- configuration for identifying each image on a per platform basis
-
- see implementations for get_image() for details about the contents
- of the platform's config entry
+ """Get image using specified image configuration.
- note: see 'releases' main_config.yaml for example entries
-
- img_conf: configuration for image
- return_value: cloud_tests.images instance
+ @param img_conf: configuration for image
+ @return_value: cloud_tests.images instance
"""
raise NotImplementedError
def destroy(self):
- """
- Clean up platform data
- """
+ """Clean up platform data."""
pass
- def _extract_img_platform_config(self, img_conf):
- """
- extract platform configuration for current platform from img_conf
- """
- platform_ident = img_conf.get('platform_ident')
- if not platform_ident:
- raise ValueError('invalid img_conf, missing \'platform_ident\'')
- ident = platform_ident.get(self.platform_name)
- if not ident:
- raise ValueError('img_conf: {} missing config for platform {}'
- .format(img_conf, self.platform_name))
- return ident
-
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/platforms/lxd.py b/tests/cloud_tests/platforms/lxd.py
index 847cc549..ead0955b 100644
--- a/tests/cloud_tests/platforms/lxd.py
+++ b/tests/cloud_tests/platforms/lxd.py
@@ -1,5 +1,7 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""Base LXD platform."""
+
from pylxd import (Client, exceptions)
from tests.cloud_tests.images import lxd as lxd_image
@@ -11,48 +13,49 @@ DEFAULT_SSTREAMS_SERVER = "https://images.linuxcontainers.org:8443"
class LXDPlatform(base.Platform):
- """
- Lxd test platform
- """
+ """LXD test platform."""
+
platform_name = 'lxd'
def __init__(self, config):
- """
- Set up platform
- """
+ """Set up platform."""
super(LXDPlatform, self).__init__(config)
# TODO: allow configuration of remote lxd host via env variables
# set up lxd connection
self.client = Client()
def get_image(self, img_conf):
+ """Get image using specified image configuration.
+
+ @param img_conf: configuration for image
+ @return_value: cloud_tests.images instance
"""
- Get image
- img_conf: dict containing config for image. platform_ident must have:
- alias: alias to use for simplestreams server
- sstreams_server: simplestreams server to use, or None for default
- return_value: cloud_tests.images instance
- """
- lxd_conf = self._extract_img_platform_config(img_conf)
- image = self.client.images.create_from_simplestreams(
- lxd_conf.get('sstreams_server', DEFAULT_SSTREAMS_SERVER),
- lxd_conf['alias'])
- return lxd_image.LXDImage(
- image.properties['description'], img_conf, self, image)
-
- def launch_container(self, image=None, container=None, ephemeral=False,
- config=None, block=True,
- image_desc=None, use_desc=None):
- """
- launch a container
- image: image fingerprint to launch from
- container: container to copy
- ephemeral: delete image after first shutdown
- config: config options for instance as dict
- block: wait until container created
- image_desc: description of image being launched
- use_desc: description of container's use
- return_value: cloud_tests.instances instance
+ pylxd_image = self.client.images.create_from_simplestreams(
+ img_conf.get('sstreams_server', DEFAULT_SSTREAMS_SERVER),
+ img_conf['alias'])
+ image = lxd_image.LXDImage(self, img_conf, pylxd_image)
+ if img_conf.get('override_templates', False):
+ image.update_templates(self.config.get('template_overrides', {}),
+ self.config.get('template_files', {}))
+ return image
+
+ def launch_container(self, properties, config, features,
+ image=None, container=None, ephemeral=False,
+ container_config=None, block=True, image_desc=None,
+ use_desc=None):
+ """Launch a container.
+
+ @param properties: image properties
+ @param config: image configuration
+ @param features: image features
+ @param image: image fingerprint to launch from
+ @param container: container to copy
+ @param ephemeral: delete image after first shutdown
+ @param container_config: config options for instance as dict
+ @param block: wait until container created
+ @param image_desc: description of image being launched
+ @param use_desc: description of container's use
+ @return_value: cloud_tests.instances instance
"""
if not (image or container):
raise ValueError("either image or container must be specified")
@@ -61,16 +64,18 @@ class LXDPlatform(base.Platform):
use_desc=use_desc,
used_list=self.list_containers()),
'ephemeral': bool(ephemeral),
- 'config': config if isinstance(config, dict) else {},
+ 'config': (container_config
+ if isinstance(container_config, dict) else {}),
'source': ({'type': 'image', 'fingerprint': image} if image else
{'type': 'copy', 'source': container})
}, wait=block)
- return lxd_instance.LXDInstance(container.name, self, container)
+ return lxd_instance.LXDInstance(self, container.name, properties,
+ config, features, container)
def container_exists(self, container_name):
- """
- check if container with name 'container_name' exists
- return_value: True if exists else False
+ """Check if container with name 'container_name' exists.
+
+ @return_value: True if exists else False
"""
res = True
try:
@@ -82,16 +87,22 @@ class LXDPlatform(base.Platform):
return res
def list_containers(self):
- """
- list names of all containers
- return_value: list of names
+ """List names of all containers.
+
+ @return_value: list of names
"""
return [container.name for container in self.client.containers.all()]
- def destroy(self):
- """
- Clean up platform data
+ def query_image_by_alias(self, alias):
+ """Get image by alias in local image store.
+
+ @param alias: alias of image
+ @return_value: pylxd image (not cloud_tests.images instance)
"""
+ return self.client.images.get_by_alias(alias)
+
+ def destroy(self):
+ """Clean up platform data."""
super(LXDPlatform, self).destroy()
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/releases.yaml b/tests/cloud_tests/releases.yaml
index 183f78c1..c8dd1427 100644
--- a/tests/cloud_tests/releases.yaml
+++ b/tests/cloud_tests/releases.yaml
@@ -1,86 +1,240 @@
# ============================= Release Config ================================
default_release_config:
- # all are disabled by default
- enabled: false
- # timeout for booting image and running cloud init
- timeout: 120
- # platform_ident values for the image, with data to identify the image
- # on that platform. see platforms.base for more information
- platform_ident: {}
- # a script to run after a boot that is used to modify an image, before
- # making a snapshot of the image. may be useful for removing data left
- # behind from cloud-init booting, such as logs, to ensure that data from
- # snapshot.launch() will not include a cloud-init.log from a boot used to
- # create the snapshot, if cloud-init has not run
- boot_clean_script: |
- #!/bin/bash
- rm -rf /var/log/cloud-init.log /var/log/cloud-init-output.log \
- /var/lib/cloud/ /run/cloud-init/ /var/log/syslog
+ # global default configuration options
+ default:
+ # all are disabled by default
+ enabled: false
+ # timeout for booting image and running cloud init
+ boot_timeout: 120
+ # a script to run after a boot that is used to modify an image, before
+ # making a snapshot of the image. may be useful for removing data left
+ # behind from cloud-init booting, such as logs, to ensure that data
+ # from snapshot.launch() will not include a cloud-init.log from a boot
+ # used to create the snapshot, if cloud-init has not run
+ boot_clean_script: |
+ #!/bin/bash
+ rm -rf /var/log/cloud-init.log /var/log/cloud-init-output.log \
+ /var/lib/cloud/ /run/cloud-init/ /var/log/syslog
+ # test script to determine if system is booted fully
+ system_ready_script: |
+ # permit running or degraded state as both indicate complete boot
+ [ $(systemctl is-system-running) = 'running' -o
+ $(systemctl is-system-running) = 'degraded' ]
+ # test script to determine if cloud-init has finished
+ cloud_init_ready_script: |
+ [ -f '/run/cloud-init/result.json' ]
+ # currently used features and their uses are:
+ # features groups and additional feature settings
+ feature_groups: []
+ features: {}
+
+ # lxd specific default configuration options
+ lxd:
+ # default sstreams server to use for lxd image retrieval
+ sstreams_server: https://us.images.linuxcontainers.org:8443
+ # keep base image, avoids downloading again next run
+ cache_base_image: true
+ # lxd images from linuxcontainers.org do not have the nocloud seed
+ # templates in place, so the image metadata must be modified
+ override_templates: true
+ # arg overrides to set image up
+ setup_overrides:
+ # lxd images from linuxcontainers.org do not come with
+ # cloud-init, so must pull cloud-init in from repo using
+ # setup_image.upgrade
+ upgrade: true
+
+features:
+ # all currently supported feature flags
+ all:
+ - apt # image supports apt package manager
+ - byobu # byobu is available in repositories
+ - landscape # landscape-client available in repos
+ - lxd # lxd is available in the image
+ - ppa # image supports ppas
+ - rpm # image supports rpms
+ - snap # supports snapd
+ # NOTE: the following feature flags are to work around bugs in the
+ # images, and can be removed when no longer needed
+ - hostname # setting system hostname works
+ # NOTE: the following feature flags are to work around issues in the
+ # testcases, and can be removed when no longer needed
+ - apt_src_cont # default contents and format of sources.list matches
+ # ubuntu sources.list
+ - apt_hist_fmt # apt command history entries use full paths to apt
+ # executable rather than relative paths
+ - daylight_time # timezones are daylight not standard time
+ - apt_up_out # 'Calculating upgrade..' present in log output from
+ # apt-get dist-upgrade output
+ - engb_locale # locale en_GB.UTF-8 is available
+ - locale_gen # the /etc/locale.gen file exists
+ - no_ntpdate # 'ntpdate' is not installed by default
+ - no_file_fmt_e # the 'file' utility does not have a formatting error
+ - ppa_file_name # the name of the source file added to sources.list.d has
+ # the expected format for newer ubuntu releases
+ - sshd # requires ssh server to be installed by default
+ - ssh_key_fmt # ssh auth keys printed to console have expected format
+ - syslog # test case requires syslog to be written by default
+ - ubuntu_ntp # expect ubuntu.pool.ntp.org to be used as ntp server
+ - ubuntu_repos # test case requres ubuntu repositories to be used
+ - ubuntu_user # test case needs user with the name 'ubuntu' to exist
+ # NOTE: the following feature flags are to work around issues that may
+ # be considered bugs in cloud-init
+ - lsb_release # image has lsb_release installed, maybe should install
+ # if missing by default
+ - sudo # image has sudo installed, should not be required
+ # feature flag groups
+ groups:
+ base:
+ hostname: true
+ no_file_fmt_e: true
+ ubuntu_specific:
+ apt_src_cont: true
+ apt_hist_fmt: true
+ byobu: true
+ daylight_time: true
+ engb_locale: true
+ landscape: true
+ locale_gen: true
+ lsb_release: true
+ lxd: true
+ ppa: true
+ ppa_file_name: true
+ snap: true
+ sshd: true
+ ssh_key_fmt: true
+ sudo: true
+ syslog: true
+ ubuntu_ntp: true
+ ubuntu_repos: true
+ ubuntu_user: true
+ debian_base:
+ apt: true
+ apt_up_out: true
+ no_ntpdate: true
+ rhel_base:
+ rpm: true
releases:
- trusty:
- enabled: true
- platform_ident:
- lxd:
- # if sstreams_server is omitted, default is used, defined in
- # tests.cloud_tests.platforms.lxd.DEFAULT_SSTREAMS_SERVER as:
- # sstreams_server: https://us.images.linuxcontainers.org:8443
- #alias: ubuntu/trusty/default
- alias: t
- sstreams_server: https://cloud-images.ubuntu.com/daily
- xenial:
- enabled: true
- platform_ident:
- lxd:
- #alias: ubuntu/xenial/default
- alias: x
- sstreams_server: https://cloud-images.ubuntu.com/daily
- yakkety:
- enabled: true
- platform_ident:
- lxd:
- #alias: ubuntu/yakkety/default
- alias: y
- sstreams_server: https://cloud-images.ubuntu.com/daily
- zesty:
- enabled: true
- platform_ident:
- lxd:
- #alias: ubuntu/zesty/default
- alias: z
- sstreams_server: https://cloud-images.ubuntu.com/daily
+ # UBUNTU =================================================================
artful:
- enabled: true
- platform_ident:
- lxd:
- #alias: ubuntu/artful/default
- alias: a
- sstreams_server: https://cloud-images.ubuntu.com/daily
- jessie:
- platform_ident:
- lxd:
- alias: debian/jessie/default
- sid:
- platform_ident:
- lxd:
- alias: debian/sid/default
+ # EOL: Jul 2018
+ default:
+ enabled: true
+ feature_groups:
+ - base
+ - debian_base
+ - ubuntu_specific
+ lxd:
+ sstreams_server: https://cloud-images.ubuntu.com/daily
+ alias: artful
+ setup_overrides: null
+ override_templates: false
+ zesty:
+ # EOL: Jan 2018
+ default:
+ enabled: true
+ feature_groups:
+ - base
+ - debian_base
+ - ubuntu_specific
+ lxd:
+ sstreams_server: https://cloud-images.ubuntu.com/daily
+ alias: zesty
+ setup_overrides: null
+ override_templates: false
+ xenial:
+ # EOL: Apr 2021
+ default:
+ enabled: true
+ feature_groups:
+ - base
+ - debian_base
+ - ubuntu_specific
+ lxd:
+ sstreams_server: https://cloud-images.ubuntu.com/daily
+ alias: xenial
+ setup_overrides: null
+ override_templates: false
+ trusty:
+ # EOL: Apr 2019
+ default:
+ enabled: true
+ feature_groups:
+ - base
+ - debian_base
+ - ubuntu_specific
+ features:
+ apt_up_out: false
+ locale_gen: false
+ lxd: false
+ ppa_file_name: false
+ snap: false
+ ssh_key_fmt: false
+ no_ntpdate: false
+ no_file_fmt_e: false
+ system_ready_script: |
+ #!/bin/bash
+ # upstart based, so use old style runlevels
+ [ $(runlevel | awk '{print $2}') = '2' ]
+ lxd:
+ sstreams_server: https://cloud-images.ubuntu.com/daily
+ alias: trusty
+ setup_overrides: null
+ override_templates: false
+ # DEBIAN =================================================================
stretch:
- platform_ident:
- lxd:
- alias: debian/stretch/default
- wheezy:
- platform_ident:
- lxd:
- alias: debian/wheezy/default
+ # EOL: Not yet released
+ default:
+ enabled: true
+ feature_groups:
+ - base
+ - debian_base
+ lxd:
+ alias: debian/stretch/default
+ jessie:
+ # EOL: Jun 2020
+ # NOTE: the cloud-init version shipped with jessie is out of date
+ # tests work if an up to date deb is used
+ default:
+ enabled: true
+ feature_groups:
+ - base
+ - debian_base
+ lxd:
+ alias: debian/jessie/default
+ # CENTOS =================================================================
centos70:
- timeout: 180
- platform_ident:
- lxd:
- alias: centos/7/default
+ # EOL: Jun 2024 (2020 - end of full updates)
+ default:
+ enabled: true
+ feature_groups:
+ - base
+ - rhel_base
+ user_data_overrides:
+ preserve_hostname: true
+ lxd:
+ features:
+ # NOTE: (LP: #1575779)
+ hostname: false
+ alias: centos/7/default
centos66:
- timeout: 180
- platform_ident:
- lxd:
- alias: centos/6/default
+ # EOL: Nov 2020
+ default:
+ enabled: true
+ feature_groups:
+ - base
+ - rhel_base
+ # still supported, but only bugfixes after may 2017
+ system_ready_script: |
+ #!/bin/bash
+ [ $(runlevel | awk '{print $2}') = '3' ]
+ user_data_overrides:
+ preserve_hostname: true
+ lxd:
+ features:
+ # NOTE: (LP: #1575779)
+ hostname: false
+ alias: centos/6/default
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/run_funcs.py b/tests/cloud_tests/run_funcs.py
new file mode 100644
index 00000000..8ae91120
--- /dev/null
+++ b/tests/cloud_tests/run_funcs.py
@@ -0,0 +1,75 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+"""Run functions."""
+
+import os
+
+from tests.cloud_tests import bddeb, collect, util, verify
+
+
+def tree_collect(args):
+ """Collect data using deb build from current tree.
+
+ @param args: cmdline args
+ @return_value: fail count
+ """
+ failed = 0
+ tmpdir = util.TempDir(tmpdir=args.data_dir, preserve=args.preserve_data)
+
+ with tmpdir as data_dir:
+ args.data_dir = data_dir
+ args.deb = os.path.join(tmpdir.tmpdir, 'cloud-init_all.deb')
+ try:
+ failed += bddeb.bddeb(args)
+ failed += collect.collect(args)
+ except Exception:
+ failed += 1
+ raise
+
+ return failed
+
+
+def tree_run(args):
+ """Run test suite using deb build from current tree.
+
+ @param args: cmdline args
+ @return_value: fail count
+ """
+ failed = 0
+ tmpdir = util.TempDir(tmpdir=args.data_dir, preserve=args.preserve_data)
+
+ with tmpdir as data_dir:
+ args.data_dir = data_dir
+ args.deb = os.path.join(tmpdir.tmpdir, 'cloud-init_all.deb')
+ try:
+ failed += bddeb.bddeb(args)
+ failed += collect.collect(args)
+ failed += verify.verify(args)
+ except Exception:
+ failed += 1
+ raise
+
+ return failed
+
+
+def run(args):
+ """Run test suite.
+
+ @param args: cmdline args
+ @return_value: fail count
+ """
+ failed = 0
+ tmpdir = util.TempDir(tmpdir=args.data_dir, preserve=args.preserve_data)
+
+ with tmpdir as data_dir:
+ args.data_dir = data_dir
+ try:
+ failed += collect.collect(args)
+ failed += verify.verify(args)
+ except Exception:
+ failed += 1
+ raise
+
+ return failed
+
+# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/setup_image.py b/tests/cloud_tests/setup_image.py
index 5d6c6387..8053a093 100644
--- a/tests/cloud_tests/setup_image.py
+++ b/tests/cloud_tests/setup_image.py
@@ -1,18 +1,42 @@
# This file is part of cloud-init. See LICENSE file for license information.
-from tests.cloud_tests import LOG
-from tests.cloud_tests import stage, util
+"""Setup image for testing."""
from functools import partial
import os
+from tests.cloud_tests import LOG
+from tests.cloud_tests import stage, util
-def install_deb(args, image):
+
+def installed_package_version(image, package, ensure_installed=True):
+ """Get installed version of package.
+
+ @param image: cloud_tests.images instance to operate on
+ @param package: name of package
+ @param ensure_installed: raise error if not installed
+ @return_value: cloud-init version string
"""
- install deb into image
- args: cmdline arguments, must contain --deb
- image: cloud_tests.images instance to operate on
- return_value: None, may raise errors
+ os_family = util.get_os_family(image.properties['os'])
+ if os_family == 'debian':
+ cmd = ['dpkg-query', '-W', "--showformat='${Version}'", package]
+ elif os_family == 'redhat':
+ cmd = ['rpm', '-q', '--queryformat', "'%{VERSION}'", package]
+ else:
+ raise NotImplementedError
+
+ msg = 'query version for package: {}'.format(package)
+ (out, err, exit) = image.execute(
+ cmd, description=msg, rcs=(0,) if ensure_installed else range(0, 256))
+ return out.strip()
+
+
+def install_deb(args, image):
+ """Install deb into image.
+
+ @param args: cmdline arguments, must contain --deb
+ @param image: cloud_tests.images instance to operate on
+ @return_value: None, may raise errors
"""
# ensure system is compatible with package format
os_family = util.get_os_family(image.properties['os'])
@@ -21,20 +45,18 @@ def install_deb(args, image):
'family: {}'.format(args.deb, os_family))
# install deb
- LOG.debug('installing deb: %s into target', args.deb)
+ msg = 'install deb: "{}" into target'.format(args.deb)
+ LOG.debug(msg)
remote_path = os.path.join('/tmp', os.path.basename(args.deb))
image.push_file(args.deb, remote_path)
- (out, err, exit) = image.execute(['dpkg', '-i', remote_path])
- if exit != 0:
- raise OSError('failed install deb: {}\n\tstdout: {}\n\tstderr: {}'
- .format(args.deb, out, err))
+ cmd = 'dpkg -i {} || apt-get install --yes -f'.format(remote_path)
+ image.execute(['/bin/sh', '-c', cmd], description=msg)
# check installed deb version matches package
fmt = ['-W', "--showformat='${Version}'"]
(out, err, exit) = image.execute(['dpkg-deb'] + fmt + [remote_path])
expected_version = out.strip()
- (out, err, exit) = image.execute(['dpkg-query'] + fmt + ['cloud-init'])
- found_version = out.strip()
+ found_version = installed_package_version(image, 'cloud-init')
if expected_version != found_version:
raise OSError('install deb version "{}" does not match expected "{}"'
.format(found_version, expected_version))
@@ -44,32 +66,28 @@ def install_deb(args, image):
def install_rpm(args, image):
+ """Install rpm into image.
+
+ @param args: cmdline arguments, must contain --rpm
+ @param image: cloud_tests.images instance to operate on
+ @return_value: None, may raise errors
"""
- install rpm into image
- args: cmdline arguments, must contain --rpm
- image: cloud_tests.images instance to operate on
- return_value: None, may raise errors
- """
- # ensure system is compatible with package format
os_family = util.get_os_family(image.properties['os'])
- if os_family not in ['redhat', 'sles']:
+ if os_family != 'redhat':
raise NotImplementedError('install rpm: {} not supported on os '
'family: {}'.format(args.rpm, os_family))
# install rpm
- LOG.debug('installing rpm: %s into target', args.rpm)
+ msg = 'install rpm: "{}" into target'.format(args.rpm)
+ LOG.debug(msg)
remote_path = os.path.join('/tmp', os.path.basename(args.rpm))
image.push_file(args.rpm, remote_path)
- (out, err, exit) = image.execute(['rpm', '-U', remote_path])
- if exit != 0:
- raise OSError('failed to install rpm: {}\n\tstdout: {}\n\tstderr: {}'
- .format(args.rpm, out, err))
+ image.execute(['rpm', '-U', remote_path], description=msg)
fmt = ['--queryformat', '"%{VERSION}"']
(out, err, exit) = image.execute(['rpm', '-q'] + fmt + [remote_path])
expected_version = out.strip()
- (out, err, exit) = image.execute(['rpm', '-q'] + fmt + ['cloud-init'])
- found_version = out.strip()
+ found_version = installed_package_version(image, 'cloud-init')
if expected_version != found_version:
raise OSError('install rpm version "{}" does not match expected "{}"'
.format(found_version, expected_version))
@@ -79,14 +97,32 @@ def install_rpm(args, image):
def upgrade(args, image):
+ """Upgrade or install cloud-init from repo.
+
+ @param args: cmdline arguments
+ @param image: cloud_tests.images instance to operate on
+ @return_value: None, may raise errors
"""
- run the system's upgrade command
- args: cmdline arguments
- image: cloud_tests.images instance to operate on
- return_value: None, may raise errors
+ os_family = util.get_os_family(image.properties['os'])
+ if os_family == 'debian':
+ cmd = 'apt-get update && apt-get install cloud-init --yes'
+ elif os_family == 'redhat':
+ cmd = 'sleep 10 && yum install cloud-init --assumeyes'
+ else:
+ raise NotImplementedError
+
+ msg = 'upgrading cloud-init'
+ LOG.debug(msg)
+ image.execute(['/bin/sh', '-c', cmd], description=msg)
+
+
+def upgrade_full(args, image):
+ """Run the system's full upgrade command.
+
+ @param args: cmdline arguments
+ @param image: cloud_tests.images instance to operate on
+ @return_value: None, may raise errors
"""
- # determine appropriate upgrade command for os_family
- # TODO: maybe use cloudinit.distros for this?
os_family = util.get_os_family(image.properties['os'])
if os_family == 'debian':
cmd = 'apt-get update && apt-get upgrade --yes'
@@ -96,53 +132,48 @@ def upgrade(args, image):
raise NotImplementedError('upgrade command not configured for distro '
'from family: {}'.format(os_family))
- # upgrade system
- LOG.debug('upgrading system')
- (out, err, exit) = image.execute(['/bin/sh', '-c', cmd])
- if exit != 0:
- raise OSError('failed to upgrade system\n\tstdout: {}\n\tstderr:{}'
- .format(out, err))
+ msg = 'full system upgrade'
+ LOG.debug(msg)
+ image.execute(['/bin/sh', '-c', cmd], description=msg)
def run_script(args, image):
+ """Run a script in the target image.
+
+ @param args: cmdline arguments, must contain --script
+ @param image: cloud_tests.images instance to operate on
+ @return_value: None, may raise errors
"""
- run a script in the target image
- args: cmdline arguments, must contain --script
- image: cloud_tests.images instance to operate on
- return_value: None, may raise errors
- """
- # TODO: get exit status back from script and add error handling here
- LOG.debug('running setup image script in target image')
- image.run_script(args.script)
+ msg = 'run setup image script in target image'
+ LOG.debug(msg)
+ image.run_script(args.script, description=msg)
def enable_ppa(args, image):
- """
- enable a ppa in the target image
- args: cmdline arguments, must contain --ppa
- image: cloud_tests.image instance to operate on
- return_value: None, may raise errors
+ """Enable a ppa in the target image.
+
+ @param args: cmdline arguments, must contain --ppa
+ @param image: cloud_tests.image instance to operate on
+ @return_value: None, may raise errors
"""
# ppa only supported on ubuntu (maybe debian?)
- if image.properties['os'] != 'ubuntu':
+ if image.properties['os'].lower() != 'ubuntu':
raise NotImplementedError('enabling a ppa is only available on ubuntu')
# add ppa with add-apt-repository and update
ppa = 'ppa:{}'.format(args.ppa)
- LOG.debug('enabling %s', ppa)
+ msg = 'enable ppa: "{}" in target'.format(ppa)
+ LOG.debug(msg)
cmd = 'add-apt-repository --yes {} && apt-get update'.format(ppa)
- (out, err, exit) = image.execute(['/bin/sh', '-c', cmd])
- if exit != 0:
- raise OSError('enable ppa for {} failed\n\tstdout: {}\n\tstderr: {}'
- .format(ppa, out, err))
+ image.execute(['/bin/sh', '-c', cmd], description=msg)
def enable_repo(args, image):
- """
- enable a repository in the target image
- args: cmdline arguments, must contain --repo
- image: cloud_tests.image instance to operate on
- return_value: None, may raise errors
+ """Enable a repository in the target image.
+
+ @param args: cmdline arguments, must contain --repo
+ @param image: cloud_tests.image instance to operate on
+ @return_value: None, may raise errors
"""
# find enable repo command for the distro
os_family = util.get_os_family(image.properties['os'])
@@ -155,20 +186,23 @@ def enable_repo(args, image):
raise NotImplementedError('enable repo command not configured for '
'distro from family: {}'.format(os_family))
- LOG.debug('enabling repo: "%s"', args.repo)
- (out, err, exit) = image.execute(['/bin/sh', '-c', cmd])
- if exit != 0:
- raise OSError('enable repo {} failed\n\tstdout: {}\n\tstderr: {}'
- .format(args.repo, out, err))
+ msg = 'enable repo: "{}" in target'.format(args.repo)
+ LOG.debug(msg)
+ image.execute(['/bin/sh', '-c', cmd], description=msg)
def setup_image(args, image):
+ """Set up image as specified in args.
+
+ @param args: cmdline arguments
+ @param image: cloud_tests.image instance to operate on
+ @return_value: tuple of results and fail count
"""
- set up image as specified in args
- args: cmdline arguments
- image: cloud_tests.image instance to operate on
- return_value: tuple of results and fail count
- """
+ # update the args if necessary for this image
+ overrides = image.setup_overrides
+ LOG.debug('updating args for setup with: %s', overrides)
+ args = util.update_args(args, overrides, preserve_old=True)
+
# mapping of setup cmdline arg name to setup function
# represented as a tuple rather than a dict or odict as lookup by name not
# needed, and order is important as --script and --upgrade go at the end
@@ -179,17 +213,19 @@ def setup_image(args, image):
('repo', enable_repo, 'setup func for --repo, enable repo'),
('ppa', enable_ppa, 'setup func for --ppa, enable ppa'),
('script', run_script, 'setup func for --script, run script'),
- ('upgrade', upgrade, 'setup func for --upgrade, upgrade pkgs'),
+ ('upgrade', upgrade, 'setup func for --upgrade, upgrade cloud-init'),
+ ('upgrade-full', upgrade_full, 'setup func for --upgrade-full'),
)
# determine which setup functions needed
calls = [partial(stage.run_single, desc, partial(func, args, image))
for name, func, desc in handlers if getattr(args, name, None)]
- image_name = 'image: distro={}, release={}'.format(
- image.properties['os'], image.properties['release'])
- LOG.info('setting up %s', image_name)
- return stage.run_stage('set up for {}'.format(image_name), calls,
- continue_after_error=False)
+ LOG.info('setting up %s', image)
+ res = stage.run_stage(
+ 'set up for {}'.format(image), calls, continue_after_error=False)
+ LOG.debug('after setup complete, installed cloud-init version is: %s',
+ installed_package_version(image, 'cloud-init'))
+ return res
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/snapshots/__init__.py b/tests/cloud_tests/snapshots/__init__.py
index 2ab654de..93a54f5e 100644
--- a/tests/cloud_tests/snapshots/__init__.py
+++ b/tests/cloud_tests/snapshots/__init__.py
@@ -1,10 +1,10 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""Main init."""
+
def get_snapshot(image):
- """
- get snapshot from image
- """
+ """Get snapshot from image."""
return image.snapshot()
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/snapshots/base.py b/tests/cloud_tests/snapshots/base.py
index d715f037..94328982 100644
--- a/tests/cloud_tests/snapshots/base.py
+++ b/tests/cloud_tests/snapshots/base.py
@@ -1,44 +1,45 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""Base snapshot."""
+
class Snapshot(object):
- """
- Base class for snapshots
- """
+ """Base class for snapshots."""
+
platform_name = None
- def __init__(self, properties, config):
- """
- Set up snapshot
+ def __init__(self, platform, properties, config, features):
+ """Set up snapshot.
+
+ @param platform: platform object
+ @param properties: image properties
+ @param config: image config
+ @param features: supported feature flags
"""
+ self.platform = platform
self.properties = properties
self.config = config
+ self.features = features
def __str__(self):
- """
- a brief description of the snapshot
- """
+ """A brief description of the snapshot."""
return '-'.join((self.properties['os'], self.properties['release']))
def launch(self, user_data, meta_data=None, block=True, start=True,
use_desc=None):
- """
- launch instance
-
- user_data: user-data for the instance
- instance_id: instance-id for the instance
- block: wait until instance is created
- start: start instance and wait until fully started
- use_desc: description of snapshot instance use
+ """Launch instance.
- return_value: an Instance
+ @param user_data: user-data for the instance
+ @param instance_id: instance-id for the instance
+ @param block: wait until instance is created
+ @param start: start instance and wait until fully started
+ @param use_desc: description of snapshot instance use
+ @return_value: an Instance
"""
raise NotImplementedError
def destroy(self):
- """
- Clean up snapshot data
- """
+ """Clean up snapshot data."""
pass
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/snapshots/lxd.py b/tests/cloud_tests/snapshots/lxd.py
index eabbce3f..39c55c5e 100644
--- a/tests/cloud_tests/snapshots/lxd.py
+++ b/tests/cloud_tests/snapshots/lxd.py
@@ -1,49 +1,52 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""Base LXD snapshot."""
+
from tests.cloud_tests.snapshots import base
class LXDSnapshot(base.Snapshot):
- """
- LXD image copy backed snapshot
- """
+ """LXD image copy backed snapshot."""
+
platform_name = "lxd"
- def __init__(self, properties, config, platform, pylxd_frozen_instance):
- """
- Set up snapshot
+ def __init__(self, platform, properties, config, features,
+ pylxd_frozen_instance):
+ """Set up snapshot.
+
+ @param platform: platform object
+ @param properties: image properties
+ @param config: image config
+ @param features: supported feature flags
"""
- self.platform = platform
self.pylxd_frozen_instance = pylxd_frozen_instance
- super(LXDSnapshot, self).__init__(properties, config)
+ super(LXDSnapshot, self).__init__(
+ platform, properties, config, features)
def launch(self, user_data, meta_data=None, block=True, start=True,
use_desc=None):
- """
- launch instance
-
- user_data: user-data for the instance
- instance_id: instance-id for the instance
- block: wait until instance is created
- start: start instance and wait until fully started
- use_desc: description of snapshot instance use
-
- return_value: an Instance
+ """Launch instance.
+
+ @param user_data: user-data for the instance
+ @param instance_id: instance-id for the instance
+ @param block: wait until instance is created
+ @param start: start instance and wait until fully started
+ @param use_desc: description of snapshot instance use
+ @return_value: an Instance
"""
inst_config = {'user.user-data': user_data}
if meta_data:
inst_config['user.meta-data'] = meta_data
instance = self.platform.launch_container(
- container=self.pylxd_frozen_instance.name, config=inst_config,
- block=block, image_desc=str(self), use_desc=use_desc)
+ self.properties, self.config, self.features, block=block,
+ image_desc=str(self), container=self.pylxd_frozen_instance.name,
+ use_desc=use_desc, container_config=inst_config)
if start:
- instance.start(wait=True, wait_time=self.config.get('timeout'))
+ instance.start()
return instance
def destroy(self):
- """
- Clean up snapshot data
- """
+ """Clean up snapshot data."""
self.pylxd_frozen_instance.destroy()
super(LXDSnapshot, self).destroy()
diff --git a/tests/cloud_tests/stage.py b/tests/cloud_tests/stage.py
index 584cdaee..74a7d46d 100644
--- a/tests/cloud_tests/stage.py
+++ b/tests/cloud_tests/stage.py
@@ -1,5 +1,7 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""Stage a run."""
+
import sys
import time
import traceback
@@ -8,38 +10,29 @@ from tests.cloud_tests import LOG
class PlatformComponent(object):
- """
- context manager to safely handle platform components, ensuring that
- .destroy() is called
- """
+ """Context manager to safely handle platform components."""
def __init__(self, get_func):
- """
- store get_<platform component> function as partial taking no args
- """
+ """Store get_<platform component> function as partial with no args."""
self.get_func = get_func
def __enter__(self):
- """
- create instance of platform component
- """
+ """Create instance of platform component."""
self.instance = self.get_func()
return self.instance
def __exit__(self, etype, value, trace):
- """
- destroy instance
- """
+ """Destroy instance."""
if self.instance is not None:
self.instance.destroy()
def run_single(name, call):
- """
- run a single function, keeping track of results and failures and time
- name: name of part
- call: call to make
- return_value: a tuple of result and fail count
+ """Run a single function, keeping track of results and time.
+
+ @param name: name of part
+ @param call: call to make
+ @return_value: a tuple of result and fail count
"""
res = {
'name': name,
@@ -67,17 +60,18 @@ def run_single(name, call):
def run_stage(parent_name, calls, continue_after_error=True):
- """
- run a stage of collection, keeping track of results and failures
- parent_name: name of stage calls are under
- calls: list of function call taking no params. must return a tuple
- of results and failures. may raise exceptions
- continue_after_error: whether or not to proceed to the next call after
- catching an exception or recording a failure
- return_value: a tuple of results and failures, with result containing
- results from the function call under 'stages', and a list
- of errors (if any on this level), and elapsed time
- running stage, and the name
+ """Run a stage of collection, keeping track of results and failures.
+
+ @param parent_name: name of stage calls are under
+ @param calls: list of function call taking no params. must return a tuple
+ of results and failures. may raise exceptions
+ @param continue_after_error: whether or not to proceed to the next call
+ after catching an exception or recording a
+ failure
+ @return_value: a tuple of results and failures, with result containing
+ results from the function call under 'stages', and a list
+ of errors (if any on this level), and elapsed time
+ running stage, and the name
"""
res = {
'name': parent_name,
diff --git a/tests/cloud_tests/testcases.yaml b/tests/cloud_tests/testcases.yaml
index c22b08ef..7183e017 100644
--- a/tests/cloud_tests/testcases.yaml
+++ b/tests/cloud_tests/testcases.yaml
@@ -2,6 +2,7 @@
base_test_data:
script_timeout: 20
enabled: True
+ required_features: []
cloud_config: |
#cloud-config
collect_scripts:
diff --git a/tests/cloud_tests/testcases/__init__.py b/tests/cloud_tests/testcases/__init__.py
index a1d86d45..47217ce6 100644
--- a/tests/cloud_tests/testcases/__init__.py
+++ b/tests/cloud_tests/testcases/__init__.py
@@ -1,5 +1,7 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""Main init."""
+
import importlib
import inspect
import unittest
@@ -9,12 +11,12 @@ from tests.cloud_tests.testcases.base import CloudTestCase as base_test
def discover_tests(test_name):
- """
- discover tests in test file for 'testname'
- return_value: list of test classes
+ """Discover tests in test file for 'testname'.
+
+ @return_value: list of test classes
"""
testmod_name = 'tests.cloud_tests.testcases.{}'.format(
- config.name_sanatize(test_name))
+ config.name_sanitize(test_name))
try:
testmod = importlib.import_module(testmod_name)
except NameError:
@@ -26,9 +28,9 @@ def discover_tests(test_name):
def get_suite(test_name, data, conf):
- """
- get test suite with all tests for 'testname'
- return_value: a test suite
+ """Get test suite with all tests for 'testname'.
+
+ @return_value: a test suite
"""
suite = unittest.TestSuite()
for test_class in discover_tests(test_name):
diff --git a/tests/cloud_tests/testcases/base.py b/tests/cloud_tests/testcases/base.py
index 64d5507a..bb545ab9 100644
--- a/tests/cloud_tests/testcases/base.py
+++ b/tests/cloud_tests/testcases/base.py
@@ -1,61 +1,55 @@
# This file is part of cloud-init. See LICENSE file for license information.
-from cloudinit import util as c_util
+"""Base test case module."""
import crypt
import json
import unittest
+from cloudinit import util as c_util
+
class CloudTestCase(unittest.TestCase):
- """
- base test class for verifiers
- """
+ """Base test class for verifiers."""
+
data = None
conf = None
_cloud_config = None
def shortDescription(self):
+ """Prevent nose from using docstrings."""
return None
@property
def cloud_config(self):
- """
- get the cloud-config used by the test
- """
+ """Get the cloud-config used by the test."""
if not self._cloud_config:
self._cloud_config = c_util.load_yaml(self.conf)
return self._cloud_config
def get_config_entry(self, name):
- """
- get a config entry from cloud-config ensuring that it is present
- """
+ """Get a config entry from cloud-config ensuring that it is present."""
if name not in self.cloud_config:
raise AssertionError('Key "{}" not in cloud config'.format(name))
return self.cloud_config[name]
def get_data_file(self, name):
- """
- get data file failing test if it is not present
- """
+ """Get data file failing test if it is not present."""
if name not in self.data:
raise AssertionError('File "{}" missing from collect data'
.format(name))
return self.data[name]
def get_instance_id(self):
- """
- get recorded instance id
- """
+ """Get recorded instance id."""
return self.get_data_file('instance-id').strip()
def get_status_data(self, data, version=None):
- """
- parse result.json and status.json like data files
- data: data to load
- version: cloud-init output version, defaults to 'v1'
- return_value: dict of data or None if missing
+ """Parse result.json and status.json like data files.
+
+ @param data: data to load
+ @param version: cloud-init output version, defaults to 'v1'
+ @return_value: dict of data or None if missing
"""
if not version:
version = 'v1'
@@ -63,16 +57,12 @@ class CloudTestCase(unittest.TestCase):
return data.get(version)
def get_datasource(self):
- """
- get datasource name
- """
+ """Get datasource name."""
data = self.get_status_data(self.get_data_file('result.json'))
return data.get('datasource')
def test_no_stages_errors(self):
- """
- ensure that there were no errors in any stage
- """
+ """Ensure that there were no errors in any stage."""
status = self.get_status_data(self.get_data_file('status.json'))
for stage in ('init', 'init-local', 'modules-config', 'modules-final'):
self.assertIn(stage, status)
@@ -84,7 +74,10 @@ class CloudTestCase(unittest.TestCase):
class PasswordListTest(CloudTestCase):
+ """Base password test case class."""
+
def test_shadow_passwords(self):
+ """Test shadow passwords."""
shadow = self.get_data_file('shadow')
users = {}
dupes = []
@@ -121,7 +114,7 @@ class PasswordListTest(CloudTestCase):
self.assertNotEqual(users['harry'], users['dick'])
def test_shadow_expected_users(self):
- """Test every tom, dick, and harry user in shadow"""
+ """Test every tom, dick, and harry user in shadow."""
out = self.get_data_file('shadow')
self.assertIn('tom:', out)
self.assertIn('dick:', out)
@@ -130,7 +123,7 @@ class PasswordListTest(CloudTestCase):
self.assertIn('mikey:', out)
def test_sshd_config(self):
- """Test sshd config allows passwords"""
+ """Test sshd config allows passwords."""
out = self.get_data_file('sshd_config')
self.assertIn('PasswordAuthentication yes', out)
diff --git a/tests/cloud_tests/testcases/bugs/__init__.py b/tests/cloud_tests/testcases/bugs/__init__.py
index 5251d7c1..c6452f9c 100644
--- a/tests/cloud_tests/testcases/bugs/__init__.py
+++ b/tests/cloud_tests/testcases/bugs/__init__.py
@@ -1,7 +1,7 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""
-Test verifiers for cloud-init bugs
+"""Test verifiers for cloud-init bugs.
+
See configs/bugs/README.md for more information
"""
diff --git a/tests/cloud_tests/testcases/bugs/lp1511485.py b/tests/cloud_tests/testcases/bugs/lp1511485.py
index ac5ccb42..670d3aff 100644
--- a/tests/cloud_tests/testcases/bugs/lp1511485.py
+++ b/tests/cloud_tests/testcases/bugs/lp1511485.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestLP1511485(base.CloudTestCase):
- """Test LP# 1511485"""
+ """Test LP# 1511485."""
def test_final_message(self):
- """Test final message exists"""
+ """Test final message exists."""
out = self.get_data_file('cloud-init-output.log')
self.assertIn('Final message from cloud-config', out)
diff --git a/tests/cloud_tests/testcases/bugs/lp1628337.py b/tests/cloud_tests/testcases/bugs/lp1628337.py
index af0ffc75..a2c90481 100644
--- a/tests/cloud_tests/testcases/bugs/lp1628337.py
+++ b/tests/cloud_tests/testcases/bugs/lp1628337.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestLP1628337(base.CloudTestCase):
- """Test LP# 1511485"""
+ """Test LP# 1511485."""
def test_fetch_indices(self):
- """Verify no apt errors"""
+ """Verify no apt errors."""
out = self.get_data_file('cloud-init-output.log')
self.assertNotIn('W: Failed to fetch', out)
self.assertNotIn('W: Some index files failed to download. '
@@ -16,7 +16,7 @@ class TestLP1628337(base.CloudTestCase):
out)
def test_ntp(self):
- """Verify can find ntp and install it"""
+ """Verify can find ntp and install it."""
out = self.get_data_file('cloud-init-output.log')
self.assertNotIn('E: Unable to locate package ntp', out)
diff --git a/tests/cloud_tests/testcases/examples/__init__.py b/tests/cloud_tests/testcases/examples/__init__.py
index b3af7f8a..39af88c2 100644
--- a/tests/cloud_tests/testcases/examples/__init__.py
+++ b/tests/cloud_tests/testcases/examples/__init__.py
@@ -1,7 +1,7 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""
-Test verifiers for cloud-init examples
+"""Test verifiers for cloud-init examples.
+
See configs/examples/README.md for more information
"""
diff --git a/tests/cloud_tests/testcases/examples/add_apt_repositories.py b/tests/cloud_tests/testcases/examples/add_apt_repositories.py
index 15b8f01c..71eede97 100644
--- a/tests/cloud_tests/testcases/examples/add_apt_repositories.py
+++ b/tests/cloud_tests/testcases/examples/add_apt_repositories.py
@@ -1,19 +1,19 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestAptconfigurePrimary(base.CloudTestCase):
- """Example cloud-config test"""
+ """Example cloud-config test."""
def test_ubuntu_sources(self):
- """Test no default Ubuntu entries exist"""
+ """Test no default Ubuntu entries exist."""
out = self.get_data_file('ubuntu.sources.list')
self.assertEqual(0, int(out))
def test_gatech_sources(self):
- """Test GaTech entires exist"""
+ """Test GaTech entires exist."""
out = self.get_data_file('gatech.sources.list')
self.assertEqual(20, int(out))
diff --git a/tests/cloud_tests/testcases/examples/alter_completion_message.py b/tests/cloud_tests/testcases/examples/alter_completion_message.py
index b06ad01b..b7b5d5e0 100644
--- a/tests/cloud_tests/testcases/examples/alter_completion_message.py
+++ b/tests/cloud_tests/testcases/examples/alter_completion_message.py
@@ -1,34 +1,27 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestFinalMessage(base.CloudTestCase):
- """
- test cloud init module `cc_final_message`
- """
+ """Test cloud init module `cc_final_message`."""
+
subs_char = '$'
def get_final_message_config(self):
- """
- get config for final message
- """
+ """Get config for final message."""
self.assertIn('final_message', self.cloud_config)
return self.cloud_config['final_message']
def get_final_message(self):
- """
- get final message from log
- """
+ """Get final message from log."""
out = self.get_data_file('cloud-init-output.log')
lines = len(self.get_final_message_config().splitlines())
return '\n'.join(out.splitlines()[-1 * lines:])
def test_final_message_string(self):
- """
- ensure final handles regular strings
- """
+ """Ensure final handles regular strings."""
for actual, config in zip(
self.get_final_message().splitlines(),
self.get_final_message_config().splitlines()):
@@ -36,9 +29,7 @@ class TestFinalMessage(base.CloudTestCase):
self.assertEqual(actual, config)
def test_final_message_subs(self):
- """
- test variable substitution in final message
- """
+ """Test variable substitution in final message."""
# TODO: add verification of other substitutions
patterns = {'$datasource': self.get_datasource()}
for key, expected in patterns.items():
diff --git a/tests/cloud_tests/testcases/examples/configure_instance_trusted_ca_certificates.py b/tests/cloud_tests/testcases/examples/configure_instance_trusted_ca_certificates.py
index 8a4a0db0..38540eb8 100644
--- a/tests/cloud_tests/testcases/examples/configure_instance_trusted_ca_certificates.py
+++ b/tests/cloud_tests/testcases/examples/configure_instance_trusted_ca_certificates.py
@@ -1,24 +1,24 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestTrustedCA(base.CloudTestCase):
- """Example cloud-config test"""
+ """Example cloud-config test."""
def test_cert_count_ca(self):
- """Test correct count of CAs in .crt"""
+ """Test correct count of CAs in .crt."""
out = self.get_data_file('cert_count_ca')
self.assertIn('7 /etc/ssl/certs/ca-certificates.crt', out)
def test_cert_count_cloudinit(self):
- """Test correct count of CAs in .pem"""
+ """Test correct count of CAs in .pem."""
out = self.get_data_file('cert_count_cloudinit')
self.assertIn('7 /etc/ssl/certs/cloud-init-ca-certs.pem', out)
def test_cloudinit_certs(self):
- """Test text of cert"""
+ """Test text of cert."""
out = self.get_data_file('cloudinit_certs')
self.assertIn('-----BEGIN CERTIFICATE-----', out)
self.assertIn('YOUR-ORGS-TRUSTED-CA-CERT-HERE', out)
diff --git a/tests/cloud_tests/testcases/examples/configure_instances_ssh_keys.py b/tests/cloud_tests/testcases/examples/configure_instances_ssh_keys.py
index 4f651703..691a316b 100644
--- a/tests/cloud_tests/testcases/examples/configure_instances_ssh_keys.py
+++ b/tests/cloud_tests/testcases/examples/configure_instances_ssh_keys.py
@@ -1,29 +1,29 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestSSHKeys(base.CloudTestCase):
- """Example cloud-config test"""
+ """Example cloud-config test."""
def test_cert_count(self):
- """Test cert count"""
+ """Test cert count."""
out = self.get_data_file('cert_count')
self.assertEqual(20, int(out))
def test_dsa_public(self):
- """Test DSA key has ending"""
+ """Test DSA key has ending."""
out = self.get_data_file('dsa_public')
self.assertIn('ZN4XnifuO5krqAybngIy66PMEoQ= smoser@localhost', out)
def test_rsa_public(self):
- """Test RSA key has specific ending"""
+ """Test RSA key has specific ending."""
out = self.get_data_file('rsa_public')
self.assertIn('PemAWthxHO18QJvWPocKJtlsDNi3 smoser@localhost', out)
def test_auth_keys(self):
- """Test authorized keys has specific ending"""
+ """Test authorized keys has specific ending."""
out = self.get_data_file('auth_keys')
self.assertIn('QPOt5Q8zWd9qG7PBl9+eiH5qV7NZ mykey@host', out)
self.assertIn('Hj29SCmXp5Kt5/82cD/VN3NtHw== smoser@brickies', out)
diff --git a/tests/cloud_tests/testcases/examples/including_user_groups.py b/tests/cloud_tests/testcases/examples/including_user_groups.py
index e5732322..67af527b 100644
--- a/tests/cloud_tests/testcases/examples/including_user_groups.py
+++ b/tests/cloud_tests/testcases/examples/including_user_groups.py
@@ -1,42 +1,42 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestUserGroups(base.CloudTestCase):
- """Example cloud-config test"""
+ """Example cloud-config test."""
def test_group_ubuntu(self):
- """Test ubuntu group exists"""
+ """Test ubuntu group exists."""
out = self.get_data_file('group_ubuntu')
self.assertRegex(out, r'ubuntu:x:[0-9]{4}:')
def test_group_cloud_users(self):
- """Test cloud users group exists"""
+ """Test cloud users group exists."""
out = self.get_data_file('group_cloud_users')
self.assertRegex(out, r'cloud-users:x:[0-9]{4}:barfoo')
def test_user_ubuntu(self):
- """Test ubuntu user exists"""
+ """Test ubuntu user exists."""
out = self.get_data_file('user_ubuntu')
self.assertRegex(
out, r'ubuntu:x:[0-9]{4}:[0-9]{4}:Ubuntu:/home/ubuntu:/bin/bash')
def test_user_foobar(self):
- """Test foobar user exists"""
+ """Test foobar user exists."""
out = self.get_data_file('user_foobar')
self.assertRegex(
out, r'foobar:x:[0-9]{4}:[0-9]{4}:Foo B. Bar:/home/foobar:')
def test_user_barfoo(self):
- """Test barfoo user exists"""
+ """Test barfoo user exists."""
out = self.get_data_file('user_barfoo')
self.assertRegex(
out, r'barfoo:x:[0-9]{4}:[0-9]{4}:Bar B. Foo:/home/barfoo:')
def test_user_cloudy(self):
- """Test cloudy user exists"""
+ """Test cloudy user exists."""
out = self.get_data_file('user_cloudy')
self.assertRegex(out, r'cloudy:x:[0-9]{3,4}:')
diff --git a/tests/cloud_tests/testcases/examples/install_arbitrary_packages.py b/tests/cloud_tests/testcases/examples/install_arbitrary_packages.py
index 660d1aa3..df133844 100644
--- a/tests/cloud_tests/testcases/examples/install_arbitrary_packages.py
+++ b/tests/cloud_tests/testcases/examples/install_arbitrary_packages.py
@@ -1,19 +1,19 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestInstall(base.CloudTestCase):
- """Example cloud-config test"""
+ """Example cloud-config test."""
def test_htop(self):
- """Verify htop installed"""
+ """Verify htop installed."""
out = self.get_data_file('htop')
self.assertEqual(1, int(out))
def test_tree(self):
- """Verify tree installed"""
+ """Verify tree installed."""
out = self.get_data_file('treeutils')
self.assertEqual(1, int(out))
diff --git a/tests/cloud_tests/testcases/examples/install_run_chef_recipes.py b/tests/cloud_tests/testcases/examples/install_run_chef_recipes.py
index b36486f0..4ec26b8f 100644
--- a/tests/cloud_tests/testcases/examples/install_run_chef_recipes.py
+++ b/tests/cloud_tests/testcases/examples/install_run_chef_recipes.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestChefExample(base.CloudTestCase):
- """Test chef module"""
+ """Test chef module."""
def test_chef_basic(self):
- """Test chef installed"""
+ """Test chef installed."""
out = self.get_data_file('chef_installed')
self.assertIn('install ok', out)
diff --git a/tests/cloud_tests/testcases/examples/run_apt_upgrade.py b/tests/cloud_tests/testcases/examples/run_apt_upgrade.py
index 4c04d315..744e49cb 100644
--- a/tests/cloud_tests/testcases/examples/run_apt_upgrade.py
+++ b/tests/cloud_tests/testcases/examples/run_apt_upgrade.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestUpgrade(base.CloudTestCase):
- """Example cloud-config test"""
+ """Example cloud-config test."""
def test_upgrade(self):
- """Test upgrade exists in apt history"""
+ """Test upgrade exists in apt history."""
out = self.get_data_file('cloud-init.log')
self.assertIn(
'[CLOUDINIT] util.py[DEBUG]: apt-upgrade '
diff --git a/tests/cloud_tests/testcases/examples/run_commands.py b/tests/cloud_tests/testcases/examples/run_commands.py
index 0be21d0f..01d5d4fc 100644
--- a/tests/cloud_tests/testcases/examples/run_commands.py
+++ b/tests/cloud_tests/testcases/examples/run_commands.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestRunCmd(base.CloudTestCase):
- """Example cloud-config test"""
+ """Example cloud-config test."""
def test_run_cmd(self):
- """Test run command worked"""
+ """Test run command worked."""
out = self.get_data_file('run_cmd')
self.assertIn('cloud-init run cmd test', out)
diff --git a/tests/cloud_tests/testcases/examples/run_commands_first_boot.py b/tests/cloud_tests/testcases/examples/run_commands_first_boot.py
index baa23130..3f3d8f84 100644
--- a/tests/cloud_tests/testcases/examples/run_commands_first_boot.py
+++ b/tests/cloud_tests/testcases/examples/run_commands_first_boot.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestBootCmd(base.CloudTestCase):
- """Example cloud-config test"""
+ """Example cloud-config test."""
def test_bootcmd_host(self):
- """Test boot command worked"""
+ """Test boot command worked."""
out = self.get_data_file('hosts')
self.assertIn('192.168.1.130 us.archive.ubuntu.com', out)
diff --git a/tests/cloud_tests/testcases/examples/writing_out_arbitrary_files.py b/tests/cloud_tests/testcases/examples/writing_out_arbitrary_files.py
index 97dfeec3..7bd520f6 100644
--- a/tests/cloud_tests/testcases/examples/writing_out_arbitrary_files.py
+++ b/tests/cloud_tests/testcases/examples/writing_out_arbitrary_files.py
@@ -1,29 +1,29 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestWriteFiles(base.CloudTestCase):
- """Example cloud-config test"""
+ """Example cloud-config test."""
def test_b64(self):
- """Test b64 encoded file reads as ascii"""
+ """Test b64 encoded file reads as ascii."""
out = self.get_data_file('file_b64')
self.assertIn('ASCII text', out)
def test_binary(self):
- """Test binary file reads as executable"""
+ """Test binary file reads as executable."""
out = self.get_data_file('file_binary')
self.assertIn('ELF 64-bit LSB executable, x86-64, version 1', out)
def test_gzip(self):
- """Test gzip file shows up as a shell script"""
+ """Test gzip file shows up as a shell script."""
out = self.get_data_file('file_gzip')
self.assertIn('POSIX shell script, ASCII text executable', out)
def test_text(self):
- """Test text shows up as ASCII text"""
+ """Test text shows up as ASCII text."""
out = self.get_data_file('file_text')
self.assertIn('ASCII text', out)
diff --git a/tests/cloud_tests/testcases/main/__init__.py b/tests/cloud_tests/testcases/main/__init__.py
index 5888990d..0a592637 100644
--- a/tests/cloud_tests/testcases/main/__init__.py
+++ b/tests/cloud_tests/testcases/main/__init__.py
@@ -1,7 +1,7 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""
-Test verifiers for cloud-init main features
+"""Test verifiers for cloud-init main features.
+
See configs/main/README.md for more information
"""
diff --git a/tests/cloud_tests/testcases/main/command_output_simple.py b/tests/cloud_tests/testcases/main/command_output_simple.py
index c0461a08..fe4c7670 100644
--- a/tests/cloud_tests/testcases/main/command_output_simple.py
+++ b/tests/cloud_tests/testcases/main/command_output_simple.py
@@ -1,17 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestCommandOutputSimple(base.CloudTestCase):
- """
- test functionality of simple output redirection
- """
+ """Test functionality of simple output redirection."""
def test_output_file(self):
- """
- ensure that the output file is not empty and has all stages
- """
+ """Ensure that the output file is not empty and has all stages."""
data = self.get_data_file('cloud-init-test-output')
self.assertNotEqual(len(data), 0, "specified log empty")
self.assertEqual(self.get_config_entry('final_message'),
diff --git a/tests/cloud_tests/testcases/modules/__init__.py b/tests/cloud_tests/testcases/modules/__init__.py
index 9560fb26..6ab8114d 100644
--- a/tests/cloud_tests/testcases/modules/__init__.py
+++ b/tests/cloud_tests/testcases/modules/__init__.py
@@ -1,7 +1,7 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""
-Test verifiers for cloud-init cc modules
+"""Test verifiers for cloud-init cc modules.
+
See configs/modules/README.md for more information
"""
diff --git a/tests/cloud_tests/testcases/modules/apt_configure_conf.py b/tests/cloud_tests/testcases/modules/apt_configure_conf.py
index 5d96d95c..3bf93447 100644
--- a/tests/cloud_tests/testcases/modules/apt_configure_conf.py
+++ b/tests/cloud_tests/testcases/modules/apt_configure_conf.py
@@ -1,19 +1,19 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestAptconfigureConf(base.CloudTestCase):
- """Test apt-configure module"""
+ """Test apt-configure module."""
def test_apt_conf_assumeyes(self):
- """Test config assumes true"""
+ """Test config assumes true."""
out = self.get_data_file('94cloud-init-config')
self.assertIn('Assume-Yes "true";', out)
def test_apt_conf_fixbroken(self):
- """Test config fixes broken"""
+ """Test config fixes broken."""
out = self.get_data_file('94cloud-init-config')
self.assertIn('Fix-Broken "true";', out)
diff --git a/tests/cloud_tests/testcases/modules/apt_configure_disable_suites.py b/tests/cloud_tests/testcases/modules/apt_configure_disable_suites.py
index 0e2dfdeb..eabe4607 100644
--- a/tests/cloud_tests/testcases/modules/apt_configure_disable_suites.py
+++ b/tests/cloud_tests/testcases/modules/apt_configure_disable_suites.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestAptconfigureDisableSuites(base.CloudTestCase):
- """Test apt-configure module"""
+ """Test apt-configure module."""
def test_empty_sourcelist(self):
- """Test source list is empty"""
+ """Test source list is empty."""
out = self.get_data_file('sources.list')
self.assertEqual('', out)
diff --git a/tests/cloud_tests/testcases/modules/apt_configure_primary.py b/tests/cloud_tests/testcases/modules/apt_configure_primary.py
index 2918785d..c1c4bbc0 100644
--- a/tests/cloud_tests/testcases/modules/apt_configure_primary.py
+++ b/tests/cloud_tests/testcases/modules/apt_configure_primary.py
@@ -1,19 +1,19 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestAptconfigurePrimary(base.CloudTestCase):
- """Test apt-configure module"""
+ """Test apt-configure module."""
def test_ubuntu_sources(self):
- """Test no default Ubuntu entries exist"""
+ """Test no default Ubuntu entries exist."""
out = self.get_data_file('ubuntu.sources.list')
self.assertEqual(0, int(out))
def test_gatech_sources(self):
- """Test GaTech entires exist"""
+ """Test GaTech entires exist."""
out = self.get_data_file('gatech.sources.list')
self.assertEqual(20, int(out))
diff --git a/tests/cloud_tests/testcases/modules/apt_configure_proxy.py b/tests/cloud_tests/testcases/modules/apt_configure_proxy.py
index 93ae64c6..0c61b6cc 100644
--- a/tests/cloud_tests/testcases/modules/apt_configure_proxy.py
+++ b/tests/cloud_tests/testcases/modules/apt_configure_proxy.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestAptconfigureProxy(base.CloudTestCase):
- """Test apt-configure module"""
+ """Test apt-configure module."""
def test_proxy_config(self):
- """Test proxy options added to apt config"""
+ """Test proxy options added to apt config."""
out = self.get_data_file('90cloud-init-aptproxy')
self.assertIn(
'Acquire::http::Proxy "http://squid.internal:3128";', out)
diff --git a/tests/cloud_tests/testcases/modules/apt_configure_security.py b/tests/cloud_tests/testcases/modules/apt_configure_security.py
index 19c79c64..7d7e2585 100644
--- a/tests/cloud_tests/testcases/modules/apt_configure_security.py
+++ b/tests/cloud_tests/testcases/modules/apt_configure_security.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestAptconfigureSecurity(base.CloudTestCase):
- """Test apt-configure module"""
+ """Test apt-configure module."""
def test_security_mirror(self):
- """Test security lines added and uncommented in source.list"""
+ """Test security lines added and uncommented in source.list."""
out = self.get_data_file('sources.list')
self.assertEqual(6, int(out))
diff --git a/tests/cloud_tests/testcases/modules/apt_configure_sources_key.py b/tests/cloud_tests/testcases/modules/apt_configure_sources_key.py
index d2ee2611..d9061f3c 100644
--- a/tests/cloud_tests/testcases/modules/apt_configure_sources_key.py
+++ b/tests/cloud_tests/testcases/modules/apt_configure_sources_key.py
@@ -1,21 +1,21 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestAptconfigureSourcesKey(base.CloudTestCase):
- """Test apt-configure module"""
+ """Test apt-configure module."""
def test_apt_key_list(self):
- """Test key list updated"""
+ """Test key list updated."""
out = self.get_data_file('apt_key_list')
self.assertIn(
'1FF0 D853 5EF7 E719 E5C8 1B9C 083D 06FB E4D3 04DF', out)
self.assertIn('Launchpad PPA for cloud init development team', out)
def test_source_list(self):
- """Test source.list updated"""
+ """Test source.list updated."""
out = self.get_data_file('sources.list')
self.assertIn(
'http://ppa.launchpad.net/cloud-init-dev/test-archive/ubuntu', out)
diff --git a/tests/cloud_tests/testcases/modules/apt_configure_sources_keyserver.py b/tests/cloud_tests/testcases/modules/apt_configure_sources_keyserver.py
index 3931a92c..ddc86174 100644
--- a/tests/cloud_tests/testcases/modules/apt_configure_sources_keyserver.py
+++ b/tests/cloud_tests/testcases/modules/apt_configure_sources_keyserver.py
@@ -1,21 +1,21 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestAptconfigureSourcesKeyserver(base.CloudTestCase):
- """Test apt-configure module"""
+ """Test apt-configure module."""
def test_apt_key_list(self):
- """Test specific key added"""
+ """Test specific key added."""
out = self.get_data_file('apt_key_list')
self.assertIn(
- '1BC3 0F71 5A3B 8612 47A8 1A5E 55FE 7C8C 0165 013E', out)
- self.assertIn('Launchpad PPA for curtin developers', out)
+ '1FF0 D853 5EF7 E719 E5C8 1B9C 083D 06FB E4D3 04DF', out)
+ self.assertIn('Launchpad PPA for cloud init development team', out)
def test_source_list(self):
- """Test source.list updated"""
+ """Test source.list updated."""
out = self.get_data_file('sources.list')
self.assertIn(
'http://ppa.launchpad.net/cloud-init-dev/test-archive/ubuntu', out)
diff --git a/tests/cloud_tests/testcases/modules/apt_configure_sources_list.py b/tests/cloud_tests/testcases/modules/apt_configure_sources_list.py
index a0bb5e6b..129d2264 100644
--- a/tests/cloud_tests/testcases/modules/apt_configure_sources_list.py
+++ b/tests/cloud_tests/testcases/modules/apt_configure_sources_list.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestAptconfigureSourcesList(base.CloudTestCase):
- """Test apt-configure module"""
+ """Test apt-configure module."""
def test_sources_list(self):
- """Test sources.list includes sources"""
+ """Test sources.list includes sources."""
out = self.get_data_file('sources.list')
self.assertRegex(out, r'deb http:\/\/archive.ubuntu.com\/ubuntu '
'[a-z].* main restricted')
diff --git a/tests/cloud_tests/testcases/modules/apt_configure_sources_ppa.py b/tests/cloud_tests/testcases/modules/apt_configure_sources_ppa.py
index dcdb3767..d299e9ad 100644
--- a/tests/cloud_tests/testcases/modules/apt_configure_sources_ppa.py
+++ b/tests/cloud_tests/testcases/modules/apt_configure_sources_ppa.py
@@ -1,20 +1,20 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestAptconfigureSourcesPPA(base.CloudTestCase):
- """Test apt-configure module"""
+ """Test apt-configure module."""
def test_ppa(self):
- """test specific ppa added"""
+ """Test specific ppa added."""
out = self.get_data_file('sources.list')
self.assertIn(
'http://ppa.launchpad.net/curtin-dev/test-archive/ubuntu', out)
def test_ppa_key(self):
- """test ppa key added"""
+ """Test ppa key added."""
out = self.get_data_file('apt-key')
self.assertIn(
'1BC3 0F71 5A3B 8612 47A8 1A5E 55FE 7C8C 0165 013E', out)
diff --git a/tests/cloud_tests/testcases/modules/apt_pipelining_disable.py b/tests/cloud_tests/testcases/modules/apt_pipelining_disable.py
index 446c597d..c98eedef 100644
--- a/tests/cloud_tests/testcases/modules/apt_pipelining_disable.py
+++ b/tests/cloud_tests/testcases/modules/apt_pipelining_disable.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestAptPipeliningDisable(base.CloudTestCase):
- """Test apt-pipelining module"""
+ """Test apt-pipelining module."""
def test_disable_pipelining(self):
- """Test pipelining disabled"""
+ """Test pipelining disabled."""
out = self.get_data_file('90cloud-init-pipelining')
self.assertIn('Acquire::http::Pipeline-Depth "0";', out)
diff --git a/tests/cloud_tests/testcases/modules/apt_pipelining_os.py b/tests/cloud_tests/testcases/modules/apt_pipelining_os.py
index ad2a8884..740dc7c0 100644
--- a/tests/cloud_tests/testcases/modules/apt_pipelining_os.py
+++ b/tests/cloud_tests/testcases/modules/apt_pipelining_os.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestAptPipeliningOS(base.CloudTestCase):
- """Test apt-pipelining module"""
+ """Test apt-pipelining module."""
def test_os_pipelining(self):
- """Test pipelining set to os"""
+ """Test pipelining set to os."""
out = self.get_data_file('90cloud-init-pipelining')
self.assertIn('Acquire::http::Pipeline-Depth "0";', out)
diff --git a/tests/cloud_tests/testcases/modules/bootcmd.py b/tests/cloud_tests/testcases/modules/bootcmd.py
index 47a51e0a..f5b86b03 100644
--- a/tests/cloud_tests/testcases/modules/bootcmd.py
+++ b/tests/cloud_tests/testcases/modules/bootcmd.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestBootCmd(base.CloudTestCase):
- """Test bootcmd module"""
+ """Test bootcmd module."""
def test_bootcmd_host(self):
- """Test boot cmd worked"""
+ """Test boot cmd worked."""
out = self.get_data_file('hosts')
self.assertIn('192.168.1.130 us.archive.ubuntu.com', out)
diff --git a/tests/cloud_tests/testcases/modules/byobu.py b/tests/cloud_tests/testcases/modules/byobu.py
index 204b37b9..005ca014 100644
--- a/tests/cloud_tests/testcases/modules/byobu.py
+++ b/tests/cloud_tests/testcases/modules/byobu.py
@@ -1,24 +1,24 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestByobu(base.CloudTestCase):
- """Test Byobu module"""
+ """Test Byobu module."""
def test_byobu_installed(self):
- """Test byobu installed"""
+ """Test byobu installed."""
out = self.get_data_file('byobu_installed')
self.assertIn('/usr/bin/byobu', out)
def test_byobu_profile_enabled(self):
- """Test byobu profile.d file exists"""
+ """Test byobu profile.d file exists."""
out = self.get_data_file('byobu_profile_enabled')
self.assertIn('/etc/profile.d/Z97-byobu.sh', out)
def test_byobu_launch_exists(self):
- """Test byobu-launch exists"""
+ """Test byobu-launch exists."""
out = self.get_data_file('byobu_launch_exists')
self.assertIn('/usr/bin/byobu-launch', out)
diff --git a/tests/cloud_tests/testcases/modules/ca_certs.py b/tests/cloud_tests/testcases/modules/ca_certs.py
index 7448e480..e75f0413 100644
--- a/tests/cloud_tests/testcases/modules/ca_certs.py
+++ b/tests/cloud_tests/testcases/modules/ca_certs.py
@@ -1,19 +1,19 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestCaCerts(base.CloudTestCase):
- """Test ca certs module"""
+ """Test ca certs module."""
def test_cert_count(self):
- """Test the count is proper"""
+ """Test the count is proper."""
out = self.get_data_file('cert_count')
self.assertEqual(5, int(out))
def test_cert_installed(self):
- """Test line from our cert exists"""
+ """Test line from our cert exists."""
out = self.get_data_file('cert')
self.assertIn('a36c744454555024e7f82edc420fd2c8', out)
diff --git a/tests/cloud_tests/testcases/modules/debug_disable.py b/tests/cloud_tests/testcases/modules/debug_disable.py
index 9899fdfe..e40e4b89 100644
--- a/tests/cloud_tests/testcases/modules/debug_disable.py
+++ b/tests/cloud_tests/testcases/modules/debug_disable.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestDebugDisable(base.CloudTestCase):
- """Disable debug messages"""
+ """Disable debug messages."""
def test_debug_disable(self):
- """Test verbose output missing from logs"""
+ """Test verbose output missing from logs."""
out = self.get_data_file('cloud-init.log')
self.assertNotIn(
out, r'Skipping module named [a-z].* verbose printing disabled')
diff --git a/tests/cloud_tests/testcases/modules/debug_enable.py b/tests/cloud_tests/testcases/modules/debug_enable.py
index 21c89524..28d26062 100644
--- a/tests/cloud_tests/testcases/modules/debug_enable.py
+++ b/tests/cloud_tests/testcases/modules/debug_enable.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestDebugEnable(base.CloudTestCase):
- """Test debug messages"""
+ """Test debug messages."""
def test_debug_enable(self):
- """Test debug messages in cloud-init log"""
+ """Test debug messages in cloud-init log."""
out = self.get_data_file('cloud-init.log')
self.assertIn('[DEBUG]', out)
diff --git a/tests/cloud_tests/testcases/modules/final_message.py b/tests/cloud_tests/testcases/modules/final_message.py
index b06ad01b..b7b5d5e0 100644
--- a/tests/cloud_tests/testcases/modules/final_message.py
+++ b/tests/cloud_tests/testcases/modules/final_message.py
@@ -1,34 +1,27 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestFinalMessage(base.CloudTestCase):
- """
- test cloud init module `cc_final_message`
- """
+ """Test cloud init module `cc_final_message`."""
+
subs_char = '$'
def get_final_message_config(self):
- """
- get config for final message
- """
+ """Get config for final message."""
self.assertIn('final_message', self.cloud_config)
return self.cloud_config['final_message']
def get_final_message(self):
- """
- get final message from log
- """
+ """Get final message from log."""
out = self.get_data_file('cloud-init-output.log')
lines = len(self.get_final_message_config().splitlines())
return '\n'.join(out.splitlines()[-1 * lines:])
def test_final_message_string(self):
- """
- ensure final handles regular strings
- """
+ """Ensure final handles regular strings."""
for actual, config in zip(
self.get_final_message().splitlines(),
self.get_final_message_config().splitlines()):
@@ -36,9 +29,7 @@ class TestFinalMessage(base.CloudTestCase):
self.assertEqual(actual, config)
def test_final_message_subs(self):
- """
- test variable substitution in final message
- """
+ """Test variable substitution in final message."""
# TODO: add verification of other substitutions
patterns = {'$datasource': self.get_datasource()}
for key, expected in patterns.items():
diff --git a/tests/cloud_tests/testcases/modules/keys_to_console.py b/tests/cloud_tests/testcases/modules/keys_to_console.py
index b36c96cf..88b6812e 100644
--- a/tests/cloud_tests/testcases/modules/keys_to_console.py
+++ b/tests/cloud_tests/testcases/modules/keys_to_console.py
@@ -1,20 +1,20 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestKeysToConsole(base.CloudTestCase):
- """Test proper keys are included and excluded to console"""
+ """Test proper keys are included and excluded to console."""
def test_excluded_keys(self):
- """Test excluded keys missing"""
+ """Test excluded keys missing."""
out = self.get_data_file('syslog')
self.assertNotIn('DSA', out)
self.assertNotIn('ECDSA', out)
def test_expected_keys(self):
- """Test expected keys exist"""
+ """Test expected keys exist."""
out = self.get_data_file('syslog')
self.assertIn('ED25519', out)
self.assertIn('RSA', out)
diff --git a/tests/cloud_tests/testcases/modules/locale.py b/tests/cloud_tests/testcases/modules/locale.py
index bf4e1b07..cb9e1dce 100644
--- a/tests/cloud_tests/testcases/modules/locale.py
+++ b/tests/cloud_tests/testcases/modules/locale.py
@@ -1,19 +1,22 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
+from cloudinit import util
+
class TestLocale(base.CloudTestCase):
- """Test locale is set properly"""
+ """Test locale is set properly."""
def test_locale(self):
- """Test locale is set properly"""
- out = self.get_data_file('locale_default')
- self.assertIn('LANG="en_GB.UTF-8"', out)
+ """Test locale is set properly."""
+ data = util.load_shell_content(self.get_data_file('locale_default'))
+ self.assertIn("LANG", data)
+ self.assertEqual('en_GB.UTF-8', data['LANG'])
def test_locale_a(self):
- """Test locale -a has both options"""
+ """Test locale -a has both options."""
out = self.get_data_file('locale_a')
self.assertIn('en_GB.utf8', out)
self.assertIn('en_US.utf8', out)
diff --git a/tests/cloud_tests/testcases/modules/lxd_bridge.py b/tests/cloud_tests/testcases/modules/lxd_bridge.py
index 4087e2f2..c0262ba3 100644
--- a/tests/cloud_tests/testcases/modules/lxd_bridge.py
+++ b/tests/cloud_tests/testcases/modules/lxd_bridge.py
@@ -1,24 +1,24 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestLxdBridge(base.CloudTestCase):
- """Test LXD module"""
+ """Test LXD module."""
def test_lxd(self):
- """Test lxd installed"""
+ """Test lxd installed."""
out = self.get_data_file('lxd')
self.assertIn('/usr/bin/lxd', out)
def test_lxc(self):
- """Test lxc installed"""
+ """Test lxc installed."""
out = self.get_data_file('lxc')
self.assertIn('/usr/bin/lxc', out)
def test_bridge(self):
- """Test bridge config"""
+ """Test bridge config."""
out = self.get_data_file('lxc-bridge')
self.assertIn('lxdbr0', out)
self.assertIn('10.100.100.1/24', out)
diff --git a/tests/cloud_tests/testcases/modules/lxd_dir.py b/tests/cloud_tests/testcases/modules/lxd_dir.py
index 51a9a1f1..1495674e 100644
--- a/tests/cloud_tests/testcases/modules/lxd_dir.py
+++ b/tests/cloud_tests/testcases/modules/lxd_dir.py
@@ -1,19 +1,19 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestLxdDir(base.CloudTestCase):
- """Test LXD module"""
+ """Test LXD module."""
def test_lxd(self):
- """Test lxd installed"""
+ """Test lxd installed."""
out = self.get_data_file('lxd')
self.assertIn('/usr/bin/lxd', out)
def test_lxc(self):
- """Test lxc installed"""
+ """Test lxc installed."""
out = self.get_data_file('lxc')
self.assertIn('/usr/bin/lxc', out)
diff --git a/tests/cloud_tests/testcases/modules/ntp.py b/tests/cloud_tests/testcases/modules/ntp.py
index 82d32880..b50e52fe 100644
--- a/tests/cloud_tests/testcases/modules/ntp.py
+++ b/tests/cloud_tests/testcases/modules/ntp.py
@@ -1,6 +1,6 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
@@ -9,8 +9,8 @@ class TestNtp(base.CloudTestCase):
def test_ntp_installed(self):
"""Test ntp installed"""
- out = self.get_data_file('ntp_installed_empty')
- self.assertEqual(1, int(out))
+ out = self.get_data_file('ntp_installed')
+ self.assertEqual(0, int(out))
def test_ntp_dist_entries(self):
"""Test dist config file is empty"""
@@ -19,10 +19,7 @@ class TestNtp(base.CloudTestCase):
def test_ntp_entires(self):
"""Test config entries"""
- out = self.get_data_file('ntp_conf_empty')
- self.assertIn('pool 0.ubuntu.pool.ntp.org iburst', out)
- self.assertIn('pool 1.ubuntu.pool.ntp.org iburst', out)
- self.assertIn('pool 2.ubuntu.pool.ntp.org iburst', out)
- self.assertIn('pool 3.ubuntu.pool.ntp.org iburst', out)
+ out = self.get_data_file('ntp_conf_pool_list')
+ self.assertIn('pool.ntp.org iburst', out)
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/testcases/modules/ntp_pools.py b/tests/cloud_tests/testcases/modules/ntp_pools.py
index ff6d8fa4..152fd3f1 100644
--- a/tests/cloud_tests/testcases/modules/ntp_pools.py
+++ b/tests/cloud_tests/testcases/modules/ntp_pools.py
@@ -1,16 +1,16 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestNtpPools(base.CloudTestCase):
- """Test ntp module"""
+ """Test ntp module."""
def test_ntp_installed(self):
"""Test ntp installed"""
out = self.get_data_file('ntp_installed_pools')
- self.assertEqual(1, int(out))
+ self.assertEqual(0, int(out))
def test_ntp_dist_entries(self):
"""Test dist config file is empty"""
diff --git a/tests/cloud_tests/testcases/modules/ntp_servers.py b/tests/cloud_tests/testcases/modules/ntp_servers.py
index 4010cf80..8d2a68b3 100644
--- a/tests/cloud_tests/testcases/modules/ntp_servers.py
+++ b/tests/cloud_tests/testcases/modules/ntp_servers.py
@@ -10,7 +10,7 @@ class TestNtpServers(base.CloudTestCase):
def test_ntp_installed(self):
"""Test ntp installed"""
out = self.get_data_file('ntp_installed_servers')
- self.assertEqual(1, int(out))
+ self.assertEqual(0, int(out))
def test_ntp_dist_entries(self):
"""Test dist config file is empty"""
diff --git a/tests/cloud_tests/testcases/modules/package_update_upgrade_install.py b/tests/cloud_tests/testcases/modules/package_update_upgrade_install.py
index 00353ead..a92dec22 100644
--- a/tests/cloud_tests/testcases/modules/package_update_upgrade_install.py
+++ b/tests/cloud_tests/testcases/modules/package_update_upgrade_install.py
@@ -1,24 +1,24 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestPackageInstallUpdateUpgrade(base.CloudTestCase):
- """Test package install update upgrade module"""
+ """Test package install update upgrade module."""
def test_installed_htop(self):
- """Test htop got installed"""
+ """Test htop got installed."""
out = self.get_data_file('dpkg_htop')
self.assertEqual(1, int(out))
def test_installed_tree(self):
- """Test tree got installed"""
+ """Test tree got installed."""
out = self.get_data_file('dpkg_tree')
self.assertEqual(1, int(out))
def test_apt_history(self):
- """Test apt history for update command"""
+ """Test apt history for update command."""
out = self.get_data_file('apt_history_cmdline')
self.assertIn(
'Commandline: /usr/bin/apt-get --option=Dpkg::Options'
@@ -26,7 +26,7 @@ class TestPackageInstallUpdateUpgrade(base.CloudTestCase):
'--assume-yes --quiet install htop tree', out)
def test_cloud_init_output(self):
- """Test cloud-init-output for install & upgrade stuff"""
+ """Test cloud-init-output for install & upgrade stuff."""
out = self.get_data_file('cloud-init-output.log')
self.assertIn('Setting up tree (', out)
self.assertIn('Setting up htop (', out)
diff --git a/tests/cloud_tests/testcases/modules/runcmd.py b/tests/cloud_tests/testcases/modules/runcmd.py
index 780cd186..9fce3062 100644
--- a/tests/cloud_tests/testcases/modules/runcmd.py
+++ b/tests/cloud_tests/testcases/modules/runcmd.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestRunCmd(base.CloudTestCase):
- """Test runcmd module"""
+ """Test runcmd module."""
def test_run_cmd(self):
- """Test run command worked"""
+ """Test run command worked."""
out = self.get_data_file('run_cmd')
self.assertIn('cloud-init run cmd test', out)
diff --git a/tests/cloud_tests/testcases/modules/salt_minion.py b/tests/cloud_tests/testcases/modules/salt_minion.py
index 3ef30f7e..c697db2d 100644
--- a/tests/cloud_tests/testcases/modules/salt_minion.py
+++ b/tests/cloud_tests/testcases/modules/salt_minion.py
@@ -1,26 +1,26 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class Test(base.CloudTestCase):
- """Test salt minion module"""
+ """Test salt minion module."""
def test_minon_master(self):
- """Test master value in config"""
+ """Test master value in config."""
out = self.get_data_file('minion')
self.assertIn('master: salt.mydomain.com', out)
def test_minion_pem(self):
- """Test private key"""
+ """Test private key."""
out = self.get_data_file('minion.pem')
self.assertIn('------BEGIN PRIVATE KEY------', out)
self.assertIn('<key data>', out)
self.assertIn('------END PRIVATE KEY-------', out)
def test_minion_pub(self):
- """Test public key"""
+ """Test public key."""
out = self.get_data_file('minion.pub')
self.assertIn('------BEGIN PUBLIC KEY-------', out)
self.assertIn('<key data>', out)
diff --git a/tests/cloud_tests/testcases/modules/seed_random_data.py b/tests/cloud_tests/testcases/modules/seed_random_data.py
index b2121569..db433d26 100644
--- a/tests/cloud_tests/testcases/modules/seed_random_data.py
+++ b/tests/cloud_tests/testcases/modules/seed_random_data.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestSeedRandom(base.CloudTestCase):
- """Test seed random module"""
+ """Test seed random module."""
def test_random_seed_data(self):
- """Test random data passed in exists"""
+ """Test random data passed in exists."""
out = self.get_data_file('seed_data')
self.assertIn('MYUb34023nD:LFDK10913jk;dfnk:Df', out)
diff --git a/tests/cloud_tests/testcases/modules/set_hostname.py b/tests/cloud_tests/testcases/modules/set_hostname.py
index 9501b069..6e96a75c 100644
--- a/tests/cloud_tests/testcases/modules/set_hostname.py
+++ b/tests/cloud_tests/testcases/modules/set_hostname.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestHostname(base.CloudTestCase):
- """Test hostname module"""
+ """Test hostname module."""
def test_hostname(self):
- """Test hostname command shows correct output"""
+ """Test hostname command shows correct output."""
out = self.get_data_file('hostname')
self.assertIn('myhostname', out)
diff --git a/tests/cloud_tests/testcases/modules/set_hostname_fqdn.py b/tests/cloud_tests/testcases/modules/set_hostname_fqdn.py
index d89c299d..398f3d40 100644
--- a/tests/cloud_tests/testcases/modules/set_hostname_fqdn.py
+++ b/tests/cloud_tests/testcases/modules/set_hostname_fqdn.py
@@ -1,24 +1,24 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestHostnameFqdn(base.CloudTestCase):
- """Test Hostname module"""
+ """Test Hostname module."""
def test_hostname(self):
- """Test hostname output"""
+ """Test hostname output."""
out = self.get_data_file('hostname')
self.assertIn('myhostname', out)
def test_hostname_fqdn(self):
- """Test hostname fqdn output"""
+ """Test hostname fqdn output."""
out = self.get_data_file('fqdn')
self.assertIn('host.myorg.com', out)
def test_hosts(self):
- """Test /etc/hosts file"""
+ """Test /etc/hosts file."""
out = self.get_data_file('hosts')
self.assertIn('127.0.1.1 host.myorg.com myhostname', out)
self.assertIn('127.0.0.1 localhost', out)
diff --git a/tests/cloud_tests/testcases/modules/set_password.py b/tests/cloud_tests/testcases/modules/set_password.py
index 1411a296..a29b2261 100644
--- a/tests/cloud_tests/testcases/modules/set_password.py
+++ b/tests/cloud_tests/testcases/modules/set_password.py
@@ -1,21 +1,21 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestPassword(base.CloudTestCase):
- """Test password module"""
+ """Test password module."""
# TODO add test to make sure password is actually "password"
def test_shadow(self):
- """Test ubuntu user in shadow"""
+ """Test ubuntu user in shadow."""
out = self.get_data_file('shadow')
self.assertIn('ubuntu:', out)
def test_sshd_config(self):
- """Test sshd config allows passwords"""
+ """Test sshd config allows passwords."""
out = self.get_data_file('sshd_config')
self.assertIn('PasswordAuthentication yes', out)
diff --git a/tests/cloud_tests/testcases/modules/set_password_expire.py b/tests/cloud_tests/testcases/modules/set_password_expire.py
index 1ac9c23f..a1c3aa08 100644
--- a/tests/cloud_tests/testcases/modules/set_password_expire.py
+++ b/tests/cloud_tests/testcases/modules/set_password_expire.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestPasswordExpire(base.CloudTestCase):
- """Test password module"""
+ """Test password module."""
def test_shadow(self):
- """Test user frozen in shadow"""
+ """Test user frozen in shadow."""
out = self.get_data_file('shadow')
self.assertIn('harry:!:', out)
self.assertIn('dick:!:', out)
@@ -16,7 +16,7 @@ class TestPasswordExpire(base.CloudTestCase):
self.assertIn('harry:!:', out)
def test_sshd_config(self):
- """Test sshd config allows passwords"""
+ """Test sshd config allows passwords."""
out = self.get_data_file('sshd_config')
self.assertIn('PasswordAuthentication no', out)
diff --git a/tests/cloud_tests/testcases/modules/set_password_list.py b/tests/cloud_tests/testcases/modules/set_password_list.py
index 6819d259..375cd27d 100644
--- a/tests/cloud_tests/testcases/modules/set_password_list.py
+++ b/tests/cloud_tests/testcases/modules/set_password_list.py
@@ -1,11 +1,12 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestPasswordList(base.PasswordListTest, base.CloudTestCase):
- """Test password setting via list in chpasswd/list"""
+ """Test password setting via list in chpasswd/list."""
+
__test__ = True
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/testcases/modules/set_password_list_string.py b/tests/cloud_tests/testcases/modules/set_password_list_string.py
index 2c34fada..8c2634c5 100644
--- a/tests/cloud_tests/testcases/modules/set_password_list_string.py
+++ b/tests/cloud_tests/testcases/modules/set_password_list_string.py
@@ -1,11 +1,12 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestPasswordListString(base.PasswordListTest, base.CloudTestCase):
- """Test password setting via string in chpasswd/list"""
+ """Test password setting via string in chpasswd/list."""
+
__test__ = True
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_disable.py b/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_disable.py
index a0f8896b..82223217 100644
--- a/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_disable.py
+++ b/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_disable.py
@@ -1,24 +1,24 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestSshKeyFingerprintsDisable(base.CloudTestCase):
- """Test ssh key fingerprints module"""
+ """Test ssh key fingerprints module."""
def test_cloud_init_log(self):
- """Verify disabled"""
+ """Verify disabled."""
out = self.get_data_file('cloud-init.log')
self.assertIn('Skipping module named ssh-authkey-fingerprints, '
'logging of ssh fingerprints disabled', out)
def test_syslog(self):
- """Verify output of syslog"""
+ """Verify output of syslog."""
out = self.get_data_file('syslog')
- self.assertNotRegexpMatches(out, r'256 SHA256:.*(ECDSA)')
- self.assertNotRegexpMatches(out, r'256 SHA256:.*(ED25519)')
- self.assertNotRegexpMatches(out, r'1024 SHA256:.*(DSA)')
- self.assertNotRegexpMatches(out, r'2048 SHA256:.*(RSA)')
+ self.assertNotRegex(out, r'256 SHA256:.*(ECDSA)')
+ self.assertNotRegex(out, r'256 SHA256:.*(ED25519)')
+ self.assertNotRegex(out, r'1024 SHA256:.*(DSA)')
+ self.assertNotRegex(out, r'2048 SHA256:.*(RSA)')
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_enable.py b/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_enable.py
index 3c44b0cc..3510e75a 100644
--- a/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_enable.py
+++ b/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_enable.py
@@ -1,18 +1,18 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestSshKeyFingerprintsEnable(base.CloudTestCase):
- """Test ssh key fingerprints module"""
+ """Test ssh key fingerprints module."""
def test_syslog(self):
- """Verify output of syslog"""
+ """Verify output of syslog."""
out = self.get_data_file('syslog')
- self.assertRegexpMatches(out, r'256 SHA256:.*(ECDSA)')
- self.assertRegexpMatches(out, r'256 SHA256:.*(ED25519)')
- self.assertNotRegexpMatches(out, r'1024 SHA256:.*(DSA)')
- self.assertNotRegexpMatches(out, r'2048 SHA256:.*(RSA)')
+ self.assertRegex(out, r'256 SHA256:.*(ECDSA)')
+ self.assertRegex(out, r'256 SHA256:.*(ED25519)')
+ self.assertNotRegex(out, r'1024 SHA256:.*(DSA)')
+ self.assertNotRegex(out, r'2048 SHA256:.*(RSA)')
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/testcases/modules/ssh_import_id.py b/tests/cloud_tests/testcases/modules/ssh_import_id.py
index 214e710d..ef156f47 100644
--- a/tests/cloud_tests/testcases/modules/ssh_import_id.py
+++ b/tests/cloud_tests/testcases/modules/ssh_import_id.py
@@ -1,26 +1,17 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestSshImportId(base.CloudTestCase):
- """Test ssh import id module"""
+ """Test ssh import id module."""
def test_authorized_keys(self):
- """Test that ssh keys were imported"""
+ """Test that ssh keys were imported."""
out = self.get_data_file('auth_keys_ubuntu')
- # Rather than checking the key fingerprints, you could just check
- # the ending comment for where it got imported from in case these
- # change in the future :\
- self.assertIn('8sXGTYYw3iQSkOvDUUlIsqdaO+w== powersj@github/'
- '18564351 # ssh-import-id gh:powersj', out)
- self.assertIn('Hj29SCmXp5Kt5/82cD/VN3NtHw== smoser@brickies-'
- 'canonical # ssh-import-id lp:smoser', out)
- self.assertIn('7cUDQSXbabilgnzTjHo9mjd/kZ7cLOHP smoser@bart-'
- 'canonical # ssh-import-id lp:smoser', out)
- self.assertIn('aX0VHGXvHAQlPl4n7+FzAE1UmWFYEGrsSoNvLv3 smose'
- 'r@kaypeah # ssh-import-id lp:smoser', out)
+ self.assertIn('# ssh-import-id gh:powersj', out)
+ self.assertIn('# ssh-import-id lp:smoser', out)
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/testcases/modules/ssh_keys_generate.py b/tests/cloud_tests/testcases/modules/ssh_keys_generate.py
index 161ace5f..fd6d9ba5 100644
--- a/tests/cloud_tests/testcases/modules/ssh_keys_generate.py
+++ b/tests/cloud_tests/testcases/modules/ssh_keys_generate.py
@@ -1,56 +1,56 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestSshKeysGenerate(base.CloudTestCase):
- """Test ssh keys module"""
+ """Test ssh keys module."""
# TODO: Check cloud-init-output for the correct keys being generated
def test_ubuntu_authorized_keys(self):
- """Test passed in key is not in list for ubuntu"""
+ """Test passed in key is not in list for ubuntu."""
out = self.get_data_file('auth_keys_ubuntu')
self.assertEqual('', out)
def test_dsa_public(self):
- """Test dsa public key not generated"""
+ """Test dsa public key not generated."""
out = self.get_data_file('dsa_public')
self.assertEqual('', out)
def test_dsa_private(self):
- """Test dsa private key not generated"""
+ """Test dsa private key not generated."""
out = self.get_data_file('dsa_private')
self.assertEqual('', out)
def test_rsa_public(self):
- """Test rsa public key not generated"""
+ """Test rsa public key not generated."""
out = self.get_data_file('rsa_public')
self.assertEqual('', out)
def test_rsa_private(self):
- """Test rsa public key not generated"""
+ """Test rsa public key not generated."""
out = self.get_data_file('rsa_private')
self.assertEqual('', out)
def test_ecdsa_public(self):
- """Test ecdsa public key generated"""
+ """Test ecdsa public key generated."""
out = self.get_data_file('ecdsa_public')
self.assertIsNotNone(out)
def test_ecdsa_private(self):
- """Test ecdsa public key generated"""
+ """Test ecdsa public key generated."""
out = self.get_data_file('ecdsa_private')
self.assertIsNotNone(out)
def test_ed25519_public(self):
- """Test ed25519 public key generated"""
+ """Test ed25519 public key generated."""
out = self.get_data_file('ed25519_public')
self.assertIsNotNone(out)
def test_ed25519_private(self):
- """Test ed25519 public key generated"""
+ """Test ed25519 public key generated."""
out = self.get_data_file('ed25519_private')
self.assertIsNotNone(out)
diff --git a/tests/cloud_tests/testcases/modules/ssh_keys_provided.py b/tests/cloud_tests/testcases/modules/ssh_keys_provided.py
index 8f18cb94..544649da 100644
--- a/tests/cloud_tests/testcases/modules/ssh_keys_provided.py
+++ b/tests/cloud_tests/testcases/modules/ssh_keys_provided.py
@@ -1,67 +1,67 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestSshKeysProvided(base.CloudTestCase):
- """Test ssh keys module"""
+ """Test ssh keys module."""
def test_ubuntu_authorized_keys(self):
- """Test passed in key is not in list for ubuntu"""
+ """Test passed in key is not in list for ubuntu."""
out = self.get_data_file('auth_keys_ubuntu')
self.assertEqual('', out)
def test_root_authorized_keys(self):
- """Test passed in key is in authorized list for root"""
+ """Test passed in key is in authorized list for root."""
out = self.get_data_file('auth_keys_root')
self.assertIn('lzrkPqONphoZx0LDV86w7RUz1ksDzAdcm0tvmNRFMN1a0frDs50'
'6oA3aWK0oDk4Nmvk8sXGTYYw3iQSkOvDUUlIsqdaO+w==', out)
def test_dsa_public(self):
- """Test dsa public key passed in"""
+ """Test dsa public key passed in."""
out = self.get_data_file('dsa_public')
self.assertIn('AAAAB3NzaC1kc3MAAACBAPkWy1zbchVIN7qTgM0/yyY8q4RZS8c'
'NM4ZpeuE5UB/Nnr6OSU/nmbO8LuM', out)
def test_dsa_private(self):
- """Test dsa private key passed in"""
+ """Test dsa private key passed in."""
out = self.get_data_file('dsa_private')
self.assertIn('MIIBuwIBAAKBgQD5Fstc23IVSDe6k4DNP8smPKuEWUvHDTOGaXr'
'hOVAfzZ6+jklP', out)
def test_rsa_public(self):
- """Test rsa public key passed in"""
+ """Test rsa public key passed in."""
out = self.get_data_file('rsa_public')
self.assertIn('AAAAB3NzaC1yc2EAAAADAQABAAABAQC0/Ho+o3eJISydO2JvIgT'
'LnZOtrxPl+fSvJfKDjoOLY0HB2eOjy2s2/2N6d9X9SGZ4', out)
def test_rsa_private(self):
- """Test rsa public key passed in"""
+ """Test rsa public key passed in."""
out = self.get_data_file('rsa_private')
self.assertIn('4DOkqNiUGl80Zp1RgZNohHUXlJMtAbrIlAVEk+mTmg7vjfyp2un'
'RQvLZpMRdywBm', out)
def test_ecdsa_public(self):
- """Test ecdsa public key passed in"""
+ """Test ecdsa public key passed in."""
out = self.get_data_file('ecdsa_public')
self.assertIn('AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAAB'
'BBFsS5Tvky/IC/dXhE/afxxU', out)
def test_ecdsa_private(self):
- """Test ecdsa public key passed in"""
+ """Test ecdsa public key passed in."""
out = self.get_data_file('ecdsa_private')
self.assertIn('AwEHoUQDQgAEWxLlO+TL8gL91eET9p/HFQbqR1A691AkJgZk3jY'
'5mpZqxgX4vcgb', out)
def test_ed25519_public(self):
- """Test ed25519 public key passed in"""
+ """Test ed25519 public key passed in."""
out = self.get_data_file('ed25519_public')
self.assertIn('AAAAC3NzaC1lZDI1NTE5AAAAINudAZSu4vjZpVWzId5pXmZg1M6'
'G15dqjQ2XkNVOEnb5', out)
def test_ed25519_private(self):
- """Test ed25519 public key passed in"""
+ """Test ed25519 public key passed in."""
out = self.get_data_file('ed25519_private')
self.assertIn('XAAAAAtzc2gtZWQyNTUxOQAAACDbnQGUruL42aVVsyHeaV5mYNT'
'OhteXao0Nl5DVThJ2+Q', out)
diff --git a/tests/cloud_tests/testcases/modules/timezone.py b/tests/cloud_tests/testcases/modules/timezone.py
index bf91d490..654fa53d 100644
--- a/tests/cloud_tests/testcases/modules/timezone.py
+++ b/tests/cloud_tests/testcases/modules/timezone.py
@@ -1,14 +1,14 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestTimezone(base.CloudTestCase):
- """Test timezone module"""
+ """Test timezone module."""
def test_timezone(self):
- """Test date prints correct timezone"""
+ """Test date prints correct timezone."""
out = self.get_data_file('timezone')
self.assertEqual('HDT', out.rstrip())
diff --git a/tests/cloud_tests/testcases/modules/user_groups.py b/tests/cloud_tests/testcases/modules/user_groups.py
index e5732322..67af527b 100644
--- a/tests/cloud_tests/testcases/modules/user_groups.py
+++ b/tests/cloud_tests/testcases/modules/user_groups.py
@@ -1,42 +1,42 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestUserGroups(base.CloudTestCase):
- """Example cloud-config test"""
+ """Example cloud-config test."""
def test_group_ubuntu(self):
- """Test ubuntu group exists"""
+ """Test ubuntu group exists."""
out = self.get_data_file('group_ubuntu')
self.assertRegex(out, r'ubuntu:x:[0-9]{4}:')
def test_group_cloud_users(self):
- """Test cloud users group exists"""
+ """Test cloud users group exists."""
out = self.get_data_file('group_cloud_users')
self.assertRegex(out, r'cloud-users:x:[0-9]{4}:barfoo')
def test_user_ubuntu(self):
- """Test ubuntu user exists"""
+ """Test ubuntu user exists."""
out = self.get_data_file('user_ubuntu')
self.assertRegex(
out, r'ubuntu:x:[0-9]{4}:[0-9]{4}:Ubuntu:/home/ubuntu:/bin/bash')
def test_user_foobar(self):
- """Test foobar user exists"""
+ """Test foobar user exists."""
out = self.get_data_file('user_foobar')
self.assertRegex(
out, r'foobar:x:[0-9]{4}:[0-9]{4}:Foo B. Bar:/home/foobar:')
def test_user_barfoo(self):
- """Test barfoo user exists"""
+ """Test barfoo user exists."""
out = self.get_data_file('user_barfoo')
self.assertRegex(
out, r'barfoo:x:[0-9]{4}:[0-9]{4}:Bar B. Foo:/home/barfoo:')
def test_user_cloudy(self):
- """Test cloudy user exists"""
+ """Test cloudy user exists."""
out = self.get_data_file('user_cloudy')
self.assertRegex(out, r'cloudy:x:[0-9]{3,4}:')
diff --git a/tests/cloud_tests/testcases/modules/write_files.py b/tests/cloud_tests/testcases/modules/write_files.py
index 97dfeec3..7bd520f6 100644
--- a/tests/cloud_tests/testcases/modules/write_files.py
+++ b/tests/cloud_tests/testcases/modules/write_files.py
@@ -1,29 +1,29 @@
# This file is part of cloud-init. See LICENSE file for license information.
-"""cloud-init Integration Test Verify Script"""
+"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestWriteFiles(base.CloudTestCase):
- """Example cloud-config test"""
+ """Example cloud-config test."""
def test_b64(self):
- """Test b64 encoded file reads as ascii"""
+ """Test b64 encoded file reads as ascii."""
out = self.get_data_file('file_b64')
self.assertIn('ASCII text', out)
def test_binary(self):
- """Test binary file reads as executable"""
+ """Test binary file reads as executable."""
out = self.get_data_file('file_binary')
self.assertIn('ELF 64-bit LSB executable, x86-64, version 1', out)
def test_gzip(self):
- """Test gzip file shows up as a shell script"""
+ """Test gzip file shows up as a shell script."""
out = self.get_data_file('file_gzip')
self.assertIn('POSIX shell script, ASCII text executable', out)
def test_text(self):
- """Test text shows up as ASCII text"""
+ """Test text shows up as ASCII text."""
out = self.get_data_file('file_text')
self.assertIn('ASCII text', out)
diff --git a/tests/cloud_tests/util.py b/tests/cloud_tests/util.py
index 64a86672..2bbe21c7 100644
--- a/tests/cloud_tests/util.py
+++ b/tests/cloud_tests/util.py
@@ -1,28 +1,43 @@
# This file is part of cloud-init. See LICENSE file for license information.
+"""Utilities for re-use across integration tests."""
+
+import copy
import glob
import os
import random
+import shutil
import string
import tempfile
import yaml
-from cloudinit.distros import OSFAMILIES
from cloudinit import util as c_util
from tests.cloud_tests import LOG
+OS_FAMILY_MAPPING = {
+ 'debian': ['debian', 'ubuntu'],
+ 'redhat': ['centos', 'rhel', 'fedora'],
+ 'gentoo': ['gentoo'],
+ 'freebsd': ['freebsd'],
+ 'suse': ['sles'],
+ 'arch': ['arch'],
+}
+
def list_test_data(data_dir):
- """
- find all tests with test data available in data_dir
- data_dir should contain <platforms>/<os_name>/<testnames>/<data>
- return_value: {<platform>: {<os_name>: [<testname>]}}
+ """Find all tests with test data available in data_dir.
+
+ @param data_dir: should contain <platforms>/<os_name>/<testnames>/<data>
+ @return_value: {<platform>: {<os_name>: [<testname>]}}
"""
if not os.path.isdir(data_dir):
raise ValueError("bad data dir")
res = {}
for platform in os.listdir(data_dir):
+ if not os.path.isdir(os.path.join(data_dir, platform)):
+ continue
+
res[platform] = {}
for os_name in os.listdir(os.path.join(data_dir, platform)):
res[platform][os_name] = [
@@ -36,39 +51,33 @@ def list_test_data(data_dir):
def gen_instance_name(prefix='cloud-test', image_desc=None, use_desc=None,
max_len=63, delim='-', max_tries=16, used_list=None,
valid=string.ascii_lowercase + string.digits):
- """
- generate an unique name for a test instance
- prefix: name prefix, defaults to cloud-test, default should be left
- image_desc: short string with image desc, will be truncated to 16 chars
- use_desc: short string with usage desc, will be truncated to 30 chars
- max_len: maximum name length, defaults to 64 chars
- delim: delimiter to use between tokens
- max_tries: maximum tries to find a unique name before giving up
- used_list: already used names, or none to not check
- valid: string of valid characters for name
- return_value: valid, unused name, may raise StopIteration
+ """Generate an unique name for a test instance.
+
+ @param prefix: name prefix, defaults to cloud-test, default should be left
+ @param image_desc: short string (len <= 16) with image desc
+ @param use_desc: short string (len <= 30) with usage desc
+ @param max_len: maximum name length, defaults to 64 chars
+ @param delim: delimiter to use between tokens
+ @param max_tries: maximum tries to find a unique name before giving up
+ @param used_list: already used names, or none to not check
+ @param valid: string of valid characters for name
+ @return_value: valid, unused name, may raise StopIteration
"""
unknown = 'unknown'
def join(*args):
- """
- join args with delim
- """
+ """Join args with delim."""
return delim.join(args)
def fill(*args):
- """
- join name elems and fill rest with random data
- """
+ """Join name elems and fill rest with random data."""
name = join(*args)
num = max_len - len(name) - len(delim)
return join(name, ''.join(random.choice(valid) for _ in range(num)))
def clean(elem, max_len):
- """
- filter bad characters out of elem and trim to length
- """
- elem = elem[:max_len] if elem else unknown
+ """Filter bad characters out of elem and trim to length."""
+ elem = elem.lower()[:max_len] if elem else unknown
return ''.join(c if c in valid else delim for c in elem)
return next(name for name in
@@ -78,30 +87,39 @@ def gen_instance_name(prefix='cloud-test', image_desc=None, use_desc=None,
def sorted_unique(iterable, key=None, reverse=False):
- """
- return_value: a sorted list of unique items in iterable
+ """Create unique sorted list.
+
+ @param iterable: the data structure to sort
+ @param key: if you have a specific key
+ @param reverse: to reverse or not
+ @return_value: a sorted list of unique items in iterable
"""
return sorted(set(iterable), key=key, reverse=reverse)
def get_os_family(os_name):
+ """Get os family type for os_name.
+
+ @param os_name: name of os
+ @return_value: family name for os_name
"""
- get os family type for os_name
- """
- return next((k for k, v in OSFAMILIES.items() if os_name in v), None)
+ return next((k for k, v in OS_FAMILY_MAPPING.items()
+ if os_name.lower() in v), None)
def current_verbosity():
- """
- get verbosity currently in effect from log level
- return_value: verbosity, 0-2, 2 = verbose, 0 = quiet
+ """Get verbosity currently in effect from log level.
+
+ @return_value: verbosity, 0-2, 2=verbose, 0=quiet
"""
return max(min(3 - int(LOG.level / 10), 2), 0)
def is_writable_dir(path):
- """
- make sure dir is writable
+ """Make sure dir is writable.
+
+ @param path: path to determine if writable
+ @return_value: boolean with result
"""
try:
c_util.ensure_dir(path)
@@ -112,9 +130,10 @@ def is_writable_dir(path):
def is_clean_writable_dir(path):
- """
- make sure dir is empty and writable, creating it if it does not exist
- return_value: True/False if successful
+ """Make sure dir is empty and writable, creating it if it does not exist.
+
+ @param path: path to check
+ @return_value: True/False if successful
"""
path = os.path.abspath(path)
if not (is_writable_dir(path) and len(os.listdir(path)) == 0):
@@ -123,29 +142,31 @@ def is_clean_writable_dir(path):
def configure_yaml():
+ """Clean yaml."""
yaml.add_representer(str, (lambda dumper, data: dumper.represent_scalar(
'tag:yaml.org,2002:str', data, style='|' if '\n' in data else '')))
-def yaml_format(data):
- """
- format data as yaml
+def yaml_format(data, content_type=None):
+ """Format data as yaml.
+
+ @param data: data to dump
+ @param header: if specified, add a header to the dumped data
+ @return_value: yaml string
"""
configure_yaml()
- return yaml.dump(data, indent=2, default_flow_style=False)
+ content_type = (
+ '#{}\n'.format(content_type.strip('#\n')) if content_type else '')
+ return content_type + yaml.dump(data, indent=2, default_flow_style=False)
def yaml_dump(data, path):
- """
- dump data to path in yaml format
- """
- write_file(os.path.abspath(path), yaml_format(data), omode='w')
+ """Dump data to path in yaml format."""
+ c_util.write_file(os.path.abspath(path), yaml_format(data), omode='w')
def merge_results(data, path):
- """
- handle merging results from collect phase and verify phase
- """
+ """Handle merging results from collect phase and verify phase."""
current = {}
if os.path.exists(path):
with open(path, 'r') as fp:
@@ -154,10 +175,118 @@ def merge_results(data, path):
yaml_dump(current, path)
-def write_file(*args, **kwargs):
+def rel_files(basedir):
+ """List of files under directory by relative path, not including dirs.
+
+ @param basedir: directory to search
+ @return_value: list or relative paths
+ """
+ basedir = os.path.normpath(basedir)
+ return [path[len(basedir) + 1:] for path in
+ glob.glob(os.path.join(basedir, '**'), recursive=True)
+ if not os.path.isdir(path)]
+
+
+def flat_tar(output, basedir, owner='root', group='root'):
+ """Create a flat tar archive (no leading ./) from basedir.
+
+ @param output: output tar file to write
+ @param basedir: base directory for archive
+ @param owner: owner of archive files
+ @param group: group archive files belong to
+ @return_value: none
+ """
+ c_util.subp(['tar', 'cf', output, '--owner', owner, '--group', group,
+ '-C', basedir] + rel_files(basedir), capture=True)
+
+
+def parse_conf_list(entries, valid=None, boolean=False):
+ """Parse config in a list of strings in key=value format.
+
+ @param entries: list of key=value strings
+ @param valid: list of valid keys in result, return None if invalid input
+ @param boolean: if true, then interpret all values as booleans
+ @return_value: dict of configuration or None if invalid
"""
- write a file using cloudinit.util.write_file
+ res = {key: value.lower() == 'true' if boolean else value
+ for key, value in (i.split('=') for i in entries)}
+ return res if not valid or all(k in valid for k in res.keys()) else None
+
+
+def update_args(args, updates, preserve_old=True):
+ """Update cmdline arguments from a dictionary.
+
+ @param args: cmdline arguments
+ @param updates: dictionary of {arg_name: new_value} mappings
+ @param preserve_old: if true, create a deep copy of args before updating
+ @return_value: updated cmdline arguments
+ """
+ args = copy.deepcopy(args) if preserve_old else args
+ if updates:
+ vars(args).update(updates)
+ return args
+
+
+def update_user_data(user_data, updates, dump_to_yaml=True):
+ """Update user_data from dictionary.
+
+ @param user_data: user data as yaml string or dict
+ @param updates: dictionary to merge with user data
+ @param dump_to_yaml: return as yaml dumped string if true
+ @return_value: updated user data, as yaml string if dump_to_yaml is true
"""
- c_util.write_file(*args, **kwargs)
+ user_data = (c_util.load_yaml(user_data)
+ if isinstance(user_data, str) else copy.deepcopy(user_data))
+ user_data.update(updates)
+ return (yaml_format(user_data, content_type='cloud-config')
+ if dump_to_yaml else user_data)
+
+
+class InTargetExecuteError(c_util.ProcessExecutionError):
+ """Error type for in target commands that fail."""
+
+ default_desc = 'Unexpected error while running command in target instance'
+
+ def __init__(self, stdout, stderr, exit_code, cmd, instance,
+ description=None):
+ """Init error and parent error class."""
+ if isinstance(cmd, (tuple, list)):
+ cmd = ' '.join(cmd)
+ super(InTargetExecuteError, self).__init__(
+ stdout=stdout, stderr=stderr, exit_code=exit_code, cmd=cmd,
+ reason="Instance: {}".format(instance),
+ description=description if description else self.default_desc)
+
+
+class TempDir(object):
+ """Configurable temporary directory like tempfile.TemporaryDirectory."""
+
+ def __init__(self, tmpdir=None, preserve=False, prefix='cloud_test_data_'):
+ """Initialize.
+
+ @param tmpdir: directory to use as tempdir
+ @param preserve: if true, always preserve data on exit
+ @param prefix: prefix to use for tempfile name
+ """
+ self.tmpdir = tmpdir
+ self.preserve = preserve
+ self.prefix = prefix
+
+ def __enter__(self):
+ """Create tempdir.
+
+ @return_value: tempdir path
+ """
+ if not self.tmpdir:
+ self.tmpdir = tempfile.mkdtemp(prefix=self.prefix)
+ LOG.debug('using tmpdir: %s', self.tmpdir)
+ return self.tmpdir
+
+ def __exit__(self, etype, value, trace):
+ """Destroy tempdir if no errors occurred."""
+ if etype or self.preserve:
+ LOG.info('leaving data in %s', self.tmpdir)
+ else:
+ shutil.rmtree(self.tmpdir)
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/verify.py b/tests/cloud_tests/verify.py
index 2a63550e..fc1efcfc 100644
--- a/tests/cloud_tests/verify.py
+++ b/tests/cloud_tests/verify.py
@@ -1,18 +1,19 @@
# This file is part of cloud-init. See LICENSE file for license information.
-from tests.cloud_tests import (config, LOG, util, testcases)
+"""Verify test results."""
import os
import unittest
+from tests.cloud_tests import (config, LOG, util, testcases)
+
def verify_data(base_dir, tests):
- """
- verify test data is correct,
- base_dir: base directory for data
- test_config: dict of all test config, from util.load_test_config()
- tests: list of test names
- return_value: {<test_name>: {passed: True/False, failures: []}}
+ """Verify test data is correct.
+
+ @param base_dir: base directory for data
+ @param tests: list of test names
+ @return_value: {<test_name>: {passed: True/False, failures: []}}
"""
runner = unittest.TextTestRunner(verbosity=util.current_verbosity())
res = {}
@@ -53,9 +54,10 @@ def verify_data(base_dir, tests):
def verify(args):
- """
- verify test data
- return_value: 0 for success, or number of failed tests
+ """Verify test data.
+
+ @param args: directory of test data
+ @return_value: 0 for success, or number of failed tests
"""
failed = 0
res = {}
diff --git a/tests/unittests/helpers.py b/tests/unittests/helpers.py
index 9ff15993..08c5c469 100644
--- a/tests/unittests/helpers.py
+++ b/tests/unittests/helpers.py
@@ -19,10 +19,6 @@ try:
from contextlib import ExitStack
except ImportError:
from contextlib2 import ExitStack
-try:
- from cStringIO import StringIO
-except ImportError:
- from io import StringIO
from cloudinit import helpers as ch
from cloudinit import util
@@ -86,7 +82,26 @@ def retarget_many_wrapper(new_base, am, old_func):
class TestCase(unittest2.TestCase):
- pass
+ def reset_global_state(self):
+ """Reset any global state to its original settings.
+
+ cloudinit caches some values in cloudinit.util. Unit tests that
+ involved those cached paths were then subject to failure if the order
+ of invocation changed (LP: #1703697).
+
+ This function resets any of these global state variables to their
+ initial state.
+
+ In the future this should really be done with some registry that
+ can then be cleaned in a more obvious way.
+ """
+ util.PROC_CMDLINE = None
+ util._DNS_REDIRECT_IP = None
+ util._LSB_RELEASE = {}
+
+ def setUp(self):
+ super(unittest2.TestCase, self).setUp()
+ self.reset_global_state()
class CiTestCase(TestCase):
@@ -101,11 +116,13 @@ class CiTestCase(TestCase):
super(CiTestCase, self).setUp()
if self.with_logs:
# Create a log handler so unit tests can search expected logs.
- logger = logging.getLogger()
- self.logs = StringIO()
+ self.logger = logging.getLogger()
+ self.logs = six.StringIO()
+ formatter = logging.Formatter('%(levelname)s: %(message)s')
handler = logging.StreamHandler(self.logs)
- self.old_handlers = logger.handlers
- logger.handlers = [handler]
+ handler.setFormatter(formatter)
+ self.old_handlers = self.logger.handlers
+ self.logger.handlers = [handler]
def tearDown(self):
if self.with_logs:
@@ -359,4 +376,16 @@ except AttributeError:
return wrapper
return decorator
+
+# older versions of mock do not have the useful 'assert_not_called'
+if not hasattr(mock.Mock, 'assert_not_called'):
+ def __mock_assert_not_called(mmock):
+ if mmock.call_count != 0:
+ msg = ("[citest] Expected '%s' to not have been called. "
+ "Called %s times." %
+ (mmock._mock_name or 'mock', mmock.call_count))
+ raise AssertionError(msg)
+ mock.Mock.assert_not_called = __mock_assert_not_called
+
+
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_datasource/test_aliyun.py b/tests/unittests/test_datasource/test_aliyun.py
index c16d1a6e..990bff2c 100644
--- a/tests/unittests/test_datasource/test_aliyun.py
+++ b/tests/unittests/test_datasource/test_aliyun.py
@@ -2,6 +2,7 @@
import functools
import httpretty
+import mock
import os
from .. import helpers as test_helpers
@@ -111,15 +112,29 @@ class TestAliYunDatasource(test_helpers.HttprettyTestCase):
self.assertEqual(self.default_metadata['hostname'],
self.ds.get_hostname())
+ @mock.patch("cloudinit.sources.DataSourceAliYun._is_aliyun")
@httpretty.activate
- def test_with_mock_server(self):
+ def test_with_mock_server(self, m_is_aliyun):
+ m_is_aliyun.return_value = True
self.regist_default_server()
- self.ds.get_data()
+ ret = self.ds.get_data()
+ self.assertEqual(True, ret)
+ self.assertEqual(1, m_is_aliyun.call_count)
self._test_get_data()
self._test_get_sshkey()
self._test_get_iid()
self._test_host_name()
+ @mock.patch("cloudinit.sources.DataSourceAliYun._is_aliyun")
+ @httpretty.activate
+ def test_returns_false_when_not_on_aliyun(self, m_is_aliyun):
+ """If is_aliyun returns false, then get_data should return False."""
+ m_is_aliyun.return_value = False
+ self.regist_default_server()
+ ret = self.ds.get_data()
+ self.assertEqual(1, m_is_aliyun.call_count)
+ self.assertEqual(False, ret)
+
def test_parse_public_keys(self):
public_keys = {}
self.assertEqual(ay.parse_public_keys(public_keys), [])
@@ -149,4 +164,36 @@ class TestAliYunDatasource(test_helpers.HttprettyTestCase):
self.assertEqual(ay.parse_public_keys(public_keys),
public_keys['key-pair-0']['openssh-key'])
+
+class TestIsAliYun(test_helpers.CiTestCase):
+ ALIYUN_PRODUCT = 'Alibaba Cloud ECS'
+ read_dmi_data_expected = [mock.call('system-product-name')]
+
+ @mock.patch("cloudinit.sources.DataSourceAliYun.util.read_dmi_data")
+ def test_true_on_aliyun_product(self, m_read_dmi_data):
+ """Should return true if the dmi product data has expected value."""
+ m_read_dmi_data.return_value = self.ALIYUN_PRODUCT
+ ret = ay._is_aliyun()
+ self.assertEqual(self.read_dmi_data_expected,
+ m_read_dmi_data.call_args_list)
+ self.assertEqual(True, ret)
+
+ @mock.patch("cloudinit.sources.DataSourceAliYun.util.read_dmi_data")
+ def test_false_on_empty_string(self, m_read_dmi_data):
+ """Should return false on empty value returned."""
+ m_read_dmi_data.return_value = ""
+ ret = ay._is_aliyun()
+ self.assertEqual(self.read_dmi_data_expected,
+ m_read_dmi_data.call_args_list)
+ self.assertEqual(False, ret)
+
+ @mock.patch("cloudinit.sources.DataSourceAliYun.util.read_dmi_data")
+ def test_false_on_unknown_string(self, m_read_dmi_data):
+ """Should return false on an unrelated string."""
+ m_read_dmi_data.return_value = "cubs win"
+ ret = ay._is_aliyun()
+ self.assertEqual(self.read_dmi_data_expected,
+ m_read_dmi_data.call_args_list)
+ self.assertEqual(False, ret)
+
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py
index 852ec703..20e70fb7 100644
--- a/tests/unittests/test_datasource/test_azure.py
+++ b/tests/unittests/test_datasource/test_azure.py
@@ -76,7 +76,9 @@ def construct_valid_ovf_env(data=None, pubkeys=None, userdata=None):
return content
-class TestAzureDataSource(TestCase):
+class TestAzureDataSource(CiTestCase):
+
+ with_logs = True
def setUp(self):
super(TestAzureDataSource, self).setUp()
@@ -160,6 +162,12 @@ scbus-1 on xpt0 bus 0
self.instance_id = 'test-instance-id'
+ def _dmi_mocks(key):
+ if key == 'system-uuid':
+ return self.instance_id
+ elif key == 'chassis-asset-tag':
+ return '7783-7084-3265-9085-8269-3286-77'
+
self.apply_patches([
(dsaz, 'list_possible_azure_ds_devs', dsdevs),
(dsaz, 'invoke_agent', _invoke_agent),
@@ -170,16 +178,22 @@ scbus-1 on xpt0 bus 0
(dsaz, 'set_hostname', mock.MagicMock()),
(dsaz, 'get_metadata_from_fabric', self.get_metadata_from_fabric),
(dsaz.util, 'read_dmi_data', mock.MagicMock(
- return_value=self.instance_id)),
+ side_effect=_dmi_mocks)),
])
- dsrc = dsaz.DataSourceAzureNet(
+ dsrc = dsaz.DataSourceAzure(
data.get('sys_cfg', {}), distro=None, paths=self.paths)
if agent_command is not None:
dsrc.ds_cfg['agent_command'] = agent_command
return dsrc
+ def _get_and_setup(self, dsrc):
+ ret = dsrc.get_data()
+ if ret:
+ dsrc.setup(True)
+ return ret
+
def xml_equals(self, oxml, nxml):
"""Compare two sets of XML to make sure they are equal"""
@@ -241,6 +255,24 @@ fdescfs /dev/fd fdescfs rw 0 0
res = get_path_dev_freebsd('/etc', mnt_list)
self.assertIsNotNone(res)
+ @mock.patch('cloudinit.sources.DataSourceAzure.util.read_dmi_data')
+ def test_non_azure_dmi_chassis_asset_tag(self, m_read_dmi_data):
+ """Report non-azure when DMI's chassis asset tag doesn't match.
+
+ Return False when the asset tag doesn't match Azure's static
+ AZURE_CHASSIS_ASSET_TAG.
+ """
+ # Return a non-matching asset tag value
+ nonazure_tag = dsaz.AZURE_CHASSIS_ASSET_TAG + 'X'
+ m_read_dmi_data.return_value = nonazure_tag
+ dsrc = dsaz.DataSourceAzure(
+ {}, distro=None, paths=self.paths)
+ self.assertFalse(dsrc.get_data())
+ self.assertEqual(
+ "DEBUG: Non-Azure DMI asset tag '{0}' discovered.\n".format(
+ nonazure_tag),
+ self.logs.getvalue())
+
def test_basic_seed_dir(self):
odata = {'HostName': "myhost", 'UserName': "myuser"}
data = {'ovfcontent': construct_valid_ovf_env(data=odata),
@@ -273,7 +305,7 @@ fdescfs /dev/fd fdescfs rw 0 0
data = {'ovfcontent': construct_valid_ovf_env(data=odata)}
dsrc = self._get_ds(data)
- ret = dsrc.get_data()
+ ret = self._get_and_setup(dsrc)
self.assertTrue(ret)
self.assertEqual(data['agent_invoked'], cfg['agent_command'])
@@ -286,7 +318,7 @@ fdescfs /dev/fd fdescfs rw 0 0
data = {'ovfcontent': construct_valid_ovf_env(data=odata)}
dsrc = self._get_ds(data)
- ret = dsrc.get_data()
+ ret = self._get_and_setup(dsrc)
self.assertTrue(ret)
self.assertEqual(data['agent_invoked'], cfg['agent_command'])
@@ -296,7 +328,7 @@ fdescfs /dev/fd fdescfs rw 0 0
'sys_cfg': sys_cfg}
dsrc = self._get_ds(data)
- ret = dsrc.get_data()
+ ret = self._get_and_setup(dsrc)
self.assertTrue(ret)
self.assertEqual(data['agent_invoked'], '_COMMAND')
@@ -368,7 +400,7 @@ fdescfs /dev/fd fdescfs rw 0 0
pubkeys=pubkeys)}
dsrc = self._get_ds(data, agent_command=['not', '__builtin__'])
- ret = dsrc.get_data()
+ ret = self._get_and_setup(dsrc)
self.assertTrue(ret)
for mypk in mypklist:
self.assertIn(mypk, dsrc.cfg['_pubkeys'])
@@ -383,7 +415,7 @@ fdescfs /dev/fd fdescfs rw 0 0
pubkeys=pubkeys)}
dsrc = self._get_ds(data, agent_command=['not', '__builtin__'])
- ret = dsrc.get_data()
+ ret = self._get_and_setup(dsrc)
self.assertTrue(ret)
for mypk in mypklist:
@@ -399,7 +431,7 @@ fdescfs /dev/fd fdescfs rw 0 0
pubkeys=pubkeys)}
dsrc = self._get_ds(data, agent_command=['not', '__builtin__'])
- ret = dsrc.get_data()
+ ret = self._get_and_setup(dsrc)
self.assertTrue(ret)
for mypk in mypklist:
@@ -493,18 +525,20 @@ fdescfs /dev/fd fdescfs rw 0 0
dsrc.get_data()
def test_exception_fetching_fabric_data_doesnt_propagate(self):
- ds = self._get_ds({'ovfcontent': construct_valid_ovf_env()})
- ds.ds_cfg['agent_command'] = '__builtin__'
+ """Errors communicating with fabric should warn, but return True."""
+ dsrc = self._get_ds({'ovfcontent': construct_valid_ovf_env()})
+ dsrc.ds_cfg['agent_command'] = '__builtin__'
self.get_metadata_from_fabric.side_effect = Exception
- self.assertFalse(ds.get_data())
+ ret = self._get_and_setup(dsrc)
+ self.assertTrue(ret)
def test_fabric_data_included_in_metadata(self):
- ds = self._get_ds({'ovfcontent': construct_valid_ovf_env()})
- ds.ds_cfg['agent_command'] = '__builtin__'
+ dsrc = self._get_ds({'ovfcontent': construct_valid_ovf_env()})
+ dsrc.ds_cfg['agent_command'] = '__builtin__'
self.get_metadata_from_fabric.return_value = {'test': 'value'}
- ret = ds.get_data()
+ ret = self._get_and_setup(dsrc)
self.assertTrue(ret)
- self.assertEqual('value', ds.metadata['test'])
+ self.assertEqual('value', dsrc.metadata['test'])
def test_instance_id_from_dmidecode_used(self):
ds = self._get_ds({'ovfcontent': construct_valid_ovf_env()})
@@ -517,6 +551,95 @@ fdescfs /dev/fd fdescfs rw 0 0
ds.get_data()
self.assertEqual(self.instance_id, ds.metadata['instance-id'])
+ @mock.patch("cloudinit.sources.DataSourceAzure.util.is_FreeBSD")
+ @mock.patch("cloudinit.sources.DataSourceAzure._check_freebsd_cdrom")
+ def test_list_possible_azure_ds_devs(self, m_check_fbsd_cdrom,
+ m_is_FreeBSD):
+ """On FreeBSD, possible devs should show /dev/cd0."""
+ m_is_FreeBSD.return_value = True
+ m_check_fbsd_cdrom.return_value = True
+ self.assertEqual(dsaz.list_possible_azure_ds_devs(), ['/dev/cd0'])
+ self.assertEqual(
+ [mock.call("/dev/cd0")], m_check_fbsd_cdrom.call_args_list)
+
+ @mock.patch('cloudinit.net.get_interface_mac')
+ @mock.patch('cloudinit.net.get_devicelist')
+ @mock.patch('cloudinit.net.device_driver')
+ @mock.patch('cloudinit.net.generate_fallback_config')
+ def test_network_config(self, mock_fallback, mock_dd,
+ mock_devlist, mock_get_mac):
+ odata = {'HostName': "myhost", 'UserName': "myuser"}
+ data = {'ovfcontent': construct_valid_ovf_env(data=odata),
+ 'sys_cfg': {}}
+
+ fallback_config = {
+ 'version': 1,
+ 'config': [{
+ 'type': 'physical', 'name': 'eth0',
+ 'mac_address': '00:11:22:33:44:55',
+ 'params': {'driver': 'hv_netsvc'},
+ 'subnets': [{'type': 'dhcp'}],
+ }]
+ }
+ mock_fallback.return_value = fallback_config
+
+ mock_devlist.return_value = ['eth0']
+ mock_dd.return_value = ['hv_netsvc']
+ mock_get_mac.return_value = '00:11:22:33:44:55'
+
+ dsrc = self._get_ds(data)
+ ret = dsrc.get_data()
+ self.assertTrue(ret)
+
+ netconfig = dsrc.network_config
+ self.assertEqual(netconfig, fallback_config)
+ mock_fallback.assert_called_with(blacklist_drivers=['mlx4_core'],
+ config_driver=True)
+
+ @mock.patch('cloudinit.net.get_interface_mac')
+ @mock.patch('cloudinit.net.get_devicelist')
+ @mock.patch('cloudinit.net.device_driver')
+ @mock.patch('cloudinit.net.generate_fallback_config')
+ def test_network_config_blacklist(self, mock_fallback, mock_dd,
+ mock_devlist, mock_get_mac):
+ odata = {'HostName': "myhost", 'UserName': "myuser"}
+ data = {'ovfcontent': construct_valid_ovf_env(data=odata),
+ 'sys_cfg': {}}
+
+ fallback_config = {
+ 'version': 1,
+ 'config': [{
+ 'type': 'physical', 'name': 'eth0',
+ 'mac_address': '00:11:22:33:44:55',
+ 'params': {'driver': 'hv_netsvc'},
+ 'subnets': [{'type': 'dhcp'}],
+ }]
+ }
+ blacklist_config = {
+ 'type': 'physical',
+ 'name': 'eth1',
+ 'mac_address': '00:11:22:33:44:55',
+ 'params': {'driver': 'mlx4_core'}
+ }
+ mock_fallback.return_value = fallback_config
+
+ mock_devlist.return_value = ['eth0', 'eth1']
+ mock_dd.side_effect = [
+ 'hv_netsvc', # list composition, skipped
+ 'mlx4_core', # list composition, match
+ 'mlx4_core', # config get driver name
+ ]
+ mock_get_mac.return_value = '00:11:22:33:44:55'
+
+ dsrc = self._get_ds(data)
+ ret = dsrc.get_data()
+ self.assertTrue(ret)
+
+ netconfig = dsrc.network_config
+ expected_config = fallback_config
+ expected_config['config'].append(blacklist_config)
+ self.assertEqual(netconfig, expected_config)
+
class TestAzureBounce(TestCase):
@@ -531,9 +654,17 @@ class TestAzureBounce(TestCase):
self.patches.enter_context(
mock.patch.object(dsaz, 'get_metadata_from_fabric',
mock.MagicMock(return_value={})))
+
+ def _dmi_mocks(key):
+ if key == 'system-uuid':
+ return 'test-instance-id'
+ elif key == 'chassis-asset-tag':
+ return '7783-7084-3265-9085-8269-3286-77'
+ raise RuntimeError('should not get here')
+
self.patches.enter_context(
mock.patch.object(dsaz.util, 'read_dmi_data',
- mock.MagicMock(return_value='test-instance-id')))
+ mock.MagicMock(side_effect=_dmi_mocks)))
def setUp(self):
super(TestAzureBounce, self).setUp()
@@ -558,12 +689,18 @@ class TestAzureBounce(TestCase):
if ovfcontent is not None:
populate_dir(os.path.join(self.paths.seed_dir, "azure"),
{'ovf-env.xml': ovfcontent})
- dsrc = dsaz.DataSourceAzureNet(
+ dsrc = dsaz.DataSourceAzure(
{}, distro=None, paths=self.paths)
if agent_command is not None:
dsrc.ds_cfg['agent_command'] = agent_command
return dsrc
+ def _get_and_setup(self, dsrc):
+ ret = dsrc.get_data()
+ if ret:
+ dsrc.setup(True)
+ return ret
+
def get_ovf_env_with_dscfg(self, hostname, cfg):
odata = {
'HostName': hostname,
@@ -607,17 +744,20 @@ class TestAzureBounce(TestCase):
host_name = 'unchanged-host-name'
self.get_hostname.return_value = host_name
cfg = {'hostname_bounce': {'policy': 'force'}}
- self._get_ds(self.get_ovf_env_with_dscfg(host_name, cfg),
- agent_command=['not', '__builtin__']).get_data()
+ dsrc = self._get_ds(self.get_ovf_env_with_dscfg(host_name, cfg),
+ agent_command=['not', '__builtin__'])
+ ret = self._get_and_setup(dsrc)
+ self.assertTrue(ret)
self.assertEqual(1, perform_hostname_bounce.call_count)
def test_different_hostnames_sets_hostname(self):
expected_hostname = 'azure-expected-host-name'
self.get_hostname.return_value = 'default-host-name'
- self._get_ds(
+ dsrc = self._get_ds(
self.get_ovf_env_with_dscfg(expected_hostname, {}),
- agent_command=['not', '__builtin__'],
- ).get_data()
+ agent_command=['not', '__builtin__'])
+ ret = self._get_and_setup(dsrc)
+ self.assertTrue(ret)
self.assertEqual(expected_hostname,
self.set_hostname.call_args_list[0][0][0])
@@ -626,19 +766,21 @@ class TestAzureBounce(TestCase):
self, perform_hostname_bounce):
expected_hostname = 'azure-expected-host-name'
self.get_hostname.return_value = 'default-host-name'
- self._get_ds(
+ dsrc = self._get_ds(
self.get_ovf_env_with_dscfg(expected_hostname, {}),
- agent_command=['not', '__builtin__'],
- ).get_data()
+ agent_command=['not', '__builtin__'])
+ ret = self._get_and_setup(dsrc)
+ self.assertTrue(ret)
self.assertEqual(1, perform_hostname_bounce.call_count)
def test_different_hostnames_sets_hostname_back(self):
initial_host_name = 'default-host-name'
self.get_hostname.return_value = initial_host_name
- self._get_ds(
+ dsrc = self._get_ds(
self.get_ovf_env_with_dscfg('some-host-name', {}),
- agent_command=['not', '__builtin__'],
- ).get_data()
+ agent_command=['not', '__builtin__'])
+ ret = self._get_and_setup(dsrc)
+ self.assertTrue(ret)
self.assertEqual(initial_host_name,
self.set_hostname.call_args_list[-1][0][0])
@@ -648,10 +790,11 @@ class TestAzureBounce(TestCase):
perform_hostname_bounce.side_effect = Exception
initial_host_name = 'default-host-name'
self.get_hostname.return_value = initial_host_name
- self._get_ds(
+ dsrc = self._get_ds(
self.get_ovf_env_with_dscfg('some-host-name', {}),
- agent_command=['not', '__builtin__'],
- ).get_data()
+ agent_command=['not', '__builtin__'])
+ ret = self._get_and_setup(dsrc)
+ self.assertTrue(ret)
self.assertEqual(initial_host_name,
self.set_hostname.call_args_list[-1][0][0])
@@ -662,7 +805,9 @@ class TestAzureBounce(TestCase):
self.get_hostname.return_value = old_hostname
cfg = {'hostname_bounce': {'interface': interface, 'policy': 'force'}}
data = self.get_ovf_env_with_dscfg(hostname, cfg)
- self._get_ds(data, agent_command=['not', '__builtin__']).get_data()
+ dsrc = self._get_ds(data, agent_command=['not', '__builtin__'])
+ ret = self._get_and_setup(dsrc)
+ self.assertTrue(ret)
self.assertEqual(1, self.subp.call_count)
bounce_env = self.subp.call_args[1]['env']
self.assertEqual(interface, bounce_env['interface'])
@@ -674,7 +819,9 @@ class TestAzureBounce(TestCase):
dsaz.BUILTIN_DS_CONFIG['hostname_bounce']['command'] = cmd
cfg = {'hostname_bounce': {'policy': 'force'}}
data = self.get_ovf_env_with_dscfg('some-hostname', cfg)
- self._get_ds(data, agent_command=['not', '__builtin__']).get_data()
+ dsrc = self._get_ds(data, agent_command=['not', '__builtin__'])
+ ret = self._get_and_setup(dsrc)
+ self.assertTrue(ret)
self.assertEqual(1, self.subp.call_count)
bounce_args = self.subp.call_args[1]['args']
self.assertEqual(cmd, bounce_args)
@@ -696,6 +843,33 @@ class TestAzureBounce(TestCase):
self.assertEqual(0, self.set_hostname.call_count)
+class TestLoadAzureDsDir(CiTestCase):
+ """Tests for load_azure_ds_dir."""
+
+ def setUp(self):
+ self.source_dir = self.tmp_dir()
+ super(TestLoadAzureDsDir, self).setUp()
+
+ def test_missing_ovf_env_xml_raises_non_azure_datasource_error(self):
+ """load_azure_ds_dir raises an error When ovf-env.xml doesn't exit."""
+ with self.assertRaises(dsaz.NonAzureDataSource) as context_manager:
+ dsaz.load_azure_ds_dir(self.source_dir)
+ self.assertEqual(
+ 'No ovf-env file found',
+ str(context_manager.exception))
+
+ def test_wb_invalid_ovf_env_xml_calls_read_azure_ovf(self):
+ """load_azure_ds_dir calls read_azure_ovf to parse the xml."""
+ ovf_path = os.path.join(self.source_dir, 'ovf-env.xml')
+ with open(ovf_path, 'wb') as stream:
+ stream.write(b'invalid xml')
+ with self.assertRaises(dsaz.BrokenAzureDataSource) as context_manager:
+ dsaz.load_azure_ds_dir(self.source_dir)
+ self.assertEqual(
+ 'Invalid ovf-env.xml: syntax error: line 1, column 0',
+ str(context_manager.exception))
+
+
class TestReadAzureOvf(TestCase):
def test_invalid_xml_raises_non_azure_ds(self):
invalid_xml = "<foo>" + construct_valid_ovf_env(data={})
@@ -903,4 +1077,12 @@ class TestCanDevBeReformatted(CiTestCase):
self.assertEqual(False, value)
self.assertIn("3 or more", msg.lower())
+
+class TestAzureNetExists(CiTestCase):
+ def test_azure_net_must_exist_for_legacy_objpkl(self):
+ """DataSourceAzureNet must exist for old obj.pkl files
+ that reference it."""
+ self.assertTrue(hasattr(dsaz, "DataSourceAzureNet"))
+
+
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_datasource/test_common.py b/tests/unittests/test_datasource/test_common.py
index c08717f3..413e87ac 100644
--- a/tests/unittests/test_datasource/test_common.py
+++ b/tests/unittests/test_datasource/test_common.py
@@ -19,6 +19,7 @@ from cloudinit.sources import (
DataSourceOpenNebula as OpenNebula,
DataSourceOpenStack as OpenStack,
DataSourceOVF as OVF,
+ DataSourceScaleway as Scaleway,
DataSourceSmartOS as SmartOS,
)
from cloudinit.sources import DataSourceNone as DSNone
@@ -26,6 +27,7 @@ from cloudinit.sources import DataSourceNone as DSNone
from .. import helpers as test_helpers
DEFAULT_LOCAL = [
+ Azure.DataSourceAzure,
CloudSigma.DataSourceCloudSigma,
ConfigDrive.DataSourceConfigDrive,
DigitalOcean.DataSourceDigitalOcean,
@@ -36,8 +38,8 @@ DEFAULT_LOCAL = [
]
DEFAULT_NETWORK = [
+ AliYun.DataSourceAliYun,
AltCloud.DataSourceAltCloud,
- Azure.DataSourceAzureNet,
Bigstep.DataSourceBigstep,
CloudStack.DataSourceCloudStack,
DSNone.DataSourceNone,
@@ -47,6 +49,7 @@ DEFAULT_NETWORK = [
NoCloud.DataSourceNoCloudNet,
OpenStack.DataSourceOpenStack,
OVF.DataSourceOVFNet,
+ Scaleway.DataSourceScaleway,
]
diff --git a/tests/unittests/test_datasource/test_ec2.py b/tests/unittests/test_datasource/test_ec2.py
new file mode 100644
index 00000000..12230ae2
--- /dev/null
+++ b/tests/unittests/test_datasource/test_ec2.py
@@ -0,0 +1,202 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import httpretty
+import mock
+
+from .. import helpers as test_helpers
+from cloudinit import helpers
+from cloudinit.sources import DataSourceEc2 as ec2
+
+
+# collected from api version 2009-04-04/ with
+# python3 -c 'import json
+# from cloudinit.ec2_utils import get_instance_metadata as gm
+# print(json.dumps(gm("2009-04-04"), indent=1, sort_keys=True))'
+DEFAULT_METADATA = {
+ "ami-id": "ami-80861296",
+ "ami-launch-index": "0",
+ "ami-manifest-path": "(unknown)",
+ "block-device-mapping": {"ami": "/dev/sda1", "root": "/dev/sda1"},
+ "hostname": "ip-10-0-0-149",
+ "instance-action": "none",
+ "instance-id": "i-0052913950685138c",
+ "instance-type": "t2.micro",
+ "local-hostname": "ip-10-0-0-149",
+ "local-ipv4": "10.0.0.149",
+ "placement": {"availability-zone": "us-east-1b"},
+ "profile": "default-hvm",
+ "public-hostname": "",
+ "public-ipv4": "107.23.188.247",
+ "public-keys": {"brickies": ["ssh-rsa AAAAB3Nz....w== brickies"]},
+ "reservation-id": "r-00a2c173fb5782a08",
+ "security-groups": "wide-open"
+}
+
+
+def _register_ssh_keys(rfunc, base_url, keys_data):
+ """handle ssh key inconsistencies.
+
+ public-keys in the ec2 metadata is inconsistently formatted compared
+ to other entries.
+ Given keys_data of {name1: pubkey1, name2: pubkey2}
+
+ This registers the following urls:
+ base_url 0={name1}\n1={name2} # (for each name)
+ base_url/ 0={name1}\n1={name2} # (for each name)
+ base_url/0 openssh-key
+ base_url/0/ openssh-key
+ base_url/0/openssh-key {pubkey1}
+ base_url/0/openssh-key/ {pubkey1}
+ ...
+ """
+
+ base_url = base_url.rstrip("/")
+ odd_index = '\n'.join(
+ ["{0}={1}".format(n, name)
+ for n, name in enumerate(sorted(keys_data))])
+
+ rfunc(base_url, odd_index)
+ rfunc(base_url + "/", odd_index)
+
+ for n, name in enumerate(sorted(keys_data)):
+ val = keys_data[name]
+ if isinstance(val, list):
+ val = '\n'.join(val)
+ burl = base_url + "/%s" % n
+ rfunc(burl, "openssh-key")
+ rfunc(burl + "/", "openssh-key")
+ rfunc(burl + "/%s/openssh-key" % name, val)
+ rfunc(burl + "/%s/openssh-key/" % name, val)
+
+
+def register_mock_metaserver(base_url, data):
+ """Register with httpretty a ec2 metadata like service serving 'data'.
+
+ If given a dictionary, it will populate urls under base_url for
+ that dictionary. For example, input of
+ {"instance-id": "i-abc", "mac": "00:16:3e:00:00:00"}
+ populates
+ base_url with 'instance-id\nmac'
+ base_url/ with 'instance-id\nmac'
+ base_url/instance-id with i-abc
+ base_url/mac with 00:16:3e:00:00:00
+ In the index, references to lists or dictionaries have a trailing /.
+ """
+ def register_helper(register, base_url, body):
+ base_url = base_url.rstrip("/")
+ if isinstance(body, str):
+ register(base_url, body)
+ elif isinstance(body, list):
+ register(base_url, '\n'.join(body) + '\n')
+ register(base_url + '/', '\n'.join(body) + '\n')
+ elif isinstance(body, dict):
+ vals = []
+ for k, v in body.items():
+ if k == 'public-keys':
+ _register_ssh_keys(
+ register, base_url + '/public-keys/', v)
+ continue
+ suffix = k.rstrip("/")
+ if not isinstance(v, (str, list)):
+ suffix += "/"
+ vals.append(suffix)
+ url = base_url + '/' + suffix
+ register_helper(register, url, v)
+ register(base_url, '\n'.join(vals) + '\n')
+ register(base_url + '/', '\n'.join(vals) + '\n')
+ elif body is None:
+ register(base_url, 'not found', status_code=404)
+
+ def myreg(*argc, **kwargs):
+ # print("register_url(%s, %s)" % (argc, kwargs))
+ return httpretty.register_uri(httpretty.GET, *argc, **kwargs)
+
+ register_helper(myreg, base_url, data)
+
+
+class TestEc2(test_helpers.HttprettyTestCase):
+ valid_platform_data = {
+ 'uuid': 'ec212f79-87d1-2f1d-588f-d86dc0fd5412',
+ 'uuid_source': 'dmi',
+ 'serial': 'ec212f79-87d1-2f1d-588f-d86dc0fd5412',
+ }
+
+ def setUp(self):
+ super(TestEc2, self).setUp()
+ self.metadata_addr = ec2.DataSourceEc2.metadata_urls[0]
+ self.api_ver = '2009-04-04'
+
+ @property
+ def metadata_url(self):
+ return '/'.join([self.metadata_addr, self.api_ver, 'meta-data', ''])
+
+ @property
+ def userdata_url(self):
+ return '/'.join([self.metadata_addr, self.api_ver, 'user-data'])
+
+ def _patch_add_cleanup(self, mpath, *args, **kwargs):
+ p = mock.patch(mpath, *args, **kwargs)
+ p.start()
+ self.addCleanup(p.stop)
+
+ def _setup_ds(self, sys_cfg, platform_data, md, ud=None):
+ distro = {}
+ paths = helpers.Paths({})
+ if sys_cfg is None:
+ sys_cfg = {}
+ ds = ec2.DataSourceEc2(sys_cfg=sys_cfg, distro=distro, paths=paths)
+ if platform_data is not None:
+ self._patch_add_cleanup(
+ "cloudinit.sources.DataSourceEc2._collect_platform_data",
+ return_value=platform_data)
+
+ if md:
+ register_mock_metaserver(self.metadata_url, md)
+ register_mock_metaserver(self.userdata_url, ud)
+
+ return ds
+
+ @httpretty.activate
+ def test_valid_platform_with_strict_true(self):
+ """Valid platform data should return true with strict_id true."""
+ ds = self._setup_ds(
+ platform_data=self.valid_platform_data,
+ sys_cfg={'datasource': {'Ec2': {'strict_id': True}}},
+ md=DEFAULT_METADATA)
+ ret = ds.get_data()
+ self.assertEqual(True, ret)
+
+ @httpretty.activate
+ def test_valid_platform_with_strict_false(self):
+ """Valid platform data should return true with strict_id false."""
+ ds = self._setup_ds(
+ platform_data=self.valid_platform_data,
+ sys_cfg={'datasource': {'Ec2': {'strict_id': False}}},
+ md=DEFAULT_METADATA)
+ ret = ds.get_data()
+ self.assertEqual(True, ret)
+
+ @httpretty.activate
+ def test_unknown_platform_with_strict_true(self):
+ """Unknown platform data with strict_id true should return False."""
+ uuid = 'ab439480-72bf-11d3-91fc-b8aded755F9a'
+ ds = self._setup_ds(
+ platform_data={'uuid': uuid, 'uuid_source': 'dmi', 'serial': ''},
+ sys_cfg={'datasource': {'Ec2': {'strict_id': True}}},
+ md=DEFAULT_METADATA)
+ ret = ds.get_data()
+ self.assertEqual(False, ret)
+
+ @httpretty.activate
+ def test_unknown_platform_with_strict_false(self):
+ """Unknown platform data with strict_id false should return True."""
+ uuid = 'ab439480-72bf-11d3-91fc-b8aded755F9a'
+ ds = self._setup_ds(
+ platform_data={'uuid': uuid, 'uuid_source': 'dmi', 'serial': ''},
+ sys_cfg={'datasource': {'Ec2': {'strict_id': False}}},
+ md=DEFAULT_METADATA)
+ ret = ds.get_data()
+ self.assertEqual(True, ret)
+
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/test_datasource/test_gce.py b/tests/unittests/test_datasource/test_gce.py
index 6fd1341d..ad608bec 100644
--- a/tests/unittests/test_datasource/test_gce.py
+++ b/tests/unittests/test_datasource/test_gce.py
@@ -72,11 +72,11 @@ class TestDataSourceGCE(test_helpers.HttprettyTestCase):
self.ds = DataSourceGCE.DataSourceGCE(
settings.CFG_BUILTIN, None,
helpers.Paths({}))
- self.m_platform_reports_gce = mock.patch(
- 'cloudinit.sources.DataSourceGCE.platform_reports_gce',
- return_value=True)
- self.m_platform_reports_gce.start()
- self.addCleanup(self.m_platform_reports_gce.stop)
+ ppatch = self.m_platform_reports_gce = mock.patch(
+ 'cloudinit.sources.DataSourceGCE.platform_reports_gce')
+ self.m_platform_reports_gce = ppatch.start()
+ self.m_platform_reports_gce.return_value = True
+ self.addCleanup(ppatch.stop)
super(TestDataSourceGCE, self).setUp()
def test_connection(self):
@@ -163,9 +163,12 @@ class TestDataSourceGCE(test_helpers.HttprettyTestCase):
self.assertEqual(True, r)
self.assertEqual('bar', self.ds.availability_zone)
- def test_get_data_returns_false_if_not_on_gce(self):
+ @mock.patch("cloudinit.sources.DataSourceGCE.GoogleMetadataFetcher")
+ def test_get_data_returns_false_if_not_on_gce(self, m_fetcher):
self.m_platform_reports_gce.return_value = False
- self.assertEqual(False, self.ds.get_data())
+ ret = self.ds.get_data()
+ self.assertEqual(False, ret)
+ m_fetcher.assert_not_called()
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_datasource/test_scaleway.py b/tests/unittests/test_datasource/test_scaleway.py
new file mode 100644
index 00000000..65d83ad7
--- /dev/null
+++ b/tests/unittests/test_datasource/test_scaleway.py
@@ -0,0 +1,262 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import json
+
+import httpretty
+import requests
+
+from cloudinit import helpers
+from cloudinit import settings
+from cloudinit.sources import DataSourceScaleway
+
+from ..helpers import mock, HttprettyTestCase, TestCase
+
+
+class DataResponses(object):
+ """
+ Possible responses of the API endpoint
+ 169.254.42.42/user_data/cloud-init and
+ 169.254.42.42/vendor_data/cloud-init.
+ """
+
+ FAKE_USER_DATA = '#!/bin/bash\necho "user-data"'
+
+ @staticmethod
+ def rate_limited(method, uri, headers):
+ return 429, headers, ''
+
+ @staticmethod
+ def api_error(method, uri, headers):
+ return 500, headers, ''
+
+ @classmethod
+ def get_ok(cls, method, uri, headers):
+ return 200, headers, cls.FAKE_USER_DATA
+
+ @staticmethod
+ def empty(method, uri, headers):
+ """
+ No user data for this server.
+ """
+ return 404, headers, ''
+
+
+class MetadataResponses(object):
+ """
+ Possible responses of the metadata API.
+ """
+
+ FAKE_METADATA = {
+ 'id': '00000000-0000-0000-0000-000000000000',
+ 'hostname': 'scaleway.host',
+ 'ssh_public_keys': [{
+ 'key': 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABA',
+ 'fingerprint': '2048 06:ae:... login (RSA)'
+ }, {
+ 'key': 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABCCCCC',
+ 'fingerprint': '2048 06:ff:... login2 (RSA)'
+ }]
+ }
+
+ @classmethod
+ def get_ok(cls, method, uri, headers):
+ return 200, headers, json.dumps(cls.FAKE_METADATA)
+
+
+class TestOnScaleway(TestCase):
+
+ def install_mocks(self, fake_dmi, fake_file_exists, fake_cmdline):
+ mock, faked = fake_dmi
+ mock.return_value = 'Scaleway' if faked else 'Whatever'
+
+ mock, faked = fake_file_exists
+ mock.return_value = faked
+
+ mock, faked = fake_cmdline
+ mock.return_value = \
+ 'initrd=initrd showopts scaleway nousb' if faked \
+ else 'BOOT_IMAGE=/vmlinuz-3.11.0-26-generic'
+
+ @mock.patch('cloudinit.util.get_cmdline')
+ @mock.patch('os.path.exists')
+ @mock.patch('cloudinit.util.read_dmi_data')
+ def test_not_on_scaleway(self, m_read_dmi_data, m_file_exists,
+ m_get_cmdline):
+ self.install_mocks(
+ fake_dmi=(m_read_dmi_data, False),
+ fake_file_exists=(m_file_exists, False),
+ fake_cmdline=(m_get_cmdline, False)
+ )
+ self.assertFalse(DataSourceScaleway.on_scaleway())
+
+ # When not on Scaleway, get_data() returns False.
+ datasource = DataSourceScaleway.DataSourceScaleway(
+ settings.CFG_BUILTIN, None, helpers.Paths({})
+ )
+ self.assertFalse(datasource.get_data())
+
+ @mock.patch('cloudinit.util.get_cmdline')
+ @mock.patch('os.path.exists')
+ @mock.patch('cloudinit.util.read_dmi_data')
+ def test_on_scaleway_dmi(self, m_read_dmi_data, m_file_exists,
+ m_get_cmdline):
+ """
+ dmidecode returns "Scaleway".
+ """
+ # dmidecode returns "Scaleway"
+ self.install_mocks(
+ fake_dmi=(m_read_dmi_data, True),
+ fake_file_exists=(m_file_exists, False),
+ fake_cmdline=(m_get_cmdline, False)
+ )
+ self.assertTrue(DataSourceScaleway.on_scaleway())
+
+ @mock.patch('cloudinit.util.get_cmdline')
+ @mock.patch('os.path.exists')
+ @mock.patch('cloudinit.util.read_dmi_data')
+ def test_on_scaleway_var_run_scaleway(self, m_read_dmi_data, m_file_exists,
+ m_get_cmdline):
+ """
+ /var/run/scaleway exists.
+ """
+ self.install_mocks(
+ fake_dmi=(m_read_dmi_data, False),
+ fake_file_exists=(m_file_exists, True),
+ fake_cmdline=(m_get_cmdline, False)
+ )
+ self.assertTrue(DataSourceScaleway.on_scaleway())
+
+ @mock.patch('cloudinit.util.get_cmdline')
+ @mock.patch('os.path.exists')
+ @mock.patch('cloudinit.util.read_dmi_data')
+ def test_on_scaleway_cmdline(self, m_read_dmi_data, m_file_exists,
+ m_get_cmdline):
+ """
+ "scaleway" in /proc/cmdline.
+ """
+ self.install_mocks(
+ fake_dmi=(m_read_dmi_data, False),
+ fake_file_exists=(m_file_exists, False),
+ fake_cmdline=(m_get_cmdline, True)
+ )
+ self.assertTrue(DataSourceScaleway.on_scaleway())
+
+
+def get_source_address_adapter(*args, **kwargs):
+ """
+ Scaleway user/vendor data API requires to be called with a privileged port.
+
+ If the unittests are run as non-root, the user doesn't have the permission
+ to bind on ports below 1024.
+
+ This function removes the bind on a privileged address, since anyway the
+ HTTP call is mocked by httpretty.
+ """
+ kwargs.pop('source_address')
+ return requests.adapters.HTTPAdapter(*args, **kwargs)
+
+
+class TestDataSourceScaleway(HttprettyTestCase):
+
+ def setUp(self):
+ self.datasource = DataSourceScaleway.DataSourceScaleway(
+ settings.CFG_BUILTIN, None, helpers.Paths({})
+ )
+ super(TestDataSourceScaleway, self).setUp()
+
+ self.metadata_url = \
+ DataSourceScaleway.BUILTIN_DS_CONFIG['metadata_url']
+ self.userdata_url = \
+ DataSourceScaleway.BUILTIN_DS_CONFIG['userdata_url']
+ self.vendordata_url = \
+ DataSourceScaleway.BUILTIN_DS_CONFIG['vendordata_url']
+
+ @httpretty.activate
+ @mock.patch('cloudinit.sources.DataSourceScaleway.SourceAddressAdapter',
+ get_source_address_adapter)
+ @mock.patch('cloudinit.util.get_cmdline')
+ @mock.patch('time.sleep', return_value=None)
+ def test_metadata_ok(self, sleep, m_get_cmdline):
+ """
+ get_data() returns metadata, user data and vendor data.
+ """
+ m_get_cmdline.return_value = 'scaleway'
+
+ # Make user data API return a valid response
+ httpretty.register_uri(httpretty.GET, self.metadata_url,
+ body=MetadataResponses.get_ok)
+ httpretty.register_uri(httpretty.GET, self.userdata_url,
+ body=DataResponses.get_ok)
+ httpretty.register_uri(httpretty.GET, self.vendordata_url,
+ body=DataResponses.get_ok)
+ self.datasource.get_data()
+
+ self.assertEqual(self.datasource.get_instance_id(),
+ MetadataResponses.FAKE_METADATA['id'])
+ self.assertEqual(self.datasource.get_public_ssh_keys(), [
+ elem['key'] for elem in
+ MetadataResponses.FAKE_METADATA['ssh_public_keys']
+ ])
+ self.assertEqual(self.datasource.get_hostname(),
+ MetadataResponses.FAKE_METADATA['hostname'])
+ self.assertEqual(self.datasource.get_userdata_raw(),
+ DataResponses.FAKE_USER_DATA)
+ self.assertEqual(self.datasource.get_vendordata_raw(),
+ DataResponses.FAKE_USER_DATA)
+ self.assertIsNone(self.datasource.availability_zone)
+ self.assertIsNone(self.datasource.region)
+ self.assertEqual(sleep.call_count, 0)
+
+ @httpretty.activate
+ @mock.patch('cloudinit.sources.DataSourceScaleway.SourceAddressAdapter',
+ get_source_address_adapter)
+ @mock.patch('cloudinit.util.get_cmdline')
+ @mock.patch('time.sleep', return_value=None)
+ def test_metadata_404(self, sleep, m_get_cmdline):
+ """
+ get_data() returns metadata, but no user data nor vendor data.
+ """
+ m_get_cmdline.return_value = 'scaleway'
+
+ # Make user and vendor data APIs return HTTP/404, which means there is
+ # no user / vendor data for the server.
+ httpretty.register_uri(httpretty.GET, self.metadata_url,
+ body=MetadataResponses.get_ok)
+ httpretty.register_uri(httpretty.GET, self.userdata_url,
+ body=DataResponses.empty)
+ httpretty.register_uri(httpretty.GET, self.vendordata_url,
+ body=DataResponses.empty)
+ self.datasource.get_data()
+ self.assertIsNone(self.datasource.get_userdata_raw())
+ self.assertIsNone(self.datasource.get_vendordata_raw())
+ self.assertEqual(sleep.call_count, 0)
+
+ @httpretty.activate
+ @mock.patch('cloudinit.sources.DataSourceScaleway.SourceAddressAdapter',
+ get_source_address_adapter)
+ @mock.patch('cloudinit.util.get_cmdline')
+ @mock.patch('time.sleep', return_value=None)
+ def test_metadata_rate_limit(self, sleep, m_get_cmdline):
+ """
+ get_data() is rate limited two times by the metadata API when fetching
+ user data.
+ """
+ m_get_cmdline.return_value = 'scaleway'
+
+ httpretty.register_uri(httpretty.GET, self.metadata_url,
+ body=MetadataResponses.get_ok)
+ httpretty.register_uri(httpretty.GET, self.vendordata_url,
+ body=DataResponses.empty)
+
+ httpretty.register_uri(
+ httpretty.GET, self.userdata_url,
+ responses=[
+ httpretty.Response(body=DataResponses.rate_limited),
+ httpretty.Response(body=DataResponses.rate_limited),
+ httpretty.Response(body=DataResponses.get_ok),
+ ]
+ )
+ self.datasource.get_data()
+ self.assertEqual(self.datasource.get_userdata_raw(),
+ DataResponses.FAKE_USER_DATA)
+ self.assertEqual(sleep.call_count, 2)
diff --git a/tests/unittests/test_distros/test_create_users.py b/tests/unittests/test_distros/test_create_users.py
index 9ded4f6c..1d02f7bd 100644
--- a/tests/unittests/test_distros/test_create_users.py
+++ b/tests/unittests/test_distros/test_create_users.py
@@ -38,6 +38,8 @@ class MyBaseDistro(distros.Distro):
raise NotImplementedError()
+@mock.patch("cloudinit.distros.util.system_is_snappy", return_value=False)
+@mock.patch("cloudinit.distros.util.subp")
class TestCreateUser(TestCase):
def setUp(self):
super(TestCase, self).setUp()
@@ -53,8 +55,7 @@ class TestCreateUser(TestCase):
logcmd[i + 1] = 'REDACTED'
return mock.call(args, logstring=logcmd)
- @mock.patch("cloudinit.distros.util.subp")
- def test_basic(self, m_subp):
+ def test_basic(self, m_subp, m_is_snappy):
user = 'foouser'
self.dist.create_user(user)
self.assertEqual(
@@ -62,8 +63,7 @@ class TestCreateUser(TestCase):
[self._useradd2call([user, '-m']),
mock.call(['passwd', '-l', user])])
- @mock.patch("cloudinit.distros.util.subp")
- def test_no_home(self, m_subp):
+ def test_no_home(self, m_subp, m_is_snappy):
user = 'foouser'
self.dist.create_user(user, no_create_home=True)
self.assertEqual(
@@ -71,8 +71,7 @@ class TestCreateUser(TestCase):
[self._useradd2call([user, '-M']),
mock.call(['passwd', '-l', user])])
- @mock.patch("cloudinit.distros.util.subp")
- def test_system_user(self, m_subp):
+ def test_system_user(self, m_subp, m_is_snappy):
# system user should have no home and get --system
user = 'foouser'
self.dist.create_user(user, system=True)
@@ -81,8 +80,7 @@ class TestCreateUser(TestCase):
[self._useradd2call([user, '--system', '-M']),
mock.call(['passwd', '-l', user])])
- @mock.patch("cloudinit.distros.util.subp")
- def test_explicit_no_home_false(self, m_subp):
+ def test_explicit_no_home_false(self, m_subp, m_is_snappy):
user = 'foouser'
self.dist.create_user(user, no_create_home=False)
self.assertEqual(
@@ -90,16 +88,14 @@ class TestCreateUser(TestCase):
[self._useradd2call([user, '-m']),
mock.call(['passwd', '-l', user])])
- @mock.patch("cloudinit.distros.util.subp")
- def test_unlocked(self, m_subp):
+ def test_unlocked(self, m_subp, m_is_snappy):
user = 'foouser'
self.dist.create_user(user, lock_passwd=False)
self.assertEqual(
m_subp.call_args_list,
[self._useradd2call([user, '-m'])])
- @mock.patch("cloudinit.distros.util.subp")
- def test_set_password(self, m_subp):
+ def test_set_password(self, m_subp, m_is_snappy):
user = 'foouser'
password = 'passfoo'
self.dist.create_user(user, passwd=password)
@@ -109,8 +105,7 @@ class TestCreateUser(TestCase):
mock.call(['passwd', '-l', user])])
@mock.patch("cloudinit.distros.util.is_group")
- @mock.patch("cloudinit.distros.util.subp")
- def test_group_added(self, m_subp, m_is_group):
+ def test_group_added(self, m_is_group, m_subp, m_is_snappy):
m_is_group.return_value = False
user = 'foouser'
self.dist.create_user(user, groups=['group1'])
@@ -121,8 +116,7 @@ class TestCreateUser(TestCase):
self.assertEqual(m_subp.call_args_list, expected)
@mock.patch("cloudinit.distros.util.is_group")
- @mock.patch("cloudinit.distros.util.subp")
- def test_only_new_group_added(self, m_subp, m_is_group):
+ def test_only_new_group_added(self, m_is_group, m_subp, m_is_snappy):
ex_groups = ['existing_group']
groups = ['group1', ex_groups[0]]
m_is_group.side_effect = lambda m: m in ex_groups
@@ -135,8 +129,8 @@ class TestCreateUser(TestCase):
self.assertEqual(m_subp.call_args_list, expected)
@mock.patch("cloudinit.distros.util.is_group")
- @mock.patch("cloudinit.distros.util.subp")
- def test_create_groups_with_whitespace_string(self, m_subp, m_is_group):
+ def test_create_groups_with_whitespace_string(
+ self, m_is_group, m_subp, m_is_snappy):
# groups supported as a comma delimeted string even with white space
m_is_group.return_value = False
user = 'foouser'
diff --git a/tests/unittests/test_distros/test_debian.py b/tests/unittests/test_distros/test_debian.py
new file mode 100644
index 00000000..2330ad52
--- /dev/null
+++ b/tests/unittests/test_distros/test_debian.py
@@ -0,0 +1,82 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+from ..helpers import (CiTestCase, mock)
+
+from cloudinit.distros.debian import apply_locale
+from cloudinit import util
+
+
+@mock.patch("cloudinit.distros.debian.util.subp")
+class TestDebianApplyLocale(CiTestCase):
+ def test_no_rerun(self, m_subp):
+ """If system has defined locale, no re-run is expected."""
+ spath = self.tmp_path("default-locale")
+ m_subp.return_value = (None, None)
+ locale = 'en_US.UTF-8'
+ util.write_file(spath, 'LANG=%s\n' % locale, omode="w")
+ apply_locale(locale, sys_path=spath)
+ m_subp.assert_not_called()
+
+ def test_rerun_if_different(self, m_subp):
+ """If system has different locale, locale-gen should be called."""
+ spath = self.tmp_path("default-locale")
+ m_subp.return_value = (None, None)
+ locale = 'en_US.UTF-8'
+ util.write_file(spath, 'LANG=fr_FR.UTF-8', omode="w")
+ apply_locale(locale, sys_path=spath)
+ self.assertEqual(
+ [['locale-gen', locale],
+ ['update-locale', '--locale-file=' + spath, 'LANG=%s' % locale]],
+ [p[0][0] for p in m_subp.call_args_list])
+
+ def test_rerun_if_no_file(self, m_subp):
+ """If system has no locale file, locale-gen should be called."""
+ spath = self.tmp_path("default-locale")
+ m_subp.return_value = (None, None)
+ locale = 'en_US.UTF-8'
+ apply_locale(locale, sys_path=spath)
+ self.assertEqual(
+ [['locale-gen', locale],
+ ['update-locale', '--locale-file=' + spath, 'LANG=%s' % locale]],
+ [p[0][0] for p in m_subp.call_args_list])
+
+ def test_rerun_on_unset_system_locale(self, m_subp):
+ """If system has unset locale, locale-gen should be called."""
+ m_subp.return_value = (None, None)
+ spath = self.tmp_path("default-locale")
+ locale = 'en_US.UTF-8'
+ util.write_file(spath, 'LANG=', omode="w")
+ apply_locale(locale, sys_path=spath)
+ self.assertEqual(
+ [['locale-gen', locale],
+ ['update-locale', '--locale-file=' + spath, 'LANG=%s' % locale]],
+ [p[0][0] for p in m_subp.call_args_list])
+
+ def test_rerun_on_mismatched_keys(self, m_subp):
+ """If key is LC_ALL and system has only LANG, rerun is expected."""
+ m_subp.return_value = (None, None)
+ spath = self.tmp_path("default-locale")
+ locale = 'en_US.UTF-8'
+ util.write_file(spath, 'LANG=', omode="w")
+ apply_locale(locale, sys_path=spath, keyname='LC_ALL')
+ self.assertEqual(
+ [['locale-gen', locale],
+ ['update-locale', '--locale-file=' + spath,
+ 'LC_ALL=%s' % locale]],
+ [p[0][0] for p in m_subp.call_args_list])
+
+ def test_falseish_locale_raises_valueerror(self, m_subp):
+ """locale as None or "" is invalid and should raise ValueError."""
+
+ with self.assertRaises(ValueError) as ctext_m:
+ apply_locale(None)
+ m_subp.assert_not_called()
+
+ self.assertEqual(
+ 'Failed to provide locale value.', str(ctext_m.exception))
+
+ with self.assertRaises(ValueError) as ctext_m:
+ apply_locale("")
+ m_subp.assert_not_called()
+ self.assertEqual(
+ 'Failed to provide locale value.', str(ctext_m.exception))
diff --git a/tests/unittests/test_distros/test_netconfig.py b/tests/unittests/test_distros/test_netconfig.py
index be9a8318..2f505d93 100644
--- a/tests/unittests/test_distros/test_netconfig.py
+++ b/tests/unittests/test_distros/test_netconfig.py
@@ -92,10 +92,9 @@ iface lo inet loopback
auto eth0
iface eth0 inet static
- address 192.168.1.5
+ address 192.168.1.5/24
broadcast 192.168.1.0
gateway 192.168.1.254
- netmask 255.255.255.0
auto eth1
iface eth1 inet dhcp
@@ -156,7 +155,7 @@ network:
ethernets:
eth7:
addresses:
- - 192.168.1.5/255.255.255.0
+ - 192.168.1.5/24
gateway4: 192.168.1.254
eth9:
dhcp4: true
@@ -477,7 +476,9 @@ NETWORKING=yes
# Created by cloud-init on instance boot automatically, do not edit.
#
BOOTPROTO=none
+DEFROUTE=yes
DEVICE=eth0
+GATEWAY=192.168.1.254
IPADDR=192.168.1.5
NETMASK=255.255.255.0
NM_CONTROLLED=no
@@ -626,9 +627,11 @@ IPV6_AUTOCONF=no
# Created by cloud-init on instance boot automatically, do not edit.
#
BOOTPROTO=none
+DEFROUTE=yes
DEVICE=eth0
IPV6ADDR=2607:f0d0:1002:0011::2/64
IPV6INIT=yes
+IPV6_DEFAULTGW=2607:f0d0:1002:0011::1
NM_CONTROLLED=no
ONBOOT=yes
TYPE=Ethernet
diff --git a/tests/unittests/test_ds_identify.py b/tests/unittests/test_ds_identify.py
index f5694b26..8ccfe55c 100644
--- a/tests/unittests/test_ds_identify.py
+++ b/tests/unittests/test_ds_identify.py
@@ -39,9 +39,11 @@ RC_FOUND = 0
RC_NOT_FOUND = 1
DS_NONE = 'None'
+P_CHASSIS_ASSET_TAG = "sys/class/dmi/id/chassis_asset_tag"
P_PRODUCT_NAME = "sys/class/dmi/id/product_name"
P_PRODUCT_SERIAL = "sys/class/dmi/id/product_serial"
P_PRODUCT_UUID = "sys/class/dmi/id/product_uuid"
+P_SEED_DIR = "var/lib/cloud/seed"
P_DSID_CFG = "etc/cloud/ds-identify.cfg"
MOCK_VIRT_IS_KVM = {'name': 'detect_virt', 'RET': 'kvm', 'ret': 0}
@@ -160,6 +162,30 @@ class TestDsIdentify(CiTestCase):
_print_run_output(rc, out, err, cfg, files)
return rc, out, err, cfg, files
+ def test_wb_print_variables(self):
+ """_print_info reports an array of discovered variables to stderr."""
+ data = VALID_CFG['Azure-dmi-detection']
+ _, _, err, _, _ = self._call_via_dict(data)
+ expected_vars = [
+ 'DMI_PRODUCT_NAME', 'DMI_SYS_VENDOR', 'DMI_PRODUCT_SERIAL',
+ 'DMI_PRODUCT_UUID', 'PID_1_PRODUCT_NAME', 'DMI_CHASSIS_ASSET_TAG',
+ 'FS_LABELS', 'KERNEL_CMDLINE', 'VIRT', 'UNAME_KERNEL_NAME',
+ 'UNAME_KERNEL_RELEASE', 'UNAME_KERNEL_VERSION', 'UNAME_MACHINE',
+ 'UNAME_NODENAME', 'UNAME_OPERATING_SYSTEM', 'DSNAME', 'DSLIST',
+ 'MODE', 'ON_FOUND', 'ON_MAYBE', 'ON_NOTFOUND']
+ for var in expected_vars:
+ self.assertIn('{0}='.format(var), err)
+
+ def test_azure_dmi_detection_from_chassis_asset_tag(self):
+ """Azure datasource is detected from DMI chassis-asset-tag"""
+ self._test_ds_found('Azure-dmi-detection')
+
+ def test_azure_seed_file_detection(self):
+ """Azure datasource is detected due to presence of a seed file.
+
+ The seed file tested is /var/lib/cloud/seed/azure/ovf-env.xml."""
+ self._test_ds_found('Azure-seed-detection')
+
def test_aws_ec2_hvm(self):
"""EC2: hvm instances use dmi serial and uuid starting with 'ec2'."""
self._test_ds_found('Ec2-hvm')
@@ -220,6 +246,20 @@ class TestDsIdentify(CiTestCase):
mydata['files'][cfgpath] = 'datasource_list: ["Ec2", "None"]\n'
self._check_via_dict(mydata, rc=RC_FOUND, dslist=['Ec2', DS_NONE])
+ def test_aliyun_identified(self):
+ """Test that Aliyun cloud is identified by product id."""
+ self._test_ds_found('AliYun')
+
+ def test_aliyun_over_ec2(self):
+ """Even if all other factors identified Ec2, AliYun should be used."""
+ mydata = copy.deepcopy(VALID_CFG['Ec2-xen'])
+ self._test_ds_found('AliYun')
+ prod_name = VALID_CFG['AliYun']['files'][P_PRODUCT_NAME]
+ mydata['files'][P_PRODUCT_NAME] = prod_name
+ policy = "search,found=first,maybe=none,notfound=disabled"
+ self._check_via_dict(mydata, rc=RC_FOUND, dslist=['AliYun', DS_NONE],
+ policy_dmi=policy)
+
def blkid_out(disks=None):
"""Convert a list of disk dictionaries into blkid content."""
@@ -254,6 +294,23 @@ def _print_run_output(rc, out, err, cfg, files):
VALID_CFG = {
+ 'AliYun': {
+ 'ds': 'AliYun',
+ 'files': {P_PRODUCT_NAME: 'Alibaba Cloud ECS\n'},
+ },
+ 'Azure-dmi-detection': {
+ 'ds': 'Azure',
+ 'files': {
+ P_CHASSIS_ASSET_TAG: '7783-7084-3265-9085-8269-3286-77\n',
+ }
+ },
+ 'Azure-seed-detection': {
+ 'ds': 'Azure',
+ 'files': {
+ P_CHASSIS_ASSET_TAG: 'No-match\n',
+ os.path.join(P_SEED_DIR, 'azure', 'ovf-env.xml'): 'present\n',
+ }
+ },
'Ec2-hvm': {
'ds': 'Ec2',
'mocks': [{'name': 'detect_virt', 'RET': 'kvm', 'ret': 0}],
diff --git a/tests/unittests/test_handler/test_handler_disk_setup.py b/tests/unittests/test_handler/test_handler_disk_setup.py
index 916a0d7a..8a6d49ed 100644
--- a/tests/unittests/test_handler/test_handler_disk_setup.py
+++ b/tests/unittests/test_handler/test_handler_disk_setup.py
@@ -3,7 +3,7 @@
import random
from cloudinit.config import cc_disk_setup
-from ..helpers import ExitStack, mock, TestCase
+from ..helpers import CiTestCase, ExitStack, mock, TestCase
class TestIsDiskUsed(TestCase):
@@ -174,32 +174,32 @@ class TestUpdateFsSetupDevices(TestCase):
return_value=('/dev/xdb1', False))
@mock.patch('cloudinit.config.cc_disk_setup.device_type', return_value=None)
@mock.patch('cloudinit.config.cc_disk_setup.util.subp', return_value=('', ''))
-class TestMkfsCommandHandling(TestCase):
+class TestMkfsCommandHandling(CiTestCase):
+
+ with_logs = True
def test_with_cmd(self, subp, *args):
"""mkfs honors cmd and logs warnings when extra_opts or overwrite are
provided."""
- with self.assertLogs(
- 'cloudinit.config.cc_disk_setup') as logs:
- cc_disk_setup.mkfs({
- 'cmd': 'mkfs -t %(filesystem)s -L %(label)s %(device)s',
- 'filesystem': 'ext4',
- 'device': '/dev/xdb1',
- 'label': 'with_cmd',
- 'extra_opts': ['should', 'generate', 'warning'],
- 'overwrite': 'should generate warning too'
- })
+ cc_disk_setup.mkfs({
+ 'cmd': 'mkfs -t %(filesystem)s -L %(label)s %(device)s',
+ 'filesystem': 'ext4',
+ 'device': '/dev/xdb1',
+ 'label': 'with_cmd',
+ 'extra_opts': ['should', 'generate', 'warning'],
+ 'overwrite': 'should generate warning too'
+ })
self.assertIn(
- 'WARNING:cloudinit.config.cc_disk_setup:fs_setup:extra_opts ' +
+ 'extra_opts ' +
'ignored because cmd was specified: mkfs -t ext4 -L with_cmd ' +
'/dev/xdb1',
- logs.output)
+ self.logs.getvalue())
self.assertIn(
- 'WARNING:cloudinit.config.cc_disk_setup:fs_setup:overwrite ' +
+ 'overwrite ' +
'ignored because cmd was specified: mkfs -t ext4 -L with_cmd ' +
'/dev/xdb1',
- logs.output)
+ self.logs.getvalue())
subp.assert_called_once_with(
'mkfs -t ext4 -L with_cmd /dev/xdb1', shell=True)
diff --git a/tests/unittests/test_handler/test_handler_ntp.py b/tests/unittests/test_handler/test_handler_ntp.py
index bc4277b7..7f278646 100644
--- a/tests/unittests/test_handler/test_handler_ntp.py
+++ b/tests/unittests/test_handler/test_handler_ntp.py
@@ -3,7 +3,7 @@
from cloudinit.config import cc_ntp
from cloudinit.sources import DataSourceNone
from cloudinit import (distros, helpers, cloud, util)
-from ..helpers import FilesystemMockingTestCase, mock
+from ..helpers import FilesystemMockingTestCase, mock, skipIf
import os
@@ -16,6 +16,13 @@ servers {{servers}}
pools {{pools}}
"""
+try:
+ import jsonschema
+ assert jsonschema # avoid pyflakes error F401: import unused
+ _missing_jsonschema_dep = False
+except ImportError:
+ _missing_jsonschema_dep = True
+
class TestNtp(FilesystemMockingTestCase):
@@ -55,7 +62,7 @@ class TestNtp(FilesystemMockingTestCase):
def test_ntp_rename_ntp_conf(self):
"""When NTP_CONF exists, rename_ntp moves it."""
ntpconf = self.tmp_path("ntp.conf", self.new_root)
- os.mknod(ntpconf)
+ util.write_file(ntpconf, "")
with mock.patch("cloudinit.config.cc_ntp.NTP_CONF", ntpconf):
cc_ntp.rename_ntp_conf()
self.assertFalse(os.path.exists(ntpconf))
@@ -209,7 +216,121 @@ class TestNtp(FilesystemMockingTestCase):
"""When no ntp section is defined handler logs a warning and noops."""
cc_ntp.handle('cc_ntp', {}, None, None, [])
self.assertEqual(
- 'Skipping module named cc_ntp, not present or disabled by cfg\n',
+ 'DEBUG: Skipping module named cc_ntp, '
+ 'not present or disabled by cfg\n',
self.logs.getvalue())
+ def test_ntp_handler_schema_validation_allows_empty_ntp_config(self):
+ """Ntp schema validation allows for an empty ntp: configuration."""
+ invalid_config = {'ntp': {}}
+ distro = 'ubuntu'
+ cc = self._get_cloud(distro)
+ ntp_conf = os.path.join(self.new_root, 'ntp.conf')
+ with open('{0}.tmpl'.format(ntp_conf), 'wb') as stream:
+ stream.write(NTP_TEMPLATE)
+ with mock.patch('cloudinit.config.cc_ntp.NTP_CONF', ntp_conf):
+ cc_ntp.handle('cc_ntp', invalid_config, cc, None, [])
+ self.assertNotIn('Invalid config:', self.logs.getvalue())
+ with open(ntp_conf) as stream:
+ content = stream.read()
+ default_pools = [
+ "{0}.{1}.pool.ntp.org".format(x, distro)
+ for x in range(0, cc_ntp.NR_POOL_SERVERS)]
+ self.assertEqual(
+ "servers []\npools {0}\n".format(default_pools),
+ content)
+
+ @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ def test_ntp_handler_schema_validation_warns_non_string_item_type(self):
+ """Ntp schema validation warns of non-strings in pools or servers.
+
+ Schema validation is not strict, so ntp config is still be rendered.
+ """
+ invalid_config = {'ntp': {'pools': [123], 'servers': ['valid', None]}}
+ cc = self._get_cloud('ubuntu')
+ ntp_conf = os.path.join(self.new_root, 'ntp.conf')
+ with open('{0}.tmpl'.format(ntp_conf), 'wb') as stream:
+ stream.write(NTP_TEMPLATE)
+ with mock.patch('cloudinit.config.cc_ntp.NTP_CONF', ntp_conf):
+ cc_ntp.handle('cc_ntp', invalid_config, cc, None, [])
+ self.assertIn(
+ "Invalid config:\nntp.pools.0: 123 is not of type 'string'\n"
+ "ntp.servers.1: None is not of type 'string'",
+ self.logs.getvalue())
+ with open(ntp_conf) as stream:
+ content = stream.read()
+ self.assertEqual("servers ['valid', None]\npools [123]\n", content)
+
+ @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ def test_ntp_handler_schema_validation_warns_of_non_array_type(self):
+ """Ntp schema validation warns of non-array pools or servers types.
+
+ Schema validation is not strict, so ntp config is still be rendered.
+ """
+ invalid_config = {'ntp': {'pools': 123, 'servers': 'non-array'}}
+ cc = self._get_cloud('ubuntu')
+ ntp_conf = os.path.join(self.new_root, 'ntp.conf')
+ with open('{0}.tmpl'.format(ntp_conf), 'wb') as stream:
+ stream.write(NTP_TEMPLATE)
+ with mock.patch('cloudinit.config.cc_ntp.NTP_CONF', ntp_conf):
+ cc_ntp.handle('cc_ntp', invalid_config, cc, None, [])
+ self.assertIn(
+ "Invalid config:\nntp.pools: 123 is not of type 'array'\n"
+ "ntp.servers: 'non-array' is not of type 'array'",
+ self.logs.getvalue())
+ with open(ntp_conf) as stream:
+ content = stream.read()
+ self.assertEqual("servers non-array\npools 123\n", content)
+
+ @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ def test_ntp_handler_schema_validation_warns_invalid_key_present(self):
+ """Ntp schema validation warns of invalid keys present in ntp config.
+
+ Schema validation is not strict, so ntp config is still be rendered.
+ """
+ invalid_config = {
+ 'ntp': {'invalidkey': 1, 'pools': ['0.mycompany.pool.ntp.org']}}
+ cc = self._get_cloud('ubuntu')
+ ntp_conf = os.path.join(self.new_root, 'ntp.conf')
+ with open('{0}.tmpl'.format(ntp_conf), 'wb') as stream:
+ stream.write(NTP_TEMPLATE)
+ with mock.patch('cloudinit.config.cc_ntp.NTP_CONF', ntp_conf):
+ cc_ntp.handle('cc_ntp', invalid_config, cc, None, [])
+ self.assertIn(
+ "Invalid config:\nntp: Additional properties are not allowed "
+ "('invalidkey' was unexpected)",
+ self.logs.getvalue())
+ with open(ntp_conf) as stream:
+ content = stream.read()
+ self.assertEqual(
+ "servers []\npools ['0.mycompany.pool.ntp.org']\n",
+ content)
+
+ @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ def test_ntp_handler_schema_validation_warns_of_duplicates(self):
+ """Ntp schema validation warns of duplicates in servers or pools.
+
+ Schema validation is not strict, so ntp config is still be rendered.
+ """
+ invalid_config = {
+ 'ntp': {'pools': ['0.mypool.org', '0.mypool.org'],
+ 'servers': ['10.0.0.1', '10.0.0.1']}}
+ cc = self._get_cloud('ubuntu')
+ ntp_conf = os.path.join(self.new_root, 'ntp.conf')
+ with open('{0}.tmpl'.format(ntp_conf), 'wb') as stream:
+ stream.write(NTP_TEMPLATE)
+ with mock.patch('cloudinit.config.cc_ntp.NTP_CONF', ntp_conf):
+ cc_ntp.handle('cc_ntp', invalid_config, cc, None, [])
+ self.assertIn(
+ "Invalid config:\nntp.pools: ['0.mypool.org', '0.mypool.org'] has "
+ "non-unique elements\nntp.servers: ['10.0.0.1', '10.0.0.1'] has "
+ "non-unique elements",
+ self.logs.getvalue())
+ with open(ntp_conf) as stream:
+ content = stream.read()
+ self.assertEqual(
+ "servers ['10.0.0.1', '10.0.0.1']\n"
+ "pools ['0.mypool.org', '0.mypool.org']\n",
+ content)
+
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_write_files.py b/tests/unittests/test_handler/test_handler_write_files.py
index fb252d1d..1129e77d 100644
--- a/tests/unittests/test_handler/test_handler_write_files.py
+++ b/tests/unittests/test_handler/test_handler_write_files.py
@@ -1,10 +1,10 @@
# This file is part of cloud-init. See LICENSE file for license information.
-from cloudinit.config.cc_write_files import write_files
+from cloudinit.config.cc_write_files import write_files, decode_perms
from cloudinit import log as logging
from cloudinit import util
-from ..helpers import FilesystemMockingTestCase
+from ..helpers import CiTestCase, FilesystemMockingTestCase
import base64
import gzip
@@ -49,13 +49,13 @@ class TestWriteFiles(FilesystemMockingTestCase):
expected = "hello world\n"
filename = "/tmp/my.file"
write_files(
- "test_simple", [{"content": expected, "path": filename}], LOG)
+ "test_simple", [{"content": expected, "path": filename}])
self.assertEqual(util.load_file(filename), expected)
def test_yaml_binary(self):
self.patchUtils(self.tmp)
data = util.load_yaml(YAML_TEXT)
- write_files("testname", data['write_files'], LOG)
+ write_files("testname", data['write_files'])
for path, content in YAML_CONTENT_EXPECTED.items():
self.assertEqual(util.load_file(path), content)
@@ -87,7 +87,7 @@ class TestWriteFiles(FilesystemMockingTestCase):
files.append(cur)
expected.append((cur['path'], data))
- write_files("test_decoding", files, LOG)
+ write_files("test_decoding", files)
for path, content in expected:
self.assertEqual(util.load_file(path, decode=False), content)
@@ -98,6 +98,33 @@ class TestWriteFiles(FilesystemMockingTestCase):
self.assertEqual(len(expected), flen_expected)
+class TestDecodePerms(CiTestCase):
+
+ with_logs = True
+
+ def test_none_returns_default(self):
+ """If None is passed as perms, then default should be returned."""
+ default = object()
+ found = decode_perms(None, default)
+ self.assertEqual(default, found)
+
+ def test_integer(self):
+ """A valid integer should return itself."""
+ found = decode_perms(0o755, None)
+ self.assertEqual(0o755, found)
+
+ def test_valid_octal_string(self):
+ """A string should be read as octal."""
+ found = decode_perms("644", None)
+ self.assertEqual(0o644, found)
+
+ def test_invalid_octal_string_returns_default_and_warns(self):
+ """A string with invalid octal should warn and return default."""
+ found = decode_perms("999", None)
+ self.assertIsNone(found)
+ self.assertIn("WARNING: Undecodable", self.logs.getvalue())
+
+
def _gzip_bytes(data):
buf = six.BytesIO()
fp = None
diff --git a/tests/unittests/test_handler/test_schema.py b/tests/unittests/test_handler/test_schema.py
new file mode 100644
index 00000000..eda4802a
--- /dev/null
+++ b/tests/unittests/test_handler/test_schema.py
@@ -0,0 +1,232 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+from cloudinit.config.schema import (
+ CLOUD_CONFIG_HEADER, SchemaValidationError, get_schema_doc,
+ validate_cloudconfig_file, validate_cloudconfig_schema,
+ main)
+from cloudinit.util import write_file
+
+from ..helpers import CiTestCase, mock, skipIf
+
+from copy import copy
+from six import StringIO
+from textwrap import dedent
+
+try:
+ import jsonschema
+ assert jsonschema # avoid pyflakes error F401: import unused
+ _missing_jsonschema_dep = False
+except ImportError:
+ _missing_jsonschema_dep = True
+
+
+class SchemaValidationErrorTest(CiTestCase):
+ """Test validate_cloudconfig_schema"""
+
+ def test_schema_validation_error_expects_schema_errors(self):
+ """SchemaValidationError is initialized from schema_errors."""
+ errors = (('key.path', 'unexpected key "junk"'),
+ ('key2.path', '"-123" is not a valid "hostname" format'))
+ exception = SchemaValidationError(schema_errors=errors)
+ self.assertIsInstance(exception, Exception)
+ self.assertEqual(exception.schema_errors, errors)
+ self.assertEqual(
+ 'Cloud config schema errors: key.path: unexpected key "junk", '
+ 'key2.path: "-123" is not a valid "hostname" format',
+ str(exception))
+ self.assertTrue(isinstance(exception, ValueError))
+
+
+class ValidateCloudConfigSchemaTest(CiTestCase):
+ """Tests for validate_cloudconfig_schema."""
+
+ with_logs = True
+
+ @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ def test_validateconfig_schema_non_strict_emits_warnings(self):
+ """When strict is False validate_cloudconfig_schema emits warnings."""
+ schema = {'properties': {'p1': {'type': 'string'}}}
+ validate_cloudconfig_schema({'p1': -1}, schema, strict=False)
+ self.assertIn(
+ "Invalid config:\np1: -1 is not of type 'string'\n",
+ self.logs.getvalue())
+
+ @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ def test_validateconfig_schema_emits_warning_on_missing_jsonschema(self):
+ """Warning from validate_cloudconfig_schema when missing jsonschema."""
+ schema = {'properties': {'p1': {'type': 'string'}}}
+ with mock.patch.dict('sys.modules', **{'jsonschema': ImportError()}):
+ validate_cloudconfig_schema({'p1': -1}, schema, strict=True)
+ self.assertIn(
+ 'Ignoring schema validation. python-jsonschema is not present',
+ self.logs.getvalue())
+
+ @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ def test_validateconfig_schema_strict_raises_errors(self):
+ """When strict is True validate_cloudconfig_schema raises errors."""
+ schema = {'properties': {'p1': {'type': 'string'}}}
+ with self.assertRaises(SchemaValidationError) as context_mgr:
+ validate_cloudconfig_schema({'p1': -1}, schema, strict=True)
+ self.assertEqual(
+ "Cloud config schema errors: p1: -1 is not of type 'string'",
+ str(context_mgr.exception))
+
+ @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ def test_validateconfig_schema_honors_formats(self):
+ """With strict True, validate_cloudconfig_schema errors on format."""
+ schema = {
+ 'properties': {'p1': {'type': 'string', 'format': 'hostname'}}}
+ with self.assertRaises(SchemaValidationError) as context_mgr:
+ validate_cloudconfig_schema({'p1': '-1'}, schema, strict=True)
+ self.assertEqual(
+ "Cloud config schema errors: p1: '-1' is not a 'hostname'",
+ str(context_mgr.exception))
+
+
+class ValidateCloudConfigFileTest(CiTestCase):
+ """Tests for validate_cloudconfig_file."""
+
+ def setUp(self):
+ super(ValidateCloudConfigFileTest, self).setUp()
+ self.config_file = self.tmp_path('cloudcfg.yaml')
+
+ def test_validateconfig_file_error_on_absent_file(self):
+ """On absent config_path, validate_cloudconfig_file errors."""
+ with self.assertRaises(RuntimeError) as context_mgr:
+ validate_cloudconfig_file('/not/here', {})
+ self.assertEqual(
+ 'Configfile /not/here does not exist',
+ str(context_mgr.exception))
+
+ def test_validateconfig_file_error_on_invalid_header(self):
+ """On invalid header, validate_cloudconfig_file errors.
+
+ A SchemaValidationError is raised when the file doesn't begin with
+ CLOUD_CONFIG_HEADER.
+ """
+ write_file(self.config_file, '#junk')
+ with self.assertRaises(SchemaValidationError) as context_mgr:
+ validate_cloudconfig_file(self.config_file, {})
+ self.assertEqual(
+ 'Cloud config schema errors: header: File {0} needs to begin with '
+ '"{1}"'.format(self.config_file, CLOUD_CONFIG_HEADER.decode()),
+ str(context_mgr.exception))
+
+ def test_validateconfig_file_error_on_non_yaml_format(self):
+ """On non-yaml format, validate_cloudconfig_file errors."""
+ write_file(self.config_file, '#cloud-config\n{}}')
+ with self.assertRaises(SchemaValidationError) as context_mgr:
+ validate_cloudconfig_file(self.config_file, {})
+ self.assertIn(
+ 'schema errors: format: File {0} is not valid yaml.'.format(
+ self.config_file),
+ str(context_mgr.exception))
+
+ @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ def test_validateconfig_file_sctricty_validates_schema(self):
+ """validate_cloudconfig_file raises errors on invalid schema."""
+ schema = {
+ 'properties': {'p1': {'type': 'string', 'format': 'hostname'}}}
+ write_file(self.config_file, '#cloud-config\np1: "-1"')
+ with self.assertRaises(SchemaValidationError) as context_mgr:
+ validate_cloudconfig_file(self.config_file, schema)
+ self.assertEqual(
+ "Cloud config schema errors: p1: '-1' is not a 'hostname'",
+ str(context_mgr.exception))
+
+
+class GetSchemaDocTest(CiTestCase):
+ """Tests for get_schema_doc."""
+
+ def setUp(self):
+ super(GetSchemaDocTest, self).setUp()
+ self.required_schema = {
+ 'title': 'title', 'description': 'description', 'id': 'id',
+ 'name': 'name', 'frequency': 'frequency',
+ 'distros': ['debian', 'rhel']}
+
+ def test_get_schema_doc_returns_restructured_text(self):
+ """get_schema_doc returns restructured text for a cloudinit schema."""
+ full_schema = copy(self.required_schema)
+ full_schema.update(
+ {'properties': {
+ 'prop1': {'type': 'array', 'description': 'prop-description',
+ 'items': {'type': 'int'}}}})
+ self.assertEqual(
+ dedent("""
+ name
+ ---
+ **Summary:** title
+
+ description
+
+ **Internal name:** ``id``
+
+ **Module frequency:** frequency
+
+ **Supported distros:** debian, rhel
+
+ **Config schema**:
+ **prop1:** (array of int) prop-description\n\n"""),
+ get_schema_doc(full_schema))
+
+ def test_get_schema_doc_returns_restructured_text_with_examples(self):
+ """get_schema_doc returns indented examples when present in schema."""
+ full_schema = copy(self.required_schema)
+ full_schema.update(
+ {'examples': {'ex1': [1, 2, 3]},
+ 'properties': {
+ 'prop1': {'type': 'array', 'description': 'prop-description',
+ 'items': {'type': 'int'}}}})
+ self.assertIn(
+ dedent("""
+ **Config schema**:
+ **prop1:** (array of int) prop-description
+
+ **Examples**::
+
+ ex1"""),
+ get_schema_doc(full_schema))
+
+ def test_get_schema_doc_raises_key_errors(self):
+ """get_schema_doc raises KeyErrors on missing keys."""
+ for key in self.required_schema:
+ invalid_schema = copy(self.required_schema)
+ invalid_schema.pop(key)
+ with self.assertRaises(KeyError) as context_mgr:
+ get_schema_doc(invalid_schema)
+ self.assertIn(key, str(context_mgr.exception))
+
+
+class MainTest(CiTestCase):
+
+ def test_main_missing_args(self):
+ """Main exits non-zero and reports an error on missing parameters."""
+ with mock.patch('sys.argv', ['mycmd']):
+ with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr:
+ self.assertEqual(1, main(), 'Expected non-zero exit code')
+ self.assertEqual(
+ 'Expected either --config-file argument or --doc\n',
+ m_stderr.getvalue())
+
+ def test_main_prints_docs(self):
+ """When --doc parameter is provided, main generates documentation."""
+ myargs = ['mycmd', '--doc']
+ with mock.patch('sys.argv', myargs):
+ with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout:
+ self.assertEqual(0, main(), 'Expected 0 exit code')
+ self.assertIn('\nNTP\n---\n', m_stdout.getvalue())
+
+ def test_main_validates_config_file(self):
+ """When --config-file parameter is provided, main validates schema."""
+ myyaml = self.tmp_path('my.yaml')
+ myargs = ['mycmd', '--config-file', myyaml]
+ with open(myyaml, 'wb') as stream:
+ stream.write(b'#cloud-config\nntp:') # shortest ntp schema
+ with mock.patch('sys.argv', myargs):
+ with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout:
+ self.assertEqual(0, main(), 'Expected 0 exit code')
+ self.assertIn(
+ 'Valid cloud-config file {0}'.format(myyaml), m_stdout.getvalue())
+
+# vi: ts=4 expandtab syntax=python
diff --git a/tests/unittests/test_net.py b/tests/unittests/test_net.py
index 167ed01e..e49abcc4 100644
--- a/tests/unittests/test_net.py
+++ b/tests/unittests/test_net.py
@@ -1,6 +1,7 @@
# This file is part of cloud-init. See LICENSE file for license information.
from cloudinit import net
+from cloudinit.net import _natural_sort_key
from cloudinit.net import cmdline
from cloudinit.net import eni
from cloudinit.net import netplan
@@ -149,20 +150,19 @@ ONBOOT=yes
TYPE=Ethernet
USERCTL=no
""".lstrip()),
- ('etc/sysconfig/network-scripts/route-eth0',
- """
-# Created by cloud-init on instance boot automatically, do not edit.
-#
-ADDRESS0=0.0.0.0
-GATEWAY0=172.19.3.254
-NETMASK0=0.0.0.0
-""".lstrip()),
('etc/resolv.conf',
"""
; Created by cloud-init on instance boot automatically, do not edit.
;
nameserver 172.19.0.12
""".lstrip()),
+ ('etc/NetworkManager/conf.d/99-cloud-init.conf',
+ """
+# Created by cloud-init on instance boot automatically, do not edit.
+#
+[main]
+dns = none
+""".lstrip()),
('etc/udev/rules.d/70-persistent-net.rules',
"".join(['SUBSYSTEM=="net", ACTION=="add", DRIVERS=="?*", ',
'ATTR{address}=="fa:16:3e:ed:9a:59", NAME="eth0"\n']))]
@@ -224,6 +224,13 @@ USERCTL=no
;
nameserver 172.19.0.12
""".lstrip()),
+ ('etc/NetworkManager/conf.d/99-cloud-init.conf',
+ """
+# Created by cloud-init on instance boot automatically, do not edit.
+#
+[main]
+dns = none
+""".lstrip()),
('etc/udev/rules.d/70-persistent-net.rules',
"".join(['SUBSYSTEM=="net", ACTION=="add", DRIVERS=="?*", ',
'ATTR{address}=="fa:16:3e:ed:9a:59", NAME="eth0"\n']))]
@@ -291,7 +298,7 @@ DEVICE=eth0
GATEWAY=172.19.3.254
HWADDR=fa:16:3e:ed:9a:59
IPADDR=172.19.1.34
-IPV6ADDR=2001:DB8::10
+IPV6ADDR=2001:DB8::10/64
IPV6ADDR_SECONDARIES="2001:DB9::10/64 2001:DB10::10/64"
IPV6INIT=yes
IPV6_DEFAULTGW=2001:DB8::1
@@ -307,6 +314,13 @@ USERCTL=no
;
nameserver 172.19.0.12
""".lstrip()),
+ ('etc/NetworkManager/conf.d/99-cloud-init.conf',
+ """
+# Created by cloud-init on instance boot automatically, do not edit.
+#
+[main]
+dns = none
+""".lstrip()),
('etc/udev/rules.d/70-persistent-net.rules',
"".join(['SUBSYSTEM=="net", ACTION=="add", DRIVERS=="?*", ',
'ATTR{address}=="fa:16:3e:ed:9a:59", NAME="eth0"\n']))]
@@ -341,17 +355,15 @@ iface lo inet loopback
auto eth0
iface eth0 inet static
- address 1.2.3.12
+ address 1.2.3.12/29
broadcast 1.2.3.15
dns-nameservers 69.9.160.191 69.9.191.4
gateway 1.2.3.9
- netmask 255.255.255.248
auto eth1
iface eth1 inet static
- address 10.248.2.4
+ address 10.248.2.4/29
broadcast 10.248.2.7
- netmask 255.255.255.248
""".lstrip()
NETWORK_CONFIGS = {
@@ -410,6 +422,28 @@ NETWORK_CONFIGS = {
via: 65.61.151.37
set-name: eth99
""").rstrip(' '),
+ 'expected_sysconfig': {
+ 'ifcfg-eth1': textwrap.dedent("""\
+ BOOTPROTO=none
+ DEVICE=eth1
+ HWADDR=cf:d6:af:48:e8:80
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ TYPE=Ethernet
+ USERCTL=no"""),
+ 'ifcfg-eth99': textwrap.dedent("""\
+ BOOTPROTO=dhcp
+ DEFROUTE=yes
+ DEVICE=eth99
+ GATEWAY=65.61.151.37
+ HWADDR=c0:d6:9f:2c:e8:80
+ IPADDR=192.168.21.3
+ NETMASK=255.255.255.0
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ TYPE=Ethernet
+ USERCTL=no"""),
+ },
'yaml': textwrap.dedent("""
version: 1
config:
@@ -470,6 +504,62 @@ NETWORK_CONFIGS = {
- {'type': 'dhcp6'}
""").rstrip(' '),
},
+ 'v4_and_v6_static': {
+ 'expected_eni': textwrap.dedent("""\
+ auto lo
+ iface lo inet loopback
+
+ auto iface0
+ iface iface0 inet static
+ address 192.168.14.2/24
+ mtu 9000
+
+ # control-alias iface0
+ iface iface0 inet6 static
+ address 2001:1::1/64
+ mtu 1500
+ """).rstrip(' '),
+ 'expected_netplan': textwrap.dedent("""
+ network:
+ version: 2
+ ethernets:
+ iface0:
+ addresses:
+ - 192.168.14.2/24
+ - 2001:1::1/64
+ mtu: 9000
+ mtu6: 1500
+ """).rstrip(' '),
+ 'yaml': textwrap.dedent("""\
+ version: 1
+ config:
+ - type: 'physical'
+ name: 'iface0'
+ subnets:
+ - type: static
+ address: 192.168.14.2/24
+ mtu: 9000
+ - type: static
+ address: 2001:1::1/64
+ mtu: 1500
+ """).rstrip(' '),
+ 'expected_sysconfig': {
+ 'ifcfg-iface0': textwrap.dedent("""\
+ BOOTPROTO=none
+ DEVICE=iface0
+ IPADDR=192.168.14.2
+ IPV6ADDR=2001:1::1/64
+ IPV6INIT=yes
+ NETMASK=255.255.255.0
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ TYPE=Ethernet
+ USERCTL=no
+ MTU=9000
+ IPV6_MTU=1500
+ """),
+ },
+ },
'all': {
'expected_eni': ("""\
auto lo
@@ -511,12 +601,26 @@ iface bond0 inet6 dhcp
auto br0
iface br0 inet static
address 192.168.14.2/24
+ bridge_ageing 250
+ bridge_bridgeprio 22
+ bridge_fd 1
+ bridge_gcint 2
+ bridge_hello 1
+ bridge_maxage 10
+ bridge_pathcost eth3 50
+ bridge_pathcost eth4 75
+ bridge_portprio eth3 28
+ bridge_portprio eth4 14
bridge_ports eth3 eth4
bridge_stp off
+ bridge_waitport 1 eth3
+ bridge_waitport 2 eth4
# control-alias br0
iface br0 inet6 static
address 2001:1::1/64
+ post-up route add -A inet6 default gw 2001:4800:78ff:1b::1 || true
+ pre-down route del -A inet6 default gw 2001:4800:78ff:1b::1 || true
auto bond0.200
iface bond0.200 inet dhcp
@@ -642,6 +746,18 @@ pre-down route del -net 10.0.0.0 netmask 255.0.0.0 gw 11.0.0.1 metric 3 || true
interfaces:
- eth3
- eth4
+ parameters:
+ ageing-time: 250
+ forward-delay: 1
+ hello-time: 1
+ max-age: 10
+ path-cost:
+ eth3: 50
+ eth4: 75
+ priority: 22
+ routes:
+ - to: ::/0
+ via: 2001:4800:78ff:1b::1
vlans:
bond0.200:
dhcp4: true
@@ -664,6 +780,119 @@ pre-down route del -net 10.0.0.0 netmask 255.0.0.0 gw 11.0.0.1 metric 3 || true
- sacchromyces.maas
- brettanomyces.maas
""").rstrip(' '),
+ 'expected_sysconfig': {
+ 'ifcfg-bond0': textwrap.dedent("""\
+ BONDING_MASTER=yes
+ BONDING_OPTS="mode=active-backup """
+ """xmit_hash_policy=layer3+4 """
+ """miimon=100"
+ BONDING_SLAVE0=eth1
+ BONDING_SLAVE1=eth2
+ BOOTPROTO=dhcp
+ DEVICE=bond0
+ DHCPV6C=yes
+ IPV6INIT=yes
+ MACADDR=aa:bb:cc:dd:ee:ff
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ TYPE=Bond
+ USERCTL=no"""),
+ 'ifcfg-bond0.200': textwrap.dedent("""\
+ BOOTPROTO=dhcp
+ DEVICE=bond0.200
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ PHYSDEV=bond0
+ TYPE=Ethernet
+ USERCTL=no
+ VLAN=yes"""),
+ 'ifcfg-br0': textwrap.dedent("""\
+ AGEING=250
+ BOOTPROTO=none
+ DEFROUTE=yes
+ DEVICE=br0
+ IPADDR=192.168.14.2
+ IPV6ADDR=2001:1::1/64
+ IPV6INIT=yes
+ IPV6_DEFAULTGW=2001:4800:78ff:1b::1
+ NETMASK=255.255.255.0
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ PRIO=22
+ STP=off
+ TYPE=Bridge
+ USERCTL=no"""),
+ 'ifcfg-eth0': textwrap.dedent("""\
+ BOOTPROTO=none
+ DEVICE=eth0
+ HWADDR=c0:d6:9f:2c:e8:80
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ TYPE=Ethernet
+ USERCTL=no"""),
+ 'ifcfg-eth0.101': textwrap.dedent("""\
+ BOOTPROTO=none
+ DEFROUTE=yes
+ DEVICE=eth0.101
+ GATEWAY=192.168.0.1
+ IPADDR=192.168.0.2
+ IPADDR1=192.168.2.10
+ MTU=1500
+ NETMASK=255.255.255.0
+ NETMASK1=255.255.255.0
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ PHYSDEV=eth0
+ TYPE=Ethernet
+ USERCTL=no
+ VLAN=yes"""),
+ 'ifcfg-eth1': textwrap.dedent("""\
+ BOOTPROTO=none
+ DEVICE=eth1
+ HWADDR=aa:d6:9f:2c:e8:80
+ MASTER=bond0
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ SLAVE=yes
+ TYPE=Ethernet
+ USERCTL=no"""),
+ 'ifcfg-eth2': textwrap.dedent("""\
+ BOOTPROTO=none
+ DEVICE=eth2
+ HWADDR=c0:bb:9f:2c:e8:80
+ MASTER=bond0
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ SLAVE=yes
+ TYPE=Ethernet
+ USERCTL=no"""),
+ 'ifcfg-eth3': textwrap.dedent("""\
+ BOOTPROTO=none
+ BRIDGE=br0
+ DEVICE=eth3
+ HWADDR=66:bb:9f:2c:e8:80
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ TYPE=Ethernet
+ USERCTL=no"""),
+ 'ifcfg-eth4': textwrap.dedent("""\
+ BOOTPROTO=none
+ BRIDGE=br0
+ DEVICE=eth4
+ HWADDR=98:bb:9f:2c:e8:80
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ TYPE=Ethernet
+ USERCTL=no"""),
+ 'ifcfg-eth5': textwrap.dedent("""\
+ BOOTPROTO=dhcp
+ DEVICE=eth5
+ HWADDR=98:bb:9f:2c:e8:8a
+ NM_CONTROLLED=no
+ ONBOOT=no
+ TYPE=Ethernet
+ USERCTL=no""")
+ },
'yaml': textwrap.dedent("""
version: 1
config:
@@ -752,14 +981,32 @@ pre-down route del -net 10.0.0.0 netmask 255.0.0.0 gw 11.0.0.1 metric 3 || true
forwarding: 1
# basically anything in /proc/sys/net/ipv6/conf/.../
params:
- bridge_stp: 'off'
- bridge_fd: 0
+ bridge_ageing: 250
+ bridge_bridgeprio: 22
+ bridge_fd: 1
+ bridge_gcint: 2
+ bridge_hello: 1
+ bridge_maxage: 10
bridge_maxwait: 0
+ bridge_pathcost:
+ - eth3 50
+ - eth4 75
+ bridge_portprio:
+ - eth3 28
+ - eth4 14
+ bridge_stp: 'off'
+ bridge_waitport:
+ - 1 eth3
+ - 2 eth4
subnets:
- type: static
address: 192.168.14.2/24
- type: static
address: 2001:1::1/64 # default to /64
+ routes:
+ - gateway: 2001:4800:78ff:1b::1
+ netmask: '::'
+ network: '::'
# A global nameserver.
- type: nameserver
address: 8.8.8.8
@@ -778,9 +1025,308 @@ pre-down route del -net 10.0.0.0 netmask 255.0.0.0 gw 11.0.0.1 metric 3 || true
gateway: 11.0.0.1
metric: 3
""").lstrip(),
- }
+ },
+ 'bond': {
+ 'yaml': textwrap.dedent("""
+ version: 1
+ config:
+ - type: physical
+ name: bond0s0
+ mac_address: "aa:bb:cc:dd:e8:00"
+ - type: physical
+ name: bond0s1
+ mac_address: "aa:bb:cc:dd:e8:01"
+ - type: bond
+ name: bond0
+ mac_address: "aa:bb:cc:dd:e8:ff"
+ bond_interfaces:
+ - bond0s0
+ - bond0s1
+ params:
+ bond-mode: active-backup
+ bond_miimon: 100
+ bond-xmit-hash-policy: "layer3+4"
+ subnets:
+ - type: static
+ address: 192.168.0.2/24
+ gateway: 192.168.0.1
+ routes:
+ - gateway: 192.168.0.3
+ netmask: 255.255.255.0
+ network: 10.1.3.0
+ - type: static
+ address: 192.168.1.2/24
+ - type: static
+ address: 2001:1::1/92
+ """),
+ 'expected_sysconfig': {
+ 'ifcfg-bond0': textwrap.dedent("""\
+ BONDING_MASTER=yes
+ BONDING_OPTS="mode=active-backup xmit_hash_policy=layer3+4 miimon=100"
+ BONDING_SLAVE0=bond0s0
+ BONDING_SLAVE1=bond0s1
+ BOOTPROTO=none
+ DEFROUTE=yes
+ DEVICE=bond0
+ GATEWAY=192.168.0.1
+ MACADDR=aa:bb:cc:dd:e8:ff
+ IPADDR=192.168.0.2
+ IPADDR1=192.168.1.2
+ IPV6ADDR=2001:1::1/92
+ IPV6INIT=yes
+ NETMASK=255.255.255.0
+ NETMASK1=255.255.255.0
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ TYPE=Bond
+ USERCTL=no
+ """),
+ 'ifcfg-bond0s0': textwrap.dedent("""\
+ BOOTPROTO=none
+ DEVICE=bond0s0
+ HWADDR=aa:bb:cc:dd:e8:00
+ MASTER=bond0
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ SLAVE=yes
+ TYPE=Ethernet
+ USERCTL=no
+ """),
+ 'route6-bond0': textwrap.dedent("""\
+ """),
+ 'route-bond0': textwrap.dedent("""\
+ ADDRESS0=10.1.3.0
+ GATEWAY0=192.168.0.3
+ NETMASK0=255.255.255.0
+ """),
+ 'ifcfg-bond0s1': textwrap.dedent("""\
+ BOOTPROTO=none
+ DEVICE=bond0s1
+ HWADDR=aa:bb:cc:dd:e8:01
+ MASTER=bond0
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ SLAVE=yes
+ TYPE=Ethernet
+ USERCTL=no
+ """),
+ },
+ },
+ 'vlan': {
+ 'yaml': textwrap.dedent("""
+ version: 1
+ config:
+ - type: physical
+ name: en0
+ mac_address: "aa:bb:cc:dd:e8:00"
+ - type: vlan
+ name: en0.99
+ vlan_link: en0
+ vlan_id: 99
+ subnets:
+ - type: static
+ address: '192.168.2.2/24'
+ - type: static
+ address: '192.168.1.2/24'
+ gateway: 192.168.1.1
+ - type: static
+ address: 2001:1::bbbb/96
+ routes:
+ - gateway: 2001:1::1
+ netmask: '::'
+ network: '::'
+ """),
+ 'expected_sysconfig': {
+ 'ifcfg-en0': textwrap.dedent("""\
+ BOOTPROTO=none
+ DEVICE=en0
+ HWADDR=aa:bb:cc:dd:e8:00
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ TYPE=Ethernet
+ USERCTL=no"""),
+ 'ifcfg-en0.99': textwrap.dedent("""\
+ BOOTPROTO=none
+ DEFROUTE=yes
+ DEVICE=en0.99
+ GATEWAY=192.168.1.1
+ IPADDR=192.168.2.2
+ IPADDR1=192.168.1.2
+ IPV6ADDR=2001:1::bbbb/96
+ IPV6INIT=yes
+ IPV6_DEFAULTGW=2001:1::1
+ NETMASK=255.255.255.0
+ NETMASK1=255.255.255.0
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ PHYSDEV=en0
+ TYPE=Ethernet
+ USERCTL=no
+ VLAN=yes"""),
+ },
+ },
+ 'bridge': {
+ 'yaml': textwrap.dedent("""
+ version: 1
+ config:
+ - type: physical
+ name: eth0
+ mac_address: "52:54:00:12:34:00"
+ subnets:
+ - type: static
+ address: 2001:1::100/96
+ - type: physical
+ name: eth1
+ mac_address: "52:54:00:12:34:01"
+ subnets:
+ - type: static
+ address: 2001:1::101/96
+ - type: bridge
+ name: br0
+ bridge_interfaces:
+ - eth0
+ - eth1
+ params:
+ bridge_stp: 'off'
+ bridge_bridgeprio: 22
+ subnets:
+ - type: static
+ address: 192.168.2.2/24"""),
+ 'expected_sysconfig': {
+ 'ifcfg-br0': textwrap.dedent("""\
+ BOOTPROTO=none
+ DEVICE=br0
+ IPADDR=192.168.2.2
+ NETMASK=255.255.255.0
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ PRIO=22
+ STP=off
+ TYPE=Bridge
+ USERCTL=no
+ """),
+ 'ifcfg-eth0': textwrap.dedent("""\
+ BOOTPROTO=none
+ BRIDGE=br0
+ DEVICE=eth0
+ HWADDR=52:54:00:12:34:00
+ IPV6ADDR=2001:1::100/96
+ IPV6INIT=yes
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ TYPE=Ethernet
+ USERCTL=no
+ """),
+ 'ifcfg-eth1': textwrap.dedent("""\
+ BOOTPROTO=none
+ BRIDGE=br0
+ DEVICE=eth1
+ HWADDR=52:54:00:12:34:01
+ IPV6ADDR=2001:1::101/96
+ IPV6INIT=yes
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ TYPE=Ethernet
+ USERCTL=no
+ """),
+ },
+ },
+ 'manual': {
+ 'yaml': textwrap.dedent("""
+ version: 1
+ config:
+ - type: physical
+ name: eth0
+ mac_address: "52:54:00:12:34:00"
+ subnets:
+ - type: static
+ address: 192.168.1.2/24
+ control: manual
+ - type: physical
+ name: eth1
+ mtu: 1480
+ mac_address: "52:54:00:12:34:aa"
+ subnets:
+ - type: manual
+ - type: physical
+ name: eth2
+ mac_address: "52:54:00:12:34:ff"
+ subnets:
+ - type: manual
+ control: manual
+ """),
+ 'expected_eni': textwrap.dedent("""\
+ auto lo
+ iface lo inet loopback
+
+ # control-manual eth0
+ iface eth0 inet static
+ address 192.168.1.2/24
+
+ auto eth1
+ iface eth1 inet manual
+ mtu 1480
+
+ # control-manual eth2
+ iface eth2 inet manual
+ """),
+ 'expected_netplan': textwrap.dedent("""\
+
+ network:
+ version: 2
+ ethernets:
+ eth0:
+ addresses:
+ - 192.168.1.2/24
+ match:
+ macaddress: '52:54:00:12:34:00'
+ set-name: eth0
+ eth1:
+ match:
+ macaddress: 52:54:00:12:34:aa
+ mtu: 1480
+ set-name: eth1
+ eth2:
+ match:
+ macaddress: 52:54:00:12:34:ff
+ set-name: eth2
+ """),
+ 'expected_sysconfig': {
+ 'ifcfg-eth0': textwrap.dedent("""\
+ BOOTPROTO=none
+ DEVICE=eth0
+ HWADDR=52:54:00:12:34:00
+ IPADDR=192.168.1.2
+ NETMASK=255.255.255.0
+ NM_CONTROLLED=no
+ ONBOOT=no
+ TYPE=Ethernet
+ USERCTL=no
+ """),
+ 'ifcfg-eth1': textwrap.dedent("""\
+ BOOTPROTO=none
+ DEVICE=eth1
+ HWADDR=52:54:00:12:34:aa
+ MTU=1480
+ NM_CONTROLLED=no
+ ONBOOT=yes
+ TYPE=Ethernet
+ USERCTL=no
+ """),
+ 'ifcfg-eth2': textwrap.dedent("""\
+ BOOTPROTO=none
+ DEVICE=eth2
+ HWADDR=52:54:00:12:34:ff
+ NM_CONTROLLED=no
+ ONBOOT=no
+ TYPE=Ethernet
+ USERCTL=no
+ """),
+ },
+ },
}
+
CONFIG_V1_EXPLICIT_LOOPBACK = {
'version': 1,
'config': [{'name': 'eth0', 'type': 'physical',
@@ -790,39 +1336,231 @@ CONFIG_V1_EXPLICIT_LOOPBACK = {
]}
-def _setup_test(tmp_dir, mock_get_devicelist, mock_read_sys_net,
- mock_sys_dev_path):
- mock_get_devicelist.return_value = ['eth1000']
- dev_characteristics = {
- 'eth1000': {
- "bridge": False,
- "carrier": False,
- "dormant": False,
- "operstate": "down",
- "address": "07-1C-C6-75-A4-BE",
- }
+CONFIG_V1_SIMPLE_SUBNET = {
+ 'version': 1,
+ 'config': [{'mac_address': '52:54:00:12:34:00',
+ 'name': 'interface0',
+ 'subnets': [{'address': '10.0.2.15',
+ 'gateway': '10.0.2.2',
+ 'netmask': '255.255.255.0',
+ 'type': 'static'}],
+ 'type': 'physical'}]}
+
+
+DEFAULT_DEV_ATTRS = {
+ 'eth1000': {
+ "bridge": False,
+ "carrier": False,
+ "dormant": False,
+ "operstate": "down",
+ "address": "07-1C-C6-75-A4-BE",
+ "device/driver": None,
+ "device/device": None,
}
+}
+
+
+def _setup_test(tmp_dir, mock_get_devicelist, mock_read_sys_net,
+ mock_sys_dev_path, dev_attrs=None):
+ if not dev_attrs:
+ dev_attrs = DEFAULT_DEV_ATTRS
+
+ mock_get_devicelist.return_value = dev_attrs.keys()
def fake_read(devname, path, translate=None,
on_enoent=None, on_keyerror=None,
on_einval=None):
- return dev_characteristics[devname][path]
+ return dev_attrs[devname][path]
mock_read_sys_net.side_effect = fake_read
def sys_dev_path(devname, path=""):
- return tmp_dir + devname + "/" + path
+ return tmp_dir + "/" + devname + "/" + path
- for dev in dev_characteristics:
+ for dev in dev_attrs:
os.makedirs(os.path.join(tmp_dir, dev))
with open(os.path.join(tmp_dir, dev, 'operstate'), 'w') as fh:
- fh.write("down")
+ fh.write(dev_attrs[dev]['operstate'])
+ os.makedirs(os.path.join(tmp_dir, dev, "device"))
+ for key in ['device/driver']:
+ if key in dev_attrs[dev] and dev_attrs[dev][key]:
+ target = dev_attrs[dev][key]
+ link = os.path.join(tmp_dir, dev, key)
+ print('symlink %s -> %s' % (link, target))
+ os.symlink(target, link)
mock_sys_dev_path.side_effect = sys_dev_path
+class TestGenerateFallbackConfig(CiTestCase):
+
+ @mock.patch("cloudinit.net.sys_dev_path")
+ @mock.patch("cloudinit.net.read_sys_net")
+ @mock.patch("cloudinit.net.get_devicelist")
+ def test_device_driver(self, mock_get_devicelist, mock_read_sys_net,
+ mock_sys_dev_path):
+ devices = {
+ 'eth0': {
+ 'bridge': False, 'carrier': False, 'dormant': False,
+ 'operstate': 'down', 'address': '00:11:22:33:44:55',
+ 'device/driver': 'hv_netsvc', 'device/device': '0x3'},
+ 'eth1': {
+ 'bridge': False, 'carrier': False, 'dormant': False,
+ 'operstate': 'down', 'address': '00:11:22:33:44:55',
+ 'device/driver': 'mlx4_core', 'device/device': '0x7'},
+ }
+
+ tmp_dir = self.tmp_dir()
+ _setup_test(tmp_dir, mock_get_devicelist,
+ mock_read_sys_net, mock_sys_dev_path,
+ dev_attrs=devices)
+
+ network_cfg = net.generate_fallback_config(config_driver=True)
+ ns = network_state.parse_net_config_data(network_cfg,
+ skip_broken=False)
+
+ render_dir = os.path.join(tmp_dir, "render")
+ os.makedirs(render_dir)
+
+ # don't set rulepath so eni writes them
+ renderer = eni.Renderer(
+ {'eni_path': 'interfaces', 'netrules_path': 'netrules'})
+ renderer.render_network_state(ns, render_dir)
+
+ self.assertTrue(os.path.exists(os.path.join(render_dir,
+ 'interfaces')))
+ with open(os.path.join(render_dir, 'interfaces')) as fh:
+ contents = fh.read()
+ print(contents)
+ expected = """
+auto lo
+iface lo inet loopback
+
+auto eth0
+iface eth0 inet dhcp
+"""
+ self.assertEqual(expected.lstrip(), contents.lstrip())
+
+ self.assertTrue(os.path.exists(os.path.join(render_dir, 'netrules')))
+ with open(os.path.join(render_dir, 'netrules')) as fh:
+ contents = fh.read()
+ print(contents)
+ expected_rule = [
+ 'SUBSYSTEM=="net"',
+ 'ACTION=="add"',
+ 'DRIVERS=="hv_netsvc"',
+ 'ATTR{address}=="00:11:22:33:44:55"',
+ 'NAME="eth0"',
+ ]
+ self.assertEqual(", ".join(expected_rule) + '\n', contents.lstrip())
+
+ @mock.patch("cloudinit.net.sys_dev_path")
+ @mock.patch("cloudinit.net.read_sys_net")
+ @mock.patch("cloudinit.net.get_devicelist")
+ def test_device_driver_blacklist(self, mock_get_devicelist,
+ mock_read_sys_net, mock_sys_dev_path):
+ devices = {
+ 'eth1': {
+ 'bridge': False, 'carrier': False, 'dormant': False,
+ 'operstate': 'down', 'address': '00:11:22:33:44:55',
+ 'device/driver': 'hv_netsvc', 'device/device': '0x3'},
+ 'eth0': {
+ 'bridge': False, 'carrier': False, 'dormant': False,
+ 'operstate': 'down', 'address': '00:11:22:33:44:55',
+ 'device/driver': 'mlx4_core', 'device/device': '0x7'},
+ }
+
+ tmp_dir = self.tmp_dir()
+ _setup_test(tmp_dir, mock_get_devicelist,
+ mock_read_sys_net, mock_sys_dev_path,
+ dev_attrs=devices)
+
+ blacklist = ['mlx4_core']
+ network_cfg = net.generate_fallback_config(blacklist_drivers=blacklist,
+ config_driver=True)
+ ns = network_state.parse_net_config_data(network_cfg,
+ skip_broken=False)
+
+ render_dir = os.path.join(tmp_dir, "render")
+ os.makedirs(render_dir)
+
+ # don't set rulepath so eni writes them
+ renderer = eni.Renderer(
+ {'eni_path': 'interfaces', 'netrules_path': 'netrules'})
+ renderer.render_network_state(ns, render_dir)
+
+ self.assertTrue(os.path.exists(os.path.join(render_dir,
+ 'interfaces')))
+ with open(os.path.join(render_dir, 'interfaces')) as fh:
+ contents = fh.read()
+ print(contents)
+ expected = """
+auto lo
+iface lo inet loopback
+
+auto eth1
+iface eth1 inet dhcp
+"""
+ self.assertEqual(expected.lstrip(), contents.lstrip())
+
+ self.assertTrue(os.path.exists(os.path.join(render_dir, 'netrules')))
+ with open(os.path.join(render_dir, 'netrules')) as fh:
+ contents = fh.read()
+ print(contents)
+ expected_rule = [
+ 'SUBSYSTEM=="net"',
+ 'ACTION=="add"',
+ 'DRIVERS=="hv_netsvc"',
+ 'ATTR{address}=="00:11:22:33:44:55"',
+ 'NAME="eth1"',
+ ]
+ self.assertEqual(", ".join(expected_rule) + '\n', contents.lstrip())
+
+
class TestSysConfigRendering(CiTestCase):
+ scripts_dir = '/etc/sysconfig/network-scripts'
+ header = ('# Created by cloud-init on instance boot automatically, '
+ 'do not edit.\n#\n')
+
+ def _render_and_read(self, network_config=None, state=None, dir=None):
+ if dir is None:
+ dir = self.tmp_dir()
+
+ if network_config:
+ ns = network_state.parse_net_config_data(network_config)
+ elif state:
+ ns = state
+ else:
+ raise ValueError("Expected data or state, got neither")
+
+ renderer = sysconfig.Renderer()
+ renderer.render_network_state(ns, dir)
+ return dir2dict(dir)
+
+ def _compare_files_to_expected(self, expected, found):
+ orig_maxdiff = self.maxDiff
+ expected_d = dict(
+ (os.path.join(self.scripts_dir, k), util.load_shell_content(v))
+ for k, v in expected.items())
+
+ # only compare the files in scripts_dir
+ scripts_found = dict(
+ (k, util.load_shell_content(v)) for k, v in found.items()
+ if k.startswith(self.scripts_dir))
+ try:
+ self.maxDiff = None
+ self.assertEqual(expected_d, scripts_found)
+ finally:
+ self.maxDiff = orig_maxdiff
+
+ def _assert_headers(self, found):
+ missing = [f for f in found
+ if (f.startswith(self.scripts_dir) and
+ not found[f].startswith(self.header))]
+ if missing:
+ raise AssertionError("Missing headers in: %s" % missing)
+
@mock.patch("cloudinit.net.sys_dev_path")
@mock.patch("cloudinit.net.read_sys_net")
@mock.patch("cloudinit.net.get_devicelist")
@@ -950,6 +1688,32 @@ USERCTL=no
with open(os.path.join(render_dir, fn)) as fh:
self.assertEqual(expected_content, fh.read())
+ def test_network_config_v1_samples(self):
+ ns = network_state.parse_net_config_data(CONFIG_V1_SIMPLE_SUBNET)
+ render_dir = self.tmp_path("render")
+ os.makedirs(render_dir)
+ renderer = sysconfig.Renderer()
+ renderer.render_network_state(ns, render_dir)
+ found = dir2dict(render_dir)
+ nspath = '/etc/sysconfig/network-scripts/'
+ self.assertNotIn(nspath + 'ifcfg-lo', found.keys())
+ expected = """\
+# Created by cloud-init on instance boot automatically, do not edit.
+#
+BOOTPROTO=none
+DEFROUTE=yes
+DEVICE=interface0
+GATEWAY=10.0.2.2
+HWADDR=52:54:00:12:34:00
+IPADDR=10.0.2.15
+NETMASK=255.255.255.0
+NM_CONTROLLED=no
+ONBOOT=yes
+TYPE=Ethernet
+USERCTL=no
+"""
+ self.assertEqual(expected, found[nspath + 'ifcfg-interface0'])
+
def test_config_with_explicit_loopback(self):
ns = network_state.parse_net_config_data(CONFIG_V1_EXPLICIT_LOOPBACK)
render_dir = self.tmp_path("render")
@@ -971,6 +1735,48 @@ USERCTL=no
"""
self.assertEqual(expected, found[nspath + 'ifcfg-eth0'])
+ def test_bond_config(self):
+ entry = NETWORK_CONFIGS['bond']
+ found = self._render_and_read(network_config=yaml.load(entry['yaml']))
+ self._compare_files_to_expected(entry['expected_sysconfig'], found)
+ self._assert_headers(found)
+
+ def test_vlan_config(self):
+ entry = NETWORK_CONFIGS['vlan']
+ found = self._render_and_read(network_config=yaml.load(entry['yaml']))
+ self._compare_files_to_expected(entry['expected_sysconfig'], found)
+ self._assert_headers(found)
+
+ def test_bridge_config(self):
+ entry = NETWORK_CONFIGS['bridge']
+ found = self._render_and_read(network_config=yaml.load(entry['yaml']))
+ self._compare_files_to_expected(entry['expected_sysconfig'], found)
+ self._assert_headers(found)
+
+ def test_manual_config(self):
+ entry = NETWORK_CONFIGS['manual']
+ found = self._render_and_read(network_config=yaml.load(entry['yaml']))
+ self._compare_files_to_expected(entry['expected_sysconfig'], found)
+ self._assert_headers(found)
+
+ def test_all_config(self):
+ entry = NETWORK_CONFIGS['all']
+ found = self._render_and_read(network_config=yaml.load(entry['yaml']))
+ self._compare_files_to_expected(entry['expected_sysconfig'], found)
+ self._assert_headers(found)
+
+ def test_small_config(self):
+ entry = NETWORK_CONFIGS['small']
+ found = self._render_and_read(network_config=yaml.load(entry['yaml']))
+ self._compare_files_to_expected(entry['expected_sysconfig'], found)
+ self._assert_headers(found)
+
+ def test_v4_and_v6_static_config(self):
+ entry = NETWORK_CONFIGS['v4_and_v6_static']
+ found = self._render_and_read(network_config=yaml.load(entry['yaml']))
+ self._compare_files_to_expected(entry['expected_sysconfig'], found)
+ self._assert_headers(found)
+
class TestEniNetRendering(CiTestCase):
@@ -992,9 +1798,7 @@ class TestEniNetRendering(CiTestCase):
os.makedirs(render_dir)
renderer = eni.Renderer(
- {'links_path_prefix': None,
- 'eni_path': 'interfaces', 'netrules_path': None,
- })
+ {'eni_path': 'interfaces', 'netrules_path': None})
renderer.render_network_state(ns, render_dir)
self.assertTrue(os.path.exists(os.path.join(render_dir,
@@ -1366,6 +2170,13 @@ class TestNetplanRoundTrip(CiTestCase):
entry['expected_netplan'].splitlines(),
files['/etc/netplan/50-cloud-init.yaml'].splitlines())
+ def testsimple_render_v4_and_v6_static(self):
+ entry = NETWORK_CONFIGS['v4_and_v6_static']
+ files = self._render_and_read(network_config=yaml.load(entry['yaml']))
+ self.assertEqual(
+ entry['expected_netplan'].splitlines(),
+ files['/etc/netplan/50-cloud-init.yaml'].splitlines())
+
def testsimple_render_all(self):
entry = NETWORK_CONFIGS['all']
files = self._render_and_read(network_config=yaml.load(entry['yaml']))
@@ -1373,10 +2184,17 @@ class TestNetplanRoundTrip(CiTestCase):
entry['expected_netplan'].splitlines(),
files['/etc/netplan/50-cloud-init.yaml'].splitlines())
+ def testsimple_render_manual(self):
+ entry = NETWORK_CONFIGS['manual']
+ files = self._render_and_read(network_config=yaml.load(entry['yaml']))
+ self.assertEqual(
+ entry['expected_netplan'].splitlines(),
+ files['/etc/netplan/50-cloud-init.yaml'].splitlines())
+
class TestEniRoundTrip(CiTestCase):
def _render_and_read(self, network_config=None, state=None, eni_path=None,
- links_prefix=None, netrules_path=None, dir=None):
+ netrules_path=None, dir=None):
if dir is None:
dir = self.tmp_dir()
@@ -1391,8 +2209,7 @@ class TestEniRoundTrip(CiTestCase):
eni_path = 'etc/network/interfaces'
renderer = eni.Renderer(
- config={'eni_path': eni_path, 'links_path_prefix': links_prefix,
- 'netrules_path': netrules_path})
+ config={'eni_path': eni_path, 'netrules_path': netrules_path})
renderer.render_network_state(ns, dir)
return dir2dict(dir)
@@ -1425,6 +2242,27 @@ class TestEniRoundTrip(CiTestCase):
entry['expected_eni'].splitlines(),
files['/etc/network/interfaces'].splitlines())
+ def testsimple_render_v4_and_v6_static(self):
+ entry = NETWORK_CONFIGS['v4_and_v6_static']
+ files = self._render_and_read(network_config=yaml.load(entry['yaml']))
+ self.assertEqual(
+ entry['expected_eni'].splitlines(),
+ files['/etc/network/interfaces'].splitlines())
+
+ def testsimple_render_manual(self):
+ """Test rendering of 'manual' for 'type' and 'control'.
+
+ 'type: manual' in a subnet is odd, but it is the way that was used
+ to declare that a network device should get a mtu set on it even
+ if there were no addresses to configure. Also strange is the fact
+ that in order to apply that MTU the ifupdown device must be set
+ to 'auto', or the MTU would not be set."""
+ entry = NETWORK_CONFIGS['manual']
+ files = self._render_and_read(network_config=yaml.load(entry['yaml']))
+ self.assertEqual(
+ entry['expected_eni'].splitlines(),
+ files['/etc/network/interfaces'].splitlines())
+
def test_routes_rendered(self):
# as reported in bug 1649652
conf = [
@@ -1516,6 +2354,118 @@ class TestNetRenderers(CiTestCase):
priority=['sysconfig', 'eni'])
+class TestGetInterfaces(CiTestCase):
+ _data = {'bonds': ['bond1'],
+ 'bridges': ['bridge1'],
+ 'vlans': ['bond1.101'],
+ 'own_macs': ['enp0s1', 'enp0s2', 'bridge1-nic', 'bridge1',
+ 'bond1.101', 'lo', 'eth1'],
+ 'macs': {'enp0s1': 'aa:aa:aa:aa:aa:01',
+ 'enp0s2': 'aa:aa:aa:aa:aa:02',
+ 'bond1': 'aa:aa:aa:aa:aa:01',
+ 'bond1.101': 'aa:aa:aa:aa:aa:01',
+ 'bridge1': 'aa:aa:aa:aa:aa:03',
+ 'bridge1-nic': 'aa:aa:aa:aa:aa:03',
+ 'lo': '00:00:00:00:00:00',
+ 'greptap0': '00:00:00:00:00:00',
+ 'eth1': 'aa:aa:aa:aa:aa:01',
+ 'tun0': None},
+ 'drivers': {'enp0s1': 'virtio_net',
+ 'enp0s2': 'e1000',
+ 'bond1': None,
+ 'bond1.101': None,
+ 'bridge1': None,
+ 'bridge1-nic': None,
+ 'lo': None,
+ 'greptap0': None,
+ 'eth1': 'mlx4_core',
+ 'tun0': None}}
+ data = {}
+
+ def _se_get_devicelist(self):
+ return list(self.data['devices'])
+
+ def _se_device_driver(self, name):
+ return self.data['drivers'][name]
+
+ def _se_device_devid(self, name):
+ return '0x%s' % sorted(list(self.data['drivers'].keys())).index(name)
+
+ def _se_get_interface_mac(self, name):
+ return self.data['macs'][name]
+
+ def _se_is_bridge(self, name):
+ return name in self.data['bridges']
+
+ def _se_is_vlan(self, name):
+ return name in self.data['vlans']
+
+ def _se_interface_has_own_mac(self, name):
+ return name in self.data['own_macs']
+
+ def _mock_setup(self):
+ self.data = copy.deepcopy(self._data)
+ self.data['devices'] = set(list(self.data['macs'].keys()))
+ mocks = ('get_devicelist', 'get_interface_mac', 'is_bridge',
+ 'interface_has_own_mac', 'is_vlan', 'device_driver',
+ 'device_devid')
+ self.mocks = {}
+ for n in mocks:
+ m = mock.patch('cloudinit.net.' + n,
+ side_effect=getattr(self, '_se_' + n))
+ self.addCleanup(m.stop)
+ self.mocks[n] = m.start()
+
+ def test_gi_includes_duplicate_macs(self):
+ self._mock_setup()
+ ret = net.get_interfaces()
+
+ self.assertIn('enp0s1', self._se_get_devicelist())
+ self.assertIn('eth1', self._se_get_devicelist())
+ found = [ent for ent in ret if 'aa:aa:aa:aa:aa:01' in ent]
+ self.assertEqual(len(found), 2)
+
+ def test_gi_excludes_any_without_mac_address(self):
+ self._mock_setup()
+ ret = net.get_interfaces()
+
+ self.assertIn('tun0', self._se_get_devicelist())
+ found = [ent for ent in ret if 'tun0' in ent]
+ self.assertEqual(len(found), 0)
+
+ def test_gi_excludes_stolen_macs(self):
+ self._mock_setup()
+ ret = net.get_interfaces()
+ self.mocks['interface_has_own_mac'].assert_has_calls(
+ [mock.call('enp0s1'), mock.call('bond1')], any_order=True)
+ expected = [
+ ('enp0s2', 'aa:aa:aa:aa:aa:02', 'e1000', '0x5'),
+ ('enp0s1', 'aa:aa:aa:aa:aa:01', 'virtio_net', '0x4'),
+ ('eth1', 'aa:aa:aa:aa:aa:01', 'mlx4_core', '0x6'),
+ ('lo', '00:00:00:00:00:00', None, '0x8'),
+ ('bridge1-nic', 'aa:aa:aa:aa:aa:03', None, '0x3'),
+ ]
+ self.assertEqual(sorted(expected), sorted(ret))
+
+ def test_gi_excludes_bridges(self):
+ self._mock_setup()
+ # add a device 'b1', make all return they have their "own mac",
+ # set everything other than 'b1' to be a bridge.
+ # then expect b1 is the only thing left.
+ self.data['macs']['b1'] = 'aa:aa:aa:aa:aa:b1'
+ self.data['drivers']['b1'] = None
+ self.data['devices'].add('b1')
+ self.data['bonds'] = []
+ self.data['own_macs'] = self.data['devices']
+ self.data['bridges'] = [f for f in self.data['devices'] if f != "b1"]
+ ret = net.get_interfaces()
+ self.assertEqual([('b1', 'aa:aa:aa:aa:aa:b1', None, '0x0')], ret)
+ self.mocks['is_bridge'].assert_has_calls(
+ [mock.call('bridge1'), mock.call('enp0s1'), mock.call('bond1'),
+ mock.call('b1')],
+ any_order=True)
+
+
class TestGetInterfacesByMac(CiTestCase):
_data = {'bonds': ['bond1'],
'bridges': ['bridge1'],
@@ -1627,6 +2577,19 @@ class TestGetInterfacesByMac(CiTestCase):
self.assertEqual('lo', ret[empty_mac])
+class TestInterfacesSorting(CiTestCase):
+
+ def test_natural_order(self):
+ data = ['ens5', 'ens6', 'ens3', 'ens20', 'ens13', 'ens2']
+ self.assertEqual(
+ sorted(data, key=_natural_sort_key),
+ ['ens2', 'ens3', 'ens5', 'ens6', 'ens13', 'ens20'])
+ data2 = ['enp2s0', 'enp2s3', 'enp0s3', 'enp0s13', 'enp0s8', 'enp1s2']
+ self.assertEqual(
+ sorted(data2, key=_natural_sort_key),
+ ['enp0s3', 'enp0s8', 'enp0s13', 'enp1s2', 'enp2s0', 'enp2s3'])
+
+
def _gzip_data(data):
with io.BytesIO() as iobuf:
gzfp = gzip.GzipFile(mode="wb", fileobj=iobuf)
@@ -1634,4 +2597,229 @@ def _gzip_data(data):
gzfp.close()
return iobuf.getvalue()
+
+class TestRenameInterfaces(CiTestCase):
+
+ @mock.patch('cloudinit.util.subp')
+ def test_rename_all(self, mock_subp):
+ renames = [
+ ('00:11:22:33:44:55', 'interface0', 'virtio_net', '0x3'),
+ ('00:11:22:33:44:aa', 'interface2', 'virtio_net', '0x5'),
+ ]
+ current_info = {
+ 'ens3': {
+ 'downable': True,
+ 'device_id': '0x3',
+ 'driver': 'virtio_net',
+ 'mac': '00:11:22:33:44:55',
+ 'name': 'ens3',
+ 'up': False},
+ 'ens5': {
+ 'downable': True,
+ 'device_id': '0x5',
+ 'driver': 'virtio_net',
+ 'mac': '00:11:22:33:44:aa',
+ 'name': 'ens5',
+ 'up': False},
+ }
+ net._rename_interfaces(renames, current_info=current_info)
+ print(mock_subp.call_args_list)
+ mock_subp.assert_has_calls([
+ mock.call(['ip', 'link', 'set', 'ens3', 'name', 'interface0'],
+ capture=True),
+ mock.call(['ip', 'link', 'set', 'ens5', 'name', 'interface2'],
+ capture=True),
+ ])
+
+ @mock.patch('cloudinit.util.subp')
+ def test_rename_no_driver_no_device_id(self, mock_subp):
+ renames = [
+ ('00:11:22:33:44:55', 'interface0', None, None),
+ ('00:11:22:33:44:aa', 'interface1', None, None),
+ ]
+ current_info = {
+ 'eth0': {
+ 'downable': True,
+ 'device_id': None,
+ 'driver': None,
+ 'mac': '00:11:22:33:44:55',
+ 'name': 'eth0',
+ 'up': False},
+ 'eth1': {
+ 'downable': True,
+ 'device_id': None,
+ 'driver': None,
+ 'mac': '00:11:22:33:44:aa',
+ 'name': 'eth1',
+ 'up': False},
+ }
+ net._rename_interfaces(renames, current_info=current_info)
+ print(mock_subp.call_args_list)
+ mock_subp.assert_has_calls([
+ mock.call(['ip', 'link', 'set', 'eth0', 'name', 'interface0'],
+ capture=True),
+ mock.call(['ip', 'link', 'set', 'eth1', 'name', 'interface1'],
+ capture=True),
+ ])
+
+ @mock.patch('cloudinit.util.subp')
+ def test_rename_all_bounce(self, mock_subp):
+ renames = [
+ ('00:11:22:33:44:55', 'interface0', 'virtio_net', '0x3'),
+ ('00:11:22:33:44:aa', 'interface2', 'virtio_net', '0x5'),
+ ]
+ current_info = {
+ 'ens3': {
+ 'downable': True,
+ 'device_id': '0x3',
+ 'driver': 'virtio_net',
+ 'mac': '00:11:22:33:44:55',
+ 'name': 'ens3',
+ 'up': True},
+ 'ens5': {
+ 'downable': True,
+ 'device_id': '0x5',
+ 'driver': 'virtio_net',
+ 'mac': '00:11:22:33:44:aa',
+ 'name': 'ens5',
+ 'up': True},
+ }
+ net._rename_interfaces(renames, current_info=current_info)
+ print(mock_subp.call_args_list)
+ mock_subp.assert_has_calls([
+ mock.call(['ip', 'link', 'set', 'ens3', 'down'], capture=True),
+ mock.call(['ip', 'link', 'set', 'ens3', 'name', 'interface0'],
+ capture=True),
+ mock.call(['ip', 'link', 'set', 'ens5', 'down'], capture=True),
+ mock.call(['ip', 'link', 'set', 'ens5', 'name', 'interface2'],
+ capture=True),
+ mock.call(['ip', 'link', 'set', 'interface0', 'up'], capture=True),
+ mock.call(['ip', 'link', 'set', 'interface2', 'up'], capture=True)
+ ])
+
+ @mock.patch('cloudinit.util.subp')
+ def test_rename_duplicate_macs(self, mock_subp):
+ renames = [
+ ('00:11:22:33:44:55', 'eth0', 'hv_netsvc', '0x3'),
+ ('00:11:22:33:44:55', 'vf1', 'mlx4_core', '0x5'),
+ ]
+ current_info = {
+ 'eth0': {
+ 'downable': True,
+ 'device_id': '0x3',
+ 'driver': 'hv_netsvc',
+ 'mac': '00:11:22:33:44:55',
+ 'name': 'eth0',
+ 'up': False},
+ 'eth1': {
+ 'downable': True,
+ 'device_id': '0x5',
+ 'driver': 'mlx4_core',
+ 'mac': '00:11:22:33:44:55',
+ 'name': 'eth1',
+ 'up': False},
+ }
+ net._rename_interfaces(renames, current_info=current_info)
+ print(mock_subp.call_args_list)
+ mock_subp.assert_has_calls([
+ mock.call(['ip', 'link', 'set', 'eth1', 'name', 'vf1'],
+ capture=True),
+ ])
+
+ @mock.patch('cloudinit.util.subp')
+ def test_rename_duplicate_macs_driver_no_devid(self, mock_subp):
+ renames = [
+ ('00:11:22:33:44:55', 'eth0', 'hv_netsvc', None),
+ ('00:11:22:33:44:55', 'vf1', 'mlx4_core', None),
+ ]
+ current_info = {
+ 'eth0': {
+ 'downable': True,
+ 'device_id': '0x3',
+ 'driver': 'hv_netsvc',
+ 'mac': '00:11:22:33:44:55',
+ 'name': 'eth0',
+ 'up': False},
+ 'eth1': {
+ 'downable': True,
+ 'device_id': '0x5',
+ 'driver': 'mlx4_core',
+ 'mac': '00:11:22:33:44:55',
+ 'name': 'eth1',
+ 'up': False},
+ }
+ net._rename_interfaces(renames, current_info=current_info)
+ print(mock_subp.call_args_list)
+ mock_subp.assert_has_calls([
+ mock.call(['ip', 'link', 'set', 'eth1', 'name', 'vf1'],
+ capture=True),
+ ])
+
+ @mock.patch('cloudinit.util.subp')
+ def test_rename_multi_mac_dups(self, mock_subp):
+ renames = [
+ ('00:11:22:33:44:55', 'eth0', 'hv_netsvc', '0x3'),
+ ('00:11:22:33:44:55', 'vf1', 'mlx4_core', '0x5'),
+ ('00:11:22:33:44:55', 'vf2', 'mlx4_core', '0x7'),
+ ]
+ current_info = {
+ 'eth0': {
+ 'downable': True,
+ 'device_id': '0x3',
+ 'driver': 'hv_netsvc',
+ 'mac': '00:11:22:33:44:55',
+ 'name': 'eth0',
+ 'up': False},
+ 'eth1': {
+ 'downable': True,
+ 'device_id': '0x5',
+ 'driver': 'mlx4_core',
+ 'mac': '00:11:22:33:44:55',
+ 'name': 'eth1',
+ 'up': False},
+ 'eth2': {
+ 'downable': True,
+ 'device_id': '0x7',
+ 'driver': 'mlx4_core',
+ 'mac': '00:11:22:33:44:55',
+ 'name': 'eth2',
+ 'up': False},
+ }
+ net._rename_interfaces(renames, current_info=current_info)
+ print(mock_subp.call_args_list)
+ mock_subp.assert_has_calls([
+ mock.call(['ip', 'link', 'set', 'eth1', 'name', 'vf1'],
+ capture=True),
+ mock.call(['ip', 'link', 'set', 'eth2', 'name', 'vf2'],
+ capture=True),
+ ])
+
+ @mock.patch('cloudinit.util.subp')
+ def test_rename_macs_case_insensitive(self, mock_subp):
+ """_rename_interfaces must support upper or lower case macs."""
+ renames = [
+ ('aa:aa:aa:aa:aa:aa', 'en0', None, None),
+ ('BB:BB:BB:BB:BB:BB', 'en1', None, None),
+ ('cc:cc:cc:cc:cc:cc', 'en2', None, None),
+ ('DD:DD:DD:DD:DD:DD', 'en3', None, None),
+ ]
+ current_info = {
+ 'eth0': {'downable': True, 'mac': 'AA:AA:AA:AA:AA:AA',
+ 'name': 'eth0', 'up': False},
+ 'eth1': {'downable': True, 'mac': 'bb:bb:bb:bb:bb:bb',
+ 'name': 'eth1', 'up': False},
+ 'eth2': {'downable': True, 'mac': 'cc:cc:cc:cc:cc:cc',
+ 'name': 'eth2', 'up': False},
+ 'eth3': {'downable': True, 'mac': 'DD:DD:DD:DD:DD:DD',
+ 'name': 'eth3', 'up': False},
+ }
+ net._rename_interfaces(renames, current_info=current_info)
+
+ expected = [
+ mock.call(['ip', 'link', 'set', 'eth%d' % i, 'name', 'en%d' % i],
+ capture=True)
+ for i in range(len(renames))]
+ mock_subp.assert_has_calls(expected)
+
+
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_runs/test_simple_run.py b/tests/unittests/test_runs/test_simple_run.py
index 31324204..55f15b55 100644
--- a/tests/unittests/test_runs/test_simple_run.py
+++ b/tests/unittests/test_runs/test_simple_run.py
@@ -16,24 +16,6 @@ class TestSimpleRun(helpers.FilesystemMockingTestCase):
self.patchOS(root)
self.patchUtils(root)
- def _pp_root(self, root, repatch=True):
- for (dirpath, dirnames, filenames) in os.walk(root):
- print(dirpath)
- for f in filenames:
- joined = os.path.join(dirpath, f)
- if os.path.islink(joined):
- print("f %s - (symlink)" % (f))
- else:
- print("f %s" % (f))
- for d in dirnames:
- joined = os.path.join(dirpath, d)
- if os.path.islink(joined):
- print("d %s - (symlink)" % (d))
- else:
- print("d %s" % (d))
- if repatch:
- self._patchIn(root)
-
def test_none_ds(self):
new_root = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, new_root)
diff --git a/tests/unittests/test_util.py b/tests/unittests/test_util.py
index 014aa6a3..f38a664c 100644
--- a/tests/unittests/test_util.py
+++ b/tests/unittests/test_util.py
@@ -20,6 +20,9 @@ except ImportError:
import mock
+BASH = util.which('bash')
+
+
class FakeSelinux(object):
def __init__(self, match_what):
@@ -362,6 +365,9 @@ class TestReadDMIData(helpers.FilesystemMockingTestCase):
self.addCleanup(shutil.rmtree, self.new_root)
self.patchOS(self.new_root)
self.patchUtils(self.new_root)
+ p = mock.patch("cloudinit.util.is_container", return_value=False)
+ self.addCleanup(p.stop)
+ self._m_is_container = p.start()
def _create_sysfs_parent_directory(self):
util.ensure_dir(os.path.join('sys', 'class', 'dmi', 'id'))
@@ -450,6 +456,26 @@ class TestReadDMIData(helpers.FilesystemMockingTestCase):
self._create_sysfs_file(sysfs_key, dmi_value)
self.assertEqual(expected, util.read_dmi_data(dmi_key))
+ def test_container_returns_none(self):
+ """In a container read_dmi_data should always return None."""
+
+ # first verify we get the value if not in container
+ self._m_is_container.return_value = False
+ key, val = ("system-product-name", "my_product")
+ self._create_sysfs_file('product_name', val)
+ self.assertEqual(val, util.read_dmi_data(key))
+
+ # then verify in container returns None
+ self._m_is_container.return_value = True
+ self.assertIsNone(util.read_dmi_data(key))
+
+ def test_container_returns_none_on_unknown(self):
+ """In a container even bogus keys return None."""
+ self._m_is_container.return_value = True
+ self._create_sysfs_file('product_name', "should-be-ignored")
+ self.assertIsNone(util.read_dmi_data("bogus"))
+ self.assertIsNone(util.read_dmi_data("system-product-name"))
+
class TestMultiLog(helpers.FilesystemMockingTestCase):
@@ -544,17 +570,17 @@ class TestReadSeeded(helpers.TestCase):
class TestSubp(helpers.TestCase):
- stdin2err = ['bash', '-c', 'cat >&2']
+ stdin2err = [BASH, '-c', 'cat >&2']
stdin2out = ['cat']
utf8_invalid = b'ab\xaadef'
utf8_valid = b'start \xc3\xa9 end'
utf8_valid_2 = b'd\xc3\xa9j\xc8\xa7'
- printenv = ['bash', '-c', 'for n in "$@"; do echo "$n=${!n}"; done', '--']
+ printenv = [BASH, '-c', 'for n in "$@"; do echo "$n=${!n}"; done', '--']
def printf_cmd(self, *args):
# bash's printf supports \xaa. So does /usr/bin/printf
# but by using bash, we remove dependency on another program.
- return(['bash', '-c', 'printf "$@"', 'printf'] + list(args))
+ return([BASH, '-c', 'printf "$@"', 'printf'] + list(args))
def test_subp_handles_utf8(self):
# The given bytes contain utf-8 accented characters as seen in e.g.
@@ -781,4 +807,20 @@ class TestSystemIsSnappy(helpers.FilesystemMockingTestCase):
self.reRoot(root_d)
self.assertTrue(util.system_is_snappy())
+
+class TestLoadShellContent(helpers.TestCase):
+ def test_comments_handled_correctly(self):
+ """Shell comments should be allowed in the content."""
+ self.assertEqual(
+ {'key1': 'val1', 'key2': 'val2', 'key3': 'val3 #tricky'},
+ util.load_shell_content('\n'.join([
+ "#top of file comment",
+ "key1=val1 #this is a comment",
+ "# second comment",
+ 'key2="val2" # inlin comment'
+ '#badkey=wark',
+ 'key3="val3 #tricky"',
+ ''])))
+
+
# vi: ts=4 expandtab