diff options
Diffstat (limited to 'tests')
169 files changed, 3550 insertions, 698 deletions
diff --git a/tests/cloud_tests/__init__.py b/tests/cloud_tests/__init__.py index 07148c12..98c1d6c7 100644 --- a/tests/cloud_tests/__init__.py +++ b/tests/cloud_tests/__init__.py @@ -7,7 +7,7 @@ import os BASE_DIR = os.path.dirname(os.path.abspath(__file__)) TESTCASES_DIR = os.path.join(BASE_DIR, 'testcases') -TEST_CONF_DIR = os.path.join(BASE_DIR, 'configs') +TEST_CONF_DIR = os.path.join(BASE_DIR, 'testcases') TREE_BASE = os.sep.join(BASE_DIR.split(os.sep)[:-2]) diff --git a/tests/cloud_tests/__main__.py b/tests/cloud_tests/__main__.py index 260ddb3f..7ee29cad 100644 --- a/tests/cloud_tests/__main__.py +++ b/tests/cloud_tests/__main__.py @@ -4,6 +4,7 @@ import argparse import logging +import os import sys from tests.cloud_tests import args, bddeb, collect, manage, run_funcs, verify @@ -50,7 +51,7 @@ def main(): return -1 # run handler - LOG.debug('running with args: %s\n', parsed) + LOG.debug('running with args: %s', parsed) return { 'bddeb': bddeb.bddeb, 'collect': collect.collect, @@ -63,6 +64,8 @@ def main(): if __name__ == "__main__": + if os.geteuid() == 0: + sys.exit('Do not run as root') sys.exit(main()) # vi: ts=4 expandtab diff --git a/tests/cloud_tests/args.py b/tests/cloud_tests/args.py index 369d60db..c6c1877b 100644 --- a/tests/cloud_tests/args.py +++ b/tests/cloud_tests/args.py @@ -170,9 +170,9 @@ def normalize_collect_args(args): @param args: parsed args @return_value: updated args, or None if errors occurred """ - # platform should default to all supported + # platform should default to lxd if len(args.platform) == 0: - args.platform = config.ENABLED_PLATFORMS + args.platform = ['lxd'] args.platform = util.sorted_unique(args.platform) # os name should default to all enabled diff --git a/tests/cloud_tests/bddeb.py b/tests/cloud_tests/bddeb.py index 53dbf74e..fba8a0c7 100644 --- a/tests/cloud_tests/bddeb.py +++ b/tests/cloud_tests/bddeb.py @@ -11,7 +11,7 @@ from tests.cloud_tests import (config, LOG) from tests.cloud_tests import (platforms, images, snapshots, instances) from tests.cloud_tests.stage import (PlatformComponent, run_stage, run_single) -build_deps = ['devscripts', 'equivs', 'git', 'tar'] +pre_reqs = ['devscripts', 'equivs', 'git', 'tar'] def _out(cmd_res): @@ -26,13 +26,9 @@ def build_deb(args, instance): @return_value: tuple of results and fail count """ # update remote system package list and install build deps - LOG.debug('installing build deps') - pkgs = ' '.join(build_deps) - cmd = 'apt-get update && apt-get install --yes {}'.format(pkgs) - instance.execute(['/bin/sh', '-c', cmd]) - # TODO Remove this call once we have a ci-deps Makefile target - instance.execute(['mk-build-deps', '--install', '-t', - 'apt-get --no-install-recommends --yes', 'cloud-init']) + LOG.debug('installing pre-reqs') + pkgs = ' '.join(pre_reqs) + instance.execute('apt-get update && apt-get install --yes {}'.format(pkgs)) # local tmpfile that must be deleted local_tarball = tempfile.NamedTemporaryFile().name @@ -40,7 +36,7 @@ def build_deb(args, instance): # paths to use in remote system output_link = '/root/cloud-init_all.deb' remote_tarball = _out(instance.execute(['mktemp'])) - extract_dir = _out(instance.execute(['mktemp', '--directory'])) + extract_dir = '/root' bddeb_path = os.path.join(extract_dir, 'packages', 'bddeb') git_env = {'GIT_DIR': os.path.join(extract_dir, '.git'), 'GIT_WORK_TREE': extract_dir} @@ -56,6 +52,11 @@ def build_deb(args, instance): instance.execute(['git', 'commit', '-a', '-m', 'tmp', '--allow-empty'], env=git_env) + LOG.debug('installing deps') + deps_path = os.path.join(extract_dir, 'tools', 'read-dependencies') + instance.execute([deps_path, '--install', '--test-distro', + '--distro', 'ubuntu', '--python-version', '3']) + LOG.debug('building deb in remote system at: %s', output_link) bddeb_args = args.bddeb_args.split() if args.bddeb_args else [] instance.execute([bddeb_path, '-d'] + bddeb_args, env=git_env) diff --git a/tests/cloud_tests/collect.py b/tests/cloud_tests/collect.py index b44e8bdd..4a2422ed 100644 --- a/tests/cloud_tests/collect.py +++ b/tests/cloud_tests/collect.py @@ -120,6 +120,7 @@ def collect_image(args, platform, os_name): os_config = config.load_os_config( platform.platform_name, os_name, require_enabled=True, feature_overrides=args.feature_override) + LOG.debug('os config: %s', os_config) component = PlatformComponent( partial(images.get_image, platform, os_config)) @@ -144,6 +145,8 @@ def collect_platform(args, platform_name): platform_config = config.load_platform_config( platform_name, require_enabled=True) + platform_config['data_dir'] = args.data_dir + LOG.debug('platform config: %s', platform_config) component = PlatformComponent( partial(platforms.get_platform, platform_name, platform_config)) diff --git a/tests/cloud_tests/config.py b/tests/cloud_tests/config.py index 4d5dc801..52fc2bda 100644 --- a/tests/cloud_tests/config.py +++ b/tests/cloud_tests/config.py @@ -112,6 +112,7 @@ def load_os_config(platform_name, os_name, require_enabled=False, feature_conf = main_conf['features'] feature_groups = conf.get('feature_groups', []) overrides = merge_config(get(conf, 'features'), feature_overrides) + conf['arch'] = c_util.get_architecture() conf['features'] = merge_feature_groups( feature_conf, feature_groups, overrides) diff --git a/tests/cloud_tests/images/nocloudkvm.py b/tests/cloud_tests/images/nocloudkvm.py new file mode 100644 index 00000000..a7af0e59 --- /dev/null +++ b/tests/cloud_tests/images/nocloudkvm.py @@ -0,0 +1,88 @@ +# This file is part of cloud-init. See LICENSE file for license information. + +"""NoCloud KVM Image Base Class.""" + +from tests.cloud_tests.images import base +from tests.cloud_tests.snapshots import nocloudkvm as nocloud_kvm_snapshot + + +class NoCloudKVMImage(base.Image): + """NoCloud KVM backed image.""" + + platform_name = "nocloud-kvm" + + def __init__(self, platform, config, img_path): + """Set up image. + + @param platform: platform object + @param config: image configuration + @param img_path: path to the image + """ + self.modified = False + self._instance = None + self._img_path = img_path + + super(NoCloudKVMImage, self).__init__(platform, config) + + @property + def instance(self): + """Returns an instance of an image.""" + if not self._instance: + if not self._img_path: + raise RuntimeError() + + self._instance = self.platform.create_image( + self.properties, self.config, self.features, self._img_path, + image_desc=str(self), use_desc='image-modification') + return self._instance + + @property + def properties(self): + """Dictionary containing: 'arch', 'os', 'version', 'release'.""" + return { + 'arch': self.config['arch'], + 'os': self.config['family'], + 'release': self.config['release'], + 'version': self.config['version'], + } + + def execute(self, *args, **kwargs): + """Execute command in image, modifying image.""" + return self.instance.execute(*args, **kwargs) + + def push_file(self, local_path, remote_path): + """Copy file at 'local_path' to instance at 'remote_path'.""" + return self.instance.push_file(local_path, remote_path) + + def run_script(self, *args, **kwargs): + """Run script in image, modifying image. + + @return_value: script output + """ + return self.instance.run_script(*args, **kwargs) + + def snapshot(self): + """Create snapshot of image, block until done.""" + if not self._img_path: + raise RuntimeError() + + instance = self.platform.create_image( + self.properties, self.config, self.features, + self._img_path, image_desc=str(self), use_desc='snapshot') + + return nocloud_kvm_snapshot.NoCloudKVMSnapshot( + self.platform, self.properties, self.config, + self.features, instance) + + def destroy(self): + """Unset path to signal image is no longer used. + + The removal of the images and all other items is handled by the + framework. In some cases we want to keep the images, so let the + framework decide whether to keep or destroy everything. + """ + self._img_path = None + self._instance.destroy() + super(NoCloudKVMImage, self).destroy() + +# vi: ts=4 expandtab diff --git a/tests/cloud_tests/instances/base.py b/tests/cloud_tests/instances/base.py index 959e9cce..9bdda608 100644 --- a/tests/cloud_tests/instances/base.py +++ b/tests/cloud_tests/instances/base.py @@ -23,7 +23,7 @@ class Instance(object): self.config = config self.features = features - def execute(self, command, stdout=None, stderr=None, env={}, + def execute(self, command, stdout=None, stderr=None, env=None, rcs=None, description=None): """Execute command in instance, recording output, error and exit code. @@ -31,6 +31,8 @@ class Instance(object): target filesystem being available at /. @param command: the command to execute as root inside the image + if command is a string, then it will be executed as: + ['sh', '-c', command] @param stdout, stderr: file handles to write output and error to @param env: environment variables @param rcs: allowed return codes from command @@ -88,7 +90,7 @@ class Instance(object): return self.execute( ['/bin/bash', script_path], rcs=rcs, description=description) finally: - self.execute(['rm', script_path], rcs=rcs) + self.execute(['rm', '-f', script_path], rcs=rcs) def tmpfile(self): """Get a tmp file in the target. @@ -137,9 +139,9 @@ class Instance(object): tests.append(self.config['cloud_init_ready_script']) formatted_tests = ' && '.join(clean_test(t) for t in tests) - test_cmd = ('for ((i=0;i<{time};i++)); do {test} && exit 0; sleep 1; ' - 'done; exit 1;').format(time=time, test=formatted_tests) - cmd = ['/bin/bash', '-c', test_cmd] + cmd = ('i=0; while [ $i -lt {time} ] && i=$(($i+1)); do {test} && ' + 'exit 0; sleep 1; done; exit 1').format(time=time, + test=formatted_tests) if self.execute(cmd, rcs=(0, 1))[-1] != 0: raise OSError('timeout: after {}s system not started'.format(time)) diff --git a/tests/cloud_tests/instances/lxd.py b/tests/cloud_tests/instances/lxd.py index b9c2cc6b..a43918c2 100644 --- a/tests/cloud_tests/instances/lxd.py +++ b/tests/cloud_tests/instances/lxd.py @@ -31,7 +31,7 @@ class LXDInstance(base.Instance): self._pylxd_container.sync() return self._pylxd_container - def execute(self, command, stdout=None, stderr=None, env={}, + def execute(self, command, stdout=None, stderr=None, env=None, rcs=None, description=None): """Execute command in instance, recording output, error and exit code. @@ -39,6 +39,8 @@ class LXDInstance(base.Instance): target filesystem being available at /. @param command: the command to execute as root inside the image + if command is a string, then it will be executed as: + ['sh', '-c', command] @param stdout: file handler to write output @param stderr: file handler to write error @param env: environment variables @@ -46,6 +48,12 @@ class LXDInstance(base.Instance): @param description: purpose of command @return_value: tuple containing stdout data, stderr data, exit code """ + if env is None: + env = {} + + if isinstance(command, str): + command = ['sh', '-c', command] + # ensure instance is running and execute the command self.start() res = self.pylxd_container.execute(command, environment=env) diff --git a/tests/cloud_tests/instances/nocloudkvm.py b/tests/cloud_tests/instances/nocloudkvm.py new file mode 100644 index 00000000..8a0e5319 --- /dev/null +++ b/tests/cloud_tests/instances/nocloudkvm.py @@ -0,0 +1,217 @@ +# This file is part of cloud-init. See LICENSE file for license information. + +"""Base NoCloud KVM instance.""" + +import os +import paramiko +import socket +import subprocess +import time + +from cloudinit import util as c_util +from tests.cloud_tests.instances import base +from tests.cloud_tests import util + + +class NoCloudKVMInstance(base.Instance): + """NoCloud KVM backed instance.""" + + platform_name = "nocloud-kvm" + + def __init__(self, platform, name, properties, config, features, + user_data, meta_data): + """Set up instance. + + @param platform: platform object + @param name: image path + @param properties: dictionary of properties + @param config: dictionary of configuration values + @param features: dictionary of supported feature flags + """ + self.user_data = user_data + self.meta_data = meta_data + self.ssh_key_file = os.path.join(platform.config['data_dir'], + platform.config['private_key']) + self.ssh_port = None + self.pid = None + self.pid_file = None + + super(NoCloudKVMInstance, self).__init__( + platform, name, properties, config, features) + + def destroy(self): + """Clean up instance.""" + if self.pid: + try: + c_util.subp(['kill', '-9', self.pid]) + except util.ProcessExectuionError: + pass + + if self.pid_file: + os.remove(self.pid_file) + + self.pid = None + super(NoCloudKVMInstance, self).destroy() + + def execute(self, command, stdout=None, stderr=None, env=None, + rcs=None, description=None): + """Execute command in instance. + + Assumes functional networking and execution as root with the + target filesystem being available at /. + + @param command: the command to execute as root inside the image + if command is a string, then it will be executed as: + ['sh', '-c', command] + @param stdout, stderr: file handles to write output and error to + @param env: environment variables + @param rcs: allowed return codes from command + @param description: purpose of command + @return_value: tuple containing stdout data, stderr data, exit code + """ + if env is None: + env = {} + + if isinstance(command, str): + command = ['sh', '-c', command] + + if self.pid: + return self.ssh(command) + else: + return self.mount_image_callback(command) + (0,) + + def mount_image_callback(self, cmd): + """Run mount-image-callback.""" + out, err = c_util.subp(['sudo', 'mount-image-callback', + '--system-mounts', '--system-resolvconf', + self.name, '--', 'chroot', + '_MOUNTPOINT_'] + cmd) + + return out, err + + def generate_seed(self, tmpdir): + """Generate nocloud seed from user-data""" + seed_file = os.path.join(tmpdir, '%s_seed.img' % self.name) + user_data_file = os.path.join(tmpdir, '%s_user_data' % self.name) + + with open(user_data_file, "w") as ud_file: + ud_file.write(self.user_data) + + c_util.subp(['cloud-localds', seed_file, user_data_file]) + + return seed_file + + def get_free_port(self): + """Get a free port assigned by the kernel.""" + s = socket.socket() + s.bind(('', 0)) + num = s.getsockname()[1] + s.close() + return num + + def push_file(self, local_path, remote_path): + """Copy file at 'local_path' to instance at 'remote_path'. + + If we have a pid then SSH is up, otherwise, use + mount-image-callback. + + @param local_path: path on local instance + @param remote_path: path on remote instance + """ + if self.pid: + super(NoCloudKVMInstance, self).push_file() + else: + local_file = open(local_path) + p = subprocess.Popen(['sudo', 'mount-image-callback', + '--system-mounts', '--system-resolvconf', + self.name, '--', 'chroot', '_MOUNTPOINT_', + '/bin/sh', '-c', 'cat - > %s' % remote_path], + stdin=local_file, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + p.wait() + + def sftp_put(self, path, data): + """SFTP put a file.""" + client = self._ssh_connect() + sftp = client.open_sftp() + + with sftp.open(path, 'w') as f: + f.write(data) + + client.close() + + def ssh(self, command): + """Run a command via SSH.""" + client = self._ssh_connect() + + try: + _, out, err = client.exec_command(util.shell_pack(command)) + except paramiko.SSHException: + raise util.InTargetExecuteError('', '', -1, command, self.name) + + exit = out.channel.recv_exit_status() + out = ''.join(out.readlines()) + err = ''.join(err.readlines()) + client.close() + + return out, err, exit + + def _ssh_connect(self, hostname='localhost', username='ubuntu', + banner_timeout=120, retry_attempts=30): + """Connect via SSH.""" + private_key = paramiko.RSAKey.from_private_key_file(self.ssh_key_file) + client = paramiko.SSHClient() + client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + while retry_attempts: + try: + client.connect(hostname=hostname, username=username, + port=self.ssh_port, pkey=private_key, + banner_timeout=banner_timeout) + return client + except (paramiko.SSHException, TypeError): + time.sleep(1) + retry_attempts = retry_attempts - 1 + + error_desc = 'Failed command to: %s@%s:%s' % (username, hostname, + self.ssh_port) + raise util.InTargetExecuteError('', '', -1, 'ssh connect', + self.name, error_desc) + + def start(self, wait=True, wait_for_cloud_init=False): + """Start instance.""" + tmpdir = self.platform.config['data_dir'] + seed = self.generate_seed(tmpdir) + self.pid_file = os.path.join(tmpdir, '%s.pid' % self.name) + self.ssh_port = self.get_free_port() + + subprocess.Popen(['./tools/xkvm', + '--disk', '%s,cache=unsafe' % self.name, + '--disk', '%s,cache=unsafe' % seed, + '--netdev', + 'user,hostfwd=tcp::%s-:22' % self.ssh_port, + '--', '-pidfile', self.pid_file, '-vnc', 'none', + '-m', '2G', '-smp', '2'], + close_fds=True, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + + while not os.path.exists(self.pid_file): + time.sleep(1) + + with open(self.pid_file, 'r') as pid_f: + self.pid = pid_f.readlines()[0].strip() + + if wait: + self._wait_for_system(wait_for_cloud_init) + + def write_data(self, remote_path, data): + """Write data to instance filesystem. + + @param remote_path: path in instance + @param data: data to write, either str or bytes + """ + self.sftp_put(remote_path, data) + +# vi: ts=4 expandtab diff --git a/tests/cloud_tests/platforms.yaml b/tests/cloud_tests/platforms.yaml index b91834ab..fa4f845e 100644 --- a/tests/cloud_tests/platforms.yaml +++ b/tests/cloud_tests/platforms.yaml @@ -59,6 +59,10 @@ platforms: {{ config_get("user.user-data", properties.default) }} cloud-init-vendor.tpl: | {{ config_get("user.vendor-data", properties.default) }} + nocloud-kvm: + enabled: true + private_key: id_rsa + public_key: id_rsa.pub ec2: {} azure: {} diff --git a/tests/cloud_tests/platforms/__init__.py b/tests/cloud_tests/platforms/__init__.py index 443f6d44..3490fe87 100644 --- a/tests/cloud_tests/platforms/__init__.py +++ b/tests/cloud_tests/platforms/__init__.py @@ -3,8 +3,10 @@ """Main init.""" from tests.cloud_tests.platforms import lxd +from tests.cloud_tests.platforms import nocloudkvm PLATFORMS = { + 'nocloud-kvm': nocloudkvm.NoCloudKVMPlatform, 'lxd': lxd.LXDPlatform, } diff --git a/tests/cloud_tests/platforms/nocloudkvm.py b/tests/cloud_tests/platforms/nocloudkvm.py new file mode 100644 index 00000000..f1f81877 --- /dev/null +++ b/tests/cloud_tests/platforms/nocloudkvm.py @@ -0,0 +1,90 @@ +# This file is part of cloud-init. See LICENSE file for license information. + +"""Base NoCloud KVM platform.""" +import glob +import os + +from simplestreams import filters +from simplestreams import mirrors +from simplestreams import objectstores +from simplestreams import util as s_util + +from cloudinit import util as c_util +from tests.cloud_tests.images import nocloudkvm as nocloud_kvm_image +from tests.cloud_tests.instances import nocloudkvm as nocloud_kvm_instance +from tests.cloud_tests.platforms import base +from tests.cloud_tests import util + + +class NoCloudKVMPlatform(base.Platform): + """NoCloud KVM test platform.""" + + platform_name = 'nocloud-kvm' + + def get_image(self, img_conf): + """Get image using specified image configuration. + + @param img_conf: configuration for image + @return_value: cloud_tests.images instance + """ + (url, path) = s_util.path_from_mirror_url(img_conf['mirror_url'], None) + + filter = filters.get_filters(['arch=%s' % c_util.get_architecture(), + 'release=%s' % img_conf['release'], + 'ftype=disk1.img']) + mirror_config = {'filters': filter, + 'keep_items': False, + 'max_items': 1, + 'checksumming_reader': True, + 'item_download': True + } + + def policy(content, path): + return s_util.read_signed(content, keyring=img_conf['keyring']) + + smirror = mirrors.UrlMirrorReader(url, policy=policy) + tstore = objectstores.FileStore(img_conf['mirror_dir']) + tmirror = mirrors.ObjectFilterMirror(config=mirror_config, + objectstore=tstore) + tmirror.sync(smirror, path) + + search_d = os.path.join(img_conf['mirror_dir'], '**', + img_conf['release'], '**', '*.img') + + images = [] + for fname in glob.iglob(search_d, recursive=True): + images.append(fname) + + if len(images) != 1: + raise Exception('No unique images found') + + image = nocloud_kvm_image.NoCloudKVMImage(self, img_conf, images[0]) + if img_conf.get('override_templates', False): + image.update_templates(self.config.get('template_overrides', {}), + self.config.get('template_files', {})) + return image + + def create_image(self, properties, config, features, + src_img_path, image_desc=None, use_desc=None, + user_data=None, meta_data=None): + """Create an image + + @param src_img_path: image path to launch from + @param properties: image properties + @param config: image configuration + @param features: image features + @param image_desc: description of image being launched + @param use_desc: description of container's use + @return_value: cloud_tests.instances instance + """ + name = util.gen_instance_name(image_desc=image_desc, use_desc=use_desc) + img_path = os.path.join(self.config['data_dir'], name + '.qcow2') + c_util.subp(['qemu-img', 'create', '-f', 'qcow2', + '-b', src_img_path, img_path]) + + return nocloud_kvm_instance.NoCloudKVMInstance(self, img_path, + properties, config, + features, user_data, + meta_data) + +# vi: ts=4 expandtab diff --git a/tests/cloud_tests/releases.yaml b/tests/cloud_tests/releases.yaml index c8dd1427..ec7e2d5b 100644 --- a/tests/cloud_tests/releases.yaml +++ b/tests/cloud_tests/releases.yaml @@ -27,7 +27,12 @@ default_release_config: # features groups and additional feature settings feature_groups: [] features: {} - + nocloud-kvm: + mirror_url: https://cloud-images.ubuntu.com/daily + mirror_dir: '/srv/citest/nocloud-kvm' + keyring: /usr/share/keyrings/ubuntu-cloudimage-keyring.gpg + setup_overrides: null + override_templates: false # lxd specific default configuration options lxd: # default sstreams server to use for lxd image retrieval @@ -121,6 +126,9 @@ releases: # EOL: Jul 2018 default: enabled: true + release: artful + version: 17.10 + family: ubuntu feature_groups: - base - debian_base @@ -134,6 +142,9 @@ releases: # EOL: Jan 2018 default: enabled: true + release: zesty + version: 17.04 + family: ubuntu feature_groups: - base - debian_base @@ -147,6 +158,9 @@ releases: # EOL: Apr 2021 default: enabled: true + release: xenial + version: 16.04 + family: ubuntu feature_groups: - base - debian_base @@ -160,6 +174,9 @@ releases: # EOL: Apr 2019 default: enabled: true + release: trusty + version: 14.04 + family: ubuntu feature_groups: - base - debian_base diff --git a/tests/cloud_tests/setup_image.py b/tests/cloud_tests/setup_image.py index 8053a093..6672ffb3 100644 --- a/tests/cloud_tests/setup_image.py +++ b/tests/cloud_tests/setup_image.py @@ -5,6 +5,7 @@ from functools import partial import os +from cloudinit import util as c_util from tests.cloud_tests import LOG from tests.cloud_tests import stage, util @@ -19,7 +20,7 @@ def installed_package_version(image, package, ensure_installed=True): """ os_family = util.get_os_family(image.properties['os']) if os_family == 'debian': - cmd = ['dpkg-query', '-W', "--showformat='${Version}'", package] + cmd = ['dpkg-query', '-W', "--showformat=${Version}", package] elif os_family == 'redhat': cmd = ['rpm', '-q', '--queryformat', "'%{VERSION}'", package] else: @@ -49,11 +50,11 @@ def install_deb(args, image): LOG.debug(msg) remote_path = os.path.join('/tmp', os.path.basename(args.deb)) image.push_file(args.deb, remote_path) - cmd = 'dpkg -i {} || apt-get install --yes -f'.format(remote_path) - image.execute(['/bin/sh', '-c', cmd], description=msg) + cmd = 'dpkg -i {}; apt-get install --yes -f'.format(remote_path) + image.execute(cmd, description=msg) # check installed deb version matches package - fmt = ['-W', "--showformat='${Version}'"] + fmt = ['-W', "--showformat=${Version}"] (out, err, exit) = image.execute(['dpkg-deb'] + fmt + [remote_path]) expected_version = out.strip() found_version = installed_package_version(image, 'cloud-init') @@ -113,7 +114,7 @@ def upgrade(args, image): msg = 'upgrading cloud-init' LOG.debug(msg) - image.execute(['/bin/sh', '-c', cmd], description=msg) + image.execute(cmd, description=msg) def upgrade_full(args, image): @@ -134,7 +135,7 @@ def upgrade_full(args, image): msg = 'full system upgrade' LOG.debug(msg) - image.execute(['/bin/sh', '-c', cmd], description=msg) + image.execute(cmd, description=msg) def run_script(args, image): @@ -165,7 +166,7 @@ def enable_ppa(args, image): msg = 'enable ppa: "{}" in target'.format(ppa) LOG.debug(msg) cmd = 'add-apt-repository --yes {} && apt-get update'.format(ppa) - image.execute(['/bin/sh', '-c', cmd], description=msg) + image.execute(cmd, description=msg) def enable_repo(args, image): @@ -188,7 +189,21 @@ def enable_repo(args, image): msg = 'enable repo: "{}" in target'.format(args.repo) LOG.debug(msg) - image.execute(['/bin/sh', '-c', cmd], description=msg) + image.execute(cmd, description=msg) + + +def generate_ssh_keys(data_dir): + """Generate SSH keys to be used with image.""" + LOG.info('generating SSH keys') + filename = os.path.join(data_dir, 'id_rsa') + + if os.path.exists(filename): + c_util.del_file(filename) + + c_util.subp(['ssh-keygen', '-t', 'rsa', '-b', '4096', + '-f', filename, '-P', '', + '-C', 'ubuntu@cloud_test'], + capture=True) def setup_image(args, image): @@ -226,6 +241,7 @@ def setup_image(args, image): 'set up for {}'.format(image), calls, continue_after_error=False) LOG.debug('after setup complete, installed cloud-init version is: %s', installed_package_version(image, 'cloud-init')) + generate_ssh_keys(args.data_dir) return res # vi: ts=4 expandtab diff --git a/tests/cloud_tests/snapshots/nocloudkvm.py b/tests/cloud_tests/snapshots/nocloudkvm.py new file mode 100644 index 00000000..09998349 --- /dev/null +++ b/tests/cloud_tests/snapshots/nocloudkvm.py @@ -0,0 +1,74 @@ +# This file is part of cloud-init. See LICENSE file for license information. + +"""Base NoCloud KVM snapshot.""" +import os + +from tests.cloud_tests.snapshots import base + + +class NoCloudKVMSnapshot(base.Snapshot): + """NoCloud KVM image copy backed snapshot.""" + + platform_name = "nocloud-kvm" + + def __init__(self, platform, properties, config, features, + instance): + """Set up snapshot. + + @param platform: platform object + @param properties: image properties + @param config: image config + @param features: supported feature flags + """ + self.instance = instance + + super(NoCloudKVMSnapshot, self).__init__( + platform, properties, config, features) + + def launch(self, user_data, meta_data=None, block=True, start=True, + use_desc=None): + """Launch instance. + + @param user_data: user-data for the instance + @param instance_id: instance-id for the instance + @param block: wait until instance is created + @param start: start instance and wait until fully started + @param use_desc: description of snapshot instance use + @return_value: an Instance + """ + key_file = os.path.join(self.platform.config['data_dir'], + self.platform.config['public_key']) + user_data = self.inject_ssh_key(user_data, key_file) + + instance = self.platform.create_image( + self.properties, self.config, self.features, + self.instance.name, image_desc=str(self), use_desc=use_desc, + user_data=user_data, meta_data=meta_data) + + if start: + instance.start() + + return instance + + def inject_ssh_key(self, user_data, key_file): + """Inject the authorized key into the user_data.""" + with open(key_file) as f: + value = f.read() + + key = 'ssh_authorized_keys:' + value = ' - %s' % value.strip() + user_data = user_data.split('\n') + if key in user_data: + user_data.insert(user_data.index(key) + 1, '%s' % value) + else: + user_data.insert(-1, '%s' % key) + user_data.insert(-1, '%s' % value) + + return '\n'.join(user_data) + + def destroy(self): + """Clean up snapshot data.""" + self.instance.destroy() + super(NoCloudKVMSnapshot, self).destroy() + +# vi: ts=4 expandtab diff --git a/tests/cloud_tests/configs/bugs/README.md b/tests/cloud_tests/testcases/bugs/README.md index 09ce0765..09ce0765 100644 --- a/tests/cloud_tests/configs/bugs/README.md +++ b/tests/cloud_tests/testcases/bugs/README.md diff --git a/tests/cloud_tests/configs/bugs/lp1511485.yaml b/tests/cloud_tests/testcases/bugs/lp1511485.yaml index ebf9763f..ebf9763f 100644 --- a/tests/cloud_tests/configs/bugs/lp1511485.yaml +++ b/tests/cloud_tests/testcases/bugs/lp1511485.yaml diff --git a/tests/cloud_tests/configs/bugs/lp1611074.yaml b/tests/cloud_tests/testcases/bugs/lp1611074.yaml index 960679d5..960679d5 100644 --- a/tests/cloud_tests/configs/bugs/lp1611074.yaml +++ b/tests/cloud_tests/testcases/bugs/lp1611074.yaml diff --git a/tests/cloud_tests/configs/bugs/lp1628337.yaml b/tests/cloud_tests/testcases/bugs/lp1628337.yaml index e39b3cd8..e39b3cd8 100644 --- a/tests/cloud_tests/configs/bugs/lp1628337.yaml +++ b/tests/cloud_tests/testcases/bugs/lp1628337.yaml diff --git a/tests/cloud_tests/configs/examples/README.md b/tests/cloud_tests/testcases/examples/README.md index 110a223b..110a223b 100644 --- a/tests/cloud_tests/configs/examples/README.md +++ b/tests/cloud_tests/testcases/examples/README.md diff --git a/tests/cloud_tests/configs/examples/TODO.md b/tests/cloud_tests/testcases/examples/TODO.md index 8db0e98e..8db0e98e 100644 --- a/tests/cloud_tests/configs/examples/TODO.md +++ b/tests/cloud_tests/testcases/examples/TODO.md diff --git a/tests/cloud_tests/configs/examples/add_apt_repositories.yaml b/tests/cloud_tests/testcases/examples/add_apt_repositories.yaml index 4b8575f7..4b8575f7 100644 --- a/tests/cloud_tests/configs/examples/add_apt_repositories.yaml +++ b/tests/cloud_tests/testcases/examples/add_apt_repositories.yaml diff --git a/tests/cloud_tests/configs/examples/alter_completion_message.yaml b/tests/cloud_tests/testcases/examples/alter_completion_message.yaml index 9e154f80..9e154f80 100644 --- a/tests/cloud_tests/configs/examples/alter_completion_message.yaml +++ b/tests/cloud_tests/testcases/examples/alter_completion_message.yaml diff --git a/tests/cloud_tests/configs/examples/configure_instance_trusted_ca_certificates.yaml b/tests/cloud_tests/testcases/examples/configure_instance_trusted_ca_certificates.yaml index ad32b088..ad32b088 100644 --- a/tests/cloud_tests/configs/examples/configure_instance_trusted_ca_certificates.yaml +++ b/tests/cloud_tests/testcases/examples/configure_instance_trusted_ca_certificates.yaml diff --git a/tests/cloud_tests/configs/examples/configure_instances_ssh_keys.yaml b/tests/cloud_tests/testcases/examples/configure_instances_ssh_keys.yaml index f3eaf3ce..f3eaf3ce 100644 --- a/tests/cloud_tests/configs/examples/configure_instances_ssh_keys.yaml +++ b/tests/cloud_tests/testcases/examples/configure_instances_ssh_keys.yaml diff --git a/tests/cloud_tests/configs/examples/including_user_groups.yaml b/tests/cloud_tests/testcases/examples/including_user_groups.yaml index 0aa7ad21..0aa7ad21 100644 --- a/tests/cloud_tests/configs/examples/including_user_groups.yaml +++ b/tests/cloud_tests/testcases/examples/including_user_groups.yaml diff --git a/tests/cloud_tests/configs/examples/install_arbitrary_packages.yaml b/tests/cloud_tests/testcases/examples/install_arbitrary_packages.yaml index d3980228..d3980228 100644 --- a/tests/cloud_tests/configs/examples/install_arbitrary_packages.yaml +++ b/tests/cloud_tests/testcases/examples/install_arbitrary_packages.yaml diff --git a/tests/cloud_tests/configs/examples/install_run_chef_recipes.yaml b/tests/cloud_tests/testcases/examples/install_run_chef_recipes.yaml index 0bec305e..0bec305e 100644 --- a/tests/cloud_tests/configs/examples/install_run_chef_recipes.yaml +++ b/tests/cloud_tests/testcases/examples/install_run_chef_recipes.yaml diff --git a/tests/cloud_tests/configs/examples/run_apt_upgrade.yaml b/tests/cloud_tests/testcases/examples/run_apt_upgrade.yaml index 2b7eae4c..2b7eae4c 100644 --- a/tests/cloud_tests/configs/examples/run_apt_upgrade.yaml +++ b/tests/cloud_tests/testcases/examples/run_apt_upgrade.yaml diff --git a/tests/cloud_tests/configs/examples/run_commands.yaml b/tests/cloud_tests/testcases/examples/run_commands.yaml index b0e311ba..b0e311ba 100644 --- a/tests/cloud_tests/configs/examples/run_commands.yaml +++ b/tests/cloud_tests/testcases/examples/run_commands.yaml diff --git a/tests/cloud_tests/configs/examples/run_commands_first_boot.yaml b/tests/cloud_tests/testcases/examples/run_commands_first_boot.yaml index 7bd803db..7bd803db 100644 --- a/tests/cloud_tests/configs/examples/run_commands_first_boot.yaml +++ b/tests/cloud_tests/testcases/examples/run_commands_first_boot.yaml diff --git a/tests/cloud_tests/configs/examples/setup_run_puppet.yaml b/tests/cloud_tests/testcases/examples/setup_run_puppet.yaml index e366c042..e366c042 100644 --- a/tests/cloud_tests/configs/examples/setup_run_puppet.yaml +++ b/tests/cloud_tests/testcases/examples/setup_run_puppet.yaml diff --git a/tests/cloud_tests/configs/examples/writing_out_arbitrary_files.yaml b/tests/cloud_tests/testcases/examples/writing_out_arbitrary_files.yaml index 6f78f994..6f78f994 100644 --- a/tests/cloud_tests/configs/examples/writing_out_arbitrary_files.yaml +++ b/tests/cloud_tests/testcases/examples/writing_out_arbitrary_files.yaml diff --git a/tests/cloud_tests/configs/main/README.md b/tests/cloud_tests/testcases/main/README.md index 60346063..60346063 100644 --- a/tests/cloud_tests/configs/main/README.md +++ b/tests/cloud_tests/testcases/main/README.md diff --git a/tests/cloud_tests/configs/main/command_output_simple.yaml b/tests/cloud_tests/testcases/main/command_output_simple.yaml index 08ca8940..08ca8940 100644 --- a/tests/cloud_tests/configs/main/command_output_simple.yaml +++ b/tests/cloud_tests/testcases/main/command_output_simple.yaml diff --git a/tests/cloud_tests/configs/modules/README.md b/tests/cloud_tests/testcases/modules/README.md index d66101f2..d66101f2 100644 --- a/tests/cloud_tests/configs/modules/README.md +++ b/tests/cloud_tests/testcases/modules/README.md diff --git a/tests/cloud_tests/configs/modules/TODO.md b/tests/cloud_tests/testcases/modules/TODO.md index d496da95..0b933b3b 100644 --- a/tests/cloud_tests/configs/modules/TODO.md +++ b/tests/cloud_tests/testcases/modules/TODO.md @@ -89,8 +89,6 @@ Not applicable to write a test for this as it specifies when something should be ## ssh authkey fingerprints The authkey_hash key does not appear to work. In fact the default claims to be md5, however syslog only shows sha256 -## ubuntu init switch - ## update etc hosts 2016-11-17: Issues with changing /etc/hosts and lxc backend. diff --git a/tests/cloud_tests/configs/modules/apt_configure_conf.yaml b/tests/cloud_tests/testcases/modules/apt_configure_conf.yaml index de453000..de453000 100644 --- a/tests/cloud_tests/configs/modules/apt_configure_conf.yaml +++ b/tests/cloud_tests/testcases/modules/apt_configure_conf.yaml diff --git a/tests/cloud_tests/configs/modules/apt_configure_disable_suites.yaml b/tests/cloud_tests/testcases/modules/apt_configure_disable_suites.yaml index 98800673..98800673 100644 --- a/tests/cloud_tests/configs/modules/apt_configure_disable_suites.yaml +++ b/tests/cloud_tests/testcases/modules/apt_configure_disable_suites.yaml diff --git a/tests/cloud_tests/configs/modules/apt_configure_primary.yaml b/tests/cloud_tests/testcases/modules/apt_configure_primary.yaml index 41bcf2fd..41bcf2fd 100644 --- a/tests/cloud_tests/configs/modules/apt_configure_primary.yaml +++ b/tests/cloud_tests/testcases/modules/apt_configure_primary.yaml diff --git a/tests/cloud_tests/configs/modules/apt_configure_proxy.yaml b/tests/cloud_tests/testcases/modules/apt_configure_proxy.yaml index be6c6f81..be6c6f81 100644 --- a/tests/cloud_tests/configs/modules/apt_configure_proxy.yaml +++ b/tests/cloud_tests/testcases/modules/apt_configure_proxy.yaml diff --git a/tests/cloud_tests/configs/modules/apt_configure_security.yaml b/tests/cloud_tests/testcases/modules/apt_configure_security.yaml index 83dd51df..83dd51df 100644 --- a/tests/cloud_tests/configs/modules/apt_configure_security.yaml +++ b/tests/cloud_tests/testcases/modules/apt_configure_security.yaml diff --git a/tests/cloud_tests/configs/modules/apt_configure_sources_key.yaml b/tests/cloud_tests/testcases/modules/apt_configure_sources_key.yaml index bde9398a..bde9398a 100644 --- a/tests/cloud_tests/configs/modules/apt_configure_sources_key.yaml +++ b/tests/cloud_tests/testcases/modules/apt_configure_sources_key.yaml diff --git a/tests/cloud_tests/configs/modules/apt_configure_sources_keyserver.yaml b/tests/cloud_tests/testcases/modules/apt_configure_sources_keyserver.yaml index 25088135..25088135 100644 --- a/tests/cloud_tests/configs/modules/apt_configure_sources_keyserver.yaml +++ b/tests/cloud_tests/testcases/modules/apt_configure_sources_keyserver.yaml diff --git a/tests/cloud_tests/configs/modules/apt_configure_sources_list.yaml b/tests/cloud_tests/testcases/modules/apt_configure_sources_list.yaml index 143cb080..143cb080 100644 --- a/tests/cloud_tests/configs/modules/apt_configure_sources_list.yaml +++ b/tests/cloud_tests/testcases/modules/apt_configure_sources_list.yaml diff --git a/tests/cloud_tests/configs/modules/apt_configure_sources_ppa.yaml b/tests/cloud_tests/testcases/modules/apt_configure_sources_ppa.yaml index 9efdae52..9efdae52 100644 --- a/tests/cloud_tests/configs/modules/apt_configure_sources_ppa.yaml +++ b/tests/cloud_tests/testcases/modules/apt_configure_sources_ppa.yaml diff --git a/tests/cloud_tests/configs/modules/apt_pipelining_disable.yaml b/tests/cloud_tests/testcases/modules/apt_pipelining_disable.yaml index bd9b5d08..bd9b5d08 100644 --- a/tests/cloud_tests/configs/modules/apt_pipelining_disable.yaml +++ b/tests/cloud_tests/testcases/modules/apt_pipelining_disable.yaml diff --git a/tests/cloud_tests/configs/modules/apt_pipelining_os.yaml b/tests/cloud_tests/testcases/modules/apt_pipelining_os.yaml index cbed3ba3..cbed3ba3 100644 --- a/tests/cloud_tests/configs/modules/apt_pipelining_os.yaml +++ b/tests/cloud_tests/testcases/modules/apt_pipelining_os.yaml diff --git a/tests/cloud_tests/configs/modules/bootcmd.yaml b/tests/cloud_tests/testcases/modules/bootcmd.yaml index 3a73994e..3a73994e 100644 --- a/tests/cloud_tests/configs/modules/bootcmd.yaml +++ b/tests/cloud_tests/testcases/modules/bootcmd.yaml diff --git a/tests/cloud_tests/configs/modules/byobu.yaml b/tests/cloud_tests/testcases/modules/byobu.yaml index a9aa1f3f..a9aa1f3f 100644 --- a/tests/cloud_tests/configs/modules/byobu.yaml +++ b/tests/cloud_tests/testcases/modules/byobu.yaml diff --git a/tests/cloud_tests/configs/modules/ca_certs.yaml b/tests/cloud_tests/testcases/modules/ca_certs.yaml index d939f435..d939f435 100644 --- a/tests/cloud_tests/configs/modules/ca_certs.yaml +++ b/tests/cloud_tests/testcases/modules/ca_certs.yaml diff --git a/tests/cloud_tests/configs/modules/debug_disable.yaml b/tests/cloud_tests/testcases/modules/debug_disable.yaml index 63218b18..63218b18 100644 --- a/tests/cloud_tests/configs/modules/debug_disable.yaml +++ b/tests/cloud_tests/testcases/modules/debug_disable.yaml diff --git a/tests/cloud_tests/configs/modules/debug_enable.yaml b/tests/cloud_tests/testcases/modules/debug_enable.yaml index d44147db..d44147db 100644 --- a/tests/cloud_tests/configs/modules/debug_enable.yaml +++ b/tests/cloud_tests/testcases/modules/debug_enable.yaml diff --git a/tests/cloud_tests/configs/modules/final_message.yaml b/tests/cloud_tests/testcases/modules/final_message.yaml index c9ed6118..c9ed6118 100644 --- a/tests/cloud_tests/configs/modules/final_message.yaml +++ b/tests/cloud_tests/testcases/modules/final_message.yaml diff --git a/tests/cloud_tests/configs/modules/keys_to_console.yaml b/tests/cloud_tests/testcases/modules/keys_to_console.yaml index 5d86e739..5d86e739 100644 --- a/tests/cloud_tests/configs/modules/keys_to_console.yaml +++ b/tests/cloud_tests/testcases/modules/keys_to_console.yaml diff --git a/tests/cloud_tests/configs/modules/landscape.yaml b/tests/cloud_tests/testcases/modules/landscape.yaml index ed2c37c4..ed2c37c4 100644 --- a/tests/cloud_tests/configs/modules/landscape.yaml +++ b/tests/cloud_tests/testcases/modules/landscape.yaml diff --git a/tests/cloud_tests/configs/modules/locale.yaml b/tests/cloud_tests/testcases/modules/locale.yaml index e01518a1..e01518a1 100644 --- a/tests/cloud_tests/configs/modules/locale.yaml +++ b/tests/cloud_tests/testcases/modules/locale.yaml diff --git a/tests/cloud_tests/configs/modules/lxd_bridge.yaml b/tests/cloud_tests/testcases/modules/lxd_bridge.yaml index e6b7e76a..e6b7e76a 100644 --- a/tests/cloud_tests/configs/modules/lxd_bridge.yaml +++ b/tests/cloud_tests/testcases/modules/lxd_bridge.yaml diff --git a/tests/cloud_tests/configs/modules/lxd_dir.yaml b/tests/cloud_tests/testcases/modules/lxd_dir.yaml index f93a3fa7..f93a3fa7 100644 --- a/tests/cloud_tests/configs/modules/lxd_dir.yaml +++ b/tests/cloud_tests/testcases/modules/lxd_dir.yaml diff --git a/tests/cloud_tests/configs/modules/ntp.yaml b/tests/cloud_tests/testcases/modules/ntp.yaml index fbef431b..fbef431b 100644 --- a/tests/cloud_tests/configs/modules/ntp.yaml +++ b/tests/cloud_tests/testcases/modules/ntp.yaml diff --git a/tests/cloud_tests/configs/modules/ntp_pools.yaml b/tests/cloud_tests/testcases/modules/ntp_pools.yaml index 3a93faa2..3a93faa2 100644 --- a/tests/cloud_tests/configs/modules/ntp_pools.yaml +++ b/tests/cloud_tests/testcases/modules/ntp_pools.yaml diff --git a/tests/cloud_tests/configs/modules/ntp_servers.yaml b/tests/cloud_tests/testcases/modules/ntp_servers.yaml index d59d45a8..d59d45a8 100644 --- a/tests/cloud_tests/configs/modules/ntp_servers.yaml +++ b/tests/cloud_tests/testcases/modules/ntp_servers.yaml diff --git a/tests/cloud_tests/configs/modules/package_update_upgrade_install.yaml b/tests/cloud_tests/testcases/modules/package_update_upgrade_install.yaml index 71d24b83..71d24b83 100644 --- a/tests/cloud_tests/configs/modules/package_update_upgrade_install.yaml +++ b/tests/cloud_tests/testcases/modules/package_update_upgrade_install.yaml diff --git a/tests/cloud_tests/configs/modules/runcmd.yaml b/tests/cloud_tests/testcases/modules/runcmd.yaml index 04e5a050..04e5a050 100644 --- a/tests/cloud_tests/configs/modules/runcmd.yaml +++ b/tests/cloud_tests/testcases/modules/runcmd.yaml diff --git a/tests/cloud_tests/configs/modules/salt_minion.yaml b/tests/cloud_tests/testcases/modules/salt_minion.yaml index f20d24f0..f20d24f0 100644 --- a/tests/cloud_tests/configs/modules/salt_minion.yaml +++ b/tests/cloud_tests/testcases/modules/salt_minion.yaml diff --git a/tests/cloud_tests/configs/modules/seed_random_command.yaml b/tests/cloud_tests/testcases/modules/seed_random_command.yaml index 6a9157eb..6a9157eb 100644 --- a/tests/cloud_tests/configs/modules/seed_random_command.yaml +++ b/tests/cloud_tests/testcases/modules/seed_random_command.yaml diff --git a/tests/cloud_tests/configs/modules/seed_random_data.yaml b/tests/cloud_tests/testcases/modules/seed_random_data.yaml index a9b2c885..a9b2c885 100644 --- a/tests/cloud_tests/configs/modules/seed_random_data.yaml +++ b/tests/cloud_tests/testcases/modules/seed_random_data.yaml diff --git a/tests/cloud_tests/configs/modules/set_hostname.yaml b/tests/cloud_tests/testcases/modules/set_hostname.yaml index c96344cf..c96344cf 100644 --- a/tests/cloud_tests/configs/modules/set_hostname.yaml +++ b/tests/cloud_tests/testcases/modules/set_hostname.yaml diff --git a/tests/cloud_tests/configs/modules/set_hostname_fqdn.yaml b/tests/cloud_tests/testcases/modules/set_hostname_fqdn.yaml index daf75931..daf75931 100644 --- a/tests/cloud_tests/configs/modules/set_hostname_fqdn.yaml +++ b/tests/cloud_tests/testcases/modules/set_hostname_fqdn.yaml diff --git a/tests/cloud_tests/configs/modules/set_password.yaml b/tests/cloud_tests/testcases/modules/set_password.yaml index 04d7c58a..04d7c58a 100644 --- a/tests/cloud_tests/configs/modules/set_password.yaml +++ b/tests/cloud_tests/testcases/modules/set_password.yaml diff --git a/tests/cloud_tests/configs/modules/set_password_expire.yaml b/tests/cloud_tests/testcases/modules/set_password_expire.yaml index 789604b0..789604b0 100644 --- a/tests/cloud_tests/configs/modules/set_password_expire.yaml +++ b/tests/cloud_tests/testcases/modules/set_password_expire.yaml diff --git a/tests/cloud_tests/configs/modules/set_password_list.yaml b/tests/cloud_tests/testcases/modules/set_password_list.yaml index a2a89c9d..a2a89c9d 100644 --- a/tests/cloud_tests/configs/modules/set_password_list.yaml +++ b/tests/cloud_tests/testcases/modules/set_password_list.yaml diff --git a/tests/cloud_tests/configs/modules/set_password_list_string.yaml b/tests/cloud_tests/testcases/modules/set_password_list_string.yaml index c2a0f631..c2a0f631 100644 --- a/tests/cloud_tests/configs/modules/set_password_list_string.yaml +++ b/tests/cloud_tests/testcases/modules/set_password_list_string.yaml diff --git a/tests/cloud_tests/configs/modules/snappy.yaml b/tests/cloud_tests/testcases/modules/snappy.yaml index 43f93295..43f93295 100644 --- a/tests/cloud_tests/configs/modules/snappy.yaml +++ b/tests/cloud_tests/testcases/modules/snappy.yaml diff --git a/tests/cloud_tests/configs/modules/ssh_auth_key_fingerprints_disable.yaml b/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_disable.yaml index 746653ec..746653ec 100644 --- a/tests/cloud_tests/configs/modules/ssh_auth_key_fingerprints_disable.yaml +++ b/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_disable.yaml diff --git a/tests/cloud_tests/configs/modules/ssh_auth_key_fingerprints_enable.yaml b/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_enable.yaml index 9f5dc34a..9f5dc34a 100644 --- a/tests/cloud_tests/configs/modules/ssh_auth_key_fingerprints_enable.yaml +++ b/tests/cloud_tests/testcases/modules/ssh_auth_key_fingerprints_enable.yaml diff --git a/tests/cloud_tests/configs/modules/ssh_import_id.yaml b/tests/cloud_tests/testcases/modules/ssh_import_id.yaml index b62d3f69..b62d3f69 100644 --- a/tests/cloud_tests/configs/modules/ssh_import_id.yaml +++ b/tests/cloud_tests/testcases/modules/ssh_import_id.yaml diff --git a/tests/cloud_tests/configs/modules/ssh_keys_generate.yaml b/tests/cloud_tests/testcases/modules/ssh_keys_generate.yaml index 659fd939..659fd939 100644 --- a/tests/cloud_tests/configs/modules/ssh_keys_generate.yaml +++ b/tests/cloud_tests/testcases/modules/ssh_keys_generate.yaml diff --git a/tests/cloud_tests/configs/modules/ssh_keys_provided.yaml b/tests/cloud_tests/testcases/modules/ssh_keys_provided.yaml index 5ceb3623..5ceb3623 100644 --- a/tests/cloud_tests/configs/modules/ssh_keys_provided.yaml +++ b/tests/cloud_tests/testcases/modules/ssh_keys_provided.yaml diff --git a/tests/cloud_tests/configs/modules/timezone.yaml b/tests/cloud_tests/testcases/modules/timezone.yaml index 5112aa9f..5112aa9f 100644 --- a/tests/cloud_tests/configs/modules/timezone.yaml +++ b/tests/cloud_tests/testcases/modules/timezone.yaml diff --git a/tests/cloud_tests/configs/modules/user_groups.yaml b/tests/cloud_tests/testcases/modules/user_groups.yaml index 71cc9da3..71cc9da3 100644 --- a/tests/cloud_tests/configs/modules/user_groups.yaml +++ b/tests/cloud_tests/testcases/modules/user_groups.yaml diff --git a/tests/cloud_tests/configs/modules/write_files.yaml b/tests/cloud_tests/testcases/modules/write_files.yaml index ce936b7b..ce936b7b 100644 --- a/tests/cloud_tests/configs/modules/write_files.yaml +++ b/tests/cloud_tests/testcases/modules/write_files.yaml diff --git a/tests/cloud_tests/util.py b/tests/cloud_tests/util.py index 2bbe21c7..4357fbb0 100644 --- a/tests/cloud_tests/util.py +++ b/tests/cloud_tests/util.py @@ -2,12 +2,14 @@ """Utilities for re-use across integration tests.""" +import base64 import copy import glob import os import random import shutil import string +import subprocess import tempfile import yaml @@ -242,6 +244,47 @@ def update_user_data(user_data, updates, dump_to_yaml=True): if dump_to_yaml else user_data) +def shell_safe(cmd): + """Produce string safe shell string. + + Create a string that can be passed to: + set -- <string> + to produce the same array that cmd represents. + + Internally we utilize 'getopt's ability/knowledge on how to quote + strings to be safe for shell. This implementation could be changed + to be pure python. It is just a matter of correctly escaping + or quoting characters like: ' " ^ & $ ; ( ) ... + + @param cmd: command as a list + """ + out = subprocess.check_output( + ["getopt", "--shell", "sh", "--options", "", "--", "--"] + list(cmd)) + # out contains ' -- <data>\n'. drop the ' -- ' and the '\n' + return out[4:-1].decode() + + +def shell_pack(cmd): + """Return a string that can shuffled through 'sh' and execute cmd. + + In Python subprocess terms: + check_output(cmd) == check_output(shell_pack(cmd), shell=True) + + @param cmd: list or string of command to pack up + """ + + if isinstance(cmd, str): + cmd = [cmd] + else: + cmd = list(cmd) + + stuffed = shell_safe(cmd) + # for whatever reason b64encode returns bytes when it is clearly + # representable as a string by nature of being base64 encoded. + b64 = base64.b64encode(stuffed.encode()).decode() + return 'eval set -- "$(echo %s | base64 --decode)" && exec "$@"' % b64 + + class InTargetExecuteError(c_util.ProcessExecutionError): """Error type for in target commands that fail.""" diff --git a/tests/unittests/helpers.py b/tests/unittests/helpers.py deleted file mode 100644 index 08c5c469..00000000 --- a/tests/unittests/helpers.py +++ /dev/null @@ -1,391 +0,0 @@ -# This file is part of cloud-init. See LICENSE file for license information. - -from __future__ import print_function - -import functools -import json -import logging -import os -import shutil -import sys -import tempfile -import unittest - -import mock -import six -import unittest2 - -try: - from contextlib import ExitStack -except ImportError: - from contextlib2 import ExitStack - -from cloudinit import helpers as ch -from cloudinit import util - -# Used for skipping tests -SkipTest = unittest2.SkipTest - -# Used for detecting different python versions -PY2 = False -PY26 = False -PY27 = False -PY3 = False - -_PY_VER = sys.version_info -_PY_MAJOR, _PY_MINOR, _PY_MICRO = _PY_VER[0:3] -if (_PY_MAJOR, _PY_MINOR) <= (2, 6): - if (_PY_MAJOR, _PY_MINOR) == (2, 6): - PY26 = True - if (_PY_MAJOR, _PY_MINOR) >= (2, 0): - PY2 = True -else: - if (_PY_MAJOR, _PY_MINOR) == (2, 7): - PY27 = True - PY2 = True - if (_PY_MAJOR, _PY_MINOR) >= (3, 0): - PY3 = True - - -# Makes the old path start -# with new base instead of whatever -# it previously had -def rebase_path(old_path, new_base): - if old_path.startswith(new_base): - # Already handled... - return old_path - # Retarget the base of that path - # to the new base instead of the - # old one... - path = os.path.join(new_base, old_path.lstrip("/")) - path = os.path.abspath(path) - return path - - -# Can work on anything that takes a path as arguments -def retarget_many_wrapper(new_base, am, old_func): - def wrapper(*args, **kwds): - n_args = list(args) - nam = am - if am == -1: - nam = len(n_args) - for i in range(0, nam): - path = args[i] - # patchOS() wraps various os and os.path functions, however in - # Python 3 some of these now accept file-descriptors (integers). - # That breaks rebase_path() so in lieu of a better solution, just - # don't rebase if we get a fd. - if isinstance(path, six.string_types): - n_args[i] = rebase_path(path, new_base) - return old_func(*n_args, **kwds) - return wrapper - - -class TestCase(unittest2.TestCase): - def reset_global_state(self): - """Reset any global state to its original settings. - - cloudinit caches some values in cloudinit.util. Unit tests that - involved those cached paths were then subject to failure if the order - of invocation changed (LP: #1703697). - - This function resets any of these global state variables to their - initial state. - - In the future this should really be done with some registry that - can then be cleaned in a more obvious way. - """ - util.PROC_CMDLINE = None - util._DNS_REDIRECT_IP = None - util._LSB_RELEASE = {} - - def setUp(self): - super(unittest2.TestCase, self).setUp() - self.reset_global_state() - - -class CiTestCase(TestCase): - """This is the preferred test case base class unless user - needs other test case classes below.""" - - # Subclass overrides for specific test behavior - # Whether or not a unit test needs logfile setup - with_logs = False - - def setUp(self): - super(CiTestCase, self).setUp() - if self.with_logs: - # Create a log handler so unit tests can search expected logs. - self.logger = logging.getLogger() - self.logs = six.StringIO() - formatter = logging.Formatter('%(levelname)s: %(message)s') - handler = logging.StreamHandler(self.logs) - handler.setFormatter(formatter) - self.old_handlers = self.logger.handlers - self.logger.handlers = [handler] - - def tearDown(self): - if self.with_logs: - # Remove the handler we setup - logging.getLogger().handlers = self.old_handlers - super(CiTestCase, self).tearDown() - - def tmp_dir(self, dir=None, cleanup=True): - # return a full path to a temporary directory that will be cleaned up. - if dir is None: - tmpd = tempfile.mkdtemp( - prefix="ci-%s." % self.__class__.__name__) - else: - tmpd = tempfile.mkdtemp(dir=dir) - self.addCleanup(functools.partial(shutil.rmtree, tmpd)) - return tmpd - - def tmp_path(self, path, dir=None): - # return an absolute path to 'path' under dir. - # if dir is None, one will be created with tmp_dir() - # the file is not created or modified. - if dir is None: - dir = self.tmp_dir() - return os.path.normpath(os.path.abspath(os.path.join(dir, path))) - - -class ResourceUsingTestCase(CiTestCase): - def setUp(self): - super(ResourceUsingTestCase, self).setUp() - self.resource_path = None - - def resourceLocation(self, subname=None): - if self.resource_path is None: - paths = [ - os.path.join('tests', 'data'), - os.path.join('data'), - os.path.join(os.pardir, 'tests', 'data'), - os.path.join(os.pardir, 'data'), - ] - for p in paths: - if os.path.isdir(p): - self.resource_path = p - break - self.assertTrue((self.resource_path and - os.path.isdir(self.resource_path)), - msg="Unable to locate test resource data path!") - if not subname: - return self.resource_path - return os.path.join(self.resource_path, subname) - - def readResource(self, name): - where = self.resourceLocation(name) - with open(where, 'r') as fh: - return fh.read() - - def getCloudPaths(self, ds=None): - tmpdir = tempfile.mkdtemp() - self.addCleanup(shutil.rmtree, tmpdir) - cp = ch.Paths({'cloud_dir': tmpdir, - 'templates_dir': self.resourceLocation()}, - ds=ds) - return cp - - -class FilesystemMockingTestCase(ResourceUsingTestCase): - def setUp(self): - super(FilesystemMockingTestCase, self).setUp() - self.patched_funcs = ExitStack() - - def tearDown(self): - self.patched_funcs.close() - ResourceUsingTestCase.tearDown(self) - - def replicateTestRoot(self, example_root, target_root): - real_root = self.resourceLocation() - real_root = os.path.join(real_root, 'roots', example_root) - for (dir_path, _dirnames, filenames) in os.walk(real_root): - real_path = dir_path - make_path = rebase_path(real_path[len(real_root):], target_root) - util.ensure_dir(make_path) - for f in filenames: - real_path = util.abs_join(real_path, f) - make_path = util.abs_join(make_path, f) - shutil.copy(real_path, make_path) - - def patchUtils(self, new_root): - patch_funcs = { - util: [('write_file', 1), - ('append_file', 1), - ('load_file', 1), - ('ensure_dir', 1), - ('chmod', 1), - ('delete_dir_contents', 1), - ('del_file', 1), - ('sym_link', -1), - ('copy', -1)], - } - for (mod, funcs) in patch_funcs.items(): - for (f, am) in funcs: - func = getattr(mod, f) - trap_func = retarget_many_wrapper(new_root, am, func) - self.patched_funcs.enter_context( - mock.patch.object(mod, f, trap_func)) - - # Handle subprocess calls - func = getattr(util, 'subp') - - def nsubp(*_args, **_kwargs): - return ('', '') - - self.patched_funcs.enter_context( - mock.patch.object(util, 'subp', nsubp)) - - def null_func(*_args, **_kwargs): - return None - - for f in ['chownbyid', 'chownbyname']: - self.patched_funcs.enter_context( - mock.patch.object(util, f, null_func)) - - def patchOS(self, new_root): - patch_funcs = { - os.path: [('isfile', 1), ('exists', 1), - ('islink', 1), ('isdir', 1)], - os: [('listdir', 1), ('mkdir', 1), - ('lstat', 1), ('symlink', 2)], - } - for (mod, funcs) in patch_funcs.items(): - for f, nargs in funcs: - func = getattr(mod, f) - trap_func = retarget_many_wrapper(new_root, nargs, func) - self.patched_funcs.enter_context( - mock.patch.object(mod, f, trap_func)) - - def patchOpen(self, new_root): - trap_func = retarget_many_wrapper(new_root, 1, open) - name = 'builtins.open' if PY3 else '__builtin__.open' - self.patched_funcs.enter_context(mock.patch(name, trap_func)) - - def patchStdoutAndStderr(self, stdout=None, stderr=None): - if stdout is not None: - self.patched_funcs.enter_context( - mock.patch.object(sys, 'stdout', stdout)) - if stderr is not None: - self.patched_funcs.enter_context( - mock.patch.object(sys, 'stderr', stderr)) - - def reRoot(self, root=None): - if root is None: - root = self.tmp_dir() - self.patchUtils(root) - self.patchOS(root) - return root - - -class HttprettyTestCase(TestCase): - # necessary as http_proxy gets in the way of httpretty - # https://github.com/gabrielfalcao/HTTPretty/issues/122 - def setUp(self): - self.restore_proxy = os.environ.get('http_proxy') - if self.restore_proxy is not None: - del os.environ['http_proxy'] - super(HttprettyTestCase, self).setUp() - - def tearDown(self): - if self.restore_proxy: - os.environ['http_proxy'] = self.restore_proxy - super(HttprettyTestCase, self).tearDown() - - -def populate_dir(path, files): - if not os.path.exists(path): - os.makedirs(path) - ret = [] - for (name, content) in files.items(): - p = os.path.sep.join([path, name]) - util.ensure_dir(os.path.dirname(p)) - with open(p, "wb") as fp: - if isinstance(content, six.binary_type): - fp.write(content) - else: - fp.write(content.encode('utf-8')) - fp.close() - ret.append(p) - - return ret - - -def dir2dict(startdir, prefix=None): - flist = {} - if prefix is None: - prefix = startdir - for root, dirs, files in os.walk(startdir): - for fname in files: - fpath = os.path.join(root, fname) - key = fpath[len(prefix):] - flist[key] = util.load_file(fpath) - return flist - - -def json_dumps(data): - # print data in nicely formatted json. - return json.dumps(data, indent=1, sort_keys=True, - separators=(',', ': ')) - - -def wrap_and_call(prefix, mocks, func, *args, **kwargs): - """ - call func(args, **kwargs) with mocks applied, then unapplies mocks - nicer to read than repeating dectorators on each function - - prefix: prefix for mock names (e.g. 'cloudinit.stages.util') or None - mocks: dictionary of names (under 'prefix') to mock and either - a return value or a dictionary to pass to the mock.patch call - func: function to call with mocks applied - *args,**kwargs: arguments for 'func' - - return_value: return from 'func' - """ - delim = '.' - if prefix is None: - prefix = '' - prefix = prefix.rstrip(delim) - unwraps = [] - for fname, kw in mocks.items(): - if prefix: - fname = delim.join((prefix, fname)) - if not isinstance(kw, dict): - kw = {'return_value': kw} - p = mock.patch(fname, **kw) - p.start() - unwraps.append(p) - try: - return func(*args, **kwargs) - finally: - for p in unwraps: - p.stop() - - -try: - skipIf = unittest.skipIf -except AttributeError: - # Python 2.6. Doesn't have to be high fidelity. - def skipIf(condition, reason): - def decorator(func): - def wrapper(*args, **kws): - if condition: - return func(*args, **kws) - else: - print(reason, file=sys.stderr) - return wrapper - return decorator - - -# older versions of mock do not have the useful 'assert_not_called' -if not hasattr(mock.Mock, 'assert_not_called'): - def __mock_assert_not_called(mmock): - if mmock.call_count != 0: - msg = ("[citest] Expected '%s' to not have been called. " - "Called %s times." % - (mmock._mock_name or 'mock', mmock.call_count)) - raise AssertionError(msg) - mock.Mock.assert_not_called = __mock_assert_not_called - - -# vi: ts=4 expandtab diff --git a/tests/unittests/test__init__.py b/tests/unittests/test__init__.py index 781f6d54..25878d7a 100644 --- a/tests/unittests/test__init__.py +++ b/tests/unittests/test__init__.py @@ -12,7 +12,7 @@ from cloudinit import settings from cloudinit import url_helper from cloudinit import util -from .helpers import TestCase, CiTestCase, ExitStack, mock +from cloudinit.tests.helpers import TestCase, CiTestCase, ExitStack, mock class FakeModule(handlers.Handler): diff --git a/tests/unittests/test_atomic_helper.py b/tests/unittests/test_atomic_helper.py index 515919d8..0101b0e3 100644 --- a/tests/unittests/test_atomic_helper.py +++ b/tests/unittests/test_atomic_helper.py @@ -6,7 +6,7 @@ import stat from cloudinit import atomic_helper -from .helpers import CiTestCase +from cloudinit.tests.helpers import CiTestCase class TestAtomicHelper(CiTestCase): diff --git a/tests/unittests/test_builtin_handlers.py b/tests/unittests/test_builtin_handlers.py index dd9d0357..9751ed95 100644 --- a/tests/unittests/test_builtin_handlers.py +++ b/tests/unittests/test_builtin_handlers.py @@ -11,7 +11,7 @@ try: except ImportError: import mock -from . import helpers as test_helpers +from cloudinit.tests import helpers as test_helpers from cloudinit import handlers from cloudinit import helpers diff --git a/tests/unittests/test_cli.py b/tests/unittests/test_cli.py index 06f366b2..fccbbd23 100644 --- a/tests/unittests/test_cli.py +++ b/tests/unittests/test_cli.py @@ -2,7 +2,7 @@ import six -from . import helpers as test_helpers +from cloudinit.tests import helpers as test_helpers from cloudinit.cmd import main as cli @@ -31,9 +31,151 @@ class TestCLI(test_helpers.FilesystemMockingTestCase): def test_no_arguments_shows_error_message(self): exit_code = self._call_main() - self.assertIn('cloud-init: error: too few arguments', - self.stderr.getvalue()) + missing_subcommand_message = [ + 'too few arguments', # python2.7 msg + 'the following arguments are required: subcommand' # python3 msg + ] + error = self.stderr.getvalue() + matches = ([msg in error for msg in missing_subcommand_message]) + self.assertTrue( + any(matches), 'Did not find error message for missing subcommand') self.assertEqual(2, exit_code) + def test_all_subcommands_represented_in_help(self): + """All known subparsers are represented in the cloud-int help doc.""" + self._call_main() + error = self.stderr.getvalue() + expected_subcommands = ['analyze', 'init', 'modules', 'single', + 'dhclient-hook', 'features', 'devel'] + for subcommand in expected_subcommands: + self.assertIn(subcommand, error) -# vi: ts=4 expandtab + @mock.patch('cloudinit.cmd.main.status_wrapper') + def test_init_subcommand_parser(self, m_status_wrapper): + """The subcommand 'init' calls status_wrapper passing init.""" + self._call_main(['cloud-init', 'init']) + (name, parseargs) = m_status_wrapper.call_args_list[0][0] + self.assertEqual('init', name) + self.assertEqual('init', parseargs.subcommand) + self.assertEqual('init', parseargs.action[0]) + self.assertEqual('main_init', parseargs.action[1].__name__) + + @mock.patch('cloudinit.cmd.main.status_wrapper') + def test_modules_subcommand_parser(self, m_status_wrapper): + """The subcommand 'modules' calls status_wrapper passing modules.""" + self._call_main(['cloud-init', 'modules']) + (name, parseargs) = m_status_wrapper.call_args_list[0][0] + self.assertEqual('modules', name) + self.assertEqual('modules', parseargs.subcommand) + self.assertEqual('modules', parseargs.action[0]) + self.assertEqual('main_modules', parseargs.action[1].__name__) + + def test_conditional_subcommands_from_entry_point_sys_argv(self): + """Subcommands from entry-point are properly parsed from sys.argv.""" + stdout = six.StringIO() + self.patchStdoutAndStderr(stdout=stdout) + + expected_errors = [ + 'usage: cloud-init analyze', 'usage: cloud-init collect-logs', + 'usage: cloud-init devel'] + conditional_subcommands = ['analyze', 'collect-logs', 'devel'] + # The cloud-init entrypoint calls main without passing sys_argv + for subcommand in conditional_subcommands: + with mock.patch('sys.argv', ['cloud-init', subcommand, '-h']): + try: + cli.main() + except SystemExit as e: + self.assertEqual(0, e.code) # exit 2 on proper -h usage + for error_message in expected_errors: + self.assertIn(error_message, stdout.getvalue()) + + def test_analyze_subcommand_parser(self): + """The subcommand cloud-init analyze calls the correct subparser.""" + self._call_main(['cloud-init', 'analyze']) + # These subcommands only valid for cloud-init analyze script + expected_subcommands = ['blame', 'show', 'dump'] + error = self.stderr.getvalue() + for subcommand in expected_subcommands: + self.assertIn(subcommand, error) + + def test_collect_logs_subcommand_parser(self): + """The subcommand cloud-init collect-logs calls the subparser.""" + # Provide -h param to collect-logs to avoid having to mock behavior. + stdout = six.StringIO() + self.patchStdoutAndStderr(stdout=stdout) + self._call_main(['cloud-init', 'collect-logs', '-h']) + self.assertIn('usage: cloud-init collect-log', stdout.getvalue()) + + def test_devel_subcommand_parser(self): + """The subcommand cloud-init devel calls the correct subparser.""" + self._call_main(['cloud-init', 'devel']) + # These subcommands only valid for cloud-init schema script + expected_subcommands = ['schema'] + error = self.stderr.getvalue() + for subcommand in expected_subcommands: + self.assertIn(subcommand, error) + + @mock.patch('cloudinit.config.schema.handle_schema_args') + def test_wb_devel_schema_subcommand_parser(self, m_schema): + """The subcommand cloud-init schema calls the correct subparser.""" + exit_code = self._call_main(['cloud-init', 'devel', 'schema']) + self.assertEqual(1, exit_code) + # Known whitebox output from schema subcommand + self.assertEqual( + 'Expected either --config-file argument or --doc\n', + self.stderr.getvalue()) + + def test_wb_devel_schema_subcommand_doc_content(self): + """Validate that doc content is sane from known examples.""" + stdout = six.StringIO() + self.patchStdoutAndStderr(stdout=stdout) + self._call_main(['cloud-init', 'devel', 'schema', '--doc']) + expected_doc_sections = [ + '**Supported distros:** all', + '**Supported distros:** centos, debian, fedora', + '**Config schema**:\n **resize_rootfs:** (true/false/noblock)', + '**Examples**::\n\n runcmd:\n - [ ls, -l, / ]\n' + ] + stdout = stdout.getvalue() + for expected in expected_doc_sections: + self.assertIn(expected, stdout) + + @mock.patch('cloudinit.cmd.main.main_single') + def test_single_subcommand(self, m_main_single): + """The subcommand 'single' calls main_single with valid args.""" + self._call_main(['cloud-init', 'single', '--name', 'cc_ntp']) + (name, parseargs) = m_main_single.call_args_list[0][0] + self.assertEqual('single', name) + self.assertEqual('single', parseargs.subcommand) + self.assertEqual('single', parseargs.action[0]) + self.assertFalse(parseargs.debug) + self.assertFalse(parseargs.force) + self.assertIsNone(parseargs.frequency) + self.assertEqual('cc_ntp', parseargs.name) + self.assertFalse(parseargs.report) + + @mock.patch('cloudinit.cmd.main.dhclient_hook') + def test_dhclient_hook_subcommand(self, m_dhclient_hook): + """The subcommand 'dhclient-hook' calls dhclient_hook with args.""" + self._call_main(['cloud-init', 'dhclient-hook', 'net_action', 'eth0']) + (name, parseargs) = m_dhclient_hook.call_args_list[0][0] + self.assertEqual('dhclient_hook', name) + self.assertEqual('dhclient-hook', parseargs.subcommand) + self.assertEqual('dhclient_hook', parseargs.action[0]) + self.assertFalse(parseargs.debug) + self.assertFalse(parseargs.force) + self.assertEqual('net_action', parseargs.net_action) + self.assertEqual('eth0', parseargs.net_interface) + + @mock.patch('cloudinit.cmd.main.main_features') + def test_features_hook_subcommand(self, m_features): + """The subcommand 'features' calls main_features with args.""" + self._call_main(['cloud-init', 'features']) + (name, parseargs) = m_features.call_args_list[0][0] + self.assertEqual('features', name) + self.assertEqual('features', parseargs.subcommand) + self.assertEqual('features', parseargs.action[0]) + self.assertFalse(parseargs.debug) + self.assertFalse(parseargs.force) + +# : ts=4 expandtab diff --git a/tests/unittests/test_cs_util.py b/tests/unittests/test_cs_util.py index b8f5031c..ee88520d 100644 --- a/tests/unittests/test_cs_util.py +++ b/tests/unittests/test_cs_util.py @@ -2,7 +2,7 @@ from __future__ import print_function -from . import helpers as test_helpers +from cloudinit.tests import helpers as test_helpers from cloudinit.cs_utils import Cepko diff --git a/tests/unittests/test_data.py b/tests/unittests/test_data.py index 4ad86bb6..6d621d26 100644 --- a/tests/unittests/test_data.py +++ b/tests/unittests/test_data.py @@ -27,7 +27,7 @@ from cloudinit import stages from cloudinit import user_data as ud from cloudinit import util -from . import helpers +from cloudinit.tests import helpers INSTANCE_ID = "i-testing" diff --git a/tests/unittests/test_datasource/test_aliyun.py b/tests/unittests/test_datasource/test_aliyun.py index 990bff2c..82ee9714 100644 --- a/tests/unittests/test_datasource/test_aliyun.py +++ b/tests/unittests/test_datasource/test_aliyun.py @@ -5,9 +5,9 @@ import httpretty import mock import os -from .. import helpers as test_helpers from cloudinit import helpers from cloudinit.sources import DataSourceAliYun as ay +from cloudinit.tests import helpers as test_helpers DEFAULT_METADATA = { 'instance-id': 'aliyun-test-vm-00', @@ -70,7 +70,6 @@ class TestAliYunDatasource(test_helpers.HttprettyTestCase): paths = helpers.Paths({}) self.ds = ay.DataSourceAliYun(cfg, distro, paths) self.metadata_address = self.ds.metadata_urls[0] - self.api_ver = self.ds.api_ver @property def default_metadata(self): @@ -82,13 +81,15 @@ class TestAliYunDatasource(test_helpers.HttprettyTestCase): @property def metadata_url(self): - return os.path.join(self.metadata_address, - self.api_ver, 'meta-data') + '/' + return os.path.join( + self.metadata_address, + self.ds.min_metadata_version, 'meta-data') + '/' @property def userdata_url(self): - return os.path.join(self.metadata_address, - self.api_ver, 'user-data') + return os.path.join( + self.metadata_address, + self.ds.min_metadata_version, 'user-data') def regist_default_server(self): register_mock_metaserver(self.metadata_url, self.default_metadata) diff --git a/tests/unittests/test_datasource/test_altcloud.py b/tests/unittests/test_datasource/test_altcloud.py index 9c46abc1..a4dfb540 100644 --- a/tests/unittests/test_datasource/test_altcloud.py +++ b/tests/unittests/test_datasource/test_altcloud.py @@ -18,7 +18,7 @@ import tempfile from cloudinit import helpers from cloudinit import util -from ..helpers import TestCase +from cloudinit.tests.helpers import TestCase import cloudinit.sources.DataSourceAltCloud as dsac @@ -280,8 +280,8 @@ class TestUserDataRhevm(TestCase): pass dsac.CLOUD_INFO_FILE = '/etc/sysconfig/cloud-info' - dsac.CMD_PROBE_FLOPPY = ['/sbin/modprobe', 'floppy'] - dsac.CMD_UDEVADM_SETTLE = ['/sbin/udevadm', 'settle', + dsac.CMD_PROBE_FLOPPY = ['modprobe', 'floppy'] + dsac.CMD_UDEVADM_SETTLE = ['udevadm', 'settle', '--quiet', '--timeout=5'] def test_mount_cb_fails(self): diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py index 20e70fb7..0a117771 100644 --- a/tests/unittests/test_datasource/test_azure.py +++ b/tests/unittests/test_datasource/test_azure.py @@ -6,8 +6,8 @@ from cloudinit.sources import DataSourceAzure as dsaz from cloudinit.util import find_freebsd_part from cloudinit.util import get_path_dev_freebsd -from ..helpers import (CiTestCase, TestCase, populate_dir, mock, - ExitStack, PY26, SkipTest) +from cloudinit.tests.helpers import (CiTestCase, TestCase, populate_dir, mock, + ExitStack, PY26, SkipTest) import crypt import os @@ -871,6 +871,7 @@ class TestLoadAzureDsDir(CiTestCase): class TestReadAzureOvf(TestCase): + def test_invalid_xml_raises_non_azure_ds(self): invalid_xml = "<foo>" + construct_valid_ovf_env(data={}) self.assertRaises(dsaz.BrokenAzureDataSource, @@ -1079,6 +1080,7 @@ class TestCanDevBeReformatted(CiTestCase): class TestAzureNetExists(CiTestCase): + def test_azure_net_must_exist_for_legacy_objpkl(self): """DataSourceAzureNet must exist for old obj.pkl files that reference it.""" diff --git a/tests/unittests/test_datasource/test_azure_helper.py b/tests/unittests/test_datasource/test_azure_helper.py index b2d2971b..b42b073f 100644 --- a/tests/unittests/test_datasource/test_azure_helper.py +++ b/tests/unittests/test_datasource/test_azure_helper.py @@ -1,10 +1,12 @@ # This file is part of cloud-init. See LICENSE file for license information. import os +from textwrap import dedent from cloudinit.sources.helpers import azure as azure_helper -from ..helpers import ExitStack, mock, TestCase +from cloudinit.tests.helpers import CiTestCase, ExitStack, mock, populate_dir +from cloudinit.sources.helpers.azure import WALinuxAgentShim as wa_shim GOAL_STATE_TEMPLATE = """\ <?xml version="1.0" encoding="utf-8"?> @@ -45,7 +47,7 @@ GOAL_STATE_TEMPLATE = """\ """ -class TestFindEndpoint(TestCase): +class TestFindEndpoint(CiTestCase): def setUp(self): super(TestFindEndpoint, self).setUp() @@ -56,18 +58,19 @@ class TestFindEndpoint(TestCase): mock.patch.object(azure_helper.util, 'load_file')) self.dhcp_options = patches.enter_context( - mock.patch.object(azure_helper.WALinuxAgentShim, - '_load_dhclient_json')) + mock.patch.object(wa_shim, '_load_dhclient_json')) + + self.networkd_leases = patches.enter_context( + mock.patch.object(wa_shim, '_networkd_get_value_from_leases')) + self.networkd_leases.return_value = None def test_missing_file(self): - self.assertRaises(ValueError, - azure_helper.WALinuxAgentShim.find_endpoint) + self.assertRaises(ValueError, wa_shim.find_endpoint) def test_missing_special_azure_line(self): self.load_file.return_value = '' self.dhcp_options.return_value = {'eth0': {'key': 'value'}} - self.assertRaises(ValueError, - azure_helper.WALinuxAgentShim.find_endpoint) + self.assertRaises(ValueError, wa_shim.find_endpoint) @staticmethod def _build_lease_content(encoded_address): @@ -80,8 +83,7 @@ class TestFindEndpoint(TestCase): def test_from_dhcp_client(self): self.dhcp_options.return_value = {"eth0": {"unknown_245": "5:4:3:2"}} - self.assertEqual('5.4.3.2', - azure_helper.WALinuxAgentShim.find_endpoint(None)) + self.assertEqual('5.4.3.2', wa_shim.find_endpoint(None)) def test_latest_lease_used(self): encoded_addresses = ['5:4:3:2', '4:3:2:1'] @@ -89,53 +91,38 @@ class TestFindEndpoint(TestCase): for encoded_address in encoded_addresses]) self.load_file.return_value = file_content self.assertEqual(encoded_addresses[-1].replace(':', '.'), - azure_helper.WALinuxAgentShim.find_endpoint("foobar")) + wa_shim.find_endpoint("foobar")) -class TestExtractIpAddressFromLeaseValue(TestCase): +class TestExtractIpAddressFromLeaseValue(CiTestCase): def test_hex_string(self): ip_address, encoded_address = '98.76.54.32', '62:4c:36:20' self.assertEqual( - ip_address, - azure_helper.WALinuxAgentShim.get_ip_from_lease_value( - encoded_address - )) + ip_address, wa_shim.get_ip_from_lease_value(encoded_address)) def test_hex_string_with_single_character_part(self): ip_address, encoded_address = '4.3.2.1', '4:3:2:1' self.assertEqual( - ip_address, - azure_helper.WALinuxAgentShim.get_ip_from_lease_value( - encoded_address - )) + ip_address, wa_shim.get_ip_from_lease_value(encoded_address)) def test_packed_string(self): ip_address, encoded_address = '98.76.54.32', 'bL6 ' self.assertEqual( - ip_address, - azure_helper.WALinuxAgentShim.get_ip_from_lease_value( - encoded_address - )) + ip_address, wa_shim.get_ip_from_lease_value(encoded_address)) def test_packed_string_with_escaped_quote(self): ip_address, encoded_address = '100.72.34.108', 'dH\\"l' self.assertEqual( - ip_address, - azure_helper.WALinuxAgentShim.get_ip_from_lease_value( - encoded_address - )) + ip_address, wa_shim.get_ip_from_lease_value(encoded_address)) def test_packed_string_containing_a_colon(self): ip_address, encoded_address = '100.72.58.108', 'dH:l' self.assertEqual( - ip_address, - azure_helper.WALinuxAgentShim.get_ip_from_lease_value( - encoded_address - )) + ip_address, wa_shim.get_ip_from_lease_value(encoded_address)) -class TestGoalStateParsing(TestCase): +class TestGoalStateParsing(CiTestCase): default_parameters = { 'incarnation': 1, @@ -195,7 +182,7 @@ class TestGoalStateParsing(TestCase): self.assertIsNone(certificates_xml) -class TestAzureEndpointHttpClient(TestCase): +class TestAzureEndpointHttpClient(CiTestCase): regular_headers = { 'x-ms-agent-name': 'WALinuxAgent', @@ -258,7 +245,7 @@ class TestAzureEndpointHttpClient(TestCase): self.read_file_or_url.call_args) -class TestOpenSSLManager(TestCase): +class TestOpenSSLManager(CiTestCase): def setUp(self): super(TestOpenSSLManager, self).setUp() @@ -275,7 +262,7 @@ class TestOpenSSLManager(TestCase): mock.patch('builtins.open')) @mock.patch.object(azure_helper, 'cd', mock.MagicMock()) - @mock.patch.object(azure_helper.tempfile, 'mkdtemp') + @mock.patch.object(azure_helper.temp_utils, 'mkdtemp') def test_openssl_manager_creates_a_tmpdir(self, mkdtemp): manager = azure_helper.OpenSSLManager() self.assertEqual(mkdtemp.return_value, manager.tmpdir) @@ -292,7 +279,7 @@ class TestOpenSSLManager(TestCase): manager.clean_up() @mock.patch.object(azure_helper, 'cd', mock.MagicMock()) - @mock.patch.object(azure_helper.tempfile, 'mkdtemp', mock.MagicMock()) + @mock.patch.object(azure_helper.temp_utils, 'mkdtemp', mock.MagicMock()) @mock.patch.object(azure_helper.util, 'del_dir') def test_clean_up(self, del_dir): manager = azure_helper.OpenSSLManager() @@ -300,7 +287,7 @@ class TestOpenSSLManager(TestCase): self.assertEqual([mock.call(manager.tmpdir)], del_dir.call_args_list) -class TestWALinuxAgentShim(TestCase): +class TestWALinuxAgentShim(CiTestCase): def setUp(self): super(TestWALinuxAgentShim, self).setUp() @@ -310,8 +297,7 @@ class TestWALinuxAgentShim(TestCase): self.AzureEndpointHttpClient = patches.enter_context( mock.patch.object(azure_helper, 'AzureEndpointHttpClient')) self.find_endpoint = patches.enter_context( - mock.patch.object( - azure_helper.WALinuxAgentShim, 'find_endpoint')) + mock.patch.object(wa_shim, 'find_endpoint')) self.GoalState = patches.enter_context( mock.patch.object(azure_helper, 'GoalState')) self.OpenSSLManager = patches.enter_context( @@ -320,7 +306,7 @@ class TestWALinuxAgentShim(TestCase): mock.patch.object(azure_helper.time, 'sleep', mock.MagicMock())) def test_http_client_uses_certificate(self): - shim = azure_helper.WALinuxAgentShim() + shim = wa_shim() shim.register_with_azure_and_fetch_data() self.assertEqual( [mock.call(self.OpenSSLManager.return_value.certificate)], @@ -328,7 +314,7 @@ class TestWALinuxAgentShim(TestCase): def test_correct_url_used_for_goalstate(self): self.find_endpoint.return_value = 'test_endpoint' - shim = azure_helper.WALinuxAgentShim() + shim = wa_shim() shim.register_with_azure_and_fetch_data() get = self.AzureEndpointHttpClient.return_value.get self.assertEqual( @@ -340,7 +326,7 @@ class TestWALinuxAgentShim(TestCase): self.GoalState.call_args_list) def test_certificates_used_to_determine_public_keys(self): - shim = azure_helper.WALinuxAgentShim() + shim = wa_shim() data = shim.register_with_azure_and_fetch_data() self.assertEqual( [mock.call(self.GoalState.return_value.certificates_xml)], @@ -351,13 +337,13 @@ class TestWALinuxAgentShim(TestCase): def test_absent_certificates_produces_empty_public_keys(self): self.GoalState.return_value.certificates_xml = None - shim = azure_helper.WALinuxAgentShim() + shim = wa_shim() data = shim.register_with_azure_and_fetch_data() self.assertEqual([], data['public-keys']) def test_correct_url_used_for_report_ready(self): self.find_endpoint.return_value = 'test_endpoint' - shim = azure_helper.WALinuxAgentShim() + shim = wa_shim() shim.register_with_azure_and_fetch_data() expected_url = 'http://test_endpoint/machine?comp=health' self.assertEqual( @@ -368,7 +354,7 @@ class TestWALinuxAgentShim(TestCase): self.GoalState.return_value.incarnation = 'TestIncarnation' self.GoalState.return_value.container_id = 'TestContainerId' self.GoalState.return_value.instance_id = 'TestInstanceId' - shim = azure_helper.WALinuxAgentShim() + shim = wa_shim() shim.register_with_azure_and_fetch_data() posted_document = ( self.AzureEndpointHttpClient.return_value.post.call_args[1]['data'] @@ -378,11 +364,11 @@ class TestWALinuxAgentShim(TestCase): self.assertIn('TestInstanceId', posted_document) def test_clean_up_can_be_called_at_any_time(self): - shim = azure_helper.WALinuxAgentShim() + shim = wa_shim() shim.clean_up() def test_clean_up_will_clean_up_openssl_manager_if_instantiated(self): - shim = azure_helper.WALinuxAgentShim() + shim = wa_shim() shim.register_with_azure_and_fetch_data() shim.clean_up() self.assertEqual( @@ -393,12 +379,12 @@ class TestWALinuxAgentShim(TestCase): pass self.AzureEndpointHttpClient.return_value.get.side_effect = ( SentinelException) - shim = azure_helper.WALinuxAgentShim() + shim = wa_shim() self.assertRaises(SentinelException, shim.register_with_azure_and_fetch_data) -class TestGetMetadataFromFabric(TestCase): +class TestGetMetadataFromFabric(CiTestCase): @mock.patch.object(azure_helper, 'WALinuxAgentShim') def test_data_from_shim_returned(self, shim): @@ -422,4 +408,65 @@ class TestGetMetadataFromFabric(TestCase): azure_helper.get_metadata_from_fabric) self.assertEqual(1, shim.return_value.clean_up.call_count) + +class TestExtractIpAddressFromNetworkd(CiTestCase): + + azure_lease = dedent("""\ + # This is private data. Do not parse. + ADDRESS=10.132.0.5 + NETMASK=255.255.255.255 + ROUTER=10.132.0.1 + SERVER_ADDRESS=169.254.169.254 + NEXT_SERVER=10.132.0.1 + MTU=1460 + T1=43200 + T2=75600 + LIFETIME=86400 + DNS=169.254.169.254 + NTP=169.254.169.254 + DOMAINNAME=c.ubuntu-foundations.internal + DOMAIN_SEARCH_LIST=c.ubuntu-foundations.internal google.internal + HOSTNAME=tribaal-test-171002-1349.c.ubuntu-foundations.internal + ROUTES=10.132.0.1/32,0.0.0.0 0.0.0.0/0,10.132.0.1 + CLIENTID=ff405663a200020000ab11332859494d7a8b4c + OPTION_245=624c3620 + """) + + def setUp(self): + super(TestExtractIpAddressFromNetworkd, self).setUp() + self.lease_d = self.tmp_dir() + + def test_no_valid_leases_is_none(self): + """No valid leases should return None.""" + self.assertIsNone( + wa_shim._networkd_get_value_from_leases(self.lease_d)) + + def test_option_245_is_found_in_single(self): + """A single valid lease with 245 option should return it.""" + populate_dir(self.lease_d, {'9': self.azure_lease}) + self.assertEqual( + '624c3620', wa_shim._networkd_get_value_from_leases(self.lease_d)) + + def test_option_245_not_found_returns_None(self): + """A valid lease, but no option 245 should return None.""" + populate_dir( + self.lease_d, + {'9': self.azure_lease.replace("OPTION_245", "OPTION_999")}) + self.assertIsNone( + wa_shim._networkd_get_value_from_leases(self.lease_d)) + + def test_multiple_returns_first(self): + """Somewhat arbitrarily return the first address when multiple. + + Most important at the moment is that this is consistent behavior + rather than changing randomly as in order of a dictionary.""" + myval = "624c3601" + populate_dir( + self.lease_d, + {'9': self.azure_lease, + '2': self.azure_lease.replace("624c3620", myval)}) + self.assertEqual( + myval, wa_shim._networkd_get_value_from_leases(self.lease_d)) + + # vi: ts=4 expandtab diff --git a/tests/unittests/test_datasource/test_cloudsigma.py b/tests/unittests/test_datasource/test_cloudsigma.py index 5997102c..e4c59907 100644 --- a/tests/unittests/test_datasource/test_cloudsigma.py +++ b/tests/unittests/test_datasource/test_cloudsigma.py @@ -6,7 +6,7 @@ from cloudinit.cs_utils import Cepko from cloudinit import sources from cloudinit.sources import DataSourceCloudSigma -from .. import helpers as test_helpers +from cloudinit.tests import helpers as test_helpers SERVER_CONTEXT = { "cpu": 1000, diff --git a/tests/unittests/test_datasource/test_cloudstack.py b/tests/unittests/test_datasource/test_cloudstack.py index e94aad61..96144b64 100644 --- a/tests/unittests/test_datasource/test_cloudstack.py +++ b/tests/unittests/test_datasource/test_cloudstack.py @@ -1,12 +1,17 @@ # This file is part of cloud-init. See LICENSE file for license information. from cloudinit import helpers -from cloudinit.sources.DataSourceCloudStack import DataSourceCloudStack +from cloudinit import util +from cloudinit.sources.DataSourceCloudStack import ( + DataSourceCloudStack, get_latest_lease) -from ..helpers import TestCase, mock, ExitStack +from cloudinit.tests.helpers import CiTestCase, ExitStack, mock +import os +import time -class TestCloudStackPasswordFetching(TestCase): + +class TestCloudStackPasswordFetching(CiTestCase): def setUp(self): super(TestCloudStackPasswordFetching, self).setUp() @@ -18,13 +23,16 @@ class TestCloudStackPasswordFetching(TestCase): default_gw = "192.201.20.0" get_latest_lease = mock.MagicMock(return_value=None) self.patches.enter_context(mock.patch( - 'cloudinit.sources.DataSourceCloudStack.get_latest_lease', - get_latest_lease)) + mod_name + '.get_latest_lease', get_latest_lease)) get_default_gw = mock.MagicMock(return_value=default_gw) self.patches.enter_context(mock.patch( - 'cloudinit.sources.DataSourceCloudStack.get_default_gateway', - get_default_gw)) + mod_name + '.get_default_gateway', get_default_gw)) + + get_networkd_server_address = mock.MagicMock(return_value=None) + self.patches.enter_context(mock.patch( + mod_name + '.dhcp.networkd_get_option_from_leases', + get_networkd_server_address)) def _set_password_server_response(self, response_string): subp = mock.MagicMock(return_value=(response_string, '')) @@ -89,4 +97,72 @@ class TestCloudStackPasswordFetching(TestCase): def test_password_not_saved_if_bad_request(self): self._check_password_not_saved_for('bad_request') + +class TestGetLatestLease(CiTestCase): + + def _populate_dir_list(self, bdir, files): + """populate_dir_list([(name, data), (name, data)]) + + writes files to bdir, and updates timestamps to ensure + that their mtime increases with each file.""" + + start = int(time.time()) + for num, fname in enumerate(reversed(files)): + fpath = os.path.sep.join((bdir, fname)) + util.write_file(fpath, fname.encode()) + os.utime(fpath, (start - num, start - num)) + + def _pop_and_test(self, files, expected): + lease_d = self.tmp_dir() + self._populate_dir_list(lease_d, files) + self.assertEqual(self.tmp_path(expected, lease_d), + get_latest_lease(lease_d)) + + def test_skips_dhcpv6_files(self): + """files started with dhclient6 should be skipped.""" + expected = "dhclient.lease" + self._pop_and_test([expected, "dhclient6.lease"], expected) + + def test_selects_dhclient_dot_files(self): + """files named dhclient.lease or dhclient.leases should be used. + + Ubuntu names files dhclient.eth0.leases dhclient6.leases and + sometimes dhclient.leases.""" + self._pop_and_test(["dhclient.lease"], "dhclient.lease") + self._pop_and_test(["dhclient.leases"], "dhclient.leases") + + def test_selects_dhclient_dash_files(self): + """files named dhclient-lease or dhclient-leases should be used. + + Redhat/Centos names files with dhclient--eth0.lease (centos 7) or + dhclient-eth0.leases (centos 6). + """ + self._pop_and_test(["dhclient-eth0.lease"], "dhclient-eth0.lease") + self._pop_and_test(["dhclient--eth0.lease"], "dhclient--eth0.lease") + + def test_ignores_by_extension(self): + """only .lease or .leases file should be considered.""" + + self._pop_and_test(["dhclient.lease", "dhclient.lease.bk", + "dhclient.lease-old", "dhclient.leaselease"], + "dhclient.lease") + + def test_selects_newest_matching(self): + """If multiple files match, the newest written should be used.""" + lease_d = self.tmp_dir() + valid_1 = "dhclient.leases" + valid_2 = "dhclient.lease" + valid_1_path = self.tmp_path(valid_1, lease_d) + valid_2_path = self.tmp_path(valid_2, lease_d) + + self._populate_dir_list(lease_d, [valid_1, valid_2]) + self.assertEqual(valid_2_path, get_latest_lease(lease_d)) + + # now update mtime on valid_2 to be older than valid_1 and re-check. + mtime = int(os.path.getmtime(valid_1_path)) - 1 + os.utime(valid_2_path, (mtime, mtime)) + + self.assertEqual(valid_1_path, get_latest_lease(lease_d)) + + # vi: ts=4 expandtab diff --git a/tests/unittests/test_datasource/test_common.py b/tests/unittests/test_datasource/test_common.py index 413e87ac..80b9c650 100644 --- a/tests/unittests/test_datasource/test_common.py +++ b/tests/unittests/test_datasource/test_common.py @@ -24,7 +24,7 @@ from cloudinit.sources import ( ) from cloudinit.sources import DataSourceNone as DSNone -from .. import helpers as test_helpers +from cloudinit.tests import helpers as test_helpers DEFAULT_LOCAL = [ Azure.DataSourceAzure, @@ -35,6 +35,7 @@ DEFAULT_LOCAL = [ OpenNebula.DataSourceOpenNebula, OVF.DataSourceOVF, SmartOS.DataSourceSmartOS, + Ec2.DataSourceEc2Local, ] DEFAULT_NETWORK = [ diff --git a/tests/unittests/test_datasource/test_configdrive.py b/tests/unittests/test_datasource/test_configdrive.py index 337be667..237c189b 100644 --- a/tests/unittests/test_datasource/test_configdrive.py +++ b/tests/unittests/test_datasource/test_configdrive.py @@ -15,7 +15,7 @@ from cloudinit.sources import DataSourceConfigDrive as ds from cloudinit.sources.helpers import openstack from cloudinit import util -from ..helpers import TestCase, ExitStack, mock +from cloudinit.tests.helpers import TestCase, ExitStack, mock PUBKEY = u'ssh-rsa AAAAB3NzaC1....sIkJhq8wdX+4I3A4cYbYP ubuntu@server-460\n' diff --git a/tests/unittests/test_datasource/test_digitalocean.py b/tests/unittests/test_datasource/test_digitalocean.py index e97a679a..f264f361 100644 --- a/tests/unittests/test_datasource/test_digitalocean.py +++ b/tests/unittests/test_datasource/test_digitalocean.py @@ -13,7 +13,7 @@ from cloudinit import settings from cloudinit.sources import DataSourceDigitalOcean from cloudinit.sources.helpers import digitalocean -from ..helpers import mock, TestCase +from cloudinit.tests.helpers import mock, TestCase DO_MULTIPLE_KEYS = ["ssh-rsa AAAAB3NzaC1yc2EAAAA... test1@do.co", "ssh-rsa AAAAB3NzaC1yc2EAAAA... test2@do.co"] diff --git a/tests/unittests/test_datasource/test_ec2.py b/tests/unittests/test_datasource/test_ec2.py index 12230ae2..a7301dbf 100644 --- a/tests/unittests/test_datasource/test_ec2.py +++ b/tests/unittests/test_datasource/test_ec2.py @@ -1,42 +1,75 @@ # This file is part of cloud-init. See LICENSE file for license information. +import copy import httpretty import mock -from .. import helpers as test_helpers from cloudinit import helpers from cloudinit.sources import DataSourceEc2 as ec2 +from cloudinit.tests import helpers as test_helpers -# collected from api version 2009-04-04/ with +# collected from api version 2016-09-02/ with # python3 -c 'import json # from cloudinit.ec2_utils import get_instance_metadata as gm -# print(json.dumps(gm("2009-04-04"), indent=1, sort_keys=True))' +# print(json.dumps(gm("2016-09-02"), indent=1, sort_keys=True))' DEFAULT_METADATA = { - "ami-id": "ami-80861296", + "ami-id": "ami-8b92b4ee", "ami-launch-index": "0", "ami-manifest-path": "(unknown)", "block-device-mapping": {"ami": "/dev/sda1", "root": "/dev/sda1"}, - "hostname": "ip-10-0-0-149", + "hostname": "ip-172-31-31-158.us-east-2.compute.internal", "instance-action": "none", - "instance-id": "i-0052913950685138c", - "instance-type": "t2.micro", - "local-hostname": "ip-10-0-0-149", - "local-ipv4": "10.0.0.149", - "placement": {"availability-zone": "us-east-1b"}, + "instance-id": "i-0a33f80f09c96477f", + "instance-type": "t2.small", + "local-hostname": "ip-172-3-3-15.us-east-2.compute.internal", + "local-ipv4": "172.3.3.15", + "mac": "06:17:04:d7:26:09", + "metrics": {"vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"}, + "network": { + "interfaces": { + "macs": { + "06:17:04:d7:26:09": { + "device-number": "0", + "interface-id": "eni-e44ef49e", + "ipv4-associations": {"13.59.77.202": "172.3.3.15"}, + "ipv6s": "2600:1f16:aeb:b20b:9d87:a4af:5cc9:73dc", + "local-hostname": ("ip-172-3-3-15.us-east-2." + "compute.internal"), + "local-ipv4s": "172.3.3.15", + "mac": "06:17:04:d7:26:09", + "owner-id": "950047163771", + "public-hostname": ("ec2-13-59-77-202.us-east-2." + "compute.amazonaws.com"), + "public-ipv4s": "13.59.77.202", + "security-group-ids": "sg-5a61d333", + "security-groups": "wide-open", + "subnet-id": "subnet-20b8565b", + "subnet-ipv4-cidr-block": "172.31.16.0/20", + "subnet-ipv6-cidr-blocks": "2600:1f16:aeb:b20b::/64", + "vpc-id": "vpc-87e72bee", + "vpc-ipv4-cidr-block": "172.31.0.0/16", + "vpc-ipv4-cidr-blocks": "172.31.0.0/16", + "vpc-ipv6-cidr-blocks": "2600:1f16:aeb:b200::/56" + } + } + } + }, + "placement": {"availability-zone": "us-east-2b"}, "profile": "default-hvm", - "public-hostname": "", - "public-ipv4": "107.23.188.247", + "public-hostname": "ec2-13-59-77-202.us-east-2.compute.amazonaws.com", + "public-ipv4": "13.59.77.202", "public-keys": {"brickies": ["ssh-rsa AAAAB3Nz....w== brickies"]}, - "reservation-id": "r-00a2c173fb5782a08", - "security-groups": "wide-open" + "reservation-id": "r-01efbc9996bac1bd6", + "security-groups": "my-wide-open", + "services": {"domain": "amazonaws.com", "partition": "aws"} } def _register_ssh_keys(rfunc, base_url, keys_data): """handle ssh key inconsistencies. - public-keys in the ec2 metadata is inconsistently formatted compared + public-keys in the ec2 metadata is inconsistently formated compared to other entries. Given keys_data of {name1: pubkey1, name2: pubkey2} @@ -83,6 +116,9 @@ def register_mock_metaserver(base_url, data): In the index, references to lists or dictionaries have a trailing /. """ def register_helper(register, base_url, body): + if not isinstance(base_url, str): + register(base_url, body) + return base_url = base_url.rstrip("/") if isinstance(body, str): register(base_url, body) @@ -105,7 +141,7 @@ def register_mock_metaserver(base_url, data): register(base_url, '\n'.join(vals) + '\n') register(base_url + '/', '\n'.join(vals) + '\n') elif body is None: - register(base_url, 'not found', status_code=404) + register(base_url, 'not found', status=404) def myreg(*argc, **kwargs): # print("register_url(%s, %s)" % (argc, kwargs)) @@ -115,6 +151,8 @@ def register_mock_metaserver(base_url, data): class TestEc2(test_helpers.HttprettyTestCase): + with_logs = True + valid_platform_data = { 'uuid': 'ec212f79-87d1-2f1d-588f-d86dc0fd5412', 'uuid_source': 'dmi', @@ -123,48 +161,91 @@ class TestEc2(test_helpers.HttprettyTestCase): def setUp(self): super(TestEc2, self).setUp() - self.metadata_addr = ec2.DataSourceEc2.metadata_urls[0] - self.api_ver = '2009-04-04' - - @property - def metadata_url(self): - return '/'.join([self.metadata_addr, self.api_ver, 'meta-data', '']) + self.datasource = ec2.DataSourceEc2 + self.metadata_addr = self.datasource.metadata_urls[0] - @property - def userdata_url(self): - return '/'.join([self.metadata_addr, self.api_ver, 'user-data']) + def data_url(self, version): + """Return a metadata url based on the version provided.""" + return '/'.join([self.metadata_addr, version, 'meta-data', '']) def _patch_add_cleanup(self, mpath, *args, **kwargs): p = mock.patch(mpath, *args, **kwargs) p.start() self.addCleanup(p.stop) - def _setup_ds(self, sys_cfg, platform_data, md, ud=None): + def _setup_ds(self, sys_cfg, platform_data, md, md_version=None): + self.uris = [] distro = {} paths = helpers.Paths({}) if sys_cfg is None: sys_cfg = {} - ds = ec2.DataSourceEc2(sys_cfg=sys_cfg, distro=distro, paths=paths) + ds = self.datasource(sys_cfg=sys_cfg, distro=distro, paths=paths) + if not md_version: + md_version = ds.min_metadata_version if platform_data is not None: self._patch_add_cleanup( "cloudinit.sources.DataSourceEc2._collect_platform_data", return_value=platform_data) if md: - register_mock_metaserver(self.metadata_url, md) - register_mock_metaserver(self.userdata_url, ud) - + httpretty.HTTPretty.allow_net_connect = False + all_versions = ( + [ds.min_metadata_version] + ds.extended_metadata_versions) + for version in all_versions: + metadata_url = self.data_url(version) + if version == md_version: + # Register all metadata for desired version + register_mock_metaserver(metadata_url, md) + else: + instance_id_url = metadata_url + 'instance-id' + if version == ds.min_metadata_version: + # Add min_metadata_version service availability check + register_mock_metaserver( + instance_id_url, DEFAULT_METADATA['instance-id']) + else: + # Register 404s for all unrequested extended versions + register_mock_metaserver(instance_id_url, None) return ds @httpretty.activate - def test_valid_platform_with_strict_true(self): + def test_network_config_property_returns_version_1_network_data(self): + """network_config property returns network version 1 for metadata.""" + ds = self._setup_ds( + platform_data=self.valid_platform_data, + sys_cfg={'datasource': {'Ec2': {'strict_id': True}}}, + md=DEFAULT_METADATA) + ds.get_data() + mac1 = '06:17:04:d7:26:09' # Defined in DEFAULT_METADATA + expected = {'version': 1, 'config': [ + {'mac_address': '06:17:04:d7:26:09', 'name': 'eth9', + 'subnets': [{'type': 'dhcp4'}, {'type': 'dhcp6'}], + 'type': 'physical'}]} + patch_path = ( + 'cloudinit.sources.DataSourceEc2.net.get_interfaces_by_mac') + with mock.patch(patch_path) as m_get_interfaces_by_mac: + m_get_interfaces_by_mac.return_value = {mac1: 'eth9'} + self.assertEqual(expected, ds.network_config) + + def test_network_config_property_is_cached_in_datasource(self): + """network_config property is cached in DataSourceEc2.""" + ds = self._setup_ds( + platform_data=self.valid_platform_data, + sys_cfg={'datasource': {'Ec2': {'strict_id': True}}}, + md=DEFAULT_METADATA) + ds._network_config = {'cached': 'data'} + self.assertEqual({'cached': 'data'}, ds.network_config) + + @httpretty.activate + @mock.patch('cloudinit.net.dhcp.maybe_perform_dhcp_discovery') + def test_valid_platform_with_strict_true(self, m_dhcp): """Valid platform data should return true with strict_id true.""" ds = self._setup_ds( platform_data=self.valid_platform_data, sys_cfg={'datasource': {'Ec2': {'strict_id': True}}}, md=DEFAULT_METADATA) ret = ds.get_data() - self.assertEqual(True, ret) + self.assertTrue(ret) + self.assertEqual(0, m_dhcp.call_count) @httpretty.activate def test_valid_platform_with_strict_false(self): @@ -174,7 +255,7 @@ class TestEc2(test_helpers.HttprettyTestCase): sys_cfg={'datasource': {'Ec2': {'strict_id': False}}}, md=DEFAULT_METADATA) ret = ds.get_data() - self.assertEqual(True, ret) + self.assertTrue(ret) @httpretty.activate def test_unknown_platform_with_strict_true(self): @@ -185,7 +266,7 @@ class TestEc2(test_helpers.HttprettyTestCase): sys_cfg={'datasource': {'Ec2': {'strict_id': True}}}, md=DEFAULT_METADATA) ret = ds.get_data() - self.assertEqual(False, ret) + self.assertFalse(ret) @httpretty.activate def test_unknown_platform_with_strict_false(self): @@ -196,7 +277,146 @@ class TestEc2(test_helpers.HttprettyTestCase): sys_cfg={'datasource': {'Ec2': {'strict_id': False}}}, md=DEFAULT_METADATA) ret = ds.get_data() - self.assertEqual(True, ret) + self.assertTrue(ret) + + def test_ec2_local_returns_false_on_non_aws(self): + """DataSourceEc2Local returns False when platform is not AWS.""" + self.datasource = ec2.DataSourceEc2Local + ds = self._setup_ds( + platform_data=self.valid_platform_data, + sys_cfg={'datasource': {'Ec2': {'strict_id': False}}}, + md=DEFAULT_METADATA) + platform_attrs = [ + attr for attr in ec2.Platforms.__dict__.keys() + if not attr.startswith('__')] + for attr_name in platform_attrs: + platform_name = getattr(ec2.Platforms, attr_name) + if platform_name != 'AWS': + ds._cloud_platform = platform_name + ret = ds.get_data() + self.assertFalse(ret) + message = ( + "Local Ec2 mode only supported on ('AWS',)," + ' not {0}'.format(platform_name)) + self.assertIn(message, self.logs.getvalue()) + + @httpretty.activate + @mock.patch('cloudinit.sources.DataSourceEc2.util.is_FreeBSD') + def test_ec2_local_returns_false_on_bsd(self, m_is_freebsd): + """DataSourceEc2Local returns False on BSD. + + FreeBSD dhclient doesn't support dhclient -sf to run in a sandbox. + """ + m_is_freebsd.return_value = True + self.datasource = ec2.DataSourceEc2Local + ds = self._setup_ds( + platform_data=self.valid_platform_data, + sys_cfg={'datasource': {'Ec2': {'strict_id': False}}}, + md=DEFAULT_METADATA) + ret = ds.get_data() + self.assertFalse(ret) + self.assertIn( + "FreeBSD doesn't support running dhclient with -sf", + self.logs.getvalue()) + + @httpretty.activate + @mock.patch('cloudinit.net.EphemeralIPv4Network') + @mock.patch('cloudinit.net.dhcp.maybe_perform_dhcp_discovery') + @mock.patch('cloudinit.sources.DataSourceEc2.util.is_FreeBSD') + def test_ec2_local_performs_dhcp_on_non_bsd(self, m_is_bsd, m_dhcp, m_net): + """Ec2Local returns True for valid platform data on non-BSD with dhcp. + + DataSourceEc2Local will setup initial IPv4 network via dhcp discovery. + Then the metadata services is crawled for more network config info. + When the platform data is valid, return True. + """ + + m_is_bsd.return_value = False + m_dhcp.return_value = [{ + 'interface': 'eth9', 'fixed-address': '192.168.2.9', + 'routers': '192.168.2.1', 'subnet-mask': '255.255.255.0', + 'broadcast-address': '192.168.2.255'}] + self.datasource = ec2.DataSourceEc2Local + ds = self._setup_ds( + platform_data=self.valid_platform_data, + sys_cfg={'datasource': {'Ec2': {'strict_id': False}}}, + md=DEFAULT_METADATA) + + ret = ds.get_data() + self.assertTrue(ret) + m_dhcp.assert_called_once_with() + m_net.assert_called_once_with( + broadcast='192.168.2.255', interface='eth9', ip='192.168.2.9', + prefix_or_mask='255.255.255.0', router='192.168.2.1') + self.assertIn('Crawl of metadata service took', self.logs.getvalue()) + + +class TestConvertEc2MetadataNetworkConfig(test_helpers.CiTestCase): + + def setUp(self): + super(TestConvertEc2MetadataNetworkConfig, self).setUp() + self.mac1 = '06:17:04:d7:26:09' + self.network_metadata = { + 'interfaces': {'macs': { + self.mac1: {'public-ipv4s': '172.31.2.16'}}}} + + def test_convert_ec2_metadata_network_config_skips_absent_macs(self): + """Any mac absent from metadata is skipped by network config.""" + macs_to_nics = {self.mac1: 'eth9', 'DE:AD:BE:EF:FF:FF': 'vitualnic2'} + + # DE:AD:BE:EF:FF:FF represented by OS but not in metadata + expected = {'version': 1, 'config': [ + {'mac_address': self.mac1, 'type': 'physical', + 'name': 'eth9', 'subnets': [{'type': 'dhcp4'}]}]} + self.assertEqual( + expected, + ec2.convert_ec2_metadata_network_config( + self.network_metadata, macs_to_nics)) + + def test_convert_ec2_metadata_network_config_handles_only_dhcp6(self): + """Config dhcp6 when ipv6s is in metadata for a mac.""" + macs_to_nics = {self.mac1: 'eth9'} + network_metadata_ipv6 = copy.deepcopy(self.network_metadata) + nic1_metadata = ( + network_metadata_ipv6['interfaces']['macs'][self.mac1]) + nic1_metadata['ipv6s'] = '2620:0:1009:fd00:e442:c88d:c04d:dc85/64' + nic1_metadata.pop('public-ipv4s') + expected = {'version': 1, 'config': [ + {'mac_address': self.mac1, 'type': 'physical', + 'name': 'eth9', 'subnets': [{'type': 'dhcp6'}]}]} + self.assertEqual( + expected, + ec2.convert_ec2_metadata_network_config( + network_metadata_ipv6, macs_to_nics)) + + def test_convert_ec2_metadata_network_config_handles_dhcp4_and_dhcp6(self): + """Config both dhcp4 and dhcp6 when both vpc-ipv6 and ipv4 exists.""" + macs_to_nics = {self.mac1: 'eth9'} + network_metadata_both = copy.deepcopy(self.network_metadata) + nic1_metadata = ( + network_metadata_both['interfaces']['macs'][self.mac1]) + nic1_metadata['ipv6s'] = '2620:0:1009:fd00:e442:c88d:c04d:dc85/64' + expected = {'version': 1, 'config': [ + {'mac_address': self.mac1, 'type': 'physical', + 'name': 'eth9', + 'subnets': [{'type': 'dhcp4'}, {'type': 'dhcp6'}]}]} + self.assertEqual( + expected, + ec2.convert_ec2_metadata_network_config( + network_metadata_both, macs_to_nics)) + def test_convert_ec2_metadata_gets_macs_from_get_interfaces_by_mac(self): + """Convert Ec2 Metadata calls get_interfaces_by_mac by default.""" + expected = {'version': 1, 'config': [ + {'mac_address': self.mac1, 'type': 'physical', + 'name': 'eth9', + 'subnets': [{'type': 'dhcp4'}]}]} + patch_path = ( + 'cloudinit.sources.DataSourceEc2.net.get_interfaces_by_mac') + with mock.patch(patch_path) as m_get_interfaces_by_mac: + m_get_interfaces_by_mac.return_value = {self.mac1: 'eth9'} + self.assertEqual( + expected, + ec2.convert_ec2_metadata_network_config(self.network_metadata)) # vi: ts=4 expandtab diff --git a/tests/unittests/test_datasource/test_gce.py b/tests/unittests/test_datasource/test_gce.py index ad608bec..d399ae7a 100644 --- a/tests/unittests/test_datasource/test_gce.py +++ b/tests/unittests/test_datasource/test_gce.py @@ -15,7 +15,7 @@ from cloudinit import helpers from cloudinit import settings from cloudinit.sources import DataSourceGCE -from .. import helpers as test_helpers +from cloudinit.tests import helpers as test_helpers GCE_META = { @@ -23,7 +23,8 @@ GCE_META = { 'instance/zone': 'foo/bar', 'project/attributes/sshKeys': 'user:ssh-rsa AA2..+aRD0fyVw== root@server', 'instance/hostname': 'server.project-foo.local', - 'instance/attributes/user-data': b'/bin/echo foo\n', + # UnicodeDecodeError below if set to ds.userdata instead of userdata_raw + 'instance/attributes/user-data': b'/bin/echo \xff\n', } GCE_META_PARTIAL = { diff --git a/tests/unittests/test_datasource/test_maas.py b/tests/unittests/test_datasource/test_maas.py index c1911bf4..289c6a40 100644 --- a/tests/unittests/test_datasource/test_maas.py +++ b/tests/unittests/test_datasource/test_maas.py @@ -8,7 +8,7 @@ import yaml from cloudinit.sources import DataSourceMAAS from cloudinit import url_helper -from ..helpers import TestCase, populate_dir +from cloudinit.tests.helpers import TestCase, populate_dir try: from unittest import mock diff --git a/tests/unittests/test_datasource/test_nocloud.py b/tests/unittests/test_datasource/test_nocloud.py index ff294395..fea9156b 100644 --- a/tests/unittests/test_datasource/test_nocloud.py +++ b/tests/unittests/test_datasource/test_nocloud.py @@ -3,7 +3,7 @@ from cloudinit import helpers from cloudinit.sources import DataSourceNoCloud from cloudinit import util -from ..helpers import TestCase, populate_dir, mock, ExitStack +from cloudinit.tests.helpers import TestCase, populate_dir, mock, ExitStack import os import shutil diff --git a/tests/unittests/test_datasource/test_opennebula.py b/tests/unittests/test_datasource/test_opennebula.py index b0f8e435..e7d55692 100644 --- a/tests/unittests/test_datasource/test_opennebula.py +++ b/tests/unittests/test_datasource/test_opennebula.py @@ -3,7 +3,7 @@ from cloudinit import helpers from cloudinit.sources import DataSourceOpenNebula as ds from cloudinit import util -from ..helpers import mock, populate_dir, TestCase +from cloudinit.tests.helpers import mock, populate_dir, TestCase import os import pwd diff --git a/tests/unittests/test_datasource/test_openstack.py b/tests/unittests/test_datasource/test_openstack.py index c2905d1a..ed367e05 100644 --- a/tests/unittests/test_datasource/test_openstack.py +++ b/tests/unittests/test_datasource/test_openstack.py @@ -9,7 +9,7 @@ import httpretty as hp import json import re -from .. import helpers as test_helpers +from cloudinit.tests import helpers as test_helpers from six.moves.urllib.parse import urlparse from six import StringIO @@ -57,6 +57,8 @@ OS_FILES = { 'openstack/content/0000': CONTENT_0, 'openstack/content/0001': CONTENT_1, 'openstack/latest/meta_data.json': json.dumps(OSTACK_META), + 'openstack/latest/network_data.json': json.dumps( + {'links': [], 'networks': [], 'services': []}), 'openstack/latest/user_data': USER_DATA, 'openstack/latest/vendor_data.json': json.dumps(VENDOR_DATA), } @@ -68,6 +70,7 @@ EC2_VERSIONS = [ ] +# TODO _register_uris should leverage test_ec2.register_mock_metaserver. def _register_uris(version, ec2_files, ec2_meta, os_files): """Registers a set of url patterns into httpretty that will mimic the same data returned by the openstack metadata service (and ec2 service).""" diff --git a/tests/unittests/test_datasource/test_ovf.py b/tests/unittests/test_datasource/test_ovf.py index 477cf8ed..700da86c 100644 --- a/tests/unittests/test_datasource/test_ovf.py +++ b/tests/unittests/test_datasource/test_ovf.py @@ -5,8 +5,9 @@ # This file is part of cloud-init. See LICENSE file for license information. import base64 +from collections import OrderedDict -from .. import helpers as test_helpers +from cloudinit.tests import helpers as test_helpers from cloudinit.sources import DataSourceOVF as dsovf @@ -70,4 +71,167 @@ class TestReadOvfEnv(test_helpers.TestCase): self.assertEqual({'password': "passw0rd"}, cfg) self.assertIsNone(ud) + +class TestTransportIso9660(test_helpers.CiTestCase): + + def setUp(self): + super(TestTransportIso9660, self).setUp() + self.add_patch('cloudinit.util.find_devs_with', + 'm_find_devs_with') + self.add_patch('cloudinit.util.mounts', 'm_mounts') + self.add_patch('cloudinit.util.mount_cb', 'm_mount_cb') + self.add_patch('cloudinit.sources.DataSourceOVF.get_ovf_env', + 'm_get_ovf_env') + self.m_get_ovf_env.return_value = ('myfile', 'mycontent') + + def test_find_already_mounted(self): + """Check we call get_ovf_env from on matching mounted devices""" + mounts = { + '/dev/sr9': { + 'fstype': 'iso9660', + 'mountpoint': 'wark/media/sr9', + 'opts': 'ro', + } + } + self.m_mounts.return_value = mounts + + (contents, fullp, fname) = dsovf.transport_iso9660() + self.assertEqual("mycontent", contents) + self.assertEqual("/dev/sr9", fullp) + self.assertEqual("myfile", fname) + + def test_find_already_mounted_skips_non_iso9660(self): + """Check we call get_ovf_env ignoring non iso9660""" + mounts = { + '/dev/xvdb': { + 'fstype': 'vfat', + 'mountpoint': 'wark/foobar', + 'opts': 'defaults,noatime', + }, + '/dev/xvdc': { + 'fstype': 'iso9660', + 'mountpoint': 'wark/media/sr9', + 'opts': 'ro', + } + } + # We use an OrderedDict here to ensure we check xvdb before xvdc + # as we're not mocking the regex matching, however, if we place + # an entry in the results then we can be reasonably sure that + # we're skipping an entry which fails to match. + self.m_mounts.return_value = ( + OrderedDict(sorted(mounts.items(), key=lambda t: t[0]))) + + (contents, fullp, fname) = dsovf.transport_iso9660() + self.assertEqual("mycontent", contents) + self.assertEqual("/dev/xvdc", fullp) + self.assertEqual("myfile", fname) + + def test_find_already_mounted_matches_kname(self): + """Check we dont regex match on basename of the device""" + mounts = { + '/dev/foo/bar/xvdc': { + 'fstype': 'iso9660', + 'mountpoint': 'wark/media/sr9', + 'opts': 'ro', + } + } + # we're skipping an entry which fails to match. + self.m_mounts.return_value = mounts + + (contents, fullp, fname) = dsovf.transport_iso9660() + self.assertEqual(False, contents) + self.assertIsNone(fullp) + self.assertIsNone(fname) + + def test_mount_cb_called_on_blkdevs_with_iso9660(self): + """Check we call mount_cb on blockdevs with iso9660 only""" + self.m_mounts.return_value = {} + self.m_find_devs_with.return_value = ['/dev/sr0'] + self.m_mount_cb.return_value = ("myfile", "mycontent") + + (contents, fullp, fname) = dsovf.transport_iso9660() + + self.m_mount_cb.assert_called_with( + "/dev/sr0", dsovf.get_ovf_env, mtype="iso9660") + self.assertEqual("mycontent", contents) + self.assertEqual("/dev/sr0", fullp) + self.assertEqual("myfile", fname) + + def test_mount_cb_called_on_blkdevs_with_iso9660_check_regex(self): + """Check we call mount_cb on blockdevs with iso9660 and match regex""" + self.m_mounts.return_value = {} + self.m_find_devs_with.return_value = [ + '/dev/abc', '/dev/my-cdrom', '/dev/sr0'] + self.m_mount_cb.return_value = ("myfile", "mycontent") + + (contents, fullp, fname) = dsovf.transport_iso9660() + + self.m_mount_cb.assert_called_with( + "/dev/sr0", dsovf.get_ovf_env, mtype="iso9660") + self.assertEqual("mycontent", contents) + self.assertEqual("/dev/sr0", fullp) + self.assertEqual("myfile", fname) + + def test_mount_cb_not_called_no_matches(self): + """Check we don't call mount_cb if nothing matches""" + self.m_mounts.return_value = {} + self.m_find_devs_with.return_value = ['/dev/vg/myovf'] + + (contents, fullp, fname) = dsovf.transport_iso9660() + + self.assertEqual(0, self.m_mount_cb.call_count) + self.assertEqual(False, contents) + self.assertIsNone(fullp) + self.assertIsNone(fname) + + def test_mount_cb_called_require_iso_false(self): + """Check we call mount_cb on blockdevs with require_iso=False""" + self.m_mounts.return_value = {} + self.m_find_devs_with.return_value = ['/dev/xvdz'] + self.m_mount_cb.return_value = ("myfile", "mycontent") + + (contents, fullp, fname) = dsovf.transport_iso9660(require_iso=False) + + self.m_mount_cb.assert_called_with( + "/dev/xvdz", dsovf.get_ovf_env, mtype=None) + self.assertEqual("mycontent", contents) + self.assertEqual("/dev/xvdz", fullp) + self.assertEqual("myfile", fname) + + def test_maybe_cdrom_device_none(self): + """Test maybe_cdrom_device returns False for none/empty input""" + self.assertFalse(dsovf.maybe_cdrom_device(None)) + self.assertFalse(dsovf.maybe_cdrom_device('')) + + def test_maybe_cdrom_device_non_string_exception(self): + """Test maybe_cdrom_device raises ValueError on non-string types""" + with self.assertRaises(ValueError): + dsovf.maybe_cdrom_device({'a': 'eleven'}) + + def test_maybe_cdrom_device_false_on_multi_dir_paths(self): + """Test maybe_cdrom_device is false on /dev[/.*]/* paths""" + self.assertFalse(dsovf.maybe_cdrom_device('/dev/foo/sr0')) + self.assertFalse(dsovf.maybe_cdrom_device('foo/sr0')) + self.assertFalse(dsovf.maybe_cdrom_device('../foo/sr0')) + self.assertFalse(dsovf.maybe_cdrom_device('../foo/sr0')) + + def test_maybe_cdrom_device_true_on_hd_partitions(self): + """Test maybe_cdrom_device is false on /dev/hd[a-z][0-9]+ paths""" + self.assertTrue(dsovf.maybe_cdrom_device('/dev/hda1')) + self.assertTrue(dsovf.maybe_cdrom_device('hdz9')) + + def test_maybe_cdrom_device_true_on_valid_relative_paths(self): + """Test maybe_cdrom_device normalizes paths""" + self.assertTrue(dsovf.maybe_cdrom_device('/dev/wark/../sr9')) + self.assertTrue(dsovf.maybe_cdrom_device('///sr0')) + self.assertTrue(dsovf.maybe_cdrom_device('/sr0')) + self.assertTrue(dsovf.maybe_cdrom_device('//dev//hda')) + + def test_maybe_cdrom_device_true_on_xvd_partitions(self): + """Test maybe_cdrom_device returns true on xvd*""" + self.assertTrue(dsovf.maybe_cdrom_device('/dev/xvda')) + self.assertTrue(dsovf.maybe_cdrom_device('/dev/xvda1')) + self.assertTrue(dsovf.maybe_cdrom_device('xvdza1')) + +# # vi: ts=4 expandtab diff --git a/tests/unittests/test_datasource/test_scaleway.py b/tests/unittests/test_datasource/test_scaleway.py index 65d83ad7..436df9ee 100644 --- a/tests/unittests/test_datasource/test_scaleway.py +++ b/tests/unittests/test_datasource/test_scaleway.py @@ -9,7 +9,7 @@ from cloudinit import helpers from cloudinit import settings from cloudinit.sources import DataSourceScaleway -from ..helpers import mock, HttprettyTestCase, TestCase +from cloudinit.tests.helpers import mock, HttprettyTestCase, TestCase class DataResponses(object): diff --git a/tests/unittests/test_datasource/test_smartos.py b/tests/unittests/test_datasource/test_smartos.py index e3c99bbb..933d5b63 100644 --- a/tests/unittests/test_datasource/test_smartos.py +++ b/tests/unittests/test_datasource/test_smartos.py @@ -33,7 +33,7 @@ import six from cloudinit import helpers as c_helpers from cloudinit.util import b64e -from ..helpers import mock, FilesystemMockingTestCase, TestCase +from cloudinit.tests.helpers import mock, FilesystemMockingTestCase, TestCase SDC_NICS = json.loads(""" [ diff --git a/tests/unittests/test_distros/__init__.py b/tests/unittests/test_distros/__init__.py index e69de29b..5394aa56 100644 --- a/tests/unittests/test_distros/__init__.py +++ b/tests/unittests/test_distros/__init__.py @@ -0,0 +1,21 @@ +# This file is part of cloud-init. See LICENSE file for license information. +import copy + +from cloudinit import distros +from cloudinit import helpers +from cloudinit import settings + + +def _get_distro(dtype, system_info=None): + """Return a Distro class of distro 'dtype'. + + cfg is format of CFG_BUILTIN['system_info']. + + example: _get_distro("debian") + """ + if system_info is None: + system_info = copy.deepcopy(settings.CFG_BUILTIN['system_info']) + system_info['distro'] = dtype + paths = helpers.Paths(system_info['paths']) + distro_cls = distros.fetch(dtype) + return distro_cls(dtype, system_info, paths) diff --git a/tests/unittests/test_distros/test_arch.py b/tests/unittests/test_distros/test_arch.py new file mode 100644 index 00000000..a95ba3b5 --- /dev/null +++ b/tests/unittests/test_distros/test_arch.py @@ -0,0 +1,45 @@ +# This file is part of cloud-init. See LICENSE file for license information. + +from cloudinit.distros.arch import _render_network +from cloudinit import util + +from cloudinit.tests.helpers import (CiTestCase, dir2dict) + +from . import _get_distro + + +class TestArch(CiTestCase): + + def test_get_distro(self): + distro = _get_distro("arch") + hostname = "myhostname" + hostfile = self.tmp_path("hostfile") + distro._write_hostname(hostname, hostfile) + self.assertEqual(hostname + "\n", util.load_file(hostfile)) + + +class TestRenderNetwork(CiTestCase): + def test_basic_static(self): + """Just the most basic static config. + + note 'lo' should not be rendered as an interface.""" + entries = {'eth0': {'auto': True, + 'dns-nameservers': ['8.8.8.8'], + 'bootproto': 'static', + 'address': '10.0.0.2', + 'gateway': '10.0.0.1', + 'netmask': '255.255.255.0'}, + 'lo': {'auto': True}} + target = self.tmp_dir() + devs = _render_network(entries, target=target) + files = dir2dict(target, prefix=target) + self.assertEqual(['eth0'], devs) + self.assertEqual( + {'/etc/netctl/eth0': '\n'.join([ + "Address=10.0.0.2/255.255.255.0", + "Connection=ethernet", + "DNS=('8.8.8.8')", + "Gateway=10.0.0.1", + "IP=static", + "Interface=eth0", ""]), + '/etc/resolv.conf': 'nameserver 8.8.8.8\n'}, files) diff --git a/tests/unittests/test_distros/test_create_users.py b/tests/unittests/test_distros/test_create_users.py index 1d02f7bd..aa13670a 100644 --- a/tests/unittests/test_distros/test_create_users.py +++ b/tests/unittests/test_distros/test_create_users.py @@ -1,7 +1,7 @@ # This file is part of cloud-init. See LICENSE file for license information. from cloudinit import distros -from ..helpers import (TestCase, mock) +from cloudinit.tests.helpers import (TestCase, mock) class MyBaseDistro(distros.Distro): diff --git a/tests/unittests/test_distros/test_debian.py b/tests/unittests/test_distros/test_debian.py index 2330ad52..da16a797 100644 --- a/tests/unittests/test_distros/test_debian.py +++ b/tests/unittests/test_distros/test_debian.py @@ -1,67 +1,85 @@ # This file is part of cloud-init. See LICENSE file for license information. -from ..helpers import (CiTestCase, mock) - -from cloudinit.distros.debian import apply_locale +from cloudinit import distros from cloudinit import util +from cloudinit.tests.helpers import (FilesystemMockingTestCase, mock) @mock.patch("cloudinit.distros.debian.util.subp") -class TestDebianApplyLocale(CiTestCase): +class TestDebianApplyLocale(FilesystemMockingTestCase): + + def setUp(self): + super(TestDebianApplyLocale, self).setUp() + self.new_root = self.tmp_dir() + self.patchOS(self.new_root) + self.patchUtils(self.new_root) + self.spath = self.tmp_path('etc/default/locale', self.new_root) + cls = distros.fetch("debian") + self.distro = cls("debian", {}, None) + def test_no_rerun(self, m_subp): """If system has defined locale, no re-run is expected.""" - spath = self.tmp_path("default-locale") m_subp.return_value = (None, None) locale = 'en_US.UTF-8' - util.write_file(spath, 'LANG=%s\n' % locale, omode="w") - apply_locale(locale, sys_path=spath) + util.write_file(self.spath, 'LANG=%s\n' % locale, omode="w") + self.distro.apply_locale(locale, out_fn=self.spath) m_subp.assert_not_called() + def test_no_regen_on_c_utf8(self, m_subp): + """If locale is set to C.UTF8, do not attempt to call locale-gen""" + m_subp.return_value = (None, None) + locale = 'C.UTF-8' + util.write_file(self.spath, 'LANG=%s\n' % 'en_US.UTF-8', omode="w") + self.distro.apply_locale(locale, out_fn=self.spath) + self.assertEqual( + [['update-locale', '--locale-file=' + self.spath, + 'LANG=%s' % locale]], + [p[0][0] for p in m_subp.call_args_list]) + def test_rerun_if_different(self, m_subp): """If system has different locale, locale-gen should be called.""" - spath = self.tmp_path("default-locale") m_subp.return_value = (None, None) locale = 'en_US.UTF-8' - util.write_file(spath, 'LANG=fr_FR.UTF-8', omode="w") - apply_locale(locale, sys_path=spath) + util.write_file(self.spath, 'LANG=fr_FR.UTF-8', omode="w") + self.distro.apply_locale(locale, out_fn=self.spath) self.assertEqual( [['locale-gen', locale], - ['update-locale', '--locale-file=' + spath, 'LANG=%s' % locale]], + ['update-locale', '--locale-file=' + self.spath, + 'LANG=%s' % locale]], [p[0][0] for p in m_subp.call_args_list]) def test_rerun_if_no_file(self, m_subp): """If system has no locale file, locale-gen should be called.""" - spath = self.tmp_path("default-locale") m_subp.return_value = (None, None) locale = 'en_US.UTF-8' - apply_locale(locale, sys_path=spath) + self.distro.apply_locale(locale, out_fn=self.spath) self.assertEqual( [['locale-gen', locale], - ['update-locale', '--locale-file=' + spath, 'LANG=%s' % locale]], + ['update-locale', '--locale-file=' + self.spath, + 'LANG=%s' % locale]], [p[0][0] for p in m_subp.call_args_list]) def test_rerun_on_unset_system_locale(self, m_subp): """If system has unset locale, locale-gen should be called.""" m_subp.return_value = (None, None) - spath = self.tmp_path("default-locale") locale = 'en_US.UTF-8' - util.write_file(spath, 'LANG=', omode="w") - apply_locale(locale, sys_path=spath) + util.write_file(self.spath, 'LANG=', omode="w") + self.distro.apply_locale(locale, out_fn=self.spath) self.assertEqual( [['locale-gen', locale], - ['update-locale', '--locale-file=' + spath, 'LANG=%s' % locale]], + ['update-locale', '--locale-file=' + self.spath, + 'LANG=%s' % locale]], [p[0][0] for p in m_subp.call_args_list]) def test_rerun_on_mismatched_keys(self, m_subp): """If key is LC_ALL and system has only LANG, rerun is expected.""" m_subp.return_value = (None, None) - spath = self.tmp_path("default-locale") locale = 'en_US.UTF-8' - util.write_file(spath, 'LANG=', omode="w") - apply_locale(locale, sys_path=spath, keyname='LC_ALL') + util.write_file(self.spath, 'LANG=', omode="w") + self.distro.apply_locale(locale, out_fn=self.spath, keyname='LC_ALL') self.assertEqual( [['locale-gen', locale], - ['update-locale', '--locale-file=' + spath, + ['update-locale', '--locale-file=' + self.spath, 'LC_ALL=%s' % locale]], [p[0][0] for p in m_subp.call_args_list]) @@ -69,14 +87,14 @@ class TestDebianApplyLocale(CiTestCase): """locale as None or "" is invalid and should raise ValueError.""" with self.assertRaises(ValueError) as ctext_m: - apply_locale(None) + self.distro.apply_locale(None) m_subp.assert_not_called() self.assertEqual( 'Failed to provide locale value.', str(ctext_m.exception)) with self.assertRaises(ValueError) as ctext_m: - apply_locale("") + self.distro.apply_locale("") m_subp.assert_not_called() self.assertEqual( 'Failed to provide locale value.', str(ctext_m.exception)) diff --git a/tests/unittests/test_distros/test_generic.py b/tests/unittests/test_distros/test_generic.py index c9be277e..791fe612 100644 --- a/tests/unittests/test_distros/test_generic.py +++ b/tests/unittests/test_distros/test_generic.py @@ -3,7 +3,7 @@ from cloudinit import distros from cloudinit import util -from .. import helpers +from cloudinit.tests import helpers import os import shutil @@ -228,5 +228,21 @@ class TestGenericDistro(helpers.FilesystemMockingTestCase): os.symlink('/', '/run/systemd/system') self.assertFalse(d.uses_systemd()) + @mock.patch('cloudinit.distros.debian.read_system_locale') + def test_get_locale_ubuntu(self, m_locale): + """Test ubuntu distro returns locale set to C.UTF-8""" + m_locale.return_value = 'C.UTF-8' + cls = distros.fetch("ubuntu") + d = cls("ubuntu", {}, None) + locale = d.get_locale() + self.assertEqual('C.UTF-8', locale) + + def test_get_locale_rhel(self): + """Test rhel distro returns NotImplementedError exception""" + cls = distros.fetch("rhel") + d = cls("rhel", {}, None) + with self.assertRaises(NotImplementedError): + d.get_locale() + # vi: ts=4 expandtab diff --git a/tests/unittests/test_distros/test_netconfig.py b/tests/unittests/test_distros/test_netconfig.py index 2f505d93..c4bd11bc 100644 --- a/tests/unittests/test_distros/test_netconfig.py +++ b/tests/unittests/test_distros/test_netconfig.py @@ -12,7 +12,7 @@ try: except ImportError: from contextlib2 import ExitStack -from ..helpers import TestCase +from cloudinit.tests.helpers import TestCase from cloudinit import distros from cloudinit.distros.parsers.sys_conf import SysConf @@ -135,7 +135,7 @@ network: V2_NET_CFG = { 'ethernets': { 'eth7': { - 'addresses': ['192.168.1.5/255.255.255.0'], + 'addresses': ['192.168.1.5/24'], 'gateway4': '192.168.1.254'}, 'eth9': { 'dhcp4': True} @@ -151,7 +151,6 @@ V2_TO_V2_NET_CFG_OUTPUT = """ # /etc/cloud/cloud.cfg.d/99-disable-network-config.cfg with the following: # network: {config: disabled} network: - version: 2 ethernets: eth7: addresses: @@ -159,6 +158,7 @@ network: gateway4: 192.168.1.254 eth9: dhcp4: true + version: 2 """ diff --git a/tests/unittests/test_distros/test_opensuse.py b/tests/unittests/test_distros/test_opensuse.py new file mode 100644 index 00000000..b9bb9b3e --- /dev/null +++ b/tests/unittests/test_distros/test_opensuse.py @@ -0,0 +1,12 @@ +# This file is part of cloud-init. See LICENSE file for license information. + +from cloudinit.tests.helpers import CiTestCase + +from . import _get_distro + + +class TestopenSUSE(CiTestCase): + + def test_get_distro(self): + distro = _get_distro("opensuse") + self.assertEqual(distro.osfamily, 'suse') diff --git a/tests/unittests/test_distros/test_resolv.py b/tests/unittests/test_distros/test_resolv.py index 97168cf9..68ea0083 100644 --- a/tests/unittests/test_distros/test_resolv.py +++ b/tests/unittests/test_distros/test_resolv.py @@ -3,7 +3,7 @@ from cloudinit.distros.parsers import resolv_conf from cloudinit.distros import rhel_util -from ..helpers import TestCase +from cloudinit.tests.helpers import TestCase import re import tempfile diff --git a/tests/unittests/test_distros/test_sles.py b/tests/unittests/test_distros/test_sles.py new file mode 100644 index 00000000..33e3c457 --- /dev/null +++ b/tests/unittests/test_distros/test_sles.py @@ -0,0 +1,12 @@ +# This file is part of cloud-init. See LICENSE file for license information. + +from cloudinit.tests.helpers import CiTestCase + +from . import _get_distro + + +class TestSLES(CiTestCase): + + def test_get_distro(self): + distro = _get_distro("sles") + self.assertEqual(distro.osfamily, 'suse') diff --git a/tests/unittests/test_distros/test_sysconfig.py b/tests/unittests/test_distros/test_sysconfig.py index 235ecebb..c1d5b693 100644 --- a/tests/unittests/test_distros/test_sysconfig.py +++ b/tests/unittests/test_distros/test_sysconfig.py @@ -4,7 +4,7 @@ import re from cloudinit.distros.parsers.sys_conf import SysConf -from ..helpers import TestCase +from cloudinit.tests.helpers import TestCase # Lots of good examples @ diff --git a/tests/unittests/test_distros/test_user_data_normalize.py b/tests/unittests/test_distros/test_user_data_normalize.py index 88746e0a..0fa9cdb5 100644 --- a/tests/unittests/test_distros/test_user_data_normalize.py +++ b/tests/unittests/test_distros/test_user_data_normalize.py @@ -5,7 +5,7 @@ from cloudinit.distros import ug_util from cloudinit import helpers from cloudinit import settings -from ..helpers import TestCase +from cloudinit.tests.helpers import TestCase import mock diff --git a/tests/unittests/test_ds_identify.py b/tests/unittests/test_ds_identify.py index 8ccfe55c..1284e755 100644 --- a/tests/unittests/test_ds_identify.py +++ b/tests/unittests/test_ds_identify.py @@ -6,10 +6,15 @@ from uuid import uuid4 from cloudinit import safeyaml from cloudinit import util -from .helpers import CiTestCase, dir2dict, json_dumps, populate_dir +from cloudinit.tests.helpers import ( + CiTestCase, dir2dict, json_dumps, populate_dir) UNAME_MYSYS = ("Linux bart 4.4.0-62-generic #83-Ubuntu " "SMP Wed Jan 18 14:10:15 UTC 2017 x86_64 GNU/Linux") +UNAME_PPC64EL = ("Linux diamond 4.4.0-83-generic #106-Ubuntu SMP " + "Mon Jun 26 17:53:54 UTC 2017 " + "ppc64le ppc64le ppc64le GNU/Linux") + BLKID_EFI_ROOT = """ DEVNAME=/dev/sda1 UUID=8B36-5390 @@ -22,8 +27,11 @@ TYPE=ext4 PARTUUID=30c65c77-e07d-4039-b2fb-88b1fb5fa1fc """ +POLICY_FOUND_ONLY = "search,found=all,maybe=none,notfound=disabled" +POLICY_FOUND_OR_MAYBE = "search,found=all,maybe=all,notfound=disabled" DI_DEFAULT_POLICY = "search,found=all,maybe=all,notfound=enabled" DI_DEFAULT_POLICY_NO_DMI = "search,found=all,maybe=all,notfound=disabled" +DI_EC2_STRICT_ID_DEFAULT = "true" SHELL_MOCK_TMPL = """\ %(name)s() { @@ -47,6 +55,7 @@ P_SEED_DIR = "var/lib/cloud/seed" P_DSID_CFG = "etc/cloud/ds-identify.cfg" MOCK_VIRT_IS_KVM = {'name': 'detect_virt', 'RET': 'kvm', 'ret': 0} +MOCK_UNAME_IS_PPC64 = {'name': 'uname', 'out': UNAME_PPC64EL, 'ret': 0} class TestDsIdentify(CiTestCase): @@ -54,7 +63,8 @@ class TestDsIdentify(CiTestCase): def call(self, rootd=None, mocks=None, args=None, files=None, policy_dmi=DI_DEFAULT_POLICY, - policy_nodmi=DI_DEFAULT_POLICY_NO_DMI): + policy_no_dmi=DI_DEFAULT_POLICY_NO_DMI, + ec2_strict_id=DI_EC2_STRICT_ID_DEFAULT): if args is None: args = [] if mocks is None: @@ -80,7 +90,8 @@ class TestDsIdentify(CiTestCase): "PATH_ROOT='%s'" % rootd, ". " + self.dsid_path, 'DI_DEFAULT_POLICY="%s"' % policy_dmi, - 'DI_DEFAULT_POLICY_NO_DMI="%s"' % policy_nodmi, + 'DI_DEFAULT_POLICY_NO_DMI="%s"' % policy_no_dmi, + 'DI_EC2_STRICT_ID_DEFAULT="%s"' % ec2_strict_id, "" ] @@ -136,7 +147,7 @@ class TestDsIdentify(CiTestCase): def _call_via_dict(self, data, rootd=None, **kwargs): # return output of self.call with a dict input like VALID_CFG[item] xwargs = {'rootd': rootd} - for k in ('mocks', 'args', 'policy_dmi', 'policy_nodmi', 'files'): + for k in ('mocks', 'args', 'policy_dmi', 'policy_no_dmi', 'files'): if k in data: xwargs[k] = data[k] if k in kwargs: @@ -260,6 +271,31 @@ class TestDsIdentify(CiTestCase): self._check_via_dict(mydata, rc=RC_FOUND, dslist=['AliYun', DS_NONE], policy_dmi=policy) + def test_default_openstack_intel_is_found(self): + """On Intel, openstack must be identified.""" + self._test_ds_found('OpenStack') + + def test_openstack_on_non_intel_is_maybe(self): + """On non-Intel, openstack without dmi info is maybe. + + nova does not identify itself on platforms other than intel. + https://bugs.launchpad.net/cloud-init/+bugs?field.tag=dsid-nova""" + + data = VALID_CFG['OpenStack'].copy() + del data['files'][P_PRODUCT_NAME] + data.update({'policy_dmi': POLICY_FOUND_OR_MAYBE, + 'policy_no_dmi': POLICY_FOUND_OR_MAYBE}) + + # this should show not found as default uname in tests is intel. + # and intel openstack requires positive identification. + self._check_via_dict(data, RC_NOT_FOUND, dslist=None) + + # updating the uname to ppc64 though should get a maybe. + data.update({'mocks': [MOCK_VIRT_IS_KVM, MOCK_UNAME_IS_PPC64]}) + (_, _, err, _, _) = self._check_via_dict( + data, RC_FOUND, dslist=['OpenStack', 'None']) + self.assertIn("check for 'OpenStack' returned maybe", err) + def blkid_out(disks=None): """Convert a list of disk dictionaries into blkid content.""" @@ -340,6 +376,13 @@ VALID_CFG = { 'files': {P_PRODUCT_SERIAL: 'GoogleCloud-8f2e88f\n'}, 'mocks': [MOCK_VIRT_IS_KVM], }, + 'OpenStack': { + 'ds': 'OpenStack', + 'files': {P_PRODUCT_NAME: 'OpenStack Nova\n'}, + 'mocks': [MOCK_VIRT_IS_KVM], + 'policy_dmi': POLICY_FOUND_ONLY, + 'policy_no_dmi': POLICY_FOUND_ONLY, + }, 'ConfigDrive': { 'ds': 'ConfigDrive', 'mocks': [ diff --git a/tests/unittests/test_ec2_util.py b/tests/unittests/test_ec2_util.py index 65fdb519..af78997f 100644 --- a/tests/unittests/test_ec2_util.py +++ b/tests/unittests/test_ec2_util.py @@ -2,7 +2,7 @@ import httpretty as hp -from . import helpers +from cloudinit.tests import helpers from cloudinit import ec2_utils as eu from cloudinit import url_helper as uh diff --git a/tests/unittests/test_filters/test_launch_index.py b/tests/unittests/test_filters/test_launch_index.py index 13137f6d..6364d38e 100644 --- a/tests/unittests/test_filters/test_launch_index.py +++ b/tests/unittests/test_filters/test_launch_index.py @@ -2,7 +2,7 @@ import copy -from .. import helpers +from cloudinit.tests import helpers from six.moves import filterfalse diff --git a/tests/unittests/test_handler/test_handler_apt_conf_v1.py b/tests/unittests/test_handler/test_handler_apt_conf_v1.py index 554277ff..83f962a9 100644 --- a/tests/unittests/test_handler/test_handler_apt_conf_v1.py +++ b/tests/unittests/test_handler/test_handler_apt_conf_v1.py @@ -3,7 +3,7 @@ from cloudinit.config import cc_apt_configure from cloudinit import util -from ..helpers import TestCase +from cloudinit.tests.helpers import TestCase import copy import os diff --git a/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v1.py b/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v1.py index f53ddbb2..d2b96f0b 100644 --- a/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v1.py +++ b/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v1.py @@ -24,7 +24,7 @@ from cloudinit.sources import DataSourceNone from cloudinit.distros.debian import Distro -from .. import helpers as t_help +from cloudinit.tests import helpers as t_help LOG = logging.getLogger(__name__) diff --git a/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v3.py b/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v3.py index 1ca915b4..f7608c28 100644 --- a/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v3.py +++ b/tests/unittests/test_handler/test_handler_apt_configure_sources_list_v3.py @@ -24,7 +24,7 @@ from cloudinit.sources import DataSourceNone from cloudinit.distros.debian import Distro -from .. import helpers as t_help +from cloudinit.tests import helpers as t_help LOG = logging.getLogger(__name__) diff --git a/tests/unittests/test_handler/test_handler_apt_source_v1.py b/tests/unittests/test_handler/test_handler_apt_source_v1.py index 12502d05..3a3f95ca 100644 --- a/tests/unittests/test_handler/test_handler_apt_source_v1.py +++ b/tests/unittests/test_handler/test_handler_apt_source_v1.py @@ -20,7 +20,7 @@ from cloudinit.config import cc_apt_configure from cloudinit import gpg from cloudinit import util -from ..helpers import TestCase +from cloudinit.tests.helpers import TestCase EXPECTEDKEY = """-----BEGIN PGP PUBLIC KEY BLOCK----- Version: GnuPG v1 diff --git a/tests/unittests/test_handler/test_handler_apt_source_v3.py b/tests/unittests/test_handler/test_handler_apt_source_v3.py index 292d3f59..7bb1b7c4 100644 --- a/tests/unittests/test_handler/test_handler_apt_source_v3.py +++ b/tests/unittests/test_handler/test_handler_apt_source_v3.py @@ -28,7 +28,7 @@ from cloudinit import util from cloudinit.config import cc_apt_configure from cloudinit.sources import DataSourceNone -from .. import helpers as t_help +from cloudinit.tests import helpers as t_help EXPECTEDKEY = u"""-----BEGIN PGP PUBLIC KEY BLOCK----- Version: GnuPG v1 diff --git a/tests/unittests/test_handler/test_handler_bootcmd.py b/tests/unittests/test_handler/test_handler_bootcmd.py new file mode 100644 index 00000000..dbf43e0d --- /dev/null +++ b/tests/unittests/test_handler/test_handler_bootcmd.py @@ -0,0 +1,146 @@ +# This file is part of cloud-init. See LICENSE file for license information. + +from cloudinit.config import cc_bootcmd +from cloudinit.sources import DataSourceNone +from cloudinit import (distros, helpers, cloud, util) +from cloudinit.tests.helpers import CiTestCase, mock, skipIf + +import logging +import tempfile + +try: + import jsonschema + assert jsonschema # avoid pyflakes error F401: import unused + _missing_jsonschema_dep = False +except ImportError: + _missing_jsonschema_dep = True + +LOG = logging.getLogger(__name__) + + +class FakeExtendedTempFile(object): + def __init__(self, suffix): + self.suffix = suffix + self.handle = tempfile.NamedTemporaryFile( + prefix="ci-%s." % self.__class__.__name__, delete=False) + + def __enter__(self): + return self.handle + + def __exit__(self, exc_type, exc_value, traceback): + self.handle.close() + util.del_file(self.handle.name) + + +class TestBootcmd(CiTestCase): + + with_logs = True + + _etmpfile_path = ('cloudinit.config.cc_bootcmd.temp_utils.' + 'ExtendedTemporaryFile') + + def setUp(self): + super(TestBootcmd, self).setUp() + self.subp = util.subp + self.new_root = self.tmp_dir() + + def _get_cloud(self, distro): + paths = helpers.Paths({}) + cls = distros.fetch(distro) + mydist = cls(distro, {}, paths) + myds = DataSourceNone.DataSourceNone({}, mydist, paths) + paths.datasource = myds + return cloud.Cloud(myds, paths, {}, mydist, None) + + def test_handler_skip_if_no_bootcmd(self): + """When the provided config doesn't contain bootcmd, skip it.""" + cfg = {} + mycloud = self._get_cloud('ubuntu') + cc_bootcmd.handle('notimportant', cfg, mycloud, LOG, None) + self.assertIn( + "Skipping module named notimportant, no 'bootcmd' key", + self.logs.getvalue()) + + def test_handler_invalid_command_set(self): + """Commands which can't be converted to shell will raise errors.""" + invalid_config = {'bootcmd': 1} + cc = self._get_cloud('ubuntu') + with self.assertRaises(TypeError) as context_manager: + cc_bootcmd.handle('cc_bootcmd', invalid_config, cc, LOG, []) + self.assertIn('Failed to shellify bootcmd', self.logs.getvalue()) + self.assertEqual( + "'int' object is not iterable", + str(context_manager.exception)) + + @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency") + def test_handler_schema_validation_warns_non_array_type(self): + """Schema validation warns of non-array type for bootcmd key. + + Schema validation is not strict, so bootcmd attempts to shellify the + invalid content. + """ + invalid_config = {'bootcmd': 1} + cc = self._get_cloud('ubuntu') + with self.assertRaises(TypeError): + cc_bootcmd.handle('cc_bootcmd', invalid_config, cc, LOG, []) + self.assertIn( + 'Invalid config:\nbootcmd: 1 is not of type \'array\'', + self.logs.getvalue()) + self.assertIn('Failed to shellify', self.logs.getvalue()) + + @skipIf(_missing_jsonschema_dep, 'No python-jsonschema dependency') + def test_handler_schema_validation_warns_non_array_item_type(self): + """Schema validation warns of non-array or string bootcmd items. + + Schema validation is not strict, so bootcmd attempts to shellify the + invalid content. + """ + invalid_config = { + 'bootcmd': ['ls /', 20, ['wget', 'http://stuff/blah'], {'a': 'n'}]} + cc = self._get_cloud('ubuntu') + with self.assertRaises(RuntimeError) as context_manager: + cc_bootcmd.handle('cc_bootcmd', invalid_config, cc, LOG, []) + expected_warnings = [ + 'bootcmd.1: 20 is not valid under any of the given schemas', + 'bootcmd.3: {\'a\': \'n\'} is not valid under any of the given' + ' schema' + ] + logs = self.logs.getvalue() + for warning in expected_warnings: + self.assertIn(warning, logs) + self.assertIn('Failed to shellify', logs) + self.assertEqual( + 'Unable to shellify type int which is not a list or string', + str(context_manager.exception)) + + def test_handler_creates_and_runs_bootcmd_script_with_instance_id(self): + """Valid schema runs a bootcmd script with INSTANCE_ID in the env.""" + cc = self._get_cloud('ubuntu') + out_file = self.tmp_path('bootcmd.out', self.new_root) + my_id = "b6ea0f59-e27d-49c6-9f87-79f19765a425" + valid_config = {'bootcmd': [ + 'echo {0} $INSTANCE_ID > {1}'.format(my_id, out_file)]} + + with mock.patch(self._etmpfile_path, FakeExtendedTempFile): + cc_bootcmd.handle('cc_bootcmd', valid_config, cc, LOG, []) + self.assertEqual(my_id + ' iid-datasource-none\n', + util.load_file(out_file)) + + def test_handler_runs_bootcmd_script_with_error(self): + """When a valid script generates an error, that error is raised.""" + cc = self._get_cloud('ubuntu') + valid_config = {'bootcmd': ['exit 1']} # Script with error + + with mock.patch(self._etmpfile_path, FakeExtendedTempFile): + with self.assertRaises(util.ProcessExecutionError) as ctxt_manager: + cc_bootcmd.handle('does-not-matter', valid_config, cc, LOG, []) + self.assertIn( + 'Unexpected error while running command.\n' + "Command: ['/bin/sh',", + str(ctxt_manager.exception)) + self.assertIn( + 'Failed to run bootcmd module does-not-matter', + self.logs.getvalue()) + + +# vi: ts=4 expandtab diff --git a/tests/unittests/test_handler/test_handler_ca_certs.py b/tests/unittests/test_handler/test_handler_ca_certs.py index 7cee2c3f..06e14db0 100644 --- a/tests/unittests/test_handler/test_handler_ca_certs.py +++ b/tests/unittests/test_handler/test_handler_ca_certs.py @@ -5,7 +5,7 @@ from cloudinit.config import cc_ca_certs from cloudinit import helpers from cloudinit import util -from ..helpers import TestCase +from cloudinit.tests.helpers import TestCase import logging import shutil diff --git a/tests/unittests/test_handler/test_handler_chef.py b/tests/unittests/test_handler/test_handler_chef.py index 6a152ea2..0136a93d 100644 --- a/tests/unittests/test_handler/test_handler_chef.py +++ b/tests/unittests/test_handler/test_handler_chef.py @@ -1,11 +1,10 @@ # This file is part of cloud-init. See LICENSE file for license information. +import httpretty import json import logging import os -import shutil import six -import tempfile from cloudinit import cloud from cloudinit.config import cc_chef @@ -14,18 +13,83 @@ from cloudinit import helpers from cloudinit.sources import DataSourceNone from cloudinit import util -from .. import helpers as t_help +from cloudinit.tests.helpers import ( + CiTestCase, FilesystemMockingTestCase, mock, skipIf) LOG = logging.getLogger(__name__) CLIENT_TEMPL = os.path.sep.join(["templates", "chef_client.rb.tmpl"]) -class TestChef(t_help.FilesystemMockingTestCase): +class TestInstallChefOmnibus(CiTestCase): + + def setUp(self): + self.new_root = self.tmp_dir() + + @httpretty.activate + def test_install_chef_from_omnibus_runs_chef_url_content(self): + """install_chef_from_omnibus runs downloaded OMNIBUS_URL as script.""" + chef_outfile = self.tmp_path('chef.out', self.new_root) + response = '#!/bin/bash\necho "Hi Mom" > {0}'.format(chef_outfile) + httpretty.register_uri( + httpretty.GET, cc_chef.OMNIBUS_URL, body=response, status=200) + cc_chef.install_chef_from_omnibus() + self.assertEqual('Hi Mom\n', util.load_file(chef_outfile)) + + @mock.patch('cloudinit.config.cc_chef.url_helper.readurl') + @mock.patch('cloudinit.config.cc_chef.util.subp_blob_in_tempfile') + def test_install_chef_from_omnibus_retries_url(self, m_subp_blob, m_rdurl): + """install_chef_from_omnibus retries OMNIBUS_URL upon failure.""" + + class FakeURLResponse(object): + contents = '#!/bin/bash\necho "Hi Mom" > {0}/chef.out'.format( + self.new_root) + + m_rdurl.return_value = FakeURLResponse() + + cc_chef.install_chef_from_omnibus() + expected_kwargs = {'retries': cc_chef.OMNIBUS_URL_RETRIES, + 'url': cc_chef.OMNIBUS_URL} + self.assertItemsEqual(expected_kwargs, m_rdurl.call_args_list[0][1]) + cc_chef.install_chef_from_omnibus(retries=10) + expected_kwargs = {'retries': 10, + 'url': cc_chef.OMNIBUS_URL} + self.assertItemsEqual(expected_kwargs, m_rdurl.call_args_list[1][1]) + expected_subp_kwargs = { + 'args': ['-v', '2.0'], + 'basename': 'chef-omnibus-install', + 'blob': m_rdurl.return_value.contents, + 'capture': False + } + self.assertItemsEqual( + expected_subp_kwargs, + m_subp_blob.call_args_list[0][1]) + + @httpretty.activate + @mock.patch('cloudinit.config.cc_chef.util.subp_blob_in_tempfile') + def test_install_chef_from_omnibus_has_omnibus_version(self, m_subp_blob): + """install_chef_from_omnibus provides version arg to OMNIBUS_URL.""" + chef_outfile = self.tmp_path('chef.out', self.new_root) + response = '#!/bin/bash\necho "Hi Mom" > {0}'.format(chef_outfile) + httpretty.register_uri( + httpretty.GET, cc_chef.OMNIBUS_URL, body=response) + cc_chef.install_chef_from_omnibus(omnibus_version='2.0') + + called_kwargs = m_subp_blob.call_args_list[0][1] + expected_kwargs = { + 'args': ['-v', '2.0'], + 'basename': 'chef-omnibus-install', + 'blob': response, + 'capture': False + } + self.assertItemsEqual(expected_kwargs, called_kwargs) + + +class TestChef(FilesystemMockingTestCase): + def setUp(self): super(TestChef, self).setUp() - self.tmp = tempfile.mkdtemp() - self.addCleanup(shutil.rmtree, self.tmp) + self.tmp = self.tmp_dir() def fetch_cloud(self, distro_kind): cls = distros.fetch(distro_kind) @@ -43,8 +107,8 @@ class TestChef(t_help.FilesystemMockingTestCase): for d in cc_chef.CHEF_DIRS: self.assertFalse(os.path.isdir(d)) - @t_help.skipIf(not os.path.isfile(CLIENT_TEMPL), - CLIENT_TEMPL + " is not available") + @skipIf(not os.path.isfile(CLIENT_TEMPL), + CLIENT_TEMPL + " is not available") def test_basic_config(self): """ test basic config looks sane @@ -122,8 +186,8 @@ class TestChef(t_help.FilesystemMockingTestCase): 'c': 'd', }, json.loads(c)) - @t_help.skipIf(not os.path.isfile(CLIENT_TEMPL), - CLIENT_TEMPL + " is not available") + @skipIf(not os.path.isfile(CLIENT_TEMPL), + CLIENT_TEMPL + " is not available") def test_template_deletes(self): tpl_file = util.load_file('templates/chef_client.rb.tmpl') self.patchUtils(self.tmp) @@ -143,8 +207,8 @@ class TestChef(t_help.FilesystemMockingTestCase): self.assertNotIn('json_attribs', c) self.assertNotIn('Formatter.show_time', c) - @t_help.skipIf(not os.path.isfile(CLIENT_TEMPL), - CLIENT_TEMPL + " is not available") + @skipIf(not os.path.isfile(CLIENT_TEMPL), + CLIENT_TEMPL + " is not available") def test_validation_cert_and_validation_key(self): # test validation_cert content is written to validation_key path tpl_file = util.load_file('templates/chef_client.rb.tmpl') diff --git a/tests/unittests/test_handler/test_handler_debug.py b/tests/unittests/test_handler/test_handler_debug.py index 929f786e..787ba350 100644 --- a/tests/unittests/test_handler/test_handler_debug.py +++ b/tests/unittests/test_handler/test_handler_debug.py @@ -11,7 +11,7 @@ from cloudinit import util from cloudinit.sources import DataSourceNone -from .. import helpers as t_help +from cloudinit.tests.helpers import (FilesystemMockingTestCase, mock) import logging import shutil @@ -20,7 +20,8 @@ import tempfile LOG = logging.getLogger(__name__) -class TestDebug(t_help.FilesystemMockingTestCase): +@mock.patch('cloudinit.distros.debian.read_system_locale') +class TestDebug(FilesystemMockingTestCase): def setUp(self): super(TestDebug, self).setUp() self.new_root = tempfile.mkdtemp() @@ -36,7 +37,8 @@ class TestDebug(t_help.FilesystemMockingTestCase): ds.metadata.update(metadata) return cloud.Cloud(ds, paths, {}, d, None) - def test_debug_write(self): + def test_debug_write(self, m_locale): + m_locale.return_value = 'en_US.UTF-8' cfg = { 'abc': '123', 'c': u'\u20a0', @@ -54,7 +56,8 @@ class TestDebug(t_help.FilesystemMockingTestCase): for k in cfg.keys(): self.assertIn(k, contents) - def test_debug_no_write(self): + def test_debug_no_write(self, m_locale): + m_locale.return_value = 'en_US.UTF-8' cfg = { 'abc': '123', 'debug': { diff --git a/tests/unittests/test_handler/test_handler_disk_setup.py b/tests/unittests/test_handler/test_handler_disk_setup.py index 8a6d49ed..5afcacaf 100644 --- a/tests/unittests/test_handler/test_handler_disk_setup.py +++ b/tests/unittests/test_handler/test_handler_disk_setup.py @@ -3,7 +3,7 @@ import random from cloudinit.config import cc_disk_setup -from ..helpers import CiTestCase, ExitStack, mock, TestCase +from cloudinit.tests.helpers import CiTestCase, ExitStack, mock, TestCase class TestIsDiskUsed(TestCase): diff --git a/tests/unittests/test_handler/test_handler_growpart.py b/tests/unittests/test_handler/test_handler_growpart.py index c5fc8c9b..a3e46351 100644 --- a/tests/unittests/test_handler/test_handler_growpart.py +++ b/tests/unittests/test_handler/test_handler_growpart.py @@ -4,7 +4,7 @@ from cloudinit import cloud from cloudinit.config import cc_growpart from cloudinit import util -from ..helpers import TestCase +from cloudinit.tests.helpers import TestCase import errno import logging diff --git a/tests/unittests/test_handler/test_handler_landscape.py b/tests/unittests/test_handler/test_handler_landscape.py new file mode 100644 index 00000000..db92a7e2 --- /dev/null +++ b/tests/unittests/test_handler/test_handler_landscape.py @@ -0,0 +1,130 @@ +# This file is part of cloud-init. See LICENSE file for license information. + +from cloudinit.config import cc_landscape +from cloudinit import (distros, helpers, cloud, util) +from cloudinit.sources import DataSourceNone +from cloudinit.tests.helpers import (FilesystemMockingTestCase, mock, + wrap_and_call) + +from configobj import ConfigObj +import logging + + +LOG = logging.getLogger(__name__) + + +class TestLandscape(FilesystemMockingTestCase): + + with_logs = True + + def setUp(self): + super(TestLandscape, self).setUp() + self.new_root = self.tmp_dir() + self.conf = self.tmp_path('client.conf', self.new_root) + self.default_file = self.tmp_path('default_landscape', self.new_root) + + def _get_cloud(self, distro): + self.patchUtils(self.new_root) + paths = helpers.Paths({'templates_dir': self.new_root}) + cls = distros.fetch(distro) + mydist = cls(distro, {}, paths) + myds = DataSourceNone.DataSourceNone({}, mydist, paths) + return cloud.Cloud(myds, paths, {}, mydist, None) + + def test_handler_skips_empty_landscape_cloudconfig(self): + """Empty landscape cloud-config section does no work.""" + mycloud = self._get_cloud('ubuntu') + mycloud.distro = mock.MagicMock() + cfg = {'landscape': {}} + cc_landscape.handle('notimportant', cfg, mycloud, LOG, None) + self.assertFalse(mycloud.distro.install_packages.called) + + def test_handler_error_on_invalid_landscape_type(self): + """Raise an error when landscape configuraiton option is invalid.""" + mycloud = self._get_cloud('ubuntu') + cfg = {'landscape': 'wrongtype'} + with self.assertRaises(RuntimeError) as context_manager: + cc_landscape.handle('notimportant', cfg, mycloud, LOG, None) + self.assertIn( + "'landscape' key existed in config, but not a dict", + str(context_manager.exception)) + + @mock.patch('cloudinit.config.cc_landscape.util') + def test_handler_restarts_landscape_client(self, m_util): + """handler restarts lansdscape-client after install.""" + mycloud = self._get_cloud('ubuntu') + cfg = {'landscape': {'client': {}}} + wrap_and_call( + 'cloudinit.config.cc_landscape', + {'LSC_CLIENT_CFG_FILE': {'new': self.conf}}, + cc_landscape.handle, 'notimportant', cfg, mycloud, LOG, None) + self.assertEqual( + [mock.call(['service', 'landscape-client', 'restart'])], + m_util.subp.call_args_list) + + def test_handler_installs_client_and_creates_config_file(self): + """Write landscape client.conf and install landscape-client.""" + mycloud = self._get_cloud('ubuntu') + cfg = {'landscape': {'client': {}}} + expected = {'client': { + 'log_level': 'info', + 'url': 'https://landscape.canonical.com/message-system', + 'ping_url': 'http://landscape.canonical.com/ping', + 'data_path': '/var/lib/landscape/client'}} + mycloud.distro = mock.MagicMock() + wrap_and_call( + 'cloudinit.config.cc_landscape', + {'LSC_CLIENT_CFG_FILE': {'new': self.conf}, + 'LS_DEFAULT_FILE': {'new': self.default_file}}, + cc_landscape.handle, 'notimportant', cfg, mycloud, LOG, None) + self.assertEqual( + [mock.call('landscape-client')], + mycloud.distro.install_packages.call_args) + self.assertEqual(expected, dict(ConfigObj(self.conf))) + self.assertIn( + 'Wrote landscape config file to {0}'.format(self.conf), + self.logs.getvalue()) + default_content = util.load_file(self.default_file) + self.assertEqual('RUN=1\n', default_content) + + def test_handler_writes_merged_client_config_file_with_defaults(self): + """Merge and write options from LSC_CLIENT_CFG_FILE with defaults.""" + # Write existing sparse client.conf file + util.write_file(self.conf, '[client]\ncomputer_title = My PC\n') + mycloud = self._get_cloud('ubuntu') + cfg = {'landscape': {'client': {}}} + expected = {'client': { + 'log_level': 'info', + 'url': 'https://landscape.canonical.com/message-system', + 'ping_url': 'http://landscape.canonical.com/ping', + 'data_path': '/var/lib/landscape/client', + 'computer_title': 'My PC'}} + wrap_and_call( + 'cloudinit.config.cc_landscape', + {'LSC_CLIENT_CFG_FILE': {'new': self.conf}}, + cc_landscape.handle, 'notimportant', cfg, mycloud, LOG, None) + self.assertEqual(expected, dict(ConfigObj(self.conf))) + self.assertIn( + 'Wrote landscape config file to {0}'.format(self.conf), + self.logs.getvalue()) + + def test_handler_writes_merged_provided_cloudconfig_with_defaults(self): + """Merge and write options from cloud-config options with defaults.""" + # Write empty sparse client.conf file + util.write_file(self.conf, '') + mycloud = self._get_cloud('ubuntu') + cfg = {'landscape': {'client': {'computer_title': 'My PC'}}} + expected = {'client': { + 'log_level': 'info', + 'url': 'https://landscape.canonical.com/message-system', + 'ping_url': 'http://landscape.canonical.com/ping', + 'data_path': '/var/lib/landscape/client', + 'computer_title': 'My PC'}} + wrap_and_call( + 'cloudinit.config.cc_landscape', + {'LSC_CLIENT_CFG_FILE': {'new': self.conf}}, + cc_landscape.handle, 'notimportant', cfg, mycloud, LOG, None) + self.assertEqual(expected, dict(ConfigObj(self.conf))) + self.assertIn( + 'Wrote landscape config file to {0}'.format(self.conf), + self.logs.getvalue()) diff --git a/tests/unittests/test_handler/test_handler_locale.py b/tests/unittests/test_handler/test_handler_locale.py index e9a810c5..e29a06f9 100644 --- a/tests/unittests/test_handler/test_handler_locale.py +++ b/tests/unittests/test_handler/test_handler_locale.py @@ -13,13 +13,15 @@ from cloudinit import util from cloudinit.sources import DataSourceNoCloud -from .. import helpers as t_help +from cloudinit.tests import helpers as t_help from configobj import ConfigObj from six import BytesIO import logging +import mock +import os import shutil import tempfile @@ -27,6 +29,9 @@ LOG = logging.getLogger(__name__) class TestLocale(t_help.FilesystemMockingTestCase): + + with_logs = True + def setUp(self): super(TestLocale, self).setUp() self.new_root = tempfile.mkdtemp() @@ -49,9 +54,58 @@ class TestLocale(t_help.FilesystemMockingTestCase): } cc = self._get_cloud('sles') cc_locale.handle('cc_locale', cfg, cc, LOG, []) + if cc.distro.uses_systemd(): + locale_conf = cc.distro.systemd_locale_conf_fn + else: + locale_conf = cc.distro.locale_conf_fn + contents = util.load_file(locale_conf, decode=False) + n_cfg = ConfigObj(BytesIO(contents)) + if cc.distro.uses_systemd(): + self.assertEqual({'LANG': cfg['locale']}, dict(n_cfg)) + else: + self.assertEqual({'RC_LANG': cfg['locale']}, dict(n_cfg)) + + def test_set_locale_sles_default(self): + cfg = {} + cc = self._get_cloud('sles') + cc_locale.handle('cc_locale', cfg, cc, LOG, []) - contents = util.load_file('/etc/sysconfig/language', decode=False) + if cc.distro.uses_systemd(): + locale_conf = cc.distro.systemd_locale_conf_fn + keyname = 'LANG' + else: + locale_conf = cc.distro.locale_conf_fn + keyname = 'RC_LANG' + + contents = util.load_file(locale_conf, decode=False) n_cfg = ConfigObj(BytesIO(contents)) - self.assertEqual({'RC_LANG': cfg['locale']}, dict(n_cfg)) + self.assertEqual({keyname: 'en_US.UTF-8'}, dict(n_cfg)) + + def test_locale_update_config_if_different_than_default(self): + """Test cc_locale writes updates conf if different than default""" + locale_conf = os.path.join(self.new_root, "etc/default/locale") + util.write_file(locale_conf, 'LANG="en_US.UTF-8"\n') + cfg = {'locale': 'C.UTF-8'} + cc = self._get_cloud('ubuntu') + with mock.patch('cloudinit.distros.debian.util.subp') as m_subp: + with mock.patch('cloudinit.distros.debian.LOCALE_CONF_FN', + locale_conf): + cc_locale.handle('cc_locale', cfg, cc, LOG, []) + m_subp.assert_called_with(['update-locale', + '--locale-file=%s' % locale_conf, + 'LANG=C.UTF-8'], capture=False) + + def test_locale_rhel_defaults_en_us_utf8(self): + """Test cc_locale gets en_US.UTF-8 from distro get_locale fallback""" + cfg = {} + cc = self._get_cloud('rhel') + update_sysconfig = 'cloudinit.distros.rhel_util.update_sysconfig_file' + with mock.patch.object(cc.distro, 'uses_systemd') as m_use_sd: + m_use_sd.return_value = True + with mock.patch(update_sysconfig) as m_update_syscfg: + cc_locale.handle('cc_locale', cfg, cc, LOG, []) + m_update_syscfg.assert_called_with('/etc/locale.conf', + {'LANG': 'en_US.UTF-8'}) + # vi: ts=4 expandtab diff --git a/tests/unittests/test_handler/test_handler_lxd.py b/tests/unittests/test_handler/test_handler_lxd.py index 351226bf..f132a778 100644 --- a/tests/unittests/test_handler/test_handler_lxd.py +++ b/tests/unittests/test_handler/test_handler_lxd.py @@ -3,7 +3,7 @@ from cloudinit.config import cc_lxd from cloudinit.sources import DataSourceNoCloud from cloudinit import (distros, helpers, cloud) -from .. import helpers as t_help +from cloudinit.tests import helpers as t_help import logging diff --git a/tests/unittests/test_handler/test_handler_mcollective.py b/tests/unittests/test_handler/test_handler_mcollective.py index 2a9f3823..7eec7352 100644 --- a/tests/unittests/test_handler/test_handler_mcollective.py +++ b/tests/unittests/test_handler/test_handler_mcollective.py @@ -4,7 +4,7 @@ from cloudinit import (cloud, distros, helpers, util) from cloudinit.config import cc_mcollective from cloudinit.sources import DataSourceNoCloud -from .. import helpers as t_help +from cloudinit.tests import helpers as t_help import configobj import logging diff --git a/tests/unittests/test_handler/test_handler_mounts.py b/tests/unittests/test_handler/test_handler_mounts.py index 650ca0ec..fe492d4b 100644 --- a/tests/unittests/test_handler/test_handler_mounts.py +++ b/tests/unittests/test_handler/test_handler_mounts.py @@ -6,7 +6,7 @@ import tempfile from cloudinit.config import cc_mounts -from .. import helpers as test_helpers +from cloudinit.tests import helpers as test_helpers try: from unittest import mock diff --git a/tests/unittests/test_handler/test_handler_ntp.py b/tests/unittests/test_handler/test_handler_ntp.py index 7f278646..4f291248 100644 --- a/tests/unittests/test_handler/test_handler_ntp.py +++ b/tests/unittests/test_handler/test_handler_ntp.py @@ -3,7 +3,7 @@ from cloudinit.config import cc_ntp from cloudinit.sources import DataSourceNone from cloudinit import (distros, helpers, cloud, util) -from ..helpers import FilesystemMockingTestCase, mock, skipIf +from cloudinit.tests.helpers import FilesystemMockingTestCase, mock, skipIf import os @@ -16,6 +16,14 @@ servers {{servers}} pools {{pools}} """ +TIMESYNCD_TEMPLATE = b"""\ +## template:jinja +[Time] +{% if servers or pools -%} +NTP={% for host in servers|list + pools|list %}{{ host }} {% endfor -%} +{% endif -%} +""" + try: import jsonschema assert jsonschema # avoid pyflakes error F401: import unused @@ -59,6 +67,14 @@ class TestNtp(FilesystemMockingTestCase): cc_ntp.install_ntp(install_func, packages=['ntp'], check_exe='ntpd') install_func.assert_not_called() + @mock.patch("cloudinit.config.cc_ntp.util") + def test_ntp_install_no_op_with_empty_pkg_list(self, mock_util): + """ntp_install calls install_func with empty list""" + mock_util.which.return_value = None # check_exe not found + install_func = mock.MagicMock() + cc_ntp.install_ntp(install_func, packages=[], check_exe='timesyncd') + install_func.assert_called_once_with([]) + def test_ntp_rename_ntp_conf(self): """When NTP_CONF exists, rename_ntp moves it.""" ntpconf = self.tmp_path("ntp.conf", self.new_root) @@ -68,6 +84,30 @@ class TestNtp(FilesystemMockingTestCase): self.assertFalse(os.path.exists(ntpconf)) self.assertTrue(os.path.exists("{0}.dist".format(ntpconf))) + @mock.patch("cloudinit.config.cc_ntp.util") + def test_reload_ntp_defaults(self, mock_util): + """Test service is restarted/reloaded (defaults)""" + service = 'ntp' + cmd = ['service', service, 'restart'] + cc_ntp.reload_ntp(service) + mock_util.subp.assert_called_with(cmd, capture=True) + + @mock.patch("cloudinit.config.cc_ntp.util") + def test_reload_ntp_systemd(self, mock_util): + """Test service is restarted/reloaded (systemd)""" + service = 'ntp' + cmd = ['systemctl', 'reload-or-restart', service] + cc_ntp.reload_ntp(service, systemd=True) + mock_util.subp.assert_called_with(cmd, capture=True) + + @mock.patch("cloudinit.config.cc_ntp.util") + def test_reload_ntp_systemd_timesycnd(self, mock_util): + """Test service is restarted/reloaded (systemd/timesyncd)""" + service = 'systemd-timesycnd' + cmd = ['systemctl', 'reload-or-restart', service] + cc_ntp.reload_ntp(service, systemd=True) + mock_util.subp.assert_called_with(cmd, capture=True) + def test_ntp_rename_ntp_conf_skip_missing(self): """When NTP_CONF doesn't exist rename_ntp doesn't create a file.""" ntpconf = self.tmp_path("ntp.conf", self.new_root) @@ -94,7 +134,7 @@ class TestNtp(FilesystemMockingTestCase): with open('{0}.tmpl'.format(ntp_conf), 'wb') as stream: stream.write(NTP_TEMPLATE) with mock.patch('cloudinit.config.cc_ntp.NTP_CONF', ntp_conf): - cc_ntp.write_ntp_config_template(cfg, mycloud) + cc_ntp.write_ntp_config_template(cfg, mycloud, ntp_conf) content = util.read_file_or_url('file://' + ntp_conf).contents self.assertEqual( "servers ['192.168.2.1', '192.168.2.2']\npools []\n", @@ -120,7 +160,7 @@ class TestNtp(FilesystemMockingTestCase): with open('{0}.{1}.tmpl'.format(ntp_conf, distro), 'wb') as stream: stream.write(NTP_TEMPLATE) with mock.patch('cloudinit.config.cc_ntp.NTP_CONF', ntp_conf): - cc_ntp.write_ntp_config_template(cfg, mycloud) + cc_ntp.write_ntp_config_template(cfg, mycloud, ntp_conf) content = util.read_file_or_url('file://' + ntp_conf).contents self.assertEqual( "servers []\npools ['10.0.0.1', '10.0.0.2']\n", @@ -139,7 +179,7 @@ class TestNtp(FilesystemMockingTestCase): with open('{0}.tmpl'.format(ntp_conf), 'wb') as stream: stream.write(NTP_TEMPLATE) with mock.patch('cloudinit.config.cc_ntp.NTP_CONF', ntp_conf): - cc_ntp.write_ntp_config_template({}, mycloud) + cc_ntp.write_ntp_config_template({}, mycloud, ntp_conf) content = util.read_file_or_url('file://' + ntp_conf).contents default_pools = [ "{0}.{1}.pool.ntp.org".format(x, distro) @@ -152,7 +192,8 @@ class TestNtp(FilesystemMockingTestCase): ",".join(default_pools)), self.logs.getvalue()) - def test_ntp_handler_mocked_template(self): + @mock.patch("cloudinit.config.cc_ntp.ntp_installable") + def test_ntp_handler_mocked_template(self, m_ntp_install): """Test ntp handler renders ubuntu ntp.conf template.""" pools = ['0.mycompany.pool.ntp.org', '3.mycompany.pool.ntp.org'] servers = ['192.168.23.3', '192.168.23.4'] @@ -164,6 +205,8 @@ class TestNtp(FilesystemMockingTestCase): } mycloud = self._get_cloud('ubuntu') ntp_conf = self.tmp_path('ntp.conf', self.new_root) # Doesn't exist + m_ntp_install.return_value = True + # Create ntp.conf.tmpl with open('{0}.tmpl'.format(ntp_conf), 'wb') as stream: stream.write(NTP_TEMPLATE) @@ -176,6 +219,34 @@ class TestNtp(FilesystemMockingTestCase): 'servers {0}\npools {1}\n'.format(servers, pools), content.decode()) + @mock.patch("cloudinit.config.cc_ntp.util") + def test_ntp_handler_mocked_template_snappy(self, m_util): + """Test ntp handler renders timesycnd.conf template on snappy.""" + pools = ['0.mycompany.pool.ntp.org', '3.mycompany.pool.ntp.org'] + servers = ['192.168.23.3', '192.168.23.4'] + cfg = { + 'ntp': { + 'pools': pools, + 'servers': servers + } + } + mycloud = self._get_cloud('ubuntu') + m_util.system_is_snappy.return_value = True + + # Create timesyncd.conf.tmpl + tsyncd_conf = self.tmp_path("timesyncd.conf", self.new_root) + template = '{0}.tmpl'.format(tsyncd_conf) + with open(template, 'wb') as stream: + stream.write(TIMESYNCD_TEMPLATE) + + with mock.patch('cloudinit.config.cc_ntp.TIMESYNCD_CONF', tsyncd_conf): + cc_ntp.handle('notimportant', cfg, mycloud, None, None) + + content = util.read_file_or_url('file://' + tsyncd_conf).contents + self.assertEqual( + "[Time]\nNTP=%s %s \n" % (" ".join(servers), " ".join(pools)), + content.decode()) + def test_ntp_handler_real_distro_templates(self): """Test ntp handler renders the shipped distro ntp.conf templates.""" pools = ['0.mycompany.pool.ntp.org', '3.mycompany.pool.ntp.org'] @@ -333,4 +404,30 @@ class TestNtp(FilesystemMockingTestCase): "pools ['0.mypool.org', '0.mypool.org']\n", content) + @mock.patch("cloudinit.config.cc_ntp.ntp_installable") + def test_ntp_handler_timesyncd(self, m_ntp_install): + """Test ntp handler configures timesyncd""" + m_ntp_install.return_value = False + distro = 'ubuntu' + cfg = { + 'servers': ['192.168.2.1', '192.168.2.2'], + 'pools': ['0.mypool.org'], + } + mycloud = self._get_cloud(distro) + tsyncd_conf = self.tmp_path("timesyncd.conf", self.new_root) + # Create timesyncd.conf.tmpl + template = '{0}.tmpl'.format(tsyncd_conf) + print(template) + with open(template, 'wb') as stream: + stream.write(TIMESYNCD_TEMPLATE) + with mock.patch('cloudinit.config.cc_ntp.TIMESYNCD_CONF', tsyncd_conf): + cc_ntp.write_ntp_config_template(cfg, mycloud, tsyncd_conf, + template='timesyncd.conf') + + content = util.read_file_or_url('file://' + tsyncd_conf).contents + self.assertEqual( + "[Time]\nNTP=192.168.2.1 192.168.2.2 0.mypool.org \n", + content.decode()) + + # vi: ts=4 expandtab diff --git a/tests/unittests/test_handler/test_handler_power_state.py b/tests/unittests/test_handler/test_handler_power_state.py index e382210d..85a0fe0a 100644 --- a/tests/unittests/test_handler/test_handler_power_state.py +++ b/tests/unittests/test_handler/test_handler_power_state.py @@ -4,8 +4,8 @@ import sys from cloudinit.config import cc_power_state_change as psc -from .. import helpers as t_help -from ..helpers import mock +from cloudinit.tests import helpers as t_help +from cloudinit.tests.helpers import mock class TestLoadPowerState(t_help.TestCase): diff --git a/tests/unittests/test_handler/test_handler_puppet.py b/tests/unittests/test_handler/test_handler_puppet.py new file mode 100644 index 00000000..0b6e3b58 --- /dev/null +++ b/tests/unittests/test_handler/test_handler_puppet.py @@ -0,0 +1,142 @@ +# This file is part of cloud-init. See LICENSE file for license information. + +from cloudinit.config import cc_puppet +from cloudinit.sources import DataSourceNone +from cloudinit import (distros, helpers, cloud, util) +from cloudinit.tests.helpers import CiTestCase, mock + +import logging + + +LOG = logging.getLogger(__name__) + + +@mock.patch('cloudinit.config.cc_puppet.util') +@mock.patch('cloudinit.config.cc_puppet.os') +class TestAutostartPuppet(CiTestCase): + + with_logs = True + + def test_wb_autostart_puppet_updates_puppet_default(self, m_os, m_util): + """Update /etc/default/puppet to autostart if it exists.""" + + def _fake_exists(path): + return path == '/etc/default/puppet' + + m_os.path.exists.side_effect = _fake_exists + cc_puppet._autostart_puppet(LOG) + self.assertEqual( + [mock.call(['sed', '-i', '-e', 's/^START=.*/START=yes/', + '/etc/default/puppet'], capture=False)], + m_util.subp.call_args_list) + + def test_wb_autostart_pupppet_enables_puppet_systemctl(self, m_os, m_util): + """If systemctl is present, enable puppet via systemctl.""" + + def _fake_exists(path): + return path == '/bin/systemctl' + + m_os.path.exists.side_effect = _fake_exists + cc_puppet._autostart_puppet(LOG) + expected_calls = [mock.call( + ['/bin/systemctl', 'enable', 'puppet.service'], capture=False)] + self.assertEqual(expected_calls, m_util.subp.call_args_list) + + def test_wb_autostart_pupppet_enables_puppet_chkconfig(self, m_os, m_util): + """If chkconfig is present, enable puppet via checkcfg.""" + + def _fake_exists(path): + return path == '/sbin/chkconfig' + + m_os.path.exists.side_effect = _fake_exists + cc_puppet._autostart_puppet(LOG) + expected_calls = [mock.call( + ['/sbin/chkconfig', 'puppet', 'on'], capture=False)] + self.assertEqual(expected_calls, m_util.subp.call_args_list) + + +@mock.patch('cloudinit.config.cc_puppet._autostart_puppet') +class TestPuppetHandle(CiTestCase): + + with_logs = True + + def setUp(self): + super(TestPuppetHandle, self).setUp() + self.new_root = self.tmp_dir() + self.conf = self.tmp_path('puppet.conf') + + def _get_cloud(self, distro): + paths = helpers.Paths({'templates_dir': self.new_root}) + cls = distros.fetch(distro) + mydist = cls(distro, {}, paths) + myds = DataSourceNone.DataSourceNone({}, mydist, paths) + return cloud.Cloud(myds, paths, {}, mydist, None) + + def test_handler_skips_missing_puppet_key_in_cloudconfig(self, m_auto): + """Cloud-config containing no 'puppet' key is skipped.""" + mycloud = self._get_cloud('ubuntu') + cfg = {} + cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) + self.assertIn( + "no 'puppet' configuration found", self.logs.getvalue()) + self.assertEqual(0, m_auto.call_count) + + @mock.patch('cloudinit.config.cc_puppet.util.subp') + def test_handler_puppet_config_starts_puppet_service(self, m_subp, m_auto): + """Cloud-config 'puppet' configuration starts puppet.""" + mycloud = self._get_cloud('ubuntu') + cfg = {'puppet': {'install': False}} + cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) + self.assertEqual(1, m_auto.call_count) + self.assertEqual( + [mock.call(['service', 'puppet', 'start'], capture=False)], + m_subp.call_args_list) + + @mock.patch('cloudinit.config.cc_puppet.util.subp') + def test_handler_empty_puppet_config_installs_puppet(self, m_subp, m_auto): + """Cloud-config empty 'puppet' configuration installs latest puppet.""" + mycloud = self._get_cloud('ubuntu') + mycloud.distro = mock.MagicMock() + cfg = {'puppet': {}} + cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) + self.assertEqual( + [mock.call(('puppet', None))], + mycloud.distro.install_packages.call_args_list) + + @mock.patch('cloudinit.config.cc_puppet.util.subp') + def test_handler_puppet_config_installs_puppet_on_true(self, m_subp, _): + """Cloud-config with 'puppet' key installs when 'install' is True.""" + mycloud = self._get_cloud('ubuntu') + mycloud.distro = mock.MagicMock() + cfg = {'puppet': {'install': True}} + cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) + self.assertEqual( + [mock.call(('puppet', None))], + mycloud.distro.install_packages.call_args_list) + + @mock.patch('cloudinit.config.cc_puppet.util.subp') + def test_handler_puppet_config_installs_puppet_version(self, m_subp, _): + """Cloud-config 'puppet' configuration can specify a version.""" + mycloud = self._get_cloud('ubuntu') + mycloud.distro = mock.MagicMock() + cfg = {'puppet': {'version': '3.8'}} + cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) + self.assertEqual( + [mock.call(('puppet', '3.8'))], + mycloud.distro.install_packages.call_args_list) + + @mock.patch('cloudinit.config.cc_puppet.util.subp') + def test_handler_puppet_config_updates_puppet_conf(self, m_subp, m_auto): + """When 'conf' is provided update values in PUPPET_CONF_PATH.""" + mycloud = self._get_cloud('ubuntu') + cfg = { + 'puppet': { + 'conf': {'agent': {'server': 'puppetmaster.example.org'}}}} + util.write_file(self.conf, '[agent]\nserver = origpuppet\nother = 3') + puppet_conf_path = 'cloudinit.config.cc_puppet.PUPPET_CONF_PATH' + mycloud.distro = mock.MagicMock() + with mock.patch(puppet_conf_path, self.conf): + cc_puppet.handle('notimportant', cfg, mycloud, LOG, None) + content = util.load_file(self.conf) + expected = '[agent]\nserver = puppetmaster.example.org\nother = 3\n\n' + self.assertEqual(expected, content) diff --git a/tests/unittests/test_handler/test_handler_resizefs.py b/tests/unittests/test_handler/test_handler_resizefs.py index 52591b8b..3e5d436c 100644 --- a/tests/unittests/test_handler/test_handler_resizefs.py +++ b/tests/unittests/test_handler/test_handler_resizefs.py @@ -1,17 +1,30 @@ # This file is part of cloud-init. See LICENSE file for license information. -from cloudinit.config import cc_resizefs +from cloudinit.config.cc_resizefs import ( + can_skip_resize, handle, is_device_path_writable_block, + rootdev_from_cmdline) +import logging import textwrap -import unittest + +from cloudinit.tests.helpers import (CiTestCase, mock, skipIf, util, + wrap_and_call) + + +LOG = logging.getLogger(__name__) + try: - from unittest import mock + import jsonschema + assert jsonschema # avoid pyflakes error F401: import unused + _missing_jsonschema_dep = False except ImportError: - import mock + _missing_jsonschema_dep = True + +class TestResizefs(CiTestCase): + with_logs = True -class TestResizefs(unittest.TestCase): def setUp(self): super(TestResizefs, self).setUp() self.name = "resizefs" @@ -34,7 +47,7 @@ class TestResizefs(unittest.TestCase): 58720296 3145728 3 freebsd-swap (1.5G) 61866024 1048496 - free - (512M) """) - res = cc_resizefs.can_skip_resize(fs_type, resize_what, devpth) + res = can_skip_resize(fs_type, resize_what, devpth) self.assertTrue(res) @mock.patch('cloudinit.config.cc_resizefs._get_dumpfs_output') @@ -52,8 +65,210 @@ class TestResizefs(unittest.TestCase): => 34 297086 da0 GPT (145M) 34 297086 1 freebsd-ufs (145M) """) - res = cc_resizefs.can_skip_resize(fs_type, resize_what, devpth) + res = can_skip_resize(fs_type, resize_what, devpth) self.assertTrue(res) + def test_handle_noops_on_disabled(self): + """The handle function logs when the configuration disables resize.""" + cfg = {'resize_rootfs': False} + handle('cc_resizefs', cfg, _cloud=None, log=LOG, args=[]) + self.assertIn( + 'DEBUG: Skipping module named cc_resizefs, resizing disabled\n', + self.logs.getvalue()) + + @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency") + def test_handle_schema_validation_logs_invalid_resize_rootfs_value(self): + """The handle reports json schema violations as a warning. + + Invalid values for resize_rootfs result in disabling the module. + """ + cfg = {'resize_rootfs': 'junk'} + handle('cc_resizefs', cfg, _cloud=None, log=LOG, args=[]) + logs = self.logs.getvalue() + self.assertIn( + "WARNING: Invalid config:\nresize_rootfs: 'junk' is not one of" + " [True, False, 'noblock']", + logs) + self.assertIn( + 'DEBUG: Skipping module named cc_resizefs, resizing disabled\n', + logs) + + @mock.patch('cloudinit.config.cc_resizefs.util.get_mount_info') + def test_handle_warns_on_unknown_mount_info(self, m_get_mount_info): + """handle warns when get_mount_info sees unknown filesystem for /.""" + m_get_mount_info.return_value = None + cfg = {'resize_rootfs': True} + handle('cc_resizefs', cfg, _cloud=None, log=LOG, args=[]) + logs = self.logs.getvalue() + self.assertNotIn("WARNING: Invalid config:\nresize_rootfs:", logs) + self.assertIn( + 'WARNING: Could not determine filesystem type of /\n', + logs) + self.assertEqual( + [mock.call('/', LOG)], + m_get_mount_info.call_args_list) + + def test_handle_warns_on_undiscoverable_root_path_in_commandline(self): + """handle noops when the root path is not found on the commandline.""" + cfg = {'resize_rootfs': True} + exists_mock_path = 'cloudinit.config.cc_resizefs.os.path.exists' + + def fake_mount_info(path, log): + self.assertEqual('/', path) + self.assertEqual(LOG, log) + return ('/dev/root', 'ext4', '/') + + with mock.patch(exists_mock_path) as m_exists: + m_exists.return_value = False + wrap_and_call( + 'cloudinit.config.cc_resizefs.util', + {'is_container': {'return_value': False}, + 'get_mount_info': {'side_effect': fake_mount_info}, + 'get_cmdline': {'return_value': 'BOOT_IMAGE=/vmlinuz.efi'}}, + handle, 'cc_resizefs', cfg, _cloud=None, log=LOG, + args=[]) + logs = self.logs.getvalue() + self.assertIn("WARNING: Unable to find device '/dev/root'", logs) + + +class TestRootDevFromCmdline(CiTestCase): + + def test_rootdev_from_cmdline_with_no_root(self): + """Return None from rootdev_from_cmdline when root is not present.""" + invalid_cases = [ + 'BOOT_IMAGE=/adsf asdfa werasef root adf', 'BOOT_IMAGE=/adsf', ''] + for case in invalid_cases: + self.assertIsNone(rootdev_from_cmdline(case)) + + def test_rootdev_from_cmdline_with_root_startswith_dev(self): + """Return the cmdline root when the path starts with /dev.""" + self.assertEqual( + '/dev/this', rootdev_from_cmdline('asdf root=/dev/this')) + + def test_rootdev_from_cmdline_with_root_without_dev_prefix(self): + """Add /dev prefix to cmdline root when the path lacks the prefix.""" + self.assertEqual('/dev/this', rootdev_from_cmdline('asdf root=this')) + + def test_rootdev_from_cmdline_with_root_with_label(self): + """When cmdline root contains a LABEL, our root is disk/by-label.""" + self.assertEqual( + '/dev/disk/by-label/unique', + rootdev_from_cmdline('asdf root=LABEL=unique')) + + def test_rootdev_from_cmdline_with_root_with_uuid(self): + """When cmdline root contains a UUID, our root is disk/by-uuid.""" + self.assertEqual( + '/dev/disk/by-uuid/adsfdsaf-adsf', + rootdev_from_cmdline('asdf root=UUID=adsfdsaf-adsf')) + + +class TestIsDevicePathWritableBlock(CiTestCase): + + with_logs = True + + def test_is_device_path_writable_block_false_on_overlayroot(self): + """When devpath is overlayroot (on MAAS), is_dev_writable is False.""" + info = 'does not matter' + is_writable = wrap_and_call( + 'cloudinit.config.cc_resizefs.util', + {'is_container': {'return_value': False}}, + is_device_path_writable_block, 'overlayroot', info, LOG) + self.assertFalse(is_writable) + self.assertIn( + "Not attempting to resize devpath 'overlayroot'", + self.logs.getvalue()) + + def test_is_device_path_writable_block_warns_missing_cmdline_root(self): + """When root does not exist isn't in the cmdline, log warning.""" + info = 'does not matter' + + def fake_mount_info(path, log): + self.assertEqual('/', path) + self.assertEqual(LOG, log) + return ('/dev/root', 'ext4', '/') + + exists_mock_path = 'cloudinit.config.cc_resizefs.os.path.exists' + with mock.patch(exists_mock_path) as m_exists: + m_exists.return_value = False + is_writable = wrap_and_call( + 'cloudinit.config.cc_resizefs.util', + {'is_container': {'return_value': False}, + 'get_mount_info': {'side_effect': fake_mount_info}, + 'get_cmdline': {'return_value': 'BOOT_IMAGE=/vmlinuz.efi'}}, + is_device_path_writable_block, '/dev/root', info, LOG) + self.assertFalse(is_writable) + logs = self.logs.getvalue() + self.assertIn("WARNING: Unable to find device '/dev/root'", logs) + + def test_is_device_path_writable_block_does_not_exist(self): + """When devpath does not exist, a warning is logged.""" + info = 'dev=/I/dont/exist mnt_point=/ path=/dev/none' + is_writable = wrap_and_call( + 'cloudinit.config.cc_resizefs.util', + {'is_container': {'return_value': False}}, + is_device_path_writable_block, '/I/dont/exist', info, LOG) + self.assertFalse(is_writable) + self.assertIn( + "WARNING: Device '/I/dont/exist' did not exist." + ' cannot resize: %s' % info, + self.logs.getvalue()) + + def test_is_device_path_writable_block_does_not_exist_in_container(self): + """When devpath does not exist in a container, log a debug message.""" + info = 'dev=/I/dont/exist mnt_point=/ path=/dev/none' + is_writable = wrap_and_call( + 'cloudinit.config.cc_resizefs.util', + {'is_container': {'return_value': True}}, + is_device_path_writable_block, '/I/dont/exist', info, LOG) + self.assertFalse(is_writable) + self.assertIn( + "DEBUG: Device '/I/dont/exist' did not exist in container." + ' cannot resize: %s' % info, + self.logs.getvalue()) + + def test_is_device_path_writable_block_raises_oserror(self): + """When unexpected OSError is raises by os.stat it is reraised.""" + info = 'dev=/I/dont/exist mnt_point=/ path=/dev/none' + with self.assertRaises(OSError) as context_manager: + wrap_and_call( + 'cloudinit.config.cc_resizefs', + {'util.is_container': {'return_value': True}, + 'os.stat': {'side_effect': OSError('Something unexpected')}}, + is_device_path_writable_block, '/I/dont/exist', info, LOG) + self.assertEqual( + 'Something unexpected', str(context_manager.exception)) + + def test_is_device_path_writable_block_non_block(self): + """When device is not a block device, emit warning return False.""" + fake_devpath = self.tmp_path('dev/readwrite') + util.write_file(fake_devpath, '', mode=0o600) # read-write + info = 'dev=/dev/root mnt_point=/ path={0}'.format(fake_devpath) + + is_writable = wrap_and_call( + 'cloudinit.config.cc_resizefs.util', + {'is_container': {'return_value': False}}, + is_device_path_writable_block, fake_devpath, info, LOG) + self.assertFalse(is_writable) + self.assertIn( + "WARNING: device '{0}' not a block device. cannot resize".format( + fake_devpath), + self.logs.getvalue()) + + def test_is_device_path_writable_block_non_block_on_container(self): + """When device is non-block device in container, emit debug log.""" + fake_devpath = self.tmp_path('dev/readwrite') + util.write_file(fake_devpath, '', mode=0o600) # read-write + info = 'dev=/dev/root mnt_point=/ path={0}'.format(fake_devpath) + + is_writable = wrap_and_call( + 'cloudinit.config.cc_resizefs.util', + {'is_container': {'return_value': True}}, + is_device_path_writable_block, fake_devpath, info, LOG) + self.assertFalse(is_writable) + self.assertIn( + "DEBUG: device '{0}' not a block device in container." + ' cannot resize'.format(fake_devpath), + self.logs.getvalue()) + # vi: ts=4 expandtab diff --git a/tests/unittests/test_handler/test_handler_rsyslog.py b/tests/unittests/test_handler/test_handler_rsyslog.py index cca06678..8c8e2838 100644 --- a/tests/unittests/test_handler/test_handler_rsyslog.py +++ b/tests/unittests/test_handler/test_handler_rsyslog.py @@ -9,7 +9,7 @@ from cloudinit.config.cc_rsyslog import ( parse_remotes_line, remotes_to_rsyslog_cfg) from cloudinit import util -from .. import helpers as t_help +from cloudinit.tests import helpers as t_help class TestLoadConfig(t_help.TestCase): diff --git a/tests/unittests/test_handler/test_handler_runcmd.py b/tests/unittests/test_handler/test_handler_runcmd.py new file mode 100644 index 00000000..374c1d31 --- /dev/null +++ b/tests/unittests/test_handler/test_handler_runcmd.py @@ -0,0 +1,108 @@ +# This file is part of cloud-init. See LICENSE file for license information. + +from cloudinit.config import cc_runcmd +from cloudinit.sources import DataSourceNone +from cloudinit import (distros, helpers, cloud, util) +from cloudinit.tests.helpers import FilesystemMockingTestCase, skipIf + +import logging +import os +import stat + +try: + import jsonschema + assert jsonschema # avoid pyflakes error F401: import unused + _missing_jsonschema_dep = False +except ImportError: + _missing_jsonschema_dep = True + +LOG = logging.getLogger(__name__) + + +class TestRuncmd(FilesystemMockingTestCase): + + with_logs = True + + def setUp(self): + super(TestRuncmd, self).setUp() + self.subp = util.subp + self.new_root = self.tmp_dir() + + def _get_cloud(self, distro): + self.patchUtils(self.new_root) + paths = helpers.Paths({'scripts': self.new_root}) + cls = distros.fetch(distro) + mydist = cls(distro, {}, paths) + myds = DataSourceNone.DataSourceNone({}, mydist, paths) + paths.datasource = myds + return cloud.Cloud(myds, paths, {}, mydist, None) + + def test_handler_skip_if_no_runcmd(self): + """When the provided config doesn't contain runcmd, skip it.""" + cfg = {} + mycloud = self._get_cloud('ubuntu') + cc_runcmd.handle('notimportant', cfg, mycloud, LOG, None) + self.assertIn( + "Skipping module named notimportant, no 'runcmd' key", + self.logs.getvalue()) + + def test_handler_invalid_command_set(self): + """Commands which can't be converted to shell will raise errors.""" + invalid_config = {'runcmd': 1} + cc = self._get_cloud('ubuntu') + cc_runcmd.handle('cc_runcmd', invalid_config, cc, LOG, []) + self.assertIn( + 'Failed to shellify 1 into file' + ' /var/lib/cloud/instances/iid-datasource-none/scripts/runcmd', + self.logs.getvalue()) + + @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency") + def test_handler_schema_validation_warns_non_array_type(self): + """Schema validation warns of non-array type for runcmd key. + + Schema validation is not strict, so runcmd attempts to shellify the + invalid content. + """ + invalid_config = {'runcmd': 1} + cc = self._get_cloud('ubuntu') + cc_runcmd.handle('cc_runcmd', invalid_config, cc, LOG, []) + self.assertIn( + 'Invalid config:\nruncmd: 1 is not of type \'array\'', + self.logs.getvalue()) + self.assertIn('Failed to shellify', self.logs.getvalue()) + + @skipIf(_missing_jsonschema_dep, 'No python-jsonschema dependency') + def test_handler_schema_validation_warns_non_array_item_type(self): + """Schema validation warns of non-array or string runcmd items. + + Schema validation is not strict, so runcmd attempts to shellify the + invalid content. + """ + invalid_config = { + 'runcmd': ['ls /', 20, ['wget', 'http://stuff/blah'], {'a': 'n'}]} + cc = self._get_cloud('ubuntu') + cc_runcmd.handle('cc_runcmd', invalid_config, cc, LOG, []) + expected_warnings = [ + 'runcmd.1: 20 is not valid under any of the given schemas', + 'runcmd.3: {\'a\': \'n\'} is not valid under any of the given' + ' schema' + ] + logs = self.logs.getvalue() + for warning in expected_warnings: + self.assertIn(warning, logs) + self.assertIn('Failed to shellify', logs) + + def test_handler_write_valid_runcmd_schema_to_file(self): + """Valid runcmd schema is written to a runcmd shell script.""" + valid_config = {'runcmd': [['ls', '/']]} + cc = self._get_cloud('ubuntu') + cc_runcmd.handle('cc_runcmd', valid_config, cc, LOG, []) + runcmd_file = os.path.join( + self.new_root, + 'var/lib/cloud/instances/iid-datasource-none/scripts/runcmd') + self.assertEqual("#!/bin/sh\n'ls' '/'\n", util.load_file(runcmd_file)) + file_stat = os.stat(runcmd_file) + self.assertEqual(0o700, stat.S_IMODE(file_stat.st_mode)) + + +# vi: ts=4 expandtab diff --git a/tests/unittests/test_handler/test_handler_seed_random.py b/tests/unittests/test_handler/test_handler_seed_random.py index e5e607fb..f60dedc2 100644 --- a/tests/unittests/test_handler/test_handler_seed_random.py +++ b/tests/unittests/test_handler/test_handler_seed_random.py @@ -22,7 +22,7 @@ from cloudinit import util from cloudinit.sources import DataSourceNone -from .. import helpers as t_help +from cloudinit.tests import helpers as t_help import logging diff --git a/tests/unittests/test_handler/test_handler_set_hostname.py b/tests/unittests/test_handler/test_handler_set_hostname.py index 4b18de75..abdc17e7 100644 --- a/tests/unittests/test_handler/test_handler_set_hostname.py +++ b/tests/unittests/test_handler/test_handler_set_hostname.py @@ -7,7 +7,7 @@ from cloudinit import distros from cloudinit import helpers from cloudinit import util -from .. import helpers as t_help +from cloudinit.tests import helpers as t_help from configobj import ConfigObj import logging @@ -70,7 +70,8 @@ class TestHostname(t_help.FilesystemMockingTestCase): cc = cloud.Cloud(ds, paths, {}, distro, None) self.patchUtils(self.tmp) cc_set_hostname.handle('cc_set_hostname', cfg, cc, LOG, []) - contents = util.load_file("/etc/HOSTNAME") - self.assertEqual('blah', contents.strip()) + if not distro.uses_systemd(): + contents = util.load_file(distro.hostname_conf_fn) + self.assertEqual('blah', contents.strip()) # vi: ts=4 expandtab diff --git a/tests/unittests/test_handler/test_handler_snappy.py b/tests/unittests/test_handler/test_handler_snappy.py index e4d07622..76b79c29 100644 --- a/tests/unittests/test_handler/test_handler_snappy.py +++ b/tests/unittests/test_handler/test_handler_snappy.py @@ -7,9 +7,9 @@ from cloudinit.config.cc_snap_config import ( from cloudinit import (distros, helpers, cloud, util) from cloudinit.config.cc_snap_config import handle as snap_handle from cloudinit.sources import DataSourceNone -from ..helpers import FilesystemMockingTestCase, mock +from cloudinit.tests.helpers import FilesystemMockingTestCase, mock -from .. import helpers as t_help +from cloudinit.tests import helpers as t_help import logging import os diff --git a/tests/unittests/test_handler/test_handler_spacewalk.py b/tests/unittests/test_handler/test_handler_spacewalk.py index 28b5892a..ddbf4a79 100644 --- a/tests/unittests/test_handler/test_handler_spacewalk.py +++ b/tests/unittests/test_handler/test_handler_spacewalk.py @@ -3,7 +3,7 @@ from cloudinit.config import cc_spacewalk from cloudinit import util -from .. import helpers +from cloudinit.tests import helpers import logging diff --git a/tests/unittests/test_handler/test_handler_timezone.py b/tests/unittests/test_handler/test_handler_timezone.py index c30fbdfe..27eedded 100644 --- a/tests/unittests/test_handler/test_handler_timezone.py +++ b/tests/unittests/test_handler/test_handler_timezone.py @@ -13,7 +13,7 @@ from cloudinit import util from cloudinit.sources import DataSourceNoCloud -from .. import helpers as t_help +from cloudinit.tests import helpers as t_help from configobj import ConfigObj import logging diff --git a/tests/unittests/test_handler/test_handler_write_files.py b/tests/unittests/test_handler/test_handler_write_files.py index 1129e77d..7fa8fd21 100644 --- a/tests/unittests/test_handler/test_handler_write_files.py +++ b/tests/unittests/test_handler/test_handler_write_files.py @@ -4,7 +4,7 @@ from cloudinit.config.cc_write_files import write_files, decode_perms from cloudinit import log as logging from cloudinit import util -from ..helpers import CiTestCase, FilesystemMockingTestCase +from cloudinit.tests.helpers import CiTestCase, FilesystemMockingTestCase import base64 import gzip diff --git a/tests/unittests/test_handler/test_handler_yum_add_repo.py b/tests/unittests/test_handler/test_handler_yum_add_repo.py index c4396df5..b7adbe50 100644 --- a/tests/unittests/test_handler/test_handler_yum_add_repo.py +++ b/tests/unittests/test_handler/test_handler_yum_add_repo.py @@ -3,7 +3,7 @@ from cloudinit.config import cc_yum_add_repo from cloudinit import util -from .. import helpers +from cloudinit.tests import helpers try: from configparser import ConfigParser diff --git a/tests/unittests/test_handler/test_handler_zypper_add_repo.py b/tests/unittests/test_handler/test_handler_zypper_add_repo.py new file mode 100644 index 00000000..315c2a5e --- /dev/null +++ b/tests/unittests/test_handler/test_handler_zypper_add_repo.py @@ -0,0 +1,237 @@ +# This file is part of cloud-init. See LICENSE file for license information. + +import glob +import os + +from cloudinit.config import cc_zypper_add_repo +from cloudinit import util + +from cloudinit.tests import helpers +from cloudinit.tests.helpers import mock + +try: + from configparser import ConfigParser +except ImportError: + from ConfigParser import ConfigParser +import logging +from six import StringIO + +LOG = logging.getLogger(__name__) + + +class TestConfig(helpers.FilesystemMockingTestCase): + def setUp(self): + super(TestConfig, self).setUp() + self.tmp = self.tmp_dir() + self.zypp_conf = 'etc/zypp/zypp.conf' + + def test_bad_repo_config(self): + """Config has no baseurl, no file should be written""" + cfg = { + 'repos': [ + { + 'id': 'foo', + 'name': 'suse-test', + 'enabled': '1' + }, + ] + } + self.patchUtils(self.tmp) + cc_zypper_add_repo._write_repos(cfg['repos'], '/etc/zypp/repos.d') + self.assertRaises(IOError, util.load_file, + "/etc/zypp/repos.d/foo.repo") + + def test_write_repos(self): + """Verify valid repos get written""" + cfg = self._get_base_config_repos() + root_d = self.tmp_dir() + cc_zypper_add_repo._write_repos(cfg['zypper']['repos'], root_d) + repos = glob.glob('%s/*.repo' % root_d) + expected_repos = ['testing-foo.repo', 'testing-bar.repo'] + if len(repos) != 2: + assert 'Number of repos written is "%d" expected 2' % len(repos) + for repo in repos: + repo_name = os.path.basename(repo) + if repo_name not in expected_repos: + assert 'Found repo with name "%s"; unexpected' % repo_name + # Validation that the content gets properly written is in another test + + def test_write_repo(self): + """Verify the content of a repo file""" + cfg = { + 'repos': [ + { + 'baseurl': 'http://foo', + 'name': 'test-foo', + 'id': 'testing-foo' + }, + ] + } + root_d = self.tmp_dir() + cc_zypper_add_repo._write_repos(cfg['repos'], root_d) + contents = util.load_file("%s/testing-foo.repo" % root_d) + parser = ConfigParser() + parser.readfp(StringIO(contents)) + expected = { + 'testing-foo': { + 'name': 'test-foo', + 'baseurl': 'http://foo', + 'enabled': '1', + 'autorefresh': '1' + } + } + for section in expected: + self.assertTrue(parser.has_section(section), + "Contains section {0}".format(section)) + for k, v in expected[section].items(): + self.assertEqual(parser.get(section, k), v) + + def test_config_write(self): + """Write valid configuration data""" + cfg = { + 'config': { + 'download.deltarpm': 'False', + 'reposdir': 'foo' + } + } + root_d = self.tmp_dir() + helpers.populate_dir(root_d, {self.zypp_conf: '# Zypp config\n'}) + self.reRoot(root_d) + cc_zypper_add_repo._write_zypp_config(cfg['config']) + cfg_out = os.path.join(root_d, self.zypp_conf) + contents = util.load_file(cfg_out) + expected = [ + '# Zypp config', + '# Added via cloud.cfg', + 'download.deltarpm=False', + 'reposdir=foo' + ] + for item in contents.split('\n'): + if item not in expected: + self.assertIsNone(item) + + @mock.patch('cloudinit.log.logging') + def test_config_write_skip_configdir(self, mock_logging): + """Write configuration but skip writing 'configdir' setting""" + cfg = { + 'config': { + 'download.deltarpm': 'False', + 'reposdir': 'foo', + 'configdir': 'bar' + } + } + root_d = self.tmp_dir() + helpers.populate_dir(root_d, {self.zypp_conf: '# Zypp config\n'}) + self.reRoot(root_d) + cc_zypper_add_repo._write_zypp_config(cfg['config']) + cfg_out = os.path.join(root_d, self.zypp_conf) + contents = util.load_file(cfg_out) + expected = [ + '# Zypp config', + '# Added via cloud.cfg', + 'download.deltarpm=False', + 'reposdir=foo' + ] + for item in contents.split('\n'): + if item not in expected: + self.assertIsNone(item) + # Not finding teh right path for mocking :( + # assert mock_logging.warning.called + + def test_empty_config_section_no_new_data(self): + """When the config section is empty no new data should be written to + zypp.conf""" + cfg = self._get_base_config_repos() + cfg['zypper']['config'] = None + root_d = self.tmp_dir() + helpers.populate_dir(root_d, {self.zypp_conf: '# No data'}) + self.reRoot(root_d) + cc_zypper_add_repo._write_zypp_config(cfg.get('config', {})) + cfg_out = os.path.join(root_d, self.zypp_conf) + contents = util.load_file(cfg_out) + self.assertEqual(contents, '# No data') + + def test_empty_config_value_no_new_data(self): + """When the config section is not empty but there are no values + no new data should be written to zypp.conf""" + cfg = self._get_base_config_repos() + cfg['zypper']['config'] = { + 'download.deltarpm': None + } + root_d = self.tmp_dir() + helpers.populate_dir(root_d, {self.zypp_conf: '# No data'}) + self.reRoot(root_d) + cc_zypper_add_repo._write_zypp_config(cfg.get('config', {})) + cfg_out = os.path.join(root_d, self.zypp_conf) + contents = util.load_file(cfg_out) + self.assertEqual(contents, '# No data') + + def test_handler_full_setup(self): + """Test that the handler ends up calling the renderers""" + cfg = self._get_base_config_repos() + cfg['zypper']['config'] = { + 'download.deltarpm': 'False', + } + root_d = self.tmp_dir() + os.makedirs('%s/etc/zypp/repos.d' % root_d) + helpers.populate_dir(root_d, {self.zypp_conf: '# Zypp config\n'}) + self.reRoot(root_d) + cc_zypper_add_repo.handle('zypper_add_repo', cfg, None, LOG, []) + cfg_out = os.path.join(root_d, self.zypp_conf) + contents = util.load_file(cfg_out) + expected = [ + '# Zypp config', + '# Added via cloud.cfg', + 'download.deltarpm=False', + ] + for item in contents.split('\n'): + if item not in expected: + self.assertIsNone(item) + repos = glob.glob('%s/etc/zypp/repos.d/*.repo' % root_d) + expected_repos = ['testing-foo.repo', 'testing-bar.repo'] + if len(repos) != 2: + assert 'Number of repos written is "%d" expected 2' % len(repos) + for repo in repos: + repo_name = os.path.basename(repo) + if repo_name not in expected_repos: + assert 'Found repo with name "%s"; unexpected' % repo_name + + def test_no_config_section_no_new_data(self): + """When there is no config section no new data should be written to + zypp.conf""" + cfg = self._get_base_config_repos() + root_d = self.tmp_dir() + helpers.populate_dir(root_d, {self.zypp_conf: '# No data'}) + self.reRoot(root_d) + cc_zypper_add_repo._write_zypp_config(cfg.get('config', {})) + cfg_out = os.path.join(root_d, self.zypp_conf) + contents = util.load_file(cfg_out) + self.assertEqual(contents, '# No data') + + def test_no_repo_data(self): + """When there is no repo data nothing should happen""" + root_d = self.tmp_dir() + self.reRoot(root_d) + cc_zypper_add_repo._write_repos(None, root_d) + content = glob.glob('%s/*' % root_d) + self.assertEqual(len(content), 0) + + def _get_base_config_repos(self): + """Basic valid repo configuration""" + cfg = { + 'zypper': { + 'repos': [ + { + 'baseurl': 'http://foo', + 'name': 'test-foo', + 'id': 'testing-foo' + }, + { + 'baseurl': 'http://bar', + 'name': 'test-bar', + 'id': 'testing-bar' + } + ] + } + } + return cfg diff --git a/tests/unittests/test_handler/test_schema.py b/tests/unittests/test_handler/test_schema.py index eda4802a..b8fc8930 100644 --- a/tests/unittests/test_handler/test_schema.py +++ b/tests/unittests/test_handler/test_schema.py @@ -1,16 +1,17 @@ # This file is part of cloud-init. See LICENSE file for license information. from cloudinit.config.schema import ( - CLOUD_CONFIG_HEADER, SchemaValidationError, get_schema_doc, - validate_cloudconfig_file, validate_cloudconfig_schema, - main) + CLOUD_CONFIG_HEADER, SchemaValidationError, annotated_cloudconfig_file, + get_schema_doc, get_schema, validate_cloudconfig_file, + validate_cloudconfig_schema, main) from cloudinit.util import write_file -from ..helpers import CiTestCase, mock, skipIf +from cloudinit.tests.helpers import CiTestCase, mock, skipIf from copy import copy from six import StringIO from textwrap import dedent +from yaml import safe_load try: import jsonschema @@ -20,6 +21,35 @@ except ImportError: _missing_jsonschema_dep = True +class GetSchemaTest(CiTestCase): + + def test_get_schema_coalesces_known_schema(self): + """Every cloudconfig module with schema is listed in allOf keyword.""" + schema = get_schema() + self.assertItemsEqual( + [ + 'cc_bootcmd', + 'cc_ntp', + 'cc_resizefs', + 'cc_runcmd', + 'cc_zypper_add_repo' + ], + [subschema['id'] for subschema in schema['allOf']]) + self.assertEqual('cloud-config-schema', schema['id']) + self.assertEqual( + 'http://json-schema.org/draft-04/schema#', + schema['$schema']) + # FULL_SCHEMA is updated by the get_schema call + from cloudinit.config.schema import FULL_SCHEMA + self.assertItemsEqual(['id', '$schema', 'allOf'], FULL_SCHEMA.keys()) + + def test_get_schema_returns_global_when_set(self): + """When FULL_SCHEMA global is already set, get_schema returns it.""" + m_schema_path = 'cloudinit.config.schema.FULL_SCHEMA' + with mock.patch(m_schema_path, {'here': 'iam'}): + self.assertEqual({'here': 'iam'}, get_schema()) + + class SchemaValidationErrorTest(CiTestCase): """Test validate_cloudconfig_schema""" @@ -151,11 +181,11 @@ class GetSchemaDocTest(CiTestCase): full_schema.update( {'properties': { 'prop1': {'type': 'array', 'description': 'prop-description', - 'items': {'type': 'int'}}}}) + 'items': {'type': 'integer'}}}}) self.assertEqual( dedent(""" name - --- + ---- **Summary:** title description @@ -167,25 +197,65 @@ class GetSchemaDocTest(CiTestCase): **Supported distros:** debian, rhel **Config schema**: - **prop1:** (array of int) prop-description\n\n"""), + **prop1:** (array of integer) prop-description\n\n"""), + get_schema_doc(full_schema)) + + def test_get_schema_doc_handles_multiple_types(self): + """get_schema_doc delimits multiple property types with a '/'.""" + full_schema = copy(self.required_schema) + full_schema.update( + {'properties': { + 'prop1': {'type': ['string', 'integer'], + 'description': 'prop-description'}}}) + self.assertIn( + '**prop1:** (string/integer) prop-description', + get_schema_doc(full_schema)) + + def test_get_schema_doc_handles_enum_types(self): + """get_schema_doc converts enum types to yaml and delimits with '/'.""" + full_schema = copy(self.required_schema) + full_schema.update( + {'properties': { + 'prop1': {'enum': [True, False, 'stuff'], + 'description': 'prop-description'}}}) + self.assertIn( + '**prop1:** (true/false/stuff) prop-description', + get_schema_doc(full_schema)) + + def test_get_schema_doc_handles_nested_oneof_property_types(self): + """get_schema_doc describes array items oneOf declarations in type.""" + full_schema = copy(self.required_schema) + full_schema.update( + {'properties': { + 'prop1': {'type': 'array', + 'items': { + 'oneOf': [{'type': 'string'}, + {'type': 'integer'}]}, + 'description': 'prop-description'}}}) + self.assertIn( + '**prop1:** (array of (string)/(integer)) prop-description', get_schema_doc(full_schema)) - def test_get_schema_doc_returns_restructured_text_with_examples(self): - """get_schema_doc returns indented examples when present in schema.""" + def test_get_schema_doc_handles_string_examples(self): + """get_schema_doc properly indented examples as a list of strings.""" full_schema = copy(self.required_schema) full_schema.update( - {'examples': {'ex1': [1, 2, 3]}, + {'examples': ['ex1:\n [don\'t, expand, "this"]', 'ex2: true'], 'properties': { 'prop1': {'type': 'array', 'description': 'prop-description', - 'items': {'type': 'int'}}}}) + 'items': {'type': 'integer'}}}}) self.assertIn( dedent(""" **Config schema**: - **prop1:** (array of int) prop-description + **prop1:** (array of integer) prop-description **Examples**:: - ex1"""), + ex1: + [don't, expand, "this"] + # --- Example2 --- + ex2: true + """), get_schema_doc(full_schema)) def test_get_schema_doc_raises_key_errors(self): @@ -198,13 +268,78 @@ class GetSchemaDocTest(CiTestCase): self.assertIn(key, str(context_mgr.exception)) +class AnnotatedCloudconfigFileTest(CiTestCase): + maxDiff = None + + def test_annotated_cloudconfig_file_no_schema_errors(self): + """With no schema_errors, print the original content.""" + content = b'ntp:\n pools: [ntp1.pools.com]\n' + self.assertEqual( + content, + annotated_cloudconfig_file({}, content, schema_errors=[])) + + def test_annotated_cloudconfig_file_schema_annotates_and_adds_footer(self): + """With schema_errors, error lines are annotated and a footer added.""" + content = dedent("""\ + #cloud-config + # comment + ntp: + pools: [-99, 75] + """).encode() + expected = dedent("""\ + #cloud-config + # comment + ntp: # E1 + pools: [-99, 75] # E2,E3 + + # Errors: ------------- + # E1: Some type error + # E2: -99 is not a string + # E3: 75 is not a string + + """) + parsed_config = safe_load(content[13:]) + schema_errors = [ + ('ntp', 'Some type error'), ('ntp.pools.0', '-99 is not a string'), + ('ntp.pools.1', '75 is not a string')] + self.assertEqual( + expected, + annotated_cloudconfig_file(parsed_config, content, schema_errors)) + + def test_annotated_cloudconfig_file_annotates_separate_line_items(self): + """Errors are annotated for lists with items on separate lines.""" + content = dedent("""\ + #cloud-config + # comment + ntp: + pools: + - -99 + - 75 + """).encode() + expected = dedent("""\ + ntp: + pools: + - -99 # E1 + - 75 # E2 + """) + parsed_config = safe_load(content[13:]) + schema_errors = [ + ('ntp.pools.0', '-99 is not a string'), + ('ntp.pools.1', '75 is not a string')] + self.assertIn( + expected, + annotated_cloudconfig_file(parsed_config, content, schema_errors)) + + class MainTest(CiTestCase): def test_main_missing_args(self): """Main exits non-zero and reports an error on missing parameters.""" with mock.patch('sys.argv', ['mycmd']): with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr: - self.assertEqual(1, main(), 'Expected non-zero exit code') + with self.assertRaises(SystemExit) as context_manager: + main() + self.assertEqual('1', str(context_manager.exception)) self.assertEqual( 'Expected either --config-file argument or --doc\n', m_stderr.getvalue()) @@ -216,13 +351,13 @@ class MainTest(CiTestCase): with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: self.assertEqual(0, main(), 'Expected 0 exit code') self.assertIn('\nNTP\n---\n', m_stdout.getvalue()) + self.assertIn('\nRuncmd\n------\n', m_stdout.getvalue()) def test_main_validates_config_file(self): """When --config-file parameter is provided, main validates schema.""" myyaml = self.tmp_path('my.yaml') myargs = ['mycmd', '--config-file', myyaml] - with open(myyaml, 'wb') as stream: - stream.write(b'#cloud-config\nntp:') # shortest ntp schema + write_file(myyaml, b'#cloud-config\nntp:') # shortest ntp schema with mock.patch('sys.argv', myargs): with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: self.assertEqual(0, main(), 'Expected 0 exit code') diff --git a/tests/unittests/test_helpers.py b/tests/unittests/test_helpers.py index f1979e89..2e4582a0 100644 --- a/tests/unittests/test_helpers.py +++ b/tests/unittests/test_helpers.py @@ -4,7 +4,7 @@ import os -from . import helpers as test_helpers +from cloudinit.tests import helpers as test_helpers from cloudinit import sources diff --git a/tests/unittests/test_log.py b/tests/unittests/test_log.py new file mode 100644 index 00000000..cd6296d6 --- /dev/null +++ b/tests/unittests/test_log.py @@ -0,0 +1,58 @@ +# This file is part of cloud-init. See LICENSE file for license information. + +"""Tests for cloudinit.log """ + +from cloudinit.analyze.dump import CLOUD_INIT_ASCTIME_FMT +from cloudinit import log as ci_logging +from cloudinit.tests.helpers import CiTestCase +import datetime +import logging +import six +import time + + +class TestCloudInitLogger(CiTestCase): + + def setUp(self): + # set up a logger like cloud-init does in setupLogging, but instead + # of sys.stderr, we'll plug in a StringIO() object so we can see + # what gets logged + logging.Formatter.converter = time.gmtime + self.ci_logs = six.StringIO() + self.ci_root = logging.getLogger() + console = logging.StreamHandler(self.ci_logs) + console.setFormatter(logging.Formatter(ci_logging.DEF_CON_FORMAT)) + console.setLevel(ci_logging.DEBUG) + self.ci_root.addHandler(console) + self.ci_root.setLevel(ci_logging.DEBUG) + self.LOG = logging.getLogger('test_cloudinit_logger') + + def test_logger_uses_gmtime(self): + """Test that log message have timestamp in UTC (gmtime)""" + + # Log a message, extract the timestamp from the log entry + # convert to datetime, and compare to a utc timestamp before + # and after the logged message. + + # Due to loss of precision in the LOG timestamp, subtract and add + # time to the utc stamps for comparison + # + # utc_before: 2017-08-23 14:19:42.569299 + # parsed dt : 2017-08-23 14:19:43.069000 + # utc_after : 2017-08-23 14:19:43.570064 + + utc_before = datetime.datetime.utcnow() - datetime.timedelta(0, 0.5) + self.LOG.error('Test message') + utc_after = datetime.datetime.utcnow() + datetime.timedelta(0, 0.5) + + # extract timestamp from log: + # 2017-08-23 14:19:43,069 - test_log.py[ERROR]: Test message + logstr = self.ci_logs.getvalue().splitlines()[0] + timestampstr = logstr.split(' - ')[0] + parsed_dt = datetime.datetime.strptime(timestampstr, + CLOUD_INIT_ASCTIME_FMT) + + self.assertLess(utc_before, parsed_dt) + self.assertLess(parsed_dt, utc_after) + self.assertLess(utc_before, utc_after) + self.assertGreater(utc_after, parsed_dt) diff --git a/tests/unittests/test_merging.py b/tests/unittests/test_merging.py index 0658b6b4..f51358da 100644 --- a/tests/unittests/test_merging.py +++ b/tests/unittests/test_merging.py @@ -1,6 +1,6 @@ # This file is part of cloud-init. See LICENSE file for license information. -from . import helpers +from cloudinit.tests import helpers from cloudinit.handlers import cloud_config from cloudinit.handlers import (CONTENT_START, CONTENT_END) diff --git a/tests/unittests/test_net.py b/tests/unittests/test_net.py index e49abcc4..bbb63cb3 100644 --- a/tests/unittests/test_net.py +++ b/tests/unittests/test_net.py @@ -9,12 +9,13 @@ from cloudinit.net import network_state from cloudinit.net import renderers from cloudinit.net import sysconfig from cloudinit.sources.helpers import openstack +from cloudinit import temp_utils from cloudinit import util -from .helpers import CiTestCase -from .helpers import dir2dict -from .helpers import mock -from .helpers import populate_dir +from cloudinit.tests.helpers import CiTestCase +from cloudinit.tests.helpers import dir2dict +from cloudinit.tests.helpers import mock +from cloudinit.tests.helpers import populate_dir import base64 import copy @@ -755,6 +756,7 @@ pre-down route del -net 10.0.0.0 netmask 255.0.0.0 gw 11.0.0.1 metric 3 || true eth3: 50 eth4: 75 priority: 22 + stp: false routes: - to: ::/0 via: 2001:4800:78ff:1b::1 @@ -819,7 +821,7 @@ pre-down route del -net 10.0.0.0 netmask 255.0.0.0 gw 11.0.0.1 metric 3 || true NM_CONTROLLED=no ONBOOT=yes PRIO=22 - STP=off + STP=no TYPE=Bridge USERCTL=no"""), 'ifcfg-eth0': textwrap.dedent("""\ @@ -1059,6 +1061,100 @@ pre-down route del -net 10.0.0.0 netmask 255.0.0.0 gw 11.0.0.1 metric 3 || true - type: static address: 2001:1::1/92 """), + 'expected_netplan': textwrap.dedent(""" + network: + version: 2 + ethernets: + bond0s0: + match: + macaddress: aa:bb:cc:dd:e8:00 + set-name: bond0s0 + bond0s1: + match: + macaddress: aa:bb:cc:dd:e8:01 + set-name: bond0s1 + bonds: + bond0: + addresses: + - 192.168.0.2/24 + - 192.168.1.2/24 + - 2001:1::1/92 + gateway4: 192.168.0.1 + interfaces: + - bond0s0 + - bond0s1 + parameters: + mii-monitor-interval: 100 + mode: active-backup + transmit-hash-policy: layer3+4 + routes: + - to: 10.1.3.0/24 + via: 192.168.0.3 + """), + 'yaml-v2': textwrap.dedent(""" + version: 2 + ethernets: + eth0: + match: + driver: "virtio_net" + macaddress: "aa:bb:cc:dd:e8:00" + vf0: + set-name: vf0 + match: + driver: "e1000" + macaddress: "aa:bb:cc:dd:e8:01" + bonds: + bond0: + addresses: + - 192.168.0.2/24 + - 192.168.1.2/24 + - 2001:1::1/92 + gateway4: 192.168.0.1 + interfaces: + - eth0 + - vf0 + parameters: + mii-monitor-interval: 100 + mode: active-backup + primary: vf0 + transmit-hash-policy: "layer3+4" + routes: + - to: 10.1.3.0/24 + via: 192.168.0.3 + """), + 'expected_netplan-v2': textwrap.dedent(""" + network: + bonds: + bond0: + addresses: + - 192.168.0.2/24 + - 192.168.1.2/24 + - 2001:1::1/92 + gateway4: 192.168.0.1 + interfaces: + - eth0 + - vf0 + parameters: + mii-monitor-interval: 100 + mode: active-backup + primary: vf0 + transmit-hash-policy: layer3+4 + routes: + - to: 10.1.3.0/24 + via: 192.168.0.3 + ethernets: + eth0: + match: + driver: virtio_net + macaddress: aa:bb:cc:dd:e8:00 + vf0: + match: + driver: e1000 + macaddress: aa:bb:cc:dd:e8:01 + set-name: vf0 + version: 2 + """), + 'expected_sysconfig': { 'ifcfg-bond0': textwrap.dedent("""\ BONDING_MASTER=yes @@ -1187,7 +1283,7 @@ pre-down route del -net 10.0.0.0 netmask 255.0.0.0 gw 11.0.0.1 metric 3 || true - eth0 - eth1 params: - bridge_stp: 'off' + bridge_stp: 0 bridge_bridgeprio: 22 subnets: - type: static @@ -1201,7 +1297,7 @@ pre-down route del -net 10.0.0.0 netmask 255.0.0.0 gw 11.0.0.1 metric 3 || true NM_CONTROLLED=no ONBOOT=yes PRIO=22 - STP=off + STP=no TYPE=Bridge USERCTL=no """), @@ -1683,6 +1779,9 @@ USERCTL=no ns = network_state.parse_net_config_data(network_cfg, skip_broken=False) renderer = sysconfig.Renderer() + # render a multiple times to simulate reboots + renderer.render_network_state(ns, render_dir) + renderer.render_network_state(ns, render_dir) renderer.render_network_state(ns, render_dir) for fn, expected_content in os_sample.get('out_sysconfig', []): with open(os.path.join(render_dir, fn)) as fh: @@ -2053,7 +2152,7 @@ class TestCmdlineConfigParsing(CiTestCase): static['mac_address'] = macs['eth1'] expected = {'version': 1, 'config': [dhcp, static]} - with util.tempdir() as tmpd: + with temp_utils.tempdir() as tmpd: for fname, content in pairs: fp = os.path.join(tmpd, fname) files.append(fp) @@ -2156,6 +2255,27 @@ class TestNetplanRoundTrip(CiTestCase): renderer.render_network_state(ns, target) return dir2dict(target) + def testsimple_render_bond_netplan(self): + entry = NETWORK_CONFIGS['bond'] + files = self._render_and_read(network_config=yaml.load(entry['yaml'])) + print(entry['expected_netplan']) + print('-- expected ^ | v rendered --') + print(files['/etc/netplan/50-cloud-init.yaml']) + self.assertEqual( + entry['expected_netplan'].splitlines(), + files['/etc/netplan/50-cloud-init.yaml'].splitlines()) + + def testsimple_render_bond_v2_input_netplan(self): + entry = NETWORK_CONFIGS['bond'] + files = self._render_and_read( + network_config=yaml.load(entry['yaml-v2'])) + print(entry['expected_netplan-v2']) + print('-- expected ^ | v rendered --') + print(files['/etc/netplan/50-cloud-init.yaml']) + self.assertEqual( + entry['expected_netplan-v2'].splitlines(), + files['/etc/netplan/50-cloud-init.yaml'].splitlines()) + def testsimple_render_small_netplan(self): entry = NETWORK_CONFIGS['small'] files = self._render_and_read(network_config=yaml.load(entry['yaml'])) diff --git a/tests/unittests/test_pathprefix2dict.py b/tests/unittests/test_pathprefix2dict.py index a4ae284f..abbb29b8 100644 --- a/tests/unittests/test_pathprefix2dict.py +++ b/tests/unittests/test_pathprefix2dict.py @@ -2,7 +2,7 @@ from cloudinit import util -from .helpers import TestCase, populate_dir +from cloudinit.tests.helpers import TestCase, populate_dir import shutil import tempfile diff --git a/tests/unittests/test_registry.py b/tests/unittests/test_registry.py index acf0bf4f..2b625026 100644 --- a/tests/unittests/test_registry.py +++ b/tests/unittests/test_registry.py @@ -2,7 +2,7 @@ from cloudinit.registry import DictRegistry -from .helpers import (mock, TestCase) +from cloudinit.tests.helpers import (mock, TestCase) class TestDictRegistry(TestCase): diff --git a/tests/unittests/test_reporting.py b/tests/unittests/test_reporting.py index f3b8f992..571420ed 100644 --- a/tests/unittests/test_reporting.py +++ b/tests/unittests/test_reporting.py @@ -8,7 +8,7 @@ from cloudinit.reporting import handlers import mock -from .helpers import TestCase +from cloudinit.tests.helpers import TestCase def _fake_registry(): diff --git a/tests/unittests/test_rh_subscription.py b/tests/unittests/test_rh_subscription.py index ca14cd46..e9d5702a 100644 --- a/tests/unittests/test_rh_subscription.py +++ b/tests/unittests/test_rh_subscription.py @@ -7,7 +7,7 @@ import logging from cloudinit.config import cc_rh_subscription from cloudinit import util -from .helpers import TestCase, mock +from cloudinit.tests.helpers import TestCase, mock class GoodTests(TestCase): diff --git a/tests/unittests/test_runs/test_merge_run.py b/tests/unittests/test_runs/test_merge_run.py index 65895273..add93653 100644 --- a/tests/unittests/test_runs/test_merge_run.py +++ b/tests/unittests/test_runs/test_merge_run.py @@ -4,7 +4,7 @@ import os import shutil import tempfile -from .. import helpers +from cloudinit.tests import helpers from cloudinit.settings import PER_INSTANCE from cloudinit import stages diff --git a/tests/unittests/test_runs/test_simple_run.py b/tests/unittests/test_runs/test_simple_run.py index 55f15b55..b8fb4794 100644 --- a/tests/unittests/test_runs/test_simple_run.py +++ b/tests/unittests/test_runs/test_simple_run.py @@ -1,10 +1,8 @@ # This file is part of cloud-init. See LICENSE file for license information. import os -import shutil -import tempfile -from .. import helpers +from cloudinit.tests import helpers from cloudinit.settings import PER_INSTANCE from cloudinit import stages @@ -12,16 +10,19 @@ from cloudinit import util class TestSimpleRun(helpers.FilesystemMockingTestCase): - def _patchIn(self, root): - self.patchOS(root) - self.patchUtils(root) - - def test_none_ds(self): - new_root = tempfile.mkdtemp() - self.addCleanup(shutil.rmtree, new_root) - self.replicateTestRoot('simple_ubuntu', new_root) - cfg = { + + with_logs = True + + def setUp(self): + super(TestSimpleRun, self).setUp() + self.new_root = self.tmp_dir() + self.replicateTestRoot('simple_ubuntu', self.new_root) + + # Seed cloud.cfg file for our tests + self.cfg = { 'datasource_list': ['None'], + 'runcmd': ['ls /etc'], # test ALL_DISTROS + 'spacewalk': {}, # test non-ubuntu distros module definition 'write_files': [ { 'path': '/etc/blah.ini', @@ -29,14 +30,17 @@ class TestSimpleRun(helpers.FilesystemMockingTestCase): 'permissions': 0o755, }, ], - 'cloud_init_modules': ['write-files'], + 'cloud_init_modules': ['write-files', 'spacewalk', 'runcmd'], } - cloud_cfg = util.yaml_dumps(cfg) - util.ensure_dir(os.path.join(new_root, 'etc', 'cloud')) - util.write_file(os.path.join(new_root, 'etc', + cloud_cfg = util.yaml_dumps(self.cfg) + util.ensure_dir(os.path.join(self.new_root, 'etc', 'cloud')) + util.write_file(os.path.join(self.new_root, 'etc', 'cloud', 'cloud.cfg'), cloud_cfg) - self._patchIn(new_root) + self.patchOS(self.new_root) + self.patchUtils(self.new_root) + def test_none_ds_populates_var_lib_cloud(self): + """Init and run_section default behavior creates appropriate dirs.""" # Now start verifying whats created initer = stages.Init() initer.read_cfg() @@ -51,10 +55,16 @@ class TestSimpleRun(helpers.FilesystemMockingTestCase): initer.update() self.assertTrue(os.path.islink("var/lib/cloud/instance")) - initer.cloudify().run('consume_data', - initer.consume_data, - args=[PER_INSTANCE], - freq=PER_INSTANCE) + def test_none_ds_runs_modules_which_do_not_define_distros(self): + """Any modules which do not define a distros attribute are run.""" + initer = stages.Init() + initer.read_cfg() + initer.initialize() + initer.fetch() + initer.instancify() + initer.update() + initer.cloudify().run('consume_data', initer.consume_data, + args=[PER_INSTANCE], freq=PER_INSTANCE) mods = stages.Modules(initer) (which_ran, failures) = mods.run_section('cloud_init_modules') @@ -63,5 +73,80 @@ class TestSimpleRun(helpers.FilesystemMockingTestCase): self.assertIn('write-files', which_ran) contents = util.load_file('/etc/blah.ini') self.assertEqual(contents, 'blah') + self.assertNotIn( + "Skipping modules ['write-files'] because they are not verified on" + " distro 'ubuntu'", + self.logs.getvalue()) + + def test_none_ds_skips_modules_which_define_unmatched_distros(self): + """Skip modules which define distros which don't match the current.""" + initer = stages.Init() + initer.read_cfg() + initer.initialize() + initer.fetch() + initer.instancify() + initer.update() + initer.cloudify().run('consume_data', initer.consume_data, + args=[PER_INSTANCE], freq=PER_INSTANCE) + + mods = stages.Modules(initer) + (which_ran, failures) = mods.run_section('cloud_init_modules') + self.assertTrue(len(failures) == 0) + self.assertIn( + "Skipping modules 'spacewalk' because they are not verified on" + " distro 'ubuntu'", + self.logs.getvalue()) + self.assertNotIn('spacewalk', which_ran) + + def test_none_ds_runs_modules_which_distros_all(self): + """Skip modules which define distros attribute as supporting 'all'. + + This is done in the module with the declaration: + distros = [ALL_DISTROS]. runcmd is an example. + """ + initer = stages.Init() + initer.read_cfg() + initer.initialize() + initer.fetch() + initer.instancify() + initer.update() + initer.cloudify().run('consume_data', initer.consume_data, + args=[PER_INSTANCE], freq=PER_INSTANCE) + + mods = stages.Modules(initer) + (which_ran, failures) = mods.run_section('cloud_init_modules') + self.assertTrue(len(failures) == 0) + self.assertIn('runcmd', which_ran) + self.assertNotIn( + "Skipping modules 'runcmd' because they are not verified on" + " distro 'ubuntu'", + self.logs.getvalue()) + + def test_none_ds_forces_run_via_unverified_modules(self): + """run_section forced skipped modules by using unverified_modules.""" + + # re-write cloud.cfg with unverified_modules override + self.cfg['unverified_modules'] = ['spacewalk'] # Would have skipped + cloud_cfg = util.yaml_dumps(self.cfg) + util.ensure_dir(os.path.join(self.new_root, 'etc', 'cloud')) + util.write_file(os.path.join(self.new_root, 'etc', + 'cloud', 'cloud.cfg'), cloud_cfg) + + initer = stages.Init() + initer.read_cfg() + initer.initialize() + initer.fetch() + initer.instancify() + initer.update() + initer.cloudify().run('consume_data', initer.consume_data, + args=[PER_INSTANCE], freq=PER_INSTANCE) + + mods = stages.Modules(initer) + (which_ran, failures) = mods.run_section('cloud_init_modules') + self.assertTrue(len(failures) == 0) + self.assertIn('spacewalk', which_ran) + self.assertIn( + "running unverified_modules: 'spacewalk'", + self.logs.getvalue()) # vi: ts=4 expandtab diff --git a/tests/unittests/test_sshutil.py b/tests/unittests/test_sshutil.py index 991f45a6..2a8e6abe 100644 --- a/tests/unittests/test_sshutil.py +++ b/tests/unittests/test_sshutil.py @@ -2,8 +2,8 @@ from mock import patch -from . import helpers as test_helpers from cloudinit import ssh_util +from cloudinit.tests import helpers as test_helpers VALID_CONTENT = { @@ -57,6 +57,7 @@ TEST_OPTIONS = ( class TestAuthKeyLineParser(test_helpers.TestCase): + def test_simple_parse(self): # test key line with common 3 fields (keytype, base64, comment) parser = ssh_util.AuthKeyLineParser() diff --git a/tests/unittests/test_templating.py b/tests/unittests/test_templating.py index 4e627826..b911d929 100644 --- a/tests/unittests/test_templating.py +++ b/tests/unittests/test_templating.py @@ -6,7 +6,7 @@ from __future__ import print_function -from . import helpers as test_helpers +from cloudinit.tests import helpers as test_helpers import textwrap from cloudinit import templater diff --git a/tests/unittests/test_util.py b/tests/unittests/test_util.py index f38a664c..3e4154ca 100644 --- a/tests/unittests/test_util.py +++ b/tests/unittests/test_util.py @@ -12,7 +12,7 @@ import six import yaml from cloudinit import importer, util -from . import helpers +from cloudinit.tests import helpers try: from unittest import mock @@ -568,7 +568,8 @@ class TestReadSeeded(helpers.TestCase): self.assertEqual(found_ud, ud) -class TestSubp(helpers.TestCase): +class TestSubp(helpers.CiTestCase): + with_logs = True stdin2err = [BASH, '-c', 'cat >&2'] stdin2out = ['cat'] @@ -650,6 +651,16 @@ class TestSubp(helpers.TestCase): self.assertEqual( ['FOO=BAR', 'HOME=/myhome', 'K1=V1', 'K2=V2'], out.splitlines()) + def test_subp_warn_missing_shebang(self): + """Warn on no #! in script""" + noshebang = self.tmp_path('noshebang') + util.write_file(noshebang, 'true\n') + + os.chmod(noshebang, os.stat(noshebang).st_mode | stat.S_IEXEC) + self.assertRaisesRegexp(util.ProcessExecutionError, + 'Missing #! in script\?', + util.subp, (noshebang,)) + def test_returns_none_if_no_capture(self): (out, err) = util.subp(self.stdin2out, data=b'', capture=False) self.assertIsNone(err) diff --git a/tests/unittests/test_version.py b/tests/unittests/test_version.py index 1662ce09..d012f69d 100644 --- a/tests/unittests/test_version.py +++ b/tests/unittests/test_version.py @@ -1,6 +1,6 @@ # This file is part of cloud-init. See LICENSE file for license information. -from .helpers import CiTestCase +from cloudinit.tests.helpers import CiTestCase from cloudinit import version diff --git a/tests/unittests/test_vmware_config_file.py b/tests/unittests/test_vmware_config_file.py index 18475f10..808d303a 100644 --- a/tests/unittests/test_vmware_config_file.py +++ b/tests/unittests/test_vmware_config_file.py @@ -7,19 +7,24 @@ import logging import sys -import unittest +from cloudinit.sources.DataSourceOVF import get_network_config_from_conf +from cloudinit.sources.DataSourceOVF import read_vmware_imc from cloudinit.sources.helpers.vmware.imc.boot_proto import BootProtoEnum from cloudinit.sources.helpers.vmware.imc.config import Config from cloudinit.sources.helpers.vmware.imc.config_file import ConfigFile +from cloudinit.sources.helpers.vmware.imc.config_nic import gen_subnet +from cloudinit.sources.helpers.vmware.imc.config_nic import NicConfigurator +from cloudinit.tests.helpers import CiTestCase logging.basicConfig(level=logging.DEBUG, stream=sys.stdout) logger = logging.getLogger(__name__) -class TestVmwareConfigFile(unittest.TestCase): +class TestVmwareConfigFile(CiTestCase): def test_utility_methods(self): + """Tests basic utility methods of ConfigFile class""" cf = ConfigFile("tests/data/vmware/cust-dhcp-2nic.cfg") cf.clear() @@ -43,7 +48,26 @@ class TestVmwareConfigFile(unittest.TestCase): self.assertFalse(cf.should_keep_current_value("BAR"), "keepBar") self.assertTrue(cf.should_remove_current_value("BAR"), "removeBar") + def test_datasource_instance_id(self): + """Tests instance id for the DatasourceOVF""" + cf = ConfigFile("tests/data/vmware/cust-dhcp-2nic.cfg") + + instance_id_prefix = 'iid-vmware-' + + conf = Config(cf) + + (md1, _, _) = read_vmware_imc(conf) + self.assertIn(instance_id_prefix, md1["instance-id"]) + self.assertEqual(len(md1["instance-id"]), len(instance_id_prefix) + 8) + + (md2, _, _) = read_vmware_imc(conf) + self.assertIn(instance_id_prefix, md2["instance-id"]) + self.assertEqual(len(md2["instance-id"]), len(instance_id_prefix) + 8) + + self.assertNotEqual(md1["instance-id"], md2["instance-id"]) + def test_configfile_static_2nics(self): + """Tests Config class for a configuration with two static NICs.""" cf = ConfigFile("tests/data/vmware/cust-static-2nic.cfg") conf = Config(cf) @@ -81,6 +105,7 @@ class TestVmwareConfigFile(unittest.TestCase): self.assertTrue(not nics[1].staticIpv6, "ipv61 dhcp") def test_config_file_dhcp_2nics(self): + """Tests Config class for a configuration with two DHCP NICs.""" cf = ConfigFile("tests/data/vmware/cust-dhcp-2nic.cfg") conf = Config(cf) @@ -90,4 +115,224 @@ class TestVmwareConfigFile(unittest.TestCase): self.assertEqual('00:50:56:a6:8c:08', nics[0].mac, "mac0") self.assertEqual(BootProtoEnum.DHCP, nics[0].bootProto, "bootproto0") + def test_config_password(self): + cf = ConfigFile("tests/data/vmware/cust-dhcp-2nic.cfg") + + cf._insertKey("PASSWORD|-PASS", "test-password") + cf._insertKey("PASSWORD|RESET", "no") + + conf = Config(cf) + self.assertEqual('test-password', conf.admin_password, "password") + self.assertFalse(conf.reset_password, "do not reset password") + + def test_config_reset_passwd(self): + cf = ConfigFile("tests/data/vmware/cust-dhcp-2nic.cfg") + + cf._insertKey("PASSWORD|-PASS", "test-password") + cf._insertKey("PASSWORD|RESET", "random") + + conf = Config(cf) + with self.assertRaises(ValueError): + conf.reset_password() + + cf.clear() + cf._insertKey("PASSWORD|RESET", "yes") + self.assertEqual(1, len(cf), "insert size") + + conf = Config(cf) + self.assertTrue(conf.reset_password, "reset password") + + def test_get_config_nameservers(self): + """Tests DNS and nameserver settings in a configuration.""" + cf = ConfigFile("tests/data/vmware/cust-static-2nic.cfg") + + config = Config(cf) + + network_config = get_network_config_from_conf(config, False) + + self.assertEqual(1, network_config.get('version')) + + config_types = network_config.get('config') + name_servers = None + dns_suffixes = None + + for type in config_types: + if type.get('type') == 'nameserver': + name_servers = type.get('address') + dns_suffixes = type.get('search') + break + + self.assertEqual(['10.20.145.1', '10.20.145.2'], + name_servers, + "dns") + self.assertEqual(['eng.vmware.com', 'proxy.vmware.com'], + dns_suffixes, + "suffixes") + + def test_gen_subnet(self): + """Tests if gen_subnet properly calculates network subnet from + IPv4 address and netmask""" + ip_subnet_list = [['10.20.87.253', '255.255.252.0', '10.20.84.0'], + ['10.20.92.105', '255.255.252.0', '10.20.92.0'], + ['192.168.0.10', '255.255.0.0', '192.168.0.0']] + for entry in ip_subnet_list: + self.assertEqual(entry[2], gen_subnet(entry[0], entry[1]), + "Subnet for a specified ip and netmask") + + def test_get_config_dns_suffixes(self): + """Tests if get_network_config_from_conf properly + generates nameservers and dns settings from a + specified configuration""" + cf = ConfigFile("tests/data/vmware/cust-dhcp-2nic.cfg") + + config = Config(cf) + + network_config = get_network_config_from_conf(config, False) + + self.assertEqual(1, network_config.get('version')) + + config_types = network_config.get('config') + name_servers = None + dns_suffixes = None + + for type in config_types: + if type.get('type') == 'nameserver': + name_servers = type.get('address') + dns_suffixes = type.get('search') + break + + self.assertEqual([], + name_servers, + "dns") + self.assertEqual(['eng.vmware.com'], + dns_suffixes, + "suffixes") + + def test_get_nics_list_dhcp(self): + """Tests if NicConfigurator properly calculates network subnets + for a configuration with a list of DHCP NICs""" + cf = ConfigFile("tests/data/vmware/cust-dhcp-2nic.cfg") + + config = Config(cf) + + nicConfigurator = NicConfigurator(config.nics, False) + nics_cfg_list = nicConfigurator.generate() + + self.assertEqual(2, len(nics_cfg_list), "number of config elements") + + nic1 = {'name': 'NIC1'} + nic2 = {'name': 'NIC2'} + for cfg in nics_cfg_list: + if cfg.get('name') == nic1.get('name'): + nic1.update(cfg) + elif cfg.get('name') == nic2.get('name'): + nic2.update(cfg) + + self.assertEqual('physical', nic1.get('type'), 'type of NIC1') + self.assertEqual('NIC1', nic1.get('name'), 'name of NIC1') + self.assertEqual('00:50:56:a6:8c:08', nic1.get('mac_address'), + 'mac address of NIC1') + subnets = nic1.get('subnets') + self.assertEqual(1, len(subnets), 'number of subnets for NIC1') + subnet = subnets[0] + self.assertEqual('dhcp', subnet.get('type'), 'DHCP type for NIC1') + self.assertEqual('auto', subnet.get('control'), 'NIC1 Control type') + + self.assertEqual('physical', nic2.get('type'), 'type of NIC2') + self.assertEqual('NIC2', nic2.get('name'), 'name of NIC2') + self.assertEqual('00:50:56:a6:5a:de', nic2.get('mac_address'), + 'mac address of NIC2') + subnets = nic2.get('subnets') + self.assertEqual(1, len(subnets), 'number of subnets for NIC2') + subnet = subnets[0] + self.assertEqual('dhcp', subnet.get('type'), 'DHCP type for NIC2') + self.assertEqual('auto', subnet.get('control'), 'NIC2 Control type') + + def test_get_nics_list_static(self): + """Tests if NicConfigurator properly calculates network subnets + for a configuration with 2 static NICs""" + cf = ConfigFile("tests/data/vmware/cust-static-2nic.cfg") + + config = Config(cf) + + nicConfigurator = NicConfigurator(config.nics, False) + nics_cfg_list = nicConfigurator.generate() + + self.assertEqual(5, len(nics_cfg_list), "number of elements") + + nic1 = {'name': 'NIC1'} + nic2 = {'name': 'NIC2'} + route_list = [] + for cfg in nics_cfg_list: + cfg_type = cfg.get('type') + if cfg_type == 'physical': + if cfg.get('name') == nic1.get('name'): + nic1.update(cfg) + elif cfg.get('name') == nic2.get('name'): + nic2.update(cfg) + elif cfg_type == 'route': + route_list.append(cfg) + + self.assertEqual('physical', nic1.get('type'), 'type of NIC1') + self.assertEqual('NIC1', nic1.get('name'), 'name of NIC1') + self.assertEqual('00:50:56:a6:8c:08', nic1.get('mac_address'), + 'mac address of NIC1') + + subnets = nic1.get('subnets') + self.assertEqual(2, len(subnets), 'Number of subnets') + + static_subnet = [] + static6_subnet = [] + + for subnet in subnets: + subnet_type = subnet.get('type') + if subnet_type == 'static': + static_subnet.append(subnet) + elif subnet_type == 'static6': + static6_subnet.append(subnet) + else: + self.assertEqual(True, False, 'Unknown type') + + self.assertEqual(1, len(static_subnet), 'Number of static subnet') + self.assertEqual(1, len(static6_subnet), 'Number of static6 subnet') + + subnet = static_subnet[0] + self.assertEqual('10.20.87.154', subnet.get('address'), + 'IPv4 address of static subnet') + self.assertEqual('255.255.252.0', subnet.get('netmask'), + 'NetMask of static subnet') + self.assertEqual('auto', subnet.get('control'), + 'control for static subnet') + + subnet = static6_subnet[0] + self.assertEqual('fc00:10:20:87::154', subnet.get('address'), + 'IPv6 address of static subnet') + self.assertEqual('64', subnet.get('netmask'), + 'NetMask of static6 subnet') + + route_set = set(['10.20.87.253', '10.20.87.105', '192.168.0.10']) + for route in route_list: + self.assertEqual(10000, route.get('metric'), 'metric of route') + gateway = route.get('gateway') + if gateway in route_set: + route_set.discard(gateway) + else: + self.assertEqual(True, False, 'invalid gateway %s' % (gateway)) + + self.assertEqual('physical', nic2.get('type'), 'type of NIC2') + self.assertEqual('NIC2', nic2.get('name'), 'name of NIC2') + self.assertEqual('00:50:56:a6:ef:7d', nic2.get('mac_address'), + 'mac address of NIC2') + + subnets = nic2.get('subnets') + self.assertEqual(1, len(subnets), 'Number of subnets for NIC2') + + subnet = subnets[0] + self.assertEqual('static', subnet.get('type'), 'Subnet type') + self.assertEqual('192.168.6.102', subnet.get('address'), + 'Subnet address') + self.assertEqual('255.255.0.0', subnet.get('netmask'), + 'Subnet netmask') + + # vi: ts=4 expandtab |