summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorChad Smith <chad.smith@canonical.com>2018-03-28 12:29:04 -0600
committerChad Smith <chad.smith@canonical.com>2018-03-28 12:29:04 -0600
commitcf3eaed2e01062f9b5d47042d7a76b092970e0cf (patch)
tree53f7c52c5a76bb586da0483699fd6d188e72f457 /tests
parent9f159f3a55a7bba7868e03d9cccd898678381f03 (diff)
parent8caa3bcf8f2c5b3a448b9d892d4cf53ed8db9be9 (diff)
downloadvyos-cloud-init-cf3eaed2e01062f9b5d47042d7a76b092970e0cf.tar.gz
vyos-cloud-init-cf3eaed2e01062f9b5d47042d7a76b092970e0cf.zip
merge from master at 18.2
Diffstat (limited to 'tests')
-rw-r--r--tests/cloud_tests/bddeb.py2
-rw-r--r--tests/cloud_tests/platforms/ec2/__init__.py0
-rw-r--r--tests/cloud_tests/platforms/lxd/__init__.py0
-rw-r--r--tests/cloud_tests/platforms/lxd/platform.py4
-rw-r--r--tests/cloud_tests/platforms/nocloudkvm/__init__.py0
-rw-r--r--tests/cloud_tests/platforms/nocloudkvm/instance.py2
-rw-r--r--tests/cloud_tests/platforms/nocloudkvm/platform.py4
-rw-r--r--tests/cloud_tests/platforms/platforms.py14
-rw-r--r--tests/cloud_tests/releases.yaml3
-rw-r--r--tests/cloud_tests/testcases.yaml3
-rw-r--r--tests/cloud_tests/testcases/__init__.py3
-rw-r--r--tests/cloud_tests/testcases/base.py173
-rw-r--r--tests/cloud_tests/testcases/main/command_output_simple.py17
-rw-r--r--tests/cloud_tests/testcases/modules/salt_minion.py10
-rw-r--r--tests/cloud_tests/testcases/modules/salt_minion.yaml10
-rw-r--r--tests/cloud_tests/testcases/modules/snap.py16
-rw-r--r--tests/cloud_tests/testcases/modules/snap.yaml18
-rw-r--r--tests/cloud_tests/testcases/modules/snappy.py2
-rw-r--r--tests/cloud_tests/util.py6
-rw-r--r--tests/cloud_tests/verify.py11
-rw-r--r--tests/data/mount_parse_ext.txt19
-rw-r--r--tests/data/mount_parse_zfs.txt21
-rw-r--r--tests/data/zpool_status_simple.txt10
-rw-r--r--tests/unittests/test_datasource/test_azure.py37
-rw-r--r--tests/unittests/test_datasource/test_common.py4
-rw-r--r--tests/unittests/test_datasource/test_gce.py20
-rw-r--r--tests/unittests/test_datasource/test_hetzner.py117
-rw-r--r--tests/unittests/test_datasource/test_ibmcloud.py262
-rw-r--r--tests/unittests/test_datasource/test_opennebula.py266
-rw-r--r--tests/unittests/test_ds_identify.py167
-rw-r--r--tests/unittests/test_handler/test_handler_apt_source_v1.py3
-rw-r--r--tests/unittests/test_handler/test_handler_bootcmd.py19
-rw-r--r--tests/unittests/test_handler/test_handler_ntp.py18
-rw-r--r--tests/unittests/test_handler/test_handler_resizefs.py72
-rw-r--r--tests/unittests/test_handler/test_handler_runcmd.py14
-rw-r--r--tests/unittests/test_handler/test_handler_set_hostname.py57
-rw-r--r--tests/unittests/test_handler/test_schema.py35
-rw-r--r--tests/unittests/test_net.py139
-rw-r--r--tests/unittests/test_util.py135
39 files changed, 1413 insertions, 300 deletions
diff --git a/tests/cloud_tests/bddeb.py b/tests/cloud_tests/bddeb.py
index a6d5069f..b9cfcfa6 100644
--- a/tests/cloud_tests/bddeb.py
+++ b/tests/cloud_tests/bddeb.py
@@ -16,7 +16,7 @@ pre_reqs = ['devscripts', 'equivs', 'git', 'tar']
def _out(cmd_res):
"""Get clean output from cmd result."""
- return cmd_res[0].strip()
+ return cmd_res[0].decode("utf-8").strip()
def build_deb(args, instance):
diff --git a/tests/cloud_tests/platforms/ec2/__init__.py b/tests/cloud_tests/platforms/ec2/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/cloud_tests/platforms/ec2/__init__.py
diff --git a/tests/cloud_tests/platforms/lxd/__init__.py b/tests/cloud_tests/platforms/lxd/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/cloud_tests/platforms/lxd/__init__.py
diff --git a/tests/cloud_tests/platforms/lxd/platform.py b/tests/cloud_tests/platforms/lxd/platform.py
index 6a016929..f7251a07 100644
--- a/tests/cloud_tests/platforms/lxd/platform.py
+++ b/tests/cloud_tests/platforms/lxd/platform.py
@@ -101,8 +101,4 @@ class LXDPlatform(Platform):
"""
return self.client.images.get_by_alias(alias)
- def destroy(self):
- """Clean up platform data."""
- super(LXDPlatform, self).destroy()
-
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/platforms/nocloudkvm/__init__.py b/tests/cloud_tests/platforms/nocloudkvm/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/cloud_tests/platforms/nocloudkvm/__init__.py
diff --git a/tests/cloud_tests/platforms/nocloudkvm/instance.py b/tests/cloud_tests/platforms/nocloudkvm/instance.py
index 932dc0fa..33ff3f24 100644
--- a/tests/cloud_tests/platforms/nocloudkvm/instance.py
+++ b/tests/cloud_tests/platforms/nocloudkvm/instance.py
@@ -109,7 +109,7 @@ class NoCloudKVMInstance(Instance):
if self.pid:
try:
c_util.subp(['kill', '-9', self.pid])
- except util.ProcessExectuionError:
+ except c_util.ProcessExecutionError:
pass
if self.pid_file:
diff --git a/tests/cloud_tests/platforms/nocloudkvm/platform.py b/tests/cloud_tests/platforms/nocloudkvm/platform.py
index a7e6f5de..85933463 100644
--- a/tests/cloud_tests/platforms/nocloudkvm/platform.py
+++ b/tests/cloud_tests/platforms/nocloudkvm/platform.py
@@ -21,10 +21,6 @@ class NoCloudKVMPlatform(Platform):
platform_name = 'nocloud-kvm'
- def __init__(self, config):
- """Set up platform."""
- super(NoCloudKVMPlatform, self).__init__(config)
-
def get_image(self, img_conf):
"""Get image using specified image configuration.
diff --git a/tests/cloud_tests/platforms/platforms.py b/tests/cloud_tests/platforms/platforms.py
index 1542b3be..abbfebba 100644
--- a/tests/cloud_tests/platforms/platforms.py
+++ b/tests/cloud_tests/platforms/platforms.py
@@ -2,12 +2,15 @@
"""Base platform class."""
import os
+import shutil
from simplestreams import filters, mirrors
from simplestreams import util as s_util
from cloudinit import util as c_util
+from tests.cloud_tests import util
+
class Platform(object):
"""Base class for platforms."""
@@ -17,7 +20,14 @@ class Platform(object):
def __init__(self, config):
"""Set up platform."""
self.config = config
- self._generate_ssh_keys(config['data_dir'])
+ self.tmpdir = util.mkdtemp()
+ if 'data_dir' in config:
+ self.data_dir = config['data_dir']
+ else:
+ self.data_dir = os.path.join(self.tmpdir, "data_dir")
+ os.mkdir(self.data_dir)
+
+ self._generate_ssh_keys(self.data_dir)
def get_image(self, img_conf):
"""Get image using specified image configuration.
@@ -29,7 +39,7 @@ class Platform(object):
def destroy(self):
"""Clean up platform data."""
- pass
+ shutil.rmtree(self.tmpdir)
def _generate_ssh_keys(self, data_dir):
"""Generate SSH keys to be used with image."""
diff --git a/tests/cloud_tests/releases.yaml b/tests/cloud_tests/releases.yaml
index d8bc170f..c7dcbe83 100644
--- a/tests/cloud_tests/releases.yaml
+++ b/tests/cloud_tests/releases.yaml
@@ -30,6 +30,9 @@ default_release_config:
mirror_url: https://cloud-images.ubuntu.com/daily
mirror_dir: '/srv/citest/images'
keyring: /usr/share/keyrings/ubuntu-cloudimage-keyring.gpg
+ # The OS version formatted as Major.Minor is used to compare releases
+ version: null # Each release needs to define this, for example 16.04
+
ec2:
# Choose from: [ebs, instance-store]
root-store: ebs
diff --git a/tests/cloud_tests/testcases.yaml b/tests/cloud_tests/testcases.yaml
index 8e0fb62f..a3e29900 100644
--- a/tests/cloud_tests/testcases.yaml
+++ b/tests/cloud_tests/testcases.yaml
@@ -15,6 +15,9 @@ base_test_data:
instance-id: |
#!/bin/sh
cat /run/cloud-init/.instance-id
+ instance-data.json: |
+ #!/bin/sh
+ cat /run/cloud-init/instance-data.json
result.json: |
#!/bin/sh
cat /run/cloud-init/result.json
diff --git a/tests/cloud_tests/testcases/__init__.py b/tests/cloud_tests/testcases/__init__.py
index a29a0928..bd548f5a 100644
--- a/tests/cloud_tests/testcases/__init__.py
+++ b/tests/cloud_tests/testcases/__init__.py
@@ -7,6 +7,8 @@ import inspect
import unittest
from unittest.util import strclass
+from cloudinit.util import read_conf
+
from tests.cloud_tests import config
from tests.cloud_tests.testcases.base import CloudTestCase as base_test
@@ -48,6 +50,7 @@ def get_suite(test_name, data, conf):
def setUpClass(cls):
cls.data = data
cls.conf = conf
+ cls.release_conf = read_conf(config.RELEASES_CONF)['releases']
suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(tmp))
diff --git a/tests/cloud_tests/testcases/base.py b/tests/cloud_tests/testcases/base.py
index 20e95955..324c7c91 100644
--- a/tests/cloud_tests/testcases/base.py
+++ b/tests/cloud_tests/testcases/base.py
@@ -4,10 +4,14 @@
import crypt
import json
+import re
import unittest
+
from cloudinit import util as c_util
+SkipTest = unittest.SkipTest
+
class CloudTestCase(unittest.TestCase):
"""Base test class for verifiers."""
@@ -16,6 +20,43 @@ class CloudTestCase(unittest.TestCase):
data = {}
conf = None
_cloud_config = None
+ release_conf = {} # The platform's os release configuration
+
+ expected_warnings = () # Subclasses set to ignore expected WARN logs
+
+ @property
+ def os_cfg(self):
+ return self.release_conf[self.os_name]['default']
+
+ def is_distro(self, distro_name):
+ return self.os_cfg['os'] == distro_name
+
+ def os_version_cmp(self, cmp_version):
+ """Compare the version of the test to comparison_version.
+
+ @param: cmp_version: Either a float or a string representing
+ a release os from releases.yaml (e.g. centos66)
+
+ @return: -1 when version < cmp_version, 0 when version=cmp_version and
+ 1 when version > cmp_version.
+ """
+ version = self.release_conf[self.os_name]['default']['version']
+ if isinstance(cmp_version, str):
+ cmp_version = self.release_conf[cmp_version]['default']['version']
+ if version < cmp_version:
+ return -1
+ elif version == cmp_version:
+ return 0
+ else:
+ return 1
+
+ @property
+ def os_name(self):
+ return self.data.get('os_name', 'UNKNOWN')
+
+ @property
+ def platform(self):
+ return self.data.get('platform', 'UNKNOWN')
@property
def cloud_config(self):
@@ -72,12 +113,134 @@ class CloudTestCase(unittest.TestCase):
self.assertEqual(len(result['errors']), 0)
def test_no_warnings_in_log(self):
- """Warnings should not be found in the log."""
+ """Unexpected warnings should not be found in the log."""
+ warnings = [
+ l for l in self.get_data_file('cloud-init.log').splitlines()
+ if 'WARN' in l]
+ joined_warnings = '\n'.join(warnings)
+ for expected_warning in self.expected_warnings:
+ self.assertIn(
+ expected_warning, joined_warnings,
+ msg="Did not find %s in cloud-init.log" % expected_warning)
+ # Prune expected from discovered warnings
+ warnings = [w for w in warnings if expected_warning not in w]
+ self.assertEqual(
+ [], warnings, msg="'WARN' found inside cloud-init.log")
+
+ def test_instance_data_json_ec2(self):
+ """Validate instance-data.json content by ec2 platform.
+
+ This content is sourced by snapd when determining snapstore endpoints.
+ We validate expected values per cloud type to ensure we don't break
+ snapd.
+ """
+ if self.platform != 'ec2':
+ raise SkipTest(
+ 'Skipping ec2 instance-data.json on %s' % self.platform)
+ out = self.get_data_file('instance-data.json')
+ if not out:
+ if self.is_distro('ubuntu') and self.os_version_cmp('bionic') >= 0:
+ raise AssertionError(
+ 'No instance-data.json found on %s' % self.os_name)
+ raise SkipTest(
+ 'Skipping instance-data.json test.'
+ ' OS: %s not bionic or newer' % self.os_name)
+ instance_data = json.loads(out)
+ self.assertEqual(
+ ['ds/user-data'], instance_data['base64-encoded-keys'])
+ ds = instance_data.get('ds', {})
+ macs = ds.get('network', {}).get('interfaces', {}).get('macs', {})
+ if not macs:
+ raise AssertionError('No network data from EC2 meta-data')
+ # Check meta-data items we depend on
+ expected_net_keys = [
+ 'public-ipv4s', 'ipv4-associations', 'local-hostname',
+ 'public-hostname']
+ for mac, mac_data in macs.items():
+ for key in expected_net_keys:
+ self.assertIn(key, mac_data)
+ self.assertIsNotNone(
+ ds.get('placement', {}).get('availability-zone'),
+ 'Could not determine EC2 Availability zone placement')
+ ds = instance_data.get('ds', {})
+ v1_data = instance_data.get('v1', {})
+ self.assertIsNotNone(
+ v1_data['availability-zone'], 'expected ec2 availability-zone')
+ self.assertEqual('aws', v1_data['cloud-name'])
+ self.assertIn('i-', v1_data['instance-id'])
+ self.assertIn('ip-', v1_data['local-hostname'])
+ self.assertIsNotNone(v1_data['region'], 'expected ec2 region')
+
+ def test_instance_data_json_lxd(self):
+ """Validate instance-data.json content by lxd platform.
+
+ This content is sourced by snapd when determining snapstore endpoints.
+ We validate expected values per cloud type to ensure we don't break
+ snapd.
+ """
+ if self.platform != 'lxd':
+ raise SkipTest(
+ 'Skipping lxd instance-data.json on %s' % self.platform)
+ out = self.get_data_file('instance-data.json')
+ if not out:
+ if self.is_distro('ubuntu') and self.os_version_cmp('bionic') >= 0:
+ raise AssertionError(
+ 'No instance-data.json found on %s' % self.os_name)
+ raise SkipTest(
+ 'Skipping instance-data.json test.'
+ ' OS: %s not bionic or newer' % self.os_name)
+ instance_data = json.loads(out)
+ v1_data = instance_data.get('v1', {})
+ self.assertEqual(
+ ['ds/user-data', 'ds/vendor-data'],
+ sorted(instance_data['base64-encoded-keys']))
+ self.assertEqual('nocloud', v1_data['cloud-name'])
+ self.assertIsNone(
+ v1_data['availability-zone'],
+ 'found unexpected lxd availability-zone %s' %
+ v1_data['availability-zone'])
+ self.assertIn('cloud-test', v1_data['instance-id'])
+ self.assertIn('cloud-test', v1_data['local-hostname'])
+ self.assertIsNone(
+ v1_data['region'],
+ 'found unexpected lxd region %s' % v1_data['region'])
+
+ def test_instance_data_json_kvm(self):
+ """Validate instance-data.json content by nocloud-kvm platform.
+
+ This content is sourced by snapd when determining snapstore endpoints.
+ We validate expected values per cloud type to ensure we don't break
+ snapd.
+ """
+ if self.platform != 'nocloud-kvm':
+ raise SkipTest(
+ 'Skipping nocloud-kvm instance-data.json on %s' %
+ self.platform)
+ out = self.get_data_file('instance-data.json')
+ if not out:
+ if self.is_distro('ubuntu') and self.os_version_cmp('bionic') >= 0:
+ raise AssertionError(
+ 'No instance-data.json found on %s' % self.os_name)
+ raise SkipTest(
+ 'Skipping instance-data.json test.'
+ ' OS: %s not bionic or newer' % self.os_name)
+ instance_data = json.loads(out)
+ v1_data = instance_data.get('v1', {})
self.assertEqual(
- [],
- [l for l in self.get_data_file('cloud-init.log').splitlines()
- if 'WARN' in l],
- msg="'WARN' found inside cloud-init.log")
+ ['ds/user-data'], instance_data['base64-encoded-keys'])
+ self.assertEqual('nocloud', v1_data['cloud-name'])
+ self.assertIsNone(
+ v1_data['availability-zone'],
+ 'found unexpected kvm availability-zone %s' %
+ v1_data['availability-zone'])
+ self.assertIsNotNone(
+ re.match('[\da-f]{8}(-[\da-f]{4}){3}-[\da-f]{12}',
+ v1_data['instance-id']),
+ 'kvm instance-id is not a UUID: %s' % v1_data['instance-id'])
+ self.assertIn('ubuntu', v1_data['local-hostname'])
+ self.assertIsNone(
+ v1_data['region'],
+ 'found unexpected lxd region %s' % v1_data['region'])
class PasswordListTest(CloudTestCase):
diff --git a/tests/cloud_tests/testcases/main/command_output_simple.py b/tests/cloud_tests/testcases/main/command_output_simple.py
index 857881cb..80a2c8d7 100644
--- a/tests/cloud_tests/testcases/main/command_output_simple.py
+++ b/tests/cloud_tests/testcases/main/command_output_simple.py
@@ -7,6 +7,8 @@ from tests.cloud_tests.testcases import base
class TestCommandOutputSimple(base.CloudTestCase):
"""Test functionality of simple output redirection."""
+ expected_warnings = ('Stdout, stderr changing to',)
+
def test_output_file(self):
"""Ensure that the output file is not empty and has all stages."""
data = self.get_data_file('cloud-init-test-output')
@@ -15,20 +17,5 @@ class TestCommandOutputSimple(base.CloudTestCase):
data.splitlines()[-1].strip())
# TODO: need to test that all stages redirected here
- def test_no_warnings_in_log(self):
- """Warnings should not be found in the log.
-
- This class redirected stderr and stdout, so it expects to find
- a warning in cloud-init.log to that effect."""
- redirect_msg = 'Stdout, stderr changing to'
- warnings = [
- l for l in self.get_data_file('cloud-init.log').splitlines()
- if 'WARN' in l]
- self.assertEqual(
- [], [w for w in warnings if redirect_msg not in w],
- msg="'WARN' found inside cloud-init.log")
- self.assertEqual(
- 1, len(warnings),
- msg="Did not find %s in cloud-init.log" % redirect_msg)
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/testcases/modules/salt_minion.py b/tests/cloud_tests/testcases/modules/salt_minion.py
index c697db2d..70917a4c 100644
--- a/tests/cloud_tests/testcases/modules/salt_minion.py
+++ b/tests/cloud_tests/testcases/modules/salt_minion.py
@@ -26,4 +26,14 @@ class Test(base.CloudTestCase):
self.assertIn('<key data>', out)
self.assertIn('------END PUBLIC KEY-------', out)
+ def test_grains(self):
+ """Test master value in config."""
+ out = self.get_data_file('grains')
+ self.assertIn('role: web', out)
+
+ def test_minion_installed(self):
+ """Test if the salt-minion package is installed"""
+ out = self.get_data_file('minion_installed')
+ self.assertEqual(1, int(out))
+
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/testcases/modules/salt_minion.yaml b/tests/cloud_tests/testcases/modules/salt_minion.yaml
index f20d24f0..f20b9765 100644
--- a/tests/cloud_tests/testcases/modules/salt_minion.yaml
+++ b/tests/cloud_tests/testcases/modules/salt_minion.yaml
@@ -3,7 +3,7 @@
#
# 2016-11-17: Currently takes >60 seconds results in test failure
#
-enabled: False
+enabled: True
cloud_config: |
#cloud-config
salt_minion:
@@ -17,6 +17,8 @@ cloud_config: |
------BEGIN PRIVATE KEY------
<key data>
------END PRIVATE KEY-------
+ grains:
+ role: web
collect_scripts:
minion: |
#!/bin/bash
@@ -30,5 +32,11 @@ collect_scripts:
minion.pub: |
#!/bin/bash
cat /etc/salt/pki/minion/minion.pub
+ grains: |
+ #!/bin/bash
+ cat /etc/salt/grains
+ minion_installed: |
+ #!/bin/bash
+ dpkg -l | grep salt-minion | grep ii | wc -l
# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/testcases/modules/snap.py b/tests/cloud_tests/testcases/modules/snap.py
new file mode 100644
index 00000000..ff68abbe
--- /dev/null
+++ b/tests/cloud_tests/testcases/modules/snap.py
@@ -0,0 +1,16 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+"""cloud-init Integration Test Verify Script"""
+from tests.cloud_tests.testcases import base
+
+
+class TestSnap(base.CloudTestCase):
+ """Test snap module"""
+
+ def test_snappy_version(self):
+ """Expect hello-world and core snaps are installed."""
+ out = self.get_data_file('snaplist')
+ self.assertIn('core', out)
+ self.assertIn('hello-world', out)
+
+# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/testcases/modules/snap.yaml b/tests/cloud_tests/testcases/modules/snap.yaml
new file mode 100644
index 00000000..44043f31
--- /dev/null
+++ b/tests/cloud_tests/testcases/modules/snap.yaml
@@ -0,0 +1,18 @@
+#
+# Install snappy
+#
+required_features:
+ - snap
+cloud_config: |
+ #cloud-config
+ package_update: true
+ snap:
+ squashfuse_in_container: true
+ commands:
+ - snap install hello-world
+collect_scripts:
+ snaplist: |
+ #!/bin/bash
+ snap list
+
+# vi: ts=4 expandtab
diff --git a/tests/cloud_tests/testcases/modules/snappy.py b/tests/cloud_tests/testcases/modules/snappy.py
index b92271c1..7d17fc5b 100644
--- a/tests/cloud_tests/testcases/modules/snappy.py
+++ b/tests/cloud_tests/testcases/modules/snappy.py
@@ -7,6 +7,8 @@ from tests.cloud_tests.testcases import base
class TestSnappy(base.CloudTestCase):
"""Test snappy module"""
+ expected_warnings = ('DEPRECATION',)
+
def test_snappy_version(self):
"""Test snappy version output"""
out = self.get_data_file('snapd')
diff --git a/tests/cloud_tests/util.py b/tests/cloud_tests/util.py
index 6ff285e7..3dd4996d 100644
--- a/tests/cloud_tests/util.py
+++ b/tests/cloud_tests/util.py
@@ -460,6 +460,10 @@ class PlatformError(IOError):
IOError.__init__(self, message)
+def mkdtemp(prefix='cloud_test_data'):
+ return tempfile.mkdtemp(prefix=prefix)
+
+
class TempDir(object):
"""Configurable temporary directory like tempfile.TemporaryDirectory."""
@@ -480,7 +484,7 @@ class TempDir(object):
@return_value: tempdir path
"""
if not self.tmpdir:
- self.tmpdir = tempfile.mkdtemp(prefix=self.prefix)
+ self.tmpdir = mkdtemp(prefix=self.prefix)
LOG.debug('using tmpdir: %s', self.tmpdir)
return self.tmpdir
diff --git a/tests/cloud_tests/verify.py b/tests/cloud_tests/verify.py
index 2a9fd520..5a68a484 100644
--- a/tests/cloud_tests/verify.py
+++ b/tests/cloud_tests/verify.py
@@ -8,13 +8,16 @@ import unittest
from tests.cloud_tests import (config, LOG, util, testcases)
-def verify_data(base_dir, tests):
+def verify_data(data_dir, platform, os_name, tests):
"""Verify test data is correct.
- @param base_dir: base directory for data
+ @param data_dir: top level directory for all tests
+ @param platform: The platform name we for this test data (e.g. lxd)
+ @param os_name: The operating system under test (xenial, artful, etc.).
@param tests: list of test names
@return_value: {<test_name>: {passed: True/False, failures: []}}
"""
+ base_dir = os.sep.join((data_dir, platform, os_name))
runner = unittest.TextTestRunner(verbosity=util.current_verbosity())
res = {}
for test_name in tests:
@@ -26,7 +29,7 @@ def verify_data(base_dir, tests):
cloud_conf = test_conf['cloud_config']
# load script outputs
- data = {}
+ data = {'platform': platform, 'os_name': os_name}
test_dir = os.path.join(base_dir, test_name)
for script_name in os.listdir(test_dir):
with open(os.path.join(test_dir, script_name), 'rb') as fp:
@@ -73,7 +76,7 @@ def verify(args):
# run test
res[platform][os_name] = verify_data(
- os.sep.join((args.data_dir, platform, os_name)),
+ args.data_dir, platform, os_name,
tests[platform][os_name])
# handle results
diff --git a/tests/data/mount_parse_ext.txt b/tests/data/mount_parse_ext.txt
new file mode 100644
index 00000000..da0c870d
--- /dev/null
+++ b/tests/data/mount_parse_ext.txt
@@ -0,0 +1,19 @@
+/dev/mapper/vg00-lv_root on / type ext4 (rw,errors=remount-ro)
+proc on /proc type proc (rw,noexec,nosuid,nodev)
+sysfs on /sys type sysfs (rw,noexec,nosuid,nodev)
+none on /sys/fs/cgroup type tmpfs (rw)
+none on /sys/fs/fuse/connections type fusectl (rw)
+none on /sys/kernel/debug type debugfs (rw)
+none on /sys/kernel/security type securityfs (rw)
+udev on /dev type devtmpfs (rw,mode=0755)
+devpts on /dev/pts type devpts (rw,noexec,nosuid,gid=5,mode=0620)
+none on /tmp type tmpfs (rw)
+tmpfs on /run type tmpfs (rw,noexec,nosuid,size=10%,mode=0755)
+none on /run/lock type tmpfs (rw,noexec,nosuid,nodev,size=5242880)
+none on /run/shm type tmpfs (rw,nosuid,nodev)
+none on /run/user type tmpfs (rw,noexec,nosuid,nodev,size=104857600,mode=0755)
+none on /sys/fs/pstore type pstore (rw)
+/dev/mapper/vg00-lv_var on /var type ext4 (rw)
+rpc_pipefs on /run/rpc_pipefs type rpc_pipefs (rw)
+systemd on /sys/fs/cgroup/systemd type cgroup (rw,noexec,nosuid,nodev,none,name=systemd)
+10.0.1.1:/backup on /backup type nfs (rw,noexec,nosuid,nodev,bg,nolock,tcp,nfsvers=3,hard,addr=10.0.1.1) \ No newline at end of file
diff --git a/tests/data/mount_parse_zfs.txt b/tests/data/mount_parse_zfs.txt
new file mode 100644
index 00000000..08af04fc
--- /dev/null
+++ b/tests/data/mount_parse_zfs.txt
@@ -0,0 +1,21 @@
+vmzroot/ROOT/freebsd on / (zfs, local, nfsv4acls)
+devfs on /dev (devfs, local, multilabel)
+fdescfs on /dev/fd (fdescfs)
+vmzroot/root on /root (zfs, local, nfsv4acls)
+vmzroot/tmp on /tmp (zfs, local, nosuid, nfsv4acls)
+vmzroot/ROOT/freebsd/usr on /usr (zfs, local, nfsv4acls)
+vmzroot/ROOT/freebsd/usr/local on /usr/local (zfs, local, nfsv4acls)
+vmzroot/ROOT/freebsd/var on /var (zfs, local, nfsv4acls)
+vmzroot/ROOT/freebsd/var/cache on /var/cache (zfs, local, noexec, nosuid, nfsv4acls)
+vmzroot/ROOT/freebsd/var/crash on /var/crash (zfs, local, noexec, nosuid, nfsv4acls)
+vmzroot/var/cron on /var/cron (zfs, local, nosuid, nfsv4acls)
+vmzroot/ROOT/freebsd/var/db on /var/db (zfs, local, noatime, noexec, nosuid, nfsv4acls)
+vmzroot/ROOT/freebsd/var/empty on /var/empty (zfs, local, noexec, nosuid, read-only, nfsv4acls)
+vmzroot/var/log on /var/log (zfs, local, noexec, nosuid, nfsv4acls)
+vmzroot/var/log/pf on /var/log/pf (zfs, local, noexec, nosuid, nfsv4acls)
+vmzroot/var/mail on /var/mail (zfs, local, noexec, nosuid, nfsv4acls)
+vmzroot/ROOT/freebsd/var/run on /var/run (zfs, local, noexec, nosuid, nfsv4acls)
+vmzroot/var/spool on /var/spool (zfs, local, noexec, nosuid, nfsv4acls)
+vmzroot/var/tmp on /var/tmp (zfs, local, nosuid, nfsv4acls)
+10.0.0.1:/vol/test on /mnt/test (nfs, read-only)
+10.0.0.2:/vol/tes2 on /mnt/test2 (nfs, nosuid) \ No newline at end of file
diff --git a/tests/data/zpool_status_simple.txt b/tests/data/zpool_status_simple.txt
new file mode 100644
index 00000000..a2c573a3
--- /dev/null
+++ b/tests/data/zpool_status_simple.txt
@@ -0,0 +1,10 @@
+ pool: vmzroot
+ state: ONLINE
+ scan: none requested
+config:
+
+ NAME STATE READ WRITE CKSUM
+ vmzroot ONLINE 0 0 0
+ gpt/system ONLINE 0 0 0
+
+errors: No known data errors \ No newline at end of file
diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py
index 254e9876..3e8b7913 100644
--- a/tests/unittests/test_datasource/test_azure.py
+++ b/tests/unittests/test_datasource/test_azure.py
@@ -643,6 +643,21 @@ fdescfs /dev/fd fdescfs rw 0 0
expected_config['config'].append(blacklist_config)
self.assertEqual(netconfig, expected_config)
+ @mock.patch("cloudinit.sources.DataSourceAzure.util.subp")
+ def test_get_hostname_with_no_args(self, subp):
+ dsaz.get_hostname()
+ subp.assert_called_once_with(("hostname",), capture=True)
+
+ @mock.patch("cloudinit.sources.DataSourceAzure.util.subp")
+ def test_get_hostname_with_string_arg(self, subp):
+ dsaz.get_hostname(hostname_command="hostname")
+ subp.assert_called_once_with(("hostname",), capture=True)
+
+ @mock.patch("cloudinit.sources.DataSourceAzure.util.subp")
+ def test_get_hostname_with_iterable_arg(self, subp):
+ dsaz.get_hostname(hostname_command=("hostname",))
+ subp.assert_called_once_with(("hostname",), capture=True)
+
class TestAzureBounce(CiTestCase):
@@ -1162,7 +1177,8 @@ class TestAzureDataSourcePreprovisioning(CiTestCase):
url = 'http://{0}/metadata/reprovisiondata?api-version=2017-04-02'
host = "169.254.169.254"
full_url = url.format(host)
- fake_resp.return_value = mock.MagicMock(status_code=200, text="ovf")
+ fake_resp.return_value = mock.MagicMock(status_code=200, text="ovf",
+ content="ovf")
dsa = dsaz.DataSourceAzure({}, distro=None, paths=self.paths)
self.assertTrue(len(dsa._poll_imds()) > 0)
self.assertEqual(fake_resp.call_args_list,
@@ -1170,13 +1186,8 @@ class TestAzureDataSourcePreprovisioning(CiTestCase):
headers={'Metadata': 'true',
'User-Agent':
'Cloud-Init/%s' % vs()
- }, method='GET', timeout=60.0,
- url=full_url),
- mock.call(allow_redirects=True,
- headers={'Metadata': 'true',
- 'User-Agent':
- 'Cloud-Init/%s' % vs()
- }, method='GET', url=full_url)])
+ }, method='GET', timeout=1,
+ url=full_url)])
self.assertEqual(m_dhcp.call_count, 1)
m_net.assert_any_call(
broadcast='192.168.2.255', interface='eth9', ip='192.168.2.9',
@@ -1202,7 +1213,8 @@ class TestAzureDataSourcePreprovisioning(CiTestCase):
username = "myuser"
odata = {'HostName': hostname, 'UserName': username}
content = construct_valid_ovf_env(data=odata)
- fake_resp.return_value = mock.MagicMock(status_code=200, text=content)
+ fake_resp.return_value = mock.MagicMock(status_code=200, text=content,
+ content=content)
dsa = dsaz.DataSourceAzure({}, distro=None, paths=self.paths)
md, ud, cfg, d = dsa._reprovision()
self.assertEqual(md['local-hostname'], hostname)
@@ -1212,12 +1224,7 @@ class TestAzureDataSourcePreprovisioning(CiTestCase):
headers={'Metadata': 'true',
'User-Agent':
'Cloud-Init/%s' % vs()},
- method='GET', timeout=60.0, url=full_url),
- mock.call(allow_redirects=True,
- headers={'Metadata': 'true',
- 'User-Agent':
- 'Cloud-Init/%s' % vs()},
- method='GET', url=full_url)])
+ method='GET', timeout=1, url=full_url)])
self.assertEqual(m_dhcp.call_count, 1)
m_net.assert_any_call(
broadcast='192.168.2.255', interface='eth9', ip='192.168.2.9',
diff --git a/tests/unittests/test_datasource/test_common.py b/tests/unittests/test_datasource/test_common.py
index 80b9c650..ec333888 100644
--- a/tests/unittests/test_datasource/test_common.py
+++ b/tests/unittests/test_datasource/test_common.py
@@ -14,6 +14,8 @@ from cloudinit.sources import (
DataSourceDigitalOcean as DigitalOcean,
DataSourceEc2 as Ec2,
DataSourceGCE as GCE,
+ DataSourceHetzner as Hetzner,
+ DataSourceIBMCloud as IBMCloud,
DataSourceMAAS as MAAS,
DataSourceNoCloud as NoCloud,
DataSourceOpenNebula as OpenNebula,
@@ -31,6 +33,8 @@ DEFAULT_LOCAL = [
CloudSigma.DataSourceCloudSigma,
ConfigDrive.DataSourceConfigDrive,
DigitalOcean.DataSourceDigitalOcean,
+ Hetzner.DataSourceHetzner,
+ IBMCloud.DataSourceIBMCloud,
NoCloud.DataSourceNoCloud,
OpenNebula.DataSourceOpenNebula,
OVF.DataSourceOVF,
diff --git a/tests/unittests/test_datasource/test_gce.py b/tests/unittests/test_datasource/test_gce.py
index f77c2c40..eb3cec42 100644
--- a/tests/unittests/test_datasource/test_gce.py
+++ b/tests/unittests/test_datasource/test_gce.py
@@ -38,11 +38,20 @@ GCE_META_ENCODING = {
'instance/hostname': 'server.project-baz.local',
'instance/zone': 'baz/bang',
'instance/attributes': {
- 'user-data': b64encode(b'/bin/echo baz\n').decode('utf-8'),
+ 'user-data': b64encode(b'#!/bin/echo baz\n').decode('utf-8'),
'user-data-encoding': 'base64',
}
}
+GCE_USER_DATA_TEXT = {
+ 'instance/id': '12345',
+ 'instance/hostname': 'server.project-baz.local',
+ 'instance/zone': 'baz/bang',
+ 'instance/attributes': {
+ 'user-data': '#!/bin/sh\necho hi mom\ntouch /run/up-now\n',
+ }
+}
+
HEADERS = {'Metadata-Flavor': 'Google'}
MD_URL_RE = re.compile(
r'http://metadata.google.internal/computeMetadata/v1/.*')
@@ -135,7 +144,16 @@ class TestDataSourceGCE(test_helpers.HttprettyTestCase):
shostname = GCE_META_PARTIAL.get('instance/hostname').split('.')[0]
self.assertEqual(shostname, self.ds.get_hostname())
+ def test_userdata_no_encoding(self):
+ """check that user-data is read."""
+ _set_mock_metadata(GCE_USER_DATA_TEXT)
+ self.ds.get_data()
+ self.assertEqual(
+ GCE_USER_DATA_TEXT['instance/attributes']['user-data'].encode(),
+ self.ds.get_userdata_raw())
+
def test_metadata_encoding(self):
+ """user-data is base64 encoded if user-data-encoding is 'base64'."""
_set_mock_metadata(GCE_META_ENCODING)
self.ds.get_data()
diff --git a/tests/unittests/test_datasource/test_hetzner.py b/tests/unittests/test_datasource/test_hetzner.py
new file mode 100644
index 00000000..a9c12597
--- /dev/null
+++ b/tests/unittests/test_datasource/test_hetzner.py
@@ -0,0 +1,117 @@
+# Copyright (C) 2018 Jonas Keidel
+#
+# Author: Jonas Keidel <jonas.keidel@hetzner.com>
+#
+# This file is part of cloud-init. See LICENSE file for license information.
+
+from cloudinit.sources import DataSourceHetzner
+from cloudinit import util, settings, helpers
+
+from cloudinit.tests.helpers import mock, CiTestCase
+
+METADATA = util.load_yaml("""
+hostname: cloudinit-test
+instance-id: 123456
+local-ipv4: ''
+network-config:
+ config:
+ - mac_address: 96:00:00:08:19:da
+ name: eth0
+ subnets:
+ - dns_nameservers:
+ - 213.133.99.99
+ - 213.133.100.100
+ - 213.133.98.98
+ ipv4: true
+ type: dhcp
+ type: physical
+ - name: eth0:0
+ subnets:
+ - address: 2a01:4f8:beef:beef::1/64
+ gateway: fe80::1
+ ipv6: true
+ routes:
+ - gateway: fe80::1%eth0
+ netmask: 0
+ network: '::'
+ type: static
+ type: physical
+ version: 1
+network-sysconfig: "DEVICE='eth0'\nTYPE=Ethernet\nBOOTPROTO=dhcp\n\
+ ONBOOT='yes'\nHWADDR=96:00:00:08:19:da\n\
+ IPV6INIT=yes\nIPV6ADDR=2a01:4f8:beef:beef::1/64\n\
+ IPV6_DEFAULTGW=fe80::1%eth0\nIPV6_AUTOCONF=no\n\
+ DNS1=213.133.99.99\nDNS2=213.133.100.100\n"
+public-ipv4: 192.168.0.1
+public-keys:
+- ssh-ed25519 \
+ AAAAC3Nzac1lZdI1NTE5AaaAIaFrcac0yVITsmRrmueq6MD0qYNKlEvW8O1Ib4nkhmWh \
+ test-key@workstation
+vendor_data: "test"
+""")
+
+USERDATA = b"""#cloud-config
+runcmd:
+- [touch, /root/cloud-init-worked ]
+"""
+
+
+class TestDataSourceHetzner(CiTestCase):
+ """
+ Test reading the meta-data
+ """
+ def setUp(self):
+ super(TestDataSourceHetzner, self).setUp()
+ self.tmp = self.tmp_dir()
+
+ def get_ds(self):
+ ds = DataSourceHetzner.DataSourceHetzner(
+ settings.CFG_BUILTIN, None, helpers.Paths({'run_dir': self.tmp}))
+ return ds
+
+ @mock.patch('cloudinit.net.EphemeralIPv4Network')
+ @mock.patch('cloudinit.net.find_fallback_nic')
+ @mock.patch('cloudinit.sources.helpers.hetzner.read_metadata')
+ @mock.patch('cloudinit.sources.helpers.hetzner.read_userdata')
+ @mock.patch('cloudinit.sources.DataSourceHetzner.on_hetzner')
+ def test_read_data(self, m_on_hetzner, m_usermd, m_readmd, m_fallback_nic,
+ m_net):
+ m_on_hetzner.return_value = True
+ m_readmd.return_value = METADATA.copy()
+ m_usermd.return_value = USERDATA
+ m_fallback_nic.return_value = 'eth0'
+
+ ds = self.get_ds()
+ ret = ds.get_data()
+ self.assertTrue(ret)
+
+ m_net.assert_called_once_with(
+ 'eth0', '169.254.0.1',
+ 16, '169.254.255.255'
+ )
+
+ self.assertTrue(m_readmd.called)
+
+ self.assertEqual(METADATA.get('hostname'), ds.get_hostname())
+
+ self.assertEqual(METADATA.get('public-keys'),
+ ds.get_public_ssh_keys())
+
+ self.assertIsInstance(ds.get_public_ssh_keys(), list)
+ self.assertEqual(ds.get_userdata_raw(), USERDATA)
+ self.assertEqual(ds.get_vendordata_raw(), METADATA.get('vendor_data'))
+
+ @mock.patch('cloudinit.sources.helpers.hetzner.read_metadata')
+ @mock.patch('cloudinit.net.find_fallback_nic')
+ @mock.patch('cloudinit.sources.DataSourceHetzner.on_hetzner')
+ def test_not_on_hetzner_returns_false(self, m_on_hetzner, m_find_fallback,
+ m_read_md):
+ """If helper 'on_hetzner' returns False, return False from get_data."""
+ m_on_hetzner.return_value = False
+ ds = self.get_ds()
+ ret = ds.get_data()
+
+ self.assertFalse(ret)
+ # These are a white box attempt to ensure it did not search.
+ m_find_fallback.assert_not_called()
+ m_read_md.assert_not_called()
diff --git a/tests/unittests/test_datasource/test_ibmcloud.py b/tests/unittests/test_datasource/test_ibmcloud.py
new file mode 100644
index 00000000..621cfe49
--- /dev/null
+++ b/tests/unittests/test_datasource/test_ibmcloud.py
@@ -0,0 +1,262 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+from cloudinit.sources import DataSourceIBMCloud as ibm
+from cloudinit.tests import helpers as test_helpers
+
+import base64
+import copy
+import json
+import mock
+from textwrap import dedent
+
+D_PATH = "cloudinit.sources.DataSourceIBMCloud."
+
+
+class TestIBMCloud(test_helpers.CiTestCase):
+ """Test the datasource."""
+ def setUp(self):
+ super(TestIBMCloud, self).setUp()
+ pass
+
+
+@mock.patch(D_PATH + "_is_xen", return_value=True)
+@mock.patch(D_PATH + "_is_ibm_provisioning")
+@mock.patch(D_PATH + "util.blkid")
+class TestGetIBMPlatform(test_helpers.CiTestCase):
+ """Test the get_ibm_platform helper."""
+
+ blkid_base = {
+ "/dev/xvda1": {
+ "DEVNAME": "/dev/xvda1", "LABEL": "cloudimg-bootfs",
+ "TYPE": "ext3"},
+ "/dev/xvda2": {
+ "DEVNAME": "/dev/xvda2", "LABEL": "cloudimg-rootfs",
+ "TYPE": "ext4"},
+ }
+
+ blkid_metadata_disk = {
+ "/dev/xvdh1": {
+ "DEVNAME": "/dev/xvdh1", "LABEL": "METADATA", "TYPE": "vfat",
+ "SEC_TYPE": "msdos", "UUID": "681B-8C5D",
+ "PARTUUID": "3d631e09-01"},
+ }
+
+ blkid_oscode_disk = {
+ "/dev/xvdh": {
+ "DEVNAME": "/dev/xvdh", "LABEL": "config-2", "TYPE": "vfat",
+ "SEC_TYPE": "msdos", "UUID": ibm.IBM_CONFIG_UUID}
+ }
+
+ def setUp(self):
+ self.blkid_metadata = copy.deepcopy(self.blkid_base)
+ self.blkid_metadata.update(copy.deepcopy(self.blkid_metadata_disk))
+
+ self.blkid_oscode = copy.deepcopy(self.blkid_base)
+ self.blkid_oscode.update(copy.deepcopy(self.blkid_oscode_disk))
+
+ def test_id_template_live_metadata(self, m_blkid, m_is_prov, _m_xen):
+ """identify TEMPLATE_LIVE_METADATA."""
+ m_blkid.return_value = self.blkid_metadata
+ m_is_prov.return_value = False
+ self.assertEqual(
+ (ibm.Platforms.TEMPLATE_LIVE_METADATA, "/dev/xvdh1"),
+ ibm.get_ibm_platform())
+
+ def test_id_template_prov_metadata(self, m_blkid, m_is_prov, _m_xen):
+ """identify TEMPLATE_PROVISIONING_METADATA."""
+ m_blkid.return_value = self.blkid_metadata
+ m_is_prov.return_value = True
+ self.assertEqual(
+ (ibm.Platforms.TEMPLATE_PROVISIONING_METADATA, "/dev/xvdh1"),
+ ibm.get_ibm_platform())
+
+ def test_id_template_prov_nodata(self, m_blkid, m_is_prov, _m_xen):
+ """identify TEMPLATE_PROVISIONING_NODATA."""
+ m_blkid.return_value = self.blkid_base
+ m_is_prov.return_value = True
+ self.assertEqual(
+ (ibm.Platforms.TEMPLATE_PROVISIONING_NODATA, None),
+ ibm.get_ibm_platform())
+
+ def test_id_os_code(self, m_blkid, m_is_prov, _m_xen):
+ """Identify OS_CODE."""
+ m_blkid.return_value = self.blkid_oscode
+ m_is_prov.return_value = False
+ self.assertEqual((ibm.Platforms.OS_CODE, "/dev/xvdh"),
+ ibm.get_ibm_platform())
+
+ def test_id_os_code_must_match_uuid(self, m_blkid, m_is_prov, _m_xen):
+ """Test against false positive on openstack with non-ibm UUID."""
+ blkid = self.blkid_oscode
+ blkid["/dev/xvdh"]["UUID"] = "9999-9999"
+ m_blkid.return_value = blkid
+ m_is_prov.return_value = False
+ self.assertEqual((None, None), ibm.get_ibm_platform())
+
+
+@mock.patch(D_PATH + "_read_system_uuid", return_value=None)
+@mock.patch(D_PATH + "get_ibm_platform")
+class TestReadMD(test_helpers.CiTestCase):
+ """Test the read_datasource helper."""
+
+ template_md = {
+ "files": [],
+ "network_config": {"content_path": "/content/interfaces"},
+ "hostname": "ci-fond-ram",
+ "name": "ci-fond-ram",
+ "domain": "testing.ci.cloud-init.org",
+ "meta": {"dsmode": "net"},
+ "uuid": "8e636730-9f5d-c4a5-327c-d7123c46e82f",
+ "public_keys": {"1091307": "ssh-rsa AAAAB3NzaC1...Hw== ci-pubkey"},
+ }
+
+ oscode_md = {
+ "hostname": "ci-grand-gannet.testing.ci.cloud-init.org",
+ "name": "ci-grand-gannet",
+ "uuid": "2f266908-8e6c-4818-9b5c-42e9cc66a785",
+ "random_seed": "bm90LXJhbmRvbQo=",
+ "crypt_key": "ssh-rsa AAAAB3NzaC1yc2..n6z/",
+ "configuration_token": "eyJhbGciOi..M3ZA",
+ "public_keys": {"1091307": "ssh-rsa AAAAB3N..Hw== ci-pubkey"},
+ }
+
+ content_interfaces = dedent("""\
+ auto lo
+ iface lo inet loopback
+
+ auto eth0
+ allow-hotplug eth0
+ iface eth0 inet static
+ address 10.82.43.5
+ netmask 255.255.255.192
+ """)
+
+ userdata = b"#!/bin/sh\necho hi mom\n"
+ # meta.js file gets json encoded userdata as a list.
+ meta_js = '["#!/bin/sh\necho hi mom\n"]'
+ vendor_data = {
+ "cloud-init": "#!/bin/bash\necho 'root:$6$5ab01p1m1' | chpasswd -e"}
+
+ network_data = {
+ "links": [
+ {"id": "interface_29402281", "name": "eth0", "mtu": None,
+ "type": "phy", "ethernet_mac_address": "06:00:f1:bd:da:25"},
+ {"id": "interface_29402279", "name": "eth1", "mtu": None,
+ "type": "phy", "ethernet_mac_address": "06:98:5e:d0:7f:86"}
+ ],
+ "networks": [
+ {"id": "network_109887563", "link": "interface_29402281",
+ "type": "ipv4", "ip_address": "10.82.43.2",
+ "netmask": "255.255.255.192",
+ "routes": [
+ {"network": "10.0.0.0", "netmask": "255.0.0.0",
+ "gateway": "10.82.43.1"},
+ {"network": "161.26.0.0", "netmask": "255.255.0.0",
+ "gateway": "10.82.43.1"}]},
+ {"id": "network_109887551", "link": "interface_29402279",
+ "type": "ipv4", "ip_address": "108.168.194.252",
+ "netmask": "255.255.255.248",
+ "routes": [
+ {"network": "0.0.0.0", "netmask": "0.0.0.0",
+ "gateway": "108.168.194.249"}]}
+ ],
+ "services": [
+ {"type": "dns", "address": "10.0.80.11"},
+ {"type": "dns", "address": "10.0.80.12"}
+ ],
+ }
+
+ sysuuid = '7f79ebf5-d791-43c3-a723-854e8389d59f'
+
+ def _get_expected_metadata(self, os_md):
+ """return expected 'metadata' for data loaded from meta_data.json."""
+ os_md = copy.deepcopy(os_md)
+ renames = (
+ ('hostname', 'local-hostname'),
+ ('uuid', 'instance-id'),
+ ('public_keys', 'public-keys'))
+ ret = {}
+ for osname, mdname in renames:
+ if osname in os_md:
+ ret[mdname] = os_md[osname]
+ if 'random_seed' in os_md:
+ ret['random_seed'] = base64.b64decode(os_md['random_seed'])
+
+ return ret
+
+ def test_provisioning_md(self, m_platform, m_sysuuid):
+ """Provisioning env with a metadata disk should return None."""
+ m_platform.return_value = (
+ ibm.Platforms.TEMPLATE_PROVISIONING_METADATA, "/dev/xvdh")
+ self.assertIsNone(ibm.read_md())
+
+ def test_provisioning_no_metadata(self, m_platform, m_sysuuid):
+ """Provisioning env with no metadata disk should return None."""
+ m_platform.return_value = (
+ ibm.Platforms.TEMPLATE_PROVISIONING_NODATA, None)
+ self.assertIsNone(ibm.read_md())
+
+ def test_provisioning_not_ibm(self, m_platform, m_sysuuid):
+ """Provisioning env but not identified as IBM should return None."""
+ m_platform.return_value = (None, None)
+ self.assertIsNone(ibm.read_md())
+
+ def test_template_live(self, m_platform, m_sysuuid):
+ """Template live environment should be identified."""
+ tmpdir = self.tmp_dir()
+ m_platform.return_value = (
+ ibm.Platforms.TEMPLATE_LIVE_METADATA, tmpdir)
+ m_sysuuid.return_value = self.sysuuid
+
+ test_helpers.populate_dir(tmpdir, {
+ 'openstack/latest/meta_data.json': json.dumps(self.template_md),
+ 'openstack/latest/user_data': self.userdata,
+ 'openstack/content/interfaces': self.content_interfaces,
+ 'meta.js': self.meta_js})
+
+ ret = ibm.read_md()
+ self.assertEqual(ibm.Platforms.TEMPLATE_LIVE_METADATA,
+ ret['platform'])
+ self.assertEqual(tmpdir, ret['source'])
+ self.assertEqual(self.userdata, ret['userdata'])
+ self.assertEqual(self._get_expected_metadata(self.template_md),
+ ret['metadata'])
+ self.assertEqual(self.sysuuid, ret['system-uuid'])
+
+ def test_os_code_live(self, m_platform, m_sysuuid):
+ """Verify an os_code metadata path."""
+ tmpdir = self.tmp_dir()
+ m_platform.return_value = (ibm.Platforms.OS_CODE, tmpdir)
+ netdata = json.dumps(self.network_data)
+ test_helpers.populate_dir(tmpdir, {
+ 'openstack/latest/meta_data.json': json.dumps(self.oscode_md),
+ 'openstack/latest/user_data': self.userdata,
+ 'openstack/latest/vendor_data.json': json.dumps(self.vendor_data),
+ 'openstack/latest/network_data.json': netdata,
+ })
+
+ ret = ibm.read_md()
+ self.assertEqual(ibm.Platforms.OS_CODE, ret['platform'])
+ self.assertEqual(tmpdir, ret['source'])
+ self.assertEqual(self.userdata, ret['userdata'])
+ self.assertEqual(self._get_expected_metadata(self.oscode_md),
+ ret['metadata'])
+
+ def test_os_code_live_no_userdata(self, m_platform, m_sysuuid):
+ """Verify os_code without user-data."""
+ tmpdir = self.tmp_dir()
+ m_platform.return_value = (ibm.Platforms.OS_CODE, tmpdir)
+ test_helpers.populate_dir(tmpdir, {
+ 'openstack/latest/meta_data.json': json.dumps(self.oscode_md),
+ 'openstack/latest/vendor_data.json': json.dumps(self.vendor_data),
+ })
+
+ ret = ibm.read_md()
+ self.assertEqual(ibm.Platforms.OS_CODE, ret['platform'])
+ self.assertEqual(tmpdir, ret['source'])
+ self.assertIsNone(ret['userdata'])
+ self.assertEqual(self._get_expected_metadata(self.oscode_md),
+ ret['metadata'])
+
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/test_datasource/test_opennebula.py b/tests/unittests/test_datasource/test_opennebula.py
index 5c3ba012..ab42f344 100644
--- a/tests/unittests/test_datasource/test_opennebula.py
+++ b/tests/unittests/test_datasource/test_opennebula.py
@@ -4,7 +4,6 @@ from cloudinit import helpers
from cloudinit.sources import DataSourceOpenNebula as ds
from cloudinit import util
from cloudinit.tests.helpers import mock, populate_dir, CiTestCase
-from textwrap import dedent
import os
import pwd
@@ -33,6 +32,11 @@ HOSTNAME = 'foo.example.com'
PUBLIC_IP = '10.0.0.3'
MACADDR = '02:00:0a:12:01:01'
IP_BY_MACADDR = '10.18.1.1'
+IP4_PREFIX = '24'
+IP6_GLOBAL = '2001:db8:1:0:400:c0ff:fea8:1ba'
+IP6_ULA = 'fd01:dead:beaf:0:400:c0ff:fea8:1ba'
+IP6_GW = '2001:db8:1::ffff'
+IP6_PREFIX = '48'
DS_PATH = "cloudinit.sources.DataSourceOpenNebula"
@@ -221,7 +225,9 @@ class TestOpenNebulaDataSource(CiTestCase):
results = ds.read_context_disk_dir(self.seed_dir)
self.assertTrue('network-interfaces' in results)
- self.assertTrue(IP_BY_MACADDR in results['network-interfaces'])
+ self.assertTrue(
+ IP_BY_MACADDR + '/' + IP4_PREFIX in
+ results['network-interfaces']['ethernets'][dev]['addresses'])
# ETH0_IP and ETH0_MAC
populate_context_dir(
@@ -229,7 +235,9 @@ class TestOpenNebulaDataSource(CiTestCase):
results = ds.read_context_disk_dir(self.seed_dir)
self.assertTrue('network-interfaces' in results)
- self.assertTrue(IP_BY_MACADDR in results['network-interfaces'])
+ self.assertTrue(
+ IP_BY_MACADDR + '/' + IP4_PREFIX in
+ results['network-interfaces']['ethernets'][dev]['addresses'])
# ETH0_IP with empty string and ETH0_MAC
# in the case of using Virtual Network contains
@@ -239,55 +247,91 @@ class TestOpenNebulaDataSource(CiTestCase):
results = ds.read_context_disk_dir(self.seed_dir)
self.assertTrue('network-interfaces' in results)
- self.assertTrue(IP_BY_MACADDR in results['network-interfaces'])
+ self.assertTrue(
+ IP_BY_MACADDR + '/' + IP4_PREFIX in
+ results['network-interfaces']['ethernets'][dev]['addresses'])
- # ETH0_NETWORK
+ # ETH0_MASK
populate_context_dir(
self.seed_dir, {
'ETH0_IP': IP_BY_MACADDR,
'ETH0_MAC': MACADDR,
- 'ETH0_NETWORK': '10.18.0.0'
+ 'ETH0_MASK': '255.255.0.0'
})
results = ds.read_context_disk_dir(self.seed_dir)
self.assertTrue('network-interfaces' in results)
- self.assertTrue('10.18.0.0' in results['network-interfaces'])
+ self.assertTrue(
+ IP_BY_MACADDR + '/16' in
+ results['network-interfaces']['ethernets'][dev]['addresses'])
- # ETH0_NETWORK with empty string
+ # ETH0_MASK with empty string
populate_context_dir(
self.seed_dir, {
'ETH0_IP': IP_BY_MACADDR,
'ETH0_MAC': MACADDR,
- 'ETH0_NETWORK': ''
+ 'ETH0_MASK': ''
})
results = ds.read_context_disk_dir(self.seed_dir)
self.assertTrue('network-interfaces' in results)
- self.assertTrue('10.18.1.0' in results['network-interfaces'])
+ self.assertTrue(
+ IP_BY_MACADDR + '/' + IP4_PREFIX in
+ results['network-interfaces']['ethernets'][dev]['addresses'])
- # ETH0_MASK
+ # ETH0_IP6
populate_context_dir(
self.seed_dir, {
- 'ETH0_IP': IP_BY_MACADDR,
+ 'ETH0_IP6': IP6_GLOBAL,
'ETH0_MAC': MACADDR,
- 'ETH0_MASK': '255.255.0.0'
})
results = ds.read_context_disk_dir(self.seed_dir)
self.assertTrue('network-interfaces' in results)
- self.assertTrue('255.255.0.0' in results['network-interfaces'])
+ self.assertTrue(
+ IP6_GLOBAL + '/64' in
+ results['network-interfaces']['ethernets'][dev]['addresses'])
- # ETH0_MASK with empty string
+ # ETH0_IP6_ULA
populate_context_dir(
self.seed_dir, {
- 'ETH0_IP': IP_BY_MACADDR,
+ 'ETH0_IP6_ULA': IP6_ULA,
+ 'ETH0_MAC': MACADDR,
+ })
+ results = ds.read_context_disk_dir(self.seed_dir)
+
+ self.assertTrue('network-interfaces' in results)
+ self.assertTrue(
+ IP6_ULA + '/64' in
+ results['network-interfaces']['ethernets'][dev]['addresses'])
+
+ # ETH0_IP6 and ETH0_IP6_PREFIX_LENGTH
+ populate_context_dir(
+ self.seed_dir, {
+ 'ETH0_IP6': IP6_GLOBAL,
+ 'ETH0_IP6_PREFIX_LENGTH': IP6_PREFIX,
+ 'ETH0_MAC': MACADDR,
+ })
+ results = ds.read_context_disk_dir(self.seed_dir)
+
+ self.assertTrue('network-interfaces' in results)
+ self.assertTrue(
+ IP6_GLOBAL + '/' + IP6_PREFIX in
+ results['network-interfaces']['ethernets'][dev]['addresses'])
+
+ # ETH0_IP6 and ETH0_IP6_PREFIX_LENGTH with empty string
+ populate_context_dir(
+ self.seed_dir, {
+ 'ETH0_IP6': IP6_GLOBAL,
+ 'ETH0_IP6_PREFIX_LENGTH': '',
'ETH0_MAC': MACADDR,
- 'ETH0_MASK': ''
})
results = ds.read_context_disk_dir(self.seed_dir)
self.assertTrue('network-interfaces' in results)
- self.assertTrue('255.255.255.0' in results['network-interfaces'])
+ self.assertTrue(
+ IP6_GLOBAL + '/64' in
+ results['network-interfaces']['ethernets'][dev]['addresses'])
def test_find_candidates(self):
def my_devs_with(criteria):
@@ -310,108 +354,152 @@ class TestOpenNebulaNetwork(unittest.TestCase):
system_nics = ('eth0', 'ens3')
- def test_lo(self):
- net = ds.OpenNebulaNetwork(context={}, system_nics_by_mac={})
- self.assertEqual(net.gen_conf(), u'''\
-auto lo
-iface lo inet loopback
-''')
-
@mock.patch(DS_PATH + ".get_physical_nics_by_mac")
def test_eth0(self, m_get_phys_by_mac):
for nic in self.system_nics:
m_get_phys_by_mac.return_value = {MACADDR: nic}
net = ds.OpenNebulaNetwork({})
- self.assertEqual(net.gen_conf(), dedent("""\
- auto lo
- iface lo inet loopback
-
- auto {dev}
- iface {dev} inet static
- #hwaddress {macaddr}
- address 10.18.1.1
- network 10.18.1.0
- netmask 255.255.255.0
- """.format(dev=nic, macaddr=MACADDR)))
+ expected = {
+ 'version': 2,
+ 'ethernets': {
+ nic: {
+ 'match': {'macaddress': MACADDR},
+ 'addresses': [IP_BY_MACADDR + '/' + IP4_PREFIX]}}}
+
+ self.assertEqual(net.gen_conf(), expected)
def test_eth0_override(self):
+ self.maxDiff = None
context = {
'DNS': '1.2.3.8',
- 'ETH0_IP': '10.18.1.1',
- 'ETH0_NETWORK': '10.18.0.0',
- 'ETH0_MASK': '255.255.0.0',
+ 'ETH0_DNS': '1.2.3.6 1.2.3.7',
'ETH0_GATEWAY': '1.2.3.5',
- 'ETH0_DOMAIN': 'example.com',
+ 'ETH0_GATEWAY6': '',
+ 'ETH0_IP': IP_BY_MACADDR,
+ 'ETH0_IP6': '',
+ 'ETH0_IP6_PREFIX_LENGTH': '',
+ 'ETH0_IP6_ULA': '',
+ 'ETH0_MAC': '02:00:0a:12:01:01',
+ 'ETH0_MASK': '255.255.0.0',
+ 'ETH0_MTU': '',
+ 'ETH0_NETWORK': '10.18.0.0',
+ 'ETH0_SEARCH_DOMAIN': '',
+ }
+ for nic in self.system_nics:
+ net = ds.OpenNebulaNetwork(context,
+ system_nics_by_mac={MACADDR: nic})
+ expected = {
+ 'version': 2,
+ 'ethernets': {
+ nic: {
+ 'match': {'macaddress': MACADDR},
+ 'addresses': [IP_BY_MACADDR + '/16'],
+ 'gateway4': '1.2.3.5',
+ 'gateway6': None,
+ 'nameservers': {
+ 'addresses': ['1.2.3.6', '1.2.3.7', '1.2.3.8']}}}}
+
+ self.assertEqual(expected, net.gen_conf())
+
+ def test_eth0_v4v6_override(self):
+ self.maxDiff = None
+ context = {
+ 'DNS': '1.2.3.8',
'ETH0_DNS': '1.2.3.6 1.2.3.7',
- 'ETH0_MAC': '02:00:0a:12:01:01'
+ 'ETH0_GATEWAY': '1.2.3.5',
+ 'ETH0_GATEWAY6': IP6_GW,
+ 'ETH0_IP': IP_BY_MACADDR,
+ 'ETH0_IP6': IP6_GLOBAL,
+ 'ETH0_IP6_PREFIX_LENGTH': IP6_PREFIX,
+ 'ETH0_IP6_ULA': IP6_ULA,
+ 'ETH0_MAC': '02:00:0a:12:01:01',
+ 'ETH0_MASK': '255.255.0.0',
+ 'ETH0_MTU': '1280',
+ 'ETH0_NETWORK': '10.18.0.0',
+ 'ETH0_SEARCH_DOMAIN': 'example.com example.org',
}
for nic in self.system_nics:
- expected = dedent("""\
- auto lo
- iface lo inet loopback
-
- auto {dev}
- iface {dev} inet static
- #hwaddress {macaddr}
- address 10.18.1.1
- network 10.18.0.0
- netmask 255.255.0.0
- gateway 1.2.3.5
- dns-search example.com
- dns-nameservers 1.2.3.8 1.2.3.6 1.2.3.7
- """).format(dev=nic, macaddr=MACADDR)
net = ds.OpenNebulaNetwork(context,
system_nics_by_mac={MACADDR: nic})
+
+ expected = {
+ 'version': 2,
+ 'ethernets': {
+ nic: {
+ 'match': {'macaddress': MACADDR},
+ 'addresses': [
+ IP_BY_MACADDR + '/16',
+ IP6_GLOBAL + '/' + IP6_PREFIX,
+ IP6_ULA + '/' + IP6_PREFIX],
+ 'gateway4': '1.2.3.5',
+ 'gateway6': IP6_GW,
+ 'nameservers': {
+ 'addresses': ['1.2.3.6', '1.2.3.7', '1.2.3.8'],
+ 'search': ['example.com', 'example.org']},
+ 'mtu': '1280'}}}
+
self.assertEqual(expected, net.gen_conf())
def test_multiple_nics(self):
"""Test rendering multiple nics with names that differ from context."""
+ self.maxDiff = None
MAC_1 = "02:00:0a:12:01:01"
MAC_2 = "02:00:0a:12:01:02"
context = {
'DNS': '1.2.3.8',
- 'ETH0_IP': '10.18.1.1',
- 'ETH0_NETWORK': '10.18.0.0',
- 'ETH0_MASK': '255.255.0.0',
- 'ETH0_GATEWAY': '1.2.3.5',
- 'ETH0_DOMAIN': 'example.com',
'ETH0_DNS': '1.2.3.6 1.2.3.7',
+ 'ETH0_GATEWAY': '1.2.3.5',
+ 'ETH0_GATEWAY6': IP6_GW,
+ 'ETH0_IP': '10.18.1.1',
+ 'ETH0_IP6': IP6_GLOBAL,
+ 'ETH0_IP6_PREFIX_LENGTH': '',
+ 'ETH0_IP6_ULA': IP6_ULA,
'ETH0_MAC': MAC_2,
- 'ETH3_IP': '10.3.1.3',
- 'ETH3_NETWORK': '10.3.0.0',
- 'ETH3_MASK': '255.255.0.0',
- 'ETH3_GATEWAY': '10.3.0.1',
- 'ETH3_DOMAIN': 'third.example.com',
+ 'ETH0_MASK': '255.255.0.0',
+ 'ETH0_MTU': '1280',
+ 'ETH0_NETWORK': '10.18.0.0',
+ 'ETH0_SEARCH_DOMAIN': 'example.com',
'ETH3_DNS': '10.3.1.2',
+ 'ETH3_GATEWAY': '10.3.0.1',
+ 'ETH3_GATEWAY6': '',
+ 'ETH3_IP': '10.3.1.3',
+ 'ETH3_IP6': '',
+ 'ETH3_IP6_PREFIX_LENGTH': '',
+ 'ETH3_IP6_ULA': '',
'ETH3_MAC': MAC_1,
+ 'ETH3_MASK': '255.255.0.0',
+ 'ETH3_MTU': '',
+ 'ETH3_NETWORK': '10.3.0.0',
+ 'ETH3_SEARCH_DOMAIN': 'third.example.com third.example.org',
}
net = ds.OpenNebulaNetwork(
context, system_nics_by_mac={MAC_1: 'enp0s25', MAC_2: 'enp1s2'})
- expected = dedent("""\
- auto lo
- iface lo inet loopback
-
- auto enp0s25
- iface enp0s25 inet static
- #hwaddress 02:00:0a:12:01:01
- address 10.3.1.3
- network 10.3.0.0
- netmask 255.255.0.0
- gateway 10.3.0.1
- dns-search third.example.com
- dns-nameservers 1.2.3.8 10.3.1.2
-
- auto enp1s2
- iface enp1s2 inet static
- #hwaddress 02:00:0a:12:01:02
- address 10.18.1.1
- network 10.18.0.0
- netmask 255.255.0.0
- gateway 1.2.3.5
- dns-search example.com
- dns-nameservers 1.2.3.8 1.2.3.6 1.2.3.7
- """)
+ expected = {
+ 'version': 2,
+ 'ethernets': {
+ 'enp1s2': {
+ 'match': {'macaddress': MAC_2},
+ 'addresses': [
+ '10.18.1.1/16',
+ IP6_GLOBAL + '/64',
+ IP6_ULA + '/64'],
+ 'gateway4': '1.2.3.5',
+ 'gateway6': IP6_GW,
+ 'nameservers': {
+ 'addresses': ['1.2.3.6', '1.2.3.7', '1.2.3.8'],
+ 'search': ['example.com']},
+ 'mtu': '1280'},
+ 'enp0s25': {
+ 'match': {'macaddress': MAC_1},
+ 'addresses': ['10.3.1.3/16'],
+ 'gateway4': '10.3.0.1',
+ 'gateway6': None,
+ 'nameservers': {
+ 'addresses': ['10.3.1.2', '1.2.3.8'],
+ 'search': [
+ 'third.example.com',
+ 'third.example.org']}}}}
self.assertEqual(expected, net.gen_conf())
diff --git a/tests/unittests/test_ds_identify.py b/tests/unittests/test_ds_identify.py
index 31cc6223..53643989 100644
--- a/tests/unittests/test_ds_identify.py
+++ b/tests/unittests/test_ds_identify.py
@@ -9,6 +9,8 @@ from cloudinit import util
from cloudinit.tests.helpers import (
CiTestCase, dir2dict, populate_dir)
+from cloudinit.sources import DataSourceIBMCloud as dsibm
+
UNAME_MYSYS = ("Linux bart 4.4.0-62-generic #83-Ubuntu "
"SMP Wed Jan 18 14:10:15 UTC 2017 x86_64 GNU/Linux")
UNAME_PPC64EL = ("Linux diamond 4.4.0-83-generic #106-Ubuntu SMP "
@@ -37,8 +39,8 @@ BLKID_UEFI_UBUNTU = [
POLICY_FOUND_ONLY = "search,found=all,maybe=none,notfound=disabled"
POLICY_FOUND_OR_MAYBE = "search,found=all,maybe=all,notfound=disabled"
-DI_DEFAULT_POLICY = "search,found=all,maybe=all,notfound=enabled"
-DI_DEFAULT_POLICY_NO_DMI = "search,found=all,maybe=all,notfound=disabled"
+DI_DEFAULT_POLICY = "search,found=all,maybe=all,notfound=disabled"
+DI_DEFAULT_POLICY_NO_DMI = "search,found=all,maybe=all,notfound=enabled"
DI_EC2_STRICT_ID_DEFAULT = "true"
OVF_MATCH_STRING = 'http://schemas.dmtf.org/ovf/environment/1'
@@ -60,11 +62,16 @@ P_CHASSIS_ASSET_TAG = "sys/class/dmi/id/chassis_asset_tag"
P_PRODUCT_NAME = "sys/class/dmi/id/product_name"
P_PRODUCT_SERIAL = "sys/class/dmi/id/product_serial"
P_PRODUCT_UUID = "sys/class/dmi/id/product_uuid"
+P_SYS_VENDOR = "sys/class/dmi/id/sys_vendor"
P_SEED_DIR = "var/lib/cloud/seed"
P_DSID_CFG = "etc/cloud/ds-identify.cfg"
+IBM_PROVISIONING_CHECK_PATH = "/root/provisioningConfiguration.cfg"
+IBM_CONFIG_UUID = "9796-932E"
+
MOCK_VIRT_IS_KVM = {'name': 'detect_virt', 'RET': 'kvm', 'ret': 0}
MOCK_VIRT_IS_VMWARE = {'name': 'detect_virt', 'RET': 'vmware', 'ret': 0}
+MOCK_VIRT_IS_XEN = {'name': 'detect_virt', 'RET': 'xen', 'ret': 0}
MOCK_UNAME_IS_PPC64 = {'name': 'uname', 'out': UNAME_PPC64EL, 'ret': 0}
@@ -237,6 +244,57 @@ class TestDsIdentify(CiTestCase):
self._test_ds_found('ConfigDriveUpper')
return
+ def test_ibmcloud_template_userdata_in_provisioning(self):
+ """Template provisioned with user-data during provisioning stage.
+
+ Template provisioning with user-data has METADATA disk,
+ datasource should return not found."""
+ data = copy.deepcopy(VALID_CFG['IBMCloud-metadata'])
+ data['files'] = {IBM_PROVISIONING_CHECK_PATH: 'xxx'}
+ return self._check_via_dict(data, RC_NOT_FOUND)
+
+ def test_ibmcloud_template_userdata(self):
+ """Template provisioned with user-data first boot.
+
+ Template provisioning with user-data has METADATA disk.
+ datasource should return found."""
+ self._test_ds_found('IBMCloud-metadata')
+
+ def test_ibmcloud_template_no_userdata_in_provisioning(self):
+ """Template provisioned with no user-data during provisioning.
+
+ no disks attached. Datasource should return not found."""
+ data = copy.deepcopy(VALID_CFG['IBMCloud-nodisks'])
+ data['files'] = {IBM_PROVISIONING_CHECK_PATH: 'xxx'}
+ return self._check_via_dict(data, RC_NOT_FOUND)
+
+ def test_ibmcloud_template_no_userdata(self):
+ """Template provisioned with no user-data first boot.
+
+ no disks attached. Datasource should return found."""
+ self._check_via_dict(VALID_CFG['IBMCloud-nodisks'], RC_NOT_FOUND)
+
+ def test_ibmcloud_os_code(self):
+ """Launched by os code always has config-2 disk."""
+ self._test_ds_found('IBMCloud-config-2')
+
+ def test_ibmcloud_os_code_different_uuid(self):
+ """IBM cloud config-2 disks must be explicit match on UUID.
+
+ If the UUID is not 9796-932E then we actually expect ConfigDrive."""
+ data = copy.deepcopy(VALID_CFG['IBMCloud-config-2'])
+ offset = None
+ for m, d in enumerate(data['mocks']):
+ if d.get('name') == "blkid":
+ offset = m
+ break
+ if not offset:
+ raise ValueError("Expected to find 'blkid' mock, but did not.")
+ data['mocks'][offset]['out'] = d['out'].replace(dsibm.IBM_CONFIG_UUID,
+ "DEAD-BEEF")
+ self._check_via_dict(
+ data, rc=RC_FOUND, dslist=['ConfigDrive', DS_NONE])
+
def test_policy_disabled(self):
"""A Builtin policy of 'disabled' should return not found.
@@ -290,6 +348,10 @@ class TestDsIdentify(CiTestCase):
"""On Intel, openstack must be identified."""
self._test_ds_found('OpenStack')
+ def test_openstack_open_telekom_cloud(self):
+ """Open Telecom identification."""
+ self._test_ds_found('OpenStack-OpenTelekom')
+
def test_openstack_on_non_intel_is_maybe(self):
"""On non-Intel, openstack without dmi info is maybe.
@@ -337,6 +399,16 @@ class TestDsIdentify(CiTestCase):
"""OVF is identified when vmware customization is enabled."""
self._test_ds_found('OVF-vmware-customization')
+ def test_ovf_on_vmware_iso_found_open_vm_tools_64(self):
+ """OVF is identified when open-vm-tools installed in /usr/lib64."""
+ cust64 = copy.deepcopy(VALID_CFG['OVF-vmware-customization'])
+ p32 = 'usr/lib/vmware-tools/plugins/vmsvc/libdeployPkgPlugin.so'
+ open64 = 'usr/lib64/open-vm-tools/plugins/vmsvc/libdeployPkgPlugin.so'
+ cust64['files'][open64] = cust64['files'][p32]
+ del cust64['files'][p32]
+ return self._check_via_dict(
+ cust64, RC_FOUND, dslist=[cust64.get('ds'), DS_NONE])
+
def test_ovf_on_vmware_iso_found_by_cdrom_with_matching_fs_label(self):
"""OVF is identified by well-known iso9660 labels."""
ovf_cdrom_by_label = copy.deepcopy(VALID_CFG['OVF'])
@@ -350,8 +422,10 @@ class TestDsIdentify(CiTestCase):
"OVFENV", "ovfenv"]
for valid_ovf_label in valid_ovf_labels:
ovf_cdrom_by_label['mocks'][0]['out'] = blkid_out([
+ {'DEVNAME': 'sda1', 'TYPE': 'ext4', 'LABEL': 'rootfs'},
{'DEVNAME': 'sr0', 'TYPE': 'iso9660',
- 'LABEL': valid_ovf_label}])
+ 'LABEL': valid_ovf_label},
+ {'DEVNAME': 'vda1', 'TYPE': 'ntfs', 'LABEL': 'data'}])
self._check_via_dict(
ovf_cdrom_by_label, rc=RC_FOUND, dslist=['OVF', DS_NONE])
@@ -359,6 +433,18 @@ class TestDsIdentify(CiTestCase):
"""NoCloud is found with iso9660 filesystem on non-cdrom disk."""
self._test_ds_found('NoCloud')
+ def test_nocloud_seed(self):
+ """Nocloud seed directory."""
+ self._test_ds_found('NoCloud-seed')
+
+ def test_nocloud_seed_ubuntu_core_writable(self):
+ """Nocloud seed directory ubuntu core writable"""
+ self._test_ds_found('NoCloud-seed-ubuntu-core')
+
+ def test_hetzner_found(self):
+ """Hetzner cloud is identified in sys_vendor."""
+ self._test_ds_found('Hetzner')
+
def blkid_out(disks=None):
"""Convert a list of disk dictionaries into blkid content."""
@@ -422,7 +508,7 @@ VALID_CFG = {
},
'Ec2-xen': {
'ds': 'Ec2',
- 'mocks': [{'name': 'detect_virt', 'RET': 'xen', 'ret': 0}],
+ 'mocks': [MOCK_VIRT_IS_XEN],
'files': {
'sys/hypervisor/uuid': 'ec2c6e2f-5fac-4fc7-9c82-74127ec14bbb\n'
},
@@ -454,6 +540,22 @@ VALID_CFG = {
'dev/vdb': 'pretend iso content for cidata\n',
}
},
+ 'NoCloud-seed': {
+ 'ds': 'NoCloud',
+ 'files': {
+ os.path.join(P_SEED_DIR, 'nocloud', 'user-data'): 'ud\n',
+ os.path.join(P_SEED_DIR, 'nocloud', 'meta-data'): 'md\n',
+ }
+ },
+ 'NoCloud-seed-ubuntu-core': {
+ 'ds': 'NoCloud',
+ 'files': {
+ os.path.join('writable/system-data', P_SEED_DIR,
+ 'nocloud-net', 'user-data'): 'ud\n',
+ os.path.join('writable/system-data', P_SEED_DIR,
+ 'nocloud-net', 'meta-data'): 'md\n',
+ }
+ },
'OpenStack': {
'ds': 'OpenStack',
'files': {P_PRODUCT_NAME: 'OpenStack Nova\n'},
@@ -461,6 +563,12 @@ VALID_CFG = {
'policy_dmi': POLICY_FOUND_ONLY,
'policy_no_dmi': POLICY_FOUND_ONLY,
},
+ 'OpenStack-OpenTelekom': {
+ # OTC gen1 (Xen) hosts use OpenStack datasource, LP: #1756471
+ 'ds': 'OpenStack',
+ 'files': {P_CHASSIS_ASSET_TAG: 'OpenTelekomCloud\n'},
+ 'mocks': [MOCK_VIRT_IS_XEN],
+ },
'OVF-seed': {
'ds': 'OVF',
'files': {
@@ -489,8 +597,9 @@ VALID_CFG = {
'mocks': [
{'name': 'blkid', 'ret': 0,
'out': blkid_out(
- [{'DEVNAME': 'vda1', 'TYPE': 'vfat', 'PARTUUID': uuid4()},
- {'DEVNAME': 'sr0', 'TYPE': 'iso9660', 'LABEL': ''}])
+ [{'DEVNAME': 'sr0', 'TYPE': 'iso9660', 'LABEL': ''},
+ {'DEVNAME': 'sr1', 'TYPE': 'iso9660', 'LABEL': 'ignoreme'},
+ {'DEVNAME': 'vda1', 'TYPE': 'vfat', 'PARTUUID': uuid4()}]),
},
MOCK_VIRT_IS_VMWARE,
],
@@ -522,6 +631,52 @@ VALID_CFG = {
},
],
},
+ 'Hetzner': {
+ 'ds': 'Hetzner',
+ 'files': {P_SYS_VENDOR: 'Hetzner\n'},
+ },
+ 'IBMCloud-metadata': {
+ 'ds': 'IBMCloud',
+ 'mocks': [
+ MOCK_VIRT_IS_XEN,
+ {'name': 'blkid', 'ret': 0,
+ 'out': blkid_out(
+ [{'DEVNAME': 'xvda1', 'TYPE': 'vfat', 'PARTUUID': uuid4()},
+ {'DEVNAME': 'xvda2', 'TYPE': 'ext4',
+ 'LABEL': 'cloudimg-rootfs', 'PARTUUID': uuid4()},
+ {'DEVNAME': 'xvdb', 'TYPE': 'vfat', 'LABEL': 'METADATA'}]),
+ },
+ ],
+ },
+ 'IBMCloud-config-2': {
+ 'ds': 'IBMCloud',
+ 'mocks': [
+ MOCK_VIRT_IS_XEN,
+ {'name': 'blkid', 'ret': 0,
+ 'out': blkid_out(
+ [{'DEVNAME': 'xvda1', 'TYPE': 'ext3', 'PARTUUID': uuid4(),
+ 'UUID': uuid4(), 'LABEL': 'cloudimg-bootfs'},
+ {'DEVNAME': 'xvdb', 'TYPE': 'vfat', 'LABEL': 'config-2',
+ 'UUID': dsibm.IBM_CONFIG_UUID},
+ {'DEVNAME': 'xvda2', 'TYPE': 'ext4',
+ 'LABEL': 'cloudimg-rootfs', 'PARTUUID': uuid4(),
+ 'UUID': uuid4()},
+ ]),
+ },
+ ],
+ },
+ 'IBMCloud-nodisks': {
+ 'ds': 'IBMCloud',
+ 'mocks': [
+ MOCK_VIRT_IS_XEN,
+ {'name': 'blkid', 'ret': 0,
+ 'out': blkid_out(
+ [{'DEVNAME': 'xvda1', 'TYPE': 'vfat', 'PARTUUID': uuid4()},
+ {'DEVNAME': 'xvda2', 'TYPE': 'ext4',
+ 'LABEL': 'cloudimg-rootfs', 'PARTUUID': uuid4()}]),
+ },
+ ],
+ },
}
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_apt_source_v1.py b/tests/unittests/test_handler/test_handler_apt_source_v1.py
index 3a3f95ca..46ca4ce4 100644
--- a/tests/unittests/test_handler/test_handler_apt_source_v1.py
+++ b/tests/unittests/test_handler/test_handler_apt_source_v1.py
@@ -569,7 +569,8 @@ class TestAptSourceConfig(TestCase):
newcfg = cc_apt_configure.convert_to_v3_apt_format(cfg_3_only)
self.assertEqual(newcfg, cfg_3_only)
# collision (unequal)
- with self.assertRaises(ValueError):
+ match = "Old and New.*unequal.*apt_proxy"
+ with self.assertRaisesRegex(ValueError, match):
cc_apt_configure.convert_to_v3_apt_format(cfgconflict)
def test_convert_to_new_format_dict_collision(self):
diff --git a/tests/unittests/test_handler/test_handler_bootcmd.py b/tests/unittests/test_handler/test_handler_bootcmd.py
index dbf43e0d..29fc25e4 100644
--- a/tests/unittests/test_handler/test_handler_bootcmd.py
+++ b/tests/unittests/test_handler/test_handler_bootcmd.py
@@ -3,17 +3,11 @@
from cloudinit.config import cc_bootcmd
from cloudinit.sources import DataSourceNone
from cloudinit import (distros, helpers, cloud, util)
-from cloudinit.tests.helpers import CiTestCase, mock, skipIf
+from cloudinit.tests.helpers import CiTestCase, mock, skipUnlessJsonSchema
import logging
import tempfile
-try:
- import jsonschema
- assert jsonschema # avoid pyflakes error F401: import unused
- _missing_jsonschema_dep = False
-except ImportError:
- _missing_jsonschema_dep = True
LOG = logging.getLogger(__name__)
@@ -69,10 +63,10 @@ class TestBootcmd(CiTestCase):
cc_bootcmd.handle('cc_bootcmd', invalid_config, cc, LOG, [])
self.assertIn('Failed to shellify bootcmd', self.logs.getvalue())
self.assertEqual(
- "'int' object is not iterable",
+ "Input to shellify was type 'int'. Expected list or tuple.",
str(context_manager.exception))
- @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ @skipUnlessJsonSchema()
def test_handler_schema_validation_warns_non_array_type(self):
"""Schema validation warns of non-array type for bootcmd key.
@@ -88,7 +82,7 @@ class TestBootcmd(CiTestCase):
self.logs.getvalue())
self.assertIn('Failed to shellify', self.logs.getvalue())
- @skipIf(_missing_jsonschema_dep, 'No python-jsonschema dependency')
+ @skipUnlessJsonSchema()
def test_handler_schema_validation_warns_non_array_item_type(self):
"""Schema validation warns of non-array or string bootcmd items.
@@ -98,7 +92,7 @@ class TestBootcmd(CiTestCase):
invalid_config = {
'bootcmd': ['ls /', 20, ['wget', 'http://stuff/blah'], {'a': 'n'}]}
cc = self._get_cloud('ubuntu')
- with self.assertRaises(RuntimeError) as context_manager:
+ with self.assertRaises(TypeError) as context_manager:
cc_bootcmd.handle('cc_bootcmd', invalid_config, cc, LOG, [])
expected_warnings = [
'bootcmd.1: 20 is not valid under any of the given schemas',
@@ -110,7 +104,8 @@ class TestBootcmd(CiTestCase):
self.assertIn(warning, logs)
self.assertIn('Failed to shellify', logs)
self.assertEqual(
- 'Unable to shellify type int which is not a list or string',
+ ("Unable to shellify type 'int'. Expected list, string, tuple. "
+ "Got: 20"),
str(context_manager.exception))
def test_handler_creates_and_runs_bootcmd_script_with_instance_id(self):
diff --git a/tests/unittests/test_handler/test_handler_ntp.py b/tests/unittests/test_handler/test_handler_ntp.py
index 28a8455d..695897c0 100644
--- a/tests/unittests/test_handler/test_handler_ntp.py
+++ b/tests/unittests/test_handler/test_handler_ntp.py
@@ -3,7 +3,8 @@
from cloudinit.config import cc_ntp
from cloudinit.sources import DataSourceNone
from cloudinit import (distros, helpers, cloud, util)
-from cloudinit.tests.helpers import FilesystemMockingTestCase, mock, skipIf
+from cloudinit.tests.helpers import (
+ FilesystemMockingTestCase, mock, skipUnlessJsonSchema)
import os
@@ -24,13 +25,6 @@ NTP={% for host in servers|list + pools|list %}{{ host }} {% endfor -%}
{% endif -%}
"""
-try:
- import jsonschema
- assert jsonschema # avoid pyflakes error F401: import unused
- _missing_jsonschema_dep = False
-except ImportError:
- _missing_jsonschema_dep = True
-
class TestNtp(FilesystemMockingTestCase):
@@ -312,7 +306,7 @@ class TestNtp(FilesystemMockingTestCase):
content)
self.assertNotIn('Invalid config:', self.logs.getvalue())
- @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ @skipUnlessJsonSchema()
def test_ntp_handler_schema_validation_warns_non_string_item_type(self):
"""Ntp schema validation warns of non-strings in pools or servers.
@@ -333,7 +327,7 @@ class TestNtp(FilesystemMockingTestCase):
content = stream.read()
self.assertEqual("servers ['valid', None]\npools [123]\n", content)
- @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ @skipUnlessJsonSchema()
def test_ntp_handler_schema_validation_warns_of_non_array_type(self):
"""Ntp schema validation warns of non-array pools or servers types.
@@ -354,7 +348,7 @@ class TestNtp(FilesystemMockingTestCase):
content = stream.read()
self.assertEqual("servers non-array\npools 123\n", content)
- @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ @skipUnlessJsonSchema()
def test_ntp_handler_schema_validation_warns_invalid_key_present(self):
"""Ntp schema validation warns of invalid keys present in ntp config.
@@ -378,7 +372,7 @@ class TestNtp(FilesystemMockingTestCase):
"servers []\npools ['0.mycompany.pool.ntp.org']\n",
content)
- @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ @skipUnlessJsonSchema()
def test_ntp_handler_schema_validation_warns_of_duplicates(self):
"""Ntp schema validation warns of duplicates in servers or pools.
diff --git a/tests/unittests/test_handler/test_handler_resizefs.py b/tests/unittests/test_handler/test_handler_resizefs.py
index 5aa3c498..7a7ba1ff 100644
--- a/tests/unittests/test_handler/test_handler_resizefs.py
+++ b/tests/unittests/test_handler/test_handler_resizefs.py
@@ -1,27 +1,20 @@
# This file is part of cloud-init. See LICENSE file for license information.
from cloudinit.config.cc_resizefs import (
- can_skip_resize, handle, maybe_get_writable_device_path, _resize_btrfs)
+ can_skip_resize, handle, maybe_get_writable_device_path, _resize_btrfs,
+ _resize_zfs, _resize_xfs, _resize_ext, _resize_ufs)
from collections import namedtuple
import logging
import textwrap
-from cloudinit.tests.helpers import (CiTestCase, mock, skipIf, util,
- wrap_and_call)
+from cloudinit.tests.helpers import (
+ CiTestCase, mock, skipUnlessJsonSchema, util, wrap_and_call)
LOG = logging.getLogger(__name__)
-try:
- import jsonschema
- assert jsonschema # avoid pyflakes error F401: import unused
- _missing_jsonschema_dep = False
-except ImportError:
- _missing_jsonschema_dep = True
-
-
class TestResizefs(CiTestCase):
with_logs = True
@@ -68,6 +61,9 @@ class TestResizefs(CiTestCase):
res = can_skip_resize(fs_type, resize_what, devpth)
self.assertTrue(res)
+ def test_can_skip_resize_ext(self):
+ self.assertFalse(can_skip_resize('ext', '/', '/dev/sda1'))
+
def test_handle_noops_on_disabled(self):
"""The handle function logs when the configuration disables resize."""
cfg = {'resize_rootfs': False}
@@ -76,7 +72,7 @@ class TestResizefs(CiTestCase):
'DEBUG: Skipping module named cc_resizefs, resizing disabled\n',
self.logs.getvalue())
- @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ @skipUnlessJsonSchema()
def test_handle_schema_validation_logs_invalid_resize_rootfs_value(self):
"""The handle reports json schema violations as a warning.
@@ -130,6 +126,51 @@ class TestResizefs(CiTestCase):
logs = self.logs.getvalue()
self.assertIn("WARNING: Unable to find device '/dev/root'", logs)
+ def test_resize_zfs_cmd_return(self):
+ zpool = 'zroot'
+ devpth = 'gpt/system'
+ self.assertEqual(('zpool', 'online', '-e', zpool, devpth),
+ _resize_zfs(zpool, devpth))
+
+ def test_resize_xfs_cmd_return(self):
+ mount_point = '/mnt/test'
+ devpth = '/dev/sda1'
+ self.assertEqual(('xfs_growfs', mount_point),
+ _resize_xfs(mount_point, devpth))
+
+ def test_resize_ext_cmd_return(self):
+ mount_point = '/'
+ devpth = '/dev/sdb1'
+ self.assertEqual(('resize2fs', devpth),
+ _resize_ext(mount_point, devpth))
+
+ def test_resize_ufs_cmd_return(self):
+ mount_point = '/'
+ devpth = '/dev/sda2'
+ self.assertEqual(('growfs', devpth),
+ _resize_ufs(mount_point, devpth))
+
+ @mock.patch('cloudinit.util.get_mount_info')
+ @mock.patch('cloudinit.util.get_device_info_from_zpool')
+ @mock.patch('cloudinit.util.parse_mount')
+ def test_handle_zfs_root(self, mount_info, zpool_info, parse_mount):
+ devpth = 'vmzroot/ROOT/freebsd'
+ disk = 'gpt/system'
+ fs_type = 'zfs'
+ mount_point = '/'
+
+ mount_info.return_value = (devpth, fs_type, mount_point)
+ zpool_info.return_value = disk
+ parse_mount.return_value = (devpth, fs_type, mount_point)
+
+ cfg = {'resize_rootfs': True}
+
+ with mock.patch('cloudinit.config.cc_resizefs.do_resize') as dresize:
+ handle('cc_resizefs', cfg, _cloud=None, log=LOG, args=[])
+ ret = dresize.call_args[0][0]
+
+ self.assertEqual(('zpool', 'online', '-e', 'vmzroot', disk), ret)
+
class TestRootDevFromCmdline(CiTestCase):
@@ -313,5 +354,12 @@ class TestMaybeGetDevicePathAsWritableBlock(CiTestCase):
('btrfs', 'filesystem', 'resize', 'max', '/'),
_resize_btrfs("/", "/dev/sda1"))
+ @mock.patch('cloudinit.util.is_FreeBSD')
+ def test_maybe_get_writable_device_path_zfs_freebsd(self, freebsd):
+ freebsd.return_value = True
+ info = 'dev=gpt/system mnt_point=/ path=/'
+ devpth = maybe_get_writable_device_path('gpt/system', info, LOG)
+ self.assertEqual('gpt/system', devpth)
+
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_runcmd.py b/tests/unittests/test_handler/test_handler_runcmd.py
index 374c1d31..dbbb2717 100644
--- a/tests/unittests/test_handler/test_handler_runcmd.py
+++ b/tests/unittests/test_handler/test_handler_runcmd.py
@@ -3,19 +3,13 @@
from cloudinit.config import cc_runcmd
from cloudinit.sources import DataSourceNone
from cloudinit import (distros, helpers, cloud, util)
-from cloudinit.tests.helpers import FilesystemMockingTestCase, skipIf
+from cloudinit.tests.helpers import (
+ FilesystemMockingTestCase, skipUnlessJsonSchema)
import logging
import os
import stat
-try:
- import jsonschema
- assert jsonschema # avoid pyflakes error F401: import unused
- _missing_jsonschema_dep = False
-except ImportError:
- _missing_jsonschema_dep = True
-
LOG = logging.getLogger(__name__)
@@ -56,7 +50,7 @@ class TestRuncmd(FilesystemMockingTestCase):
' /var/lib/cloud/instances/iid-datasource-none/scripts/runcmd',
self.logs.getvalue())
- @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ @skipUnlessJsonSchema()
def test_handler_schema_validation_warns_non_array_type(self):
"""Schema validation warns of non-array type for runcmd key.
@@ -71,7 +65,7 @@ class TestRuncmd(FilesystemMockingTestCase):
self.logs.getvalue())
self.assertIn('Failed to shellify', self.logs.getvalue())
- @skipIf(_missing_jsonschema_dep, 'No python-jsonschema dependency')
+ @skipUnlessJsonSchema()
def test_handler_schema_validation_warns_non_array_item_type(self):
"""Schema validation warns of non-array or string runcmd items.
diff --git a/tests/unittests/test_handler/test_handler_set_hostname.py b/tests/unittests/test_handler/test_handler_set_hostname.py
index abdc17e7..d09ec23a 100644
--- a/tests/unittests/test_handler/test_handler_set_hostname.py
+++ b/tests/unittests/test_handler/test_handler_set_hostname.py
@@ -11,6 +11,7 @@ from cloudinit.tests import helpers as t_help
from configobj import ConfigObj
import logging
+import os
import shutil
from six import BytesIO
import tempfile
@@ -19,14 +20,18 @@ LOG = logging.getLogger(__name__)
class TestHostname(t_help.FilesystemMockingTestCase):
+
+ with_logs = True
+
def setUp(self):
super(TestHostname, self).setUp()
self.tmp = tempfile.mkdtemp()
+ util.ensure_dir(os.path.join(self.tmp, 'data'))
self.addCleanup(shutil.rmtree, self.tmp)
def _fetch_distro(self, kind):
cls = distros.fetch(kind)
- paths = helpers.Paths({})
+ paths = helpers.Paths({'cloud_dir': self.tmp})
return cls(kind, {}, paths)
def test_write_hostname_rhel(self):
@@ -34,7 +39,7 @@ class TestHostname(t_help.FilesystemMockingTestCase):
'hostname': 'blah.blah.blah.yahoo.com',
}
distro = self._fetch_distro('rhel')
- paths = helpers.Paths({})
+ paths = helpers.Paths({'cloud_dir': self.tmp})
ds = None
cc = cloud.Cloud(ds, paths, {}, distro, None)
self.patchUtils(self.tmp)
@@ -51,7 +56,7 @@ class TestHostname(t_help.FilesystemMockingTestCase):
'hostname': 'blah.blah.blah.yahoo.com',
}
distro = self._fetch_distro('debian')
- paths = helpers.Paths({})
+ paths = helpers.Paths({'cloud_dir': self.tmp})
ds = None
cc = cloud.Cloud(ds, paths, {}, distro, None)
self.patchUtils(self.tmp)
@@ -65,7 +70,7 @@ class TestHostname(t_help.FilesystemMockingTestCase):
'hostname': 'blah.blah.blah.suse.com',
}
distro = self._fetch_distro('sles')
- paths = helpers.Paths({})
+ paths = helpers.Paths({'cloud_dir': self.tmp})
ds = None
cc = cloud.Cloud(ds, paths, {}, distro, None)
self.patchUtils(self.tmp)
@@ -74,4 +79,48 @@ class TestHostname(t_help.FilesystemMockingTestCase):
contents = util.load_file(distro.hostname_conf_fn)
self.assertEqual('blah', contents.strip())
+ def test_multiple_calls_skips_unchanged_hostname(self):
+ """Only new hostname or fqdn values will generate a hostname call."""
+ distro = self._fetch_distro('debian')
+ paths = helpers.Paths({'cloud_dir': self.tmp})
+ ds = None
+ cc = cloud.Cloud(ds, paths, {}, distro, None)
+ self.patchUtils(self.tmp)
+ cc_set_hostname.handle(
+ 'cc_set_hostname', {'hostname': 'hostname1.me.com'}, cc, LOG, [])
+ contents = util.load_file("/etc/hostname")
+ self.assertEqual('hostname1', contents.strip())
+ cc_set_hostname.handle(
+ 'cc_set_hostname', {'hostname': 'hostname1.me.com'}, cc, LOG, [])
+ self.assertIn(
+ 'DEBUG: No hostname changes. Skipping set-hostname\n',
+ self.logs.getvalue())
+ cc_set_hostname.handle(
+ 'cc_set_hostname', {'hostname': 'hostname2.me.com'}, cc, LOG, [])
+ contents = util.load_file("/etc/hostname")
+ self.assertEqual('hostname2', contents.strip())
+ self.assertIn(
+ 'Non-persistently setting the system hostname to hostname2',
+ self.logs.getvalue())
+
+ def test_error_on_distro_set_hostname_errors(self):
+ """Raise SetHostnameError on exceptions from distro.set_hostname."""
+ distro = self._fetch_distro('debian')
+
+ def set_hostname_error(hostname, fqdn):
+ raise Exception("OOPS on: %s" % fqdn)
+
+ distro.set_hostname = set_hostname_error
+ paths = helpers.Paths({'cloud_dir': self.tmp})
+ ds = None
+ cc = cloud.Cloud(ds, paths, {}, distro, None)
+ self.patchUtils(self.tmp)
+ with self.assertRaises(cc_set_hostname.SetHostnameError) as ctx_mgr:
+ cc_set_hostname.handle(
+ 'somename', {'hostname': 'hostname1.me.com'}, cc, LOG, [])
+ self.assertEqual(
+ 'Failed to set the hostname to hostname1.me.com (hostname1):'
+ ' OOPS on: hostname1.me.com',
+ str(ctx_mgr.exception))
+
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_schema.py b/tests/unittests/test_handler/test_schema.py
index 648573f6..ac41f124 100644
--- a/tests/unittests/test_handler/test_schema.py
+++ b/tests/unittests/test_handler/test_schema.py
@@ -6,7 +6,7 @@ from cloudinit.config.schema import (
validate_cloudconfig_schema, main)
from cloudinit.util import subp, write_file
-from cloudinit.tests.helpers import CiTestCase, mock, skipIf
+from cloudinit.tests.helpers import CiTestCase, mock, skipUnlessJsonSchema
from copy import copy
import os
@@ -14,13 +14,6 @@ from six import StringIO
from textwrap import dedent
from yaml import safe_load
-try:
- import jsonschema
- assert jsonschema # avoid pyflakes error F401: import unused
- _missing_jsonschema_dep = False
-except ImportError:
- _missing_jsonschema_dep = True
-
class GetSchemaTest(CiTestCase):
@@ -33,6 +26,8 @@ class GetSchemaTest(CiTestCase):
'cc_ntp',
'cc_resizefs',
'cc_runcmd',
+ 'cc_snap',
+ 'cc_ubuntu_advantage',
'cc_zypper_add_repo'
],
[subschema['id'] for subschema in schema['allOf']])
@@ -73,7 +68,7 @@ class ValidateCloudConfigSchemaTest(CiTestCase):
with_logs = True
- @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ @skipUnlessJsonSchema()
def test_validateconfig_schema_non_strict_emits_warnings(self):
"""When strict is False validate_cloudconfig_schema emits warnings."""
schema = {'properties': {'p1': {'type': 'string'}}}
@@ -82,7 +77,7 @@ class ValidateCloudConfigSchemaTest(CiTestCase):
"Invalid config:\np1: -1 is not of type 'string'\n",
self.logs.getvalue())
- @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ @skipUnlessJsonSchema()
def test_validateconfig_schema_emits_warning_on_missing_jsonschema(self):
"""Warning from validate_cloudconfig_schema when missing jsonschema."""
schema = {'properties': {'p1': {'type': 'string'}}}
@@ -92,7 +87,7 @@ class ValidateCloudConfigSchemaTest(CiTestCase):
'Ignoring schema validation. python-jsonschema is not present',
self.logs.getvalue())
- @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ @skipUnlessJsonSchema()
def test_validateconfig_schema_strict_raises_errors(self):
"""When strict is True validate_cloudconfig_schema raises errors."""
schema = {'properties': {'p1': {'type': 'string'}}}
@@ -102,7 +97,7 @@ class ValidateCloudConfigSchemaTest(CiTestCase):
"Cloud config schema errors: p1: -1 is not of type 'string'",
str(context_mgr.exception))
- @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ @skipUnlessJsonSchema()
def test_validateconfig_schema_honors_formats(self):
"""With strict True, validate_cloudconfig_schema errors on format."""
schema = {
@@ -153,7 +148,7 @@ class ValidateCloudConfigFileTest(CiTestCase):
self.config_file),
str(context_mgr.exception))
- @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ @skipUnlessJsonSchema()
def test_validateconfig_file_sctricty_validates_schema(self):
"""validate_cloudconfig_file raises errors on invalid schema."""
schema = {
@@ -336,11 +331,13 @@ class MainTest(CiTestCase):
def test_main_missing_args(self):
"""Main exits non-zero and reports an error on missing parameters."""
- with mock.patch('sys.argv', ['mycmd']):
- with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr:
- with self.assertRaises(SystemExit) as context_manager:
- main()
- self.assertEqual('1', str(context_manager.exception))
+ with mock.patch('sys.exit', side_effect=self.sys_exit):
+ with mock.patch('sys.argv', ['mycmd']):
+ with mock.patch('sys.stderr', new_callable=StringIO) as \
+ m_stderr:
+ with self.assertRaises(SystemExit) as context_manager:
+ main()
+ self.assertEqual(1, context_manager.exception.code)
self.assertEqual(
'Expected either --config-file argument or --doc\n',
m_stderr.getvalue())
@@ -374,7 +371,7 @@ class CloudTestsIntegrationTest(CiTestCase):
raises Warnings or errors on invalid cloud-config schema.
"""
- @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency")
+ @skipUnlessJsonSchema()
def test_all_integration_test_cloud_config_schema(self):
"""Validate schema of cloud_tests yaml files looking for warnings."""
schema = get_schema()
diff --git a/tests/unittests/test_net.py b/tests/unittests/test_net.py
index ac33e8ef..c12a487a 100644
--- a/tests/unittests/test_net.py
+++ b/tests/unittests/test_net.py
@@ -12,10 +12,8 @@ from cloudinit.sources.helpers import openstack
from cloudinit import temp_utils
from cloudinit import util
-from cloudinit.tests.helpers import CiTestCase
-from cloudinit.tests.helpers import dir2dict
-from cloudinit.tests.helpers import mock
-from cloudinit.tests.helpers import populate_dir
+from cloudinit.tests.helpers import (
+ CiTestCase, FilesystemMockingTestCase, dir2dict, mock, populate_dir)
import base64
import copy
@@ -395,12 +393,6 @@ NETWORK_CONFIGS = {
eth1:
match:
macaddress: cf:d6:af:48:e8:80
- nameservers:
- addresses:
- - 1.2.3.4
- - 5.6.7.8
- search:
- - wark.maas
set-name: eth1
eth99:
addresses:
@@ -412,12 +404,9 @@ NETWORK_CONFIGS = {
addresses:
- 8.8.8.8
- 8.8.4.4
- - 1.2.3.4
- - 5.6.7.8
search:
- barley.maas
- sach.maas
- - wark.maas
routes:
- to: 0.0.0.0/0
via: 65.61.151.37
@@ -656,81 +645,27 @@ pre-down route del -net 10.0.0.0 netmask 255.0.0.0 gw 11.0.0.1 metric 3 || true
eth0:
match:
macaddress: c0:d6:9f:2c:e8:80
- nameservers:
- addresses:
- - 8.8.8.8
- - 4.4.4.4
- - 8.8.4.4
- search:
- - barley.maas
- - wark.maas
- - foobar.maas
set-name: eth0
eth1:
match:
macaddress: aa:d6:9f:2c:e8:80
- nameservers:
- addresses:
- - 8.8.8.8
- - 4.4.4.4
- - 8.8.4.4
- search:
- - barley.maas
- - wark.maas
- - foobar.maas
set-name: eth1
eth2:
match:
macaddress: c0:bb:9f:2c:e8:80
- nameservers:
- addresses:
- - 8.8.8.8
- - 4.4.4.4
- - 8.8.4.4
- search:
- - barley.maas
- - wark.maas
- - foobar.maas
set-name: eth2
eth3:
match:
macaddress: 66:bb:9f:2c:e8:80
- nameservers:
- addresses:
- - 8.8.8.8
- - 4.4.4.4
- - 8.8.4.4
- search:
- - barley.maas
- - wark.maas
- - foobar.maas
set-name: eth3
eth4:
match:
macaddress: 98:bb:9f:2c:e8:80
- nameservers:
- addresses:
- - 8.8.8.8
- - 4.4.4.4
- - 8.8.4.4
- search:
- - barley.maas
- - wark.maas
- - foobar.maas
set-name: eth4
eth5:
dhcp4: true
match:
macaddress: 98:bb:9f:2c:e8:8a
- nameservers:
- addresses:
- - 8.8.8.8
- - 4.4.4.4
- - 8.8.4.4
- search:
- - barley.maas
- - wark.maas
- - foobar.maas
set-name: eth5
bonds:
bond0:
@@ -750,6 +685,15 @@ pre-down route del -net 10.0.0.0 netmask 255.0.0.0 gw 11.0.0.1 metric 3 || true
interfaces:
- eth3
- eth4
+ nameservers:
+ addresses:
+ - 8.8.8.8
+ - 4.4.4.4
+ - 8.8.4.4
+ search:
+ - barley.maas
+ - wark.maas
+ - foobar.maas
parameters:
ageing-time: 250
forward-delay: 1
@@ -758,6 +702,9 @@ pre-down route del -net 10.0.0.0 netmask 255.0.0.0 gw 11.0.0.1 metric 3 || true
path-cost:
eth3: 50
eth4: 75
+ port-priority:
+ eth3: 28
+ eth4: 14
priority: 22
stp: false
routes:
@@ -2183,27 +2130,49 @@ class TestCmdlineConfigParsing(CiTestCase):
self.assertEqual(found, self.simple_cfg)
-class TestCmdlineReadKernelConfig(CiTestCase):
+class TestCmdlineReadKernelConfig(FilesystemMockingTestCase):
macs = {
'eth0': '14:02:ec:42:48:00',
'eno1': '14:02:ec:42:48:01',
}
- def test_ip_cmdline_read_kernel_cmdline_ip(self):
- content = {'net-eth0.conf': DHCP_CONTENT_1}
- files = sorted(populate_dir(self.tmp_dir(), content))
+ def test_ip_cmdline_without_ip(self):
+ content = {'/run/net-eth0.conf': DHCP_CONTENT_1,
+ cmdline._OPEN_ISCSI_INTERFACE_FILE: "eth0\n"}
+ exp1 = copy.deepcopy(DHCP_EXPECTED_1)
+ exp1['mac_address'] = self.macs['eth0']
+
+ root = self.tmp_dir()
+ populate_dir(root, content)
+ self.reRoot(root)
+
found = cmdline.read_kernel_cmdline_config(
- files=files, cmdline='foo ip=dhcp', mac_addrs=self.macs)
+ cmdline='foo root=/root/bar', mac_addrs=self.macs)
+ self.assertEqual(found['version'], 1)
+ self.assertEqual(found['config'], [exp1])
+
+ def test_ip_cmdline_read_kernel_cmdline_ip(self):
+ content = {'/run/net-eth0.conf': DHCP_CONTENT_1}
exp1 = copy.deepcopy(DHCP_EXPECTED_1)
exp1['mac_address'] = self.macs['eth0']
+
+ root = self.tmp_dir()
+ populate_dir(root, content)
+ self.reRoot(root)
+
+ found = cmdline.read_kernel_cmdline_config(
+ cmdline='foo ip=dhcp', mac_addrs=self.macs)
self.assertEqual(found['version'], 1)
self.assertEqual(found['config'], [exp1])
def test_ip_cmdline_read_kernel_cmdline_ip6(self):
- content = {'net6-eno1.conf': DHCP6_CONTENT_1}
- files = sorted(populate_dir(self.tmp_dir(), content))
+ content = {'/run/net6-eno1.conf': DHCP6_CONTENT_1}
+ root = self.tmp_dir()
+ populate_dir(root, content)
+ self.reRoot(root)
+
found = cmdline.read_kernel_cmdline_config(
- files=files, cmdline='foo ip6=dhcp root=/dev/sda',
+ cmdline='foo ip6=dhcp root=/dev/sda',
mac_addrs=self.macs)
self.assertEqual(
found,
@@ -2223,18 +2192,23 @@ class TestCmdlineReadKernelConfig(CiTestCase):
self.assertIsNone(found)
def test_ip_cmdline_both_ip_ip6(self):
- content = {'net-eth0.conf': DHCP_CONTENT_1,
- 'net6-eth0.conf': DHCP6_CONTENT_1.replace('eno1', 'eth0')}
- files = sorted(populate_dir(self.tmp_dir(), content))
- found = cmdline.read_kernel_cmdline_config(
- files=files, cmdline='foo ip=dhcp ip6=dhcp', mac_addrs=self.macs)
-
+ content = {
+ '/run/net-eth0.conf': DHCP_CONTENT_1,
+ '/run/net6-eth0.conf': DHCP6_CONTENT_1.replace('eno1', 'eth0')}
eth0 = copy.deepcopy(DHCP_EXPECTED_1)
eth0['mac_address'] = self.macs['eth0']
eth0['subnets'].append(
{'control': 'manual', 'type': 'dhcp6',
'netmask': '64', 'dns_nameservers': ['2001:67c:1562:8010::2:1']})
expected = [eth0]
+
+ root = self.tmp_dir()
+ populate_dir(root, content)
+ self.reRoot(root)
+
+ found = cmdline.read_kernel_cmdline_config(
+ cmdline='foo ip=dhcp ip6=dhcp', mac_addrs=self.macs)
+
self.assertEqual(found['version'], 1)
self.assertEqual(found['config'], expected)
@@ -2306,6 +2280,9 @@ class TestNetplanRoundTrip(CiTestCase):
def testsimple_render_all(self):
entry = NETWORK_CONFIGS['all']
files = self._render_and_read(network_config=yaml.load(entry['yaml']))
+ print(entry['expected_netplan'])
+ print('-- expected ^ | v rendered --')
+ print(files['/etc/netplan/50-cloud-init.yaml'])
self.assertEqual(
entry['expected_netplan'].splitlines(),
files['/etc/netplan/50-cloud-init.yaml'].splitlines())
diff --git a/tests/unittests/test_util.py b/tests/unittests/test_util.py
index 4a92e741..8685b8e2 100644
--- a/tests/unittests/test_util.py
+++ b/tests/unittests/test_util.py
@@ -8,7 +8,9 @@ import shutil
import stat
import tempfile
+import json
import six
+import sys
import yaml
from cloudinit import importer, util
@@ -364,6 +366,56 @@ class TestMountinfoParsing(helpers.ResourceUsingTestCase):
expected = ('none', 'tmpfs', '/run/lock')
self.assertEqual(expected, util.parse_mount_info('/run/lock', lines))
+ @mock.patch('cloudinit.util.subp')
+ def test_get_device_info_from_zpool(self, zpool_output):
+ # mock subp command from util.get_mount_info_fs_on_zpool
+ zpool_output.return_value = (
+ self.readResource('zpool_status_simple.txt'), ''
+ )
+ # save function return values and do asserts
+ ret = util.get_device_info_from_zpool('vmzroot')
+ self.assertEqual('gpt/system', ret)
+ self.assertIsNotNone(ret)
+
+ @mock.patch('cloudinit.util.subp')
+ def test_get_device_info_from_zpool_on_error(self, zpool_output):
+ # mock subp command from util.get_mount_info_fs_on_zpool
+ zpool_output.return_value = (
+ self.readResource('zpool_status_simple.txt'), 'error'
+ )
+ # save function return values and do asserts
+ ret = util.get_device_info_from_zpool('vmzroot')
+ self.assertIsNone(ret)
+
+ @mock.patch('cloudinit.util.subp')
+ def test_parse_mount_with_ext(self, mount_out):
+ mount_out.return_value = (self.readResource('mount_parse_ext.txt'), '')
+ # this one is valid and exists in mount_parse_ext.txt
+ ret = util.parse_mount('/var')
+ self.assertEqual(('/dev/mapper/vg00-lv_var', 'ext4', '/var'), ret)
+ # another one that is valid and exists
+ ret = util.parse_mount('/')
+ self.assertEqual(('/dev/mapper/vg00-lv_root', 'ext4', '/'), ret)
+ # this one exists in mount_parse_ext.txt
+ ret = util.parse_mount('/sys/kernel/debug')
+ self.assertIsNone(ret)
+ # this one does not even exist in mount_parse_ext.txt
+ ret = util.parse_mount('/not/existing/mount')
+ self.assertIsNone(ret)
+
+ @mock.patch('cloudinit.util.subp')
+ def test_parse_mount_with_zfs(self, mount_out):
+ mount_out.return_value = (self.readResource('mount_parse_zfs.txt'), '')
+ # this one is valid and exists in mount_parse_zfs.txt
+ ret = util.parse_mount('/var')
+ self.assertEqual(('vmzroot/ROOT/freebsd/var', 'zfs', '/var'), ret)
+ # this one is the root, valid and also exists in mount_parse_zfs.txt
+ ret = util.parse_mount('/')
+ self.assertEqual(('vmzroot/ROOT/freebsd', 'zfs', '/'), ret)
+ # this one does not even exist in mount_parse_ext.txt
+ ret = util.parse_mount('/not/existing/mount')
+ self.assertIsNone(ret)
+
class TestReadDMIData(helpers.FilesystemMockingTestCase):
@@ -630,6 +682,24 @@ class TestSubp(helpers.CiTestCase):
# but by using bash, we remove dependency on another program.
return([BASH, '-c', 'printf "$@"', 'printf'] + list(args))
+ def test_subp_handles_bytestrings(self):
+ """subp can run a bytestring command if shell is True."""
+ tmp_file = self.tmp_path('test.out')
+ cmd = 'echo HI MOM >> {tmp_file}'.format(tmp_file=tmp_file)
+ (out, _err) = util.subp(cmd.encode('utf-8'), shell=True)
+ self.assertEqual(u'', out)
+ self.assertEqual(u'', _err)
+ self.assertEqual('HI MOM\n', util.load_file(tmp_file))
+
+ def test_subp_handles_strings(self):
+ """subp can run a string command if shell is True."""
+ tmp_file = self.tmp_path('test.out')
+ cmd = 'echo HI MOM >> {tmp_file}'.format(tmp_file=tmp_file)
+ (out, _err) = util.subp(cmd, shell=True)
+ self.assertEqual(u'', out)
+ self.assertEqual(u'', _err)
+ self.assertEqual('HI MOM\n', util.load_file(tmp_file))
+
def test_subp_handles_utf8(self):
# The given bytes contain utf-8 accented characters as seen in e.g.
# the "deja dup" package in Ubuntu.
@@ -733,6 +803,71 @@ class TestSubp(helpers.CiTestCase):
self.assertEqual("/target/my/path/",
util.target_path("/target/", "///my/path/"))
+ def test_c_lang_can_take_utf8_args(self):
+ """Independent of system LC_CTYPE, args can contain utf-8 strings.
+
+ When python starts up, its default encoding gets set based on
+ the value of LC_CTYPE. If no system locale is set, the default
+ encoding for both python2 and python3 in some paths will end up
+ being ascii.
+
+ Attempts to use setlocale or patching (or changing) os.environ
+ in the current environment seem to not be effective.
+
+ This test starts up a python with LC_CTYPE set to C so that
+ the default encoding will be set to ascii. In such an environment
+ Popen(['command', 'non-ascii-arg']) would cause a UnicodeDecodeError.
+ """
+ python_prog = '\n'.join([
+ 'import json, sys',
+ 'from cloudinit.util import subp',
+ 'data = sys.stdin.read()',
+ 'cmd = json.loads(data)',
+ 'subp(cmd, capture=False)',
+ ''])
+ cmd = [BASH, '-c', 'echo -n "$@"', '--',
+ self.utf8_valid.decode("utf-8")]
+ python_subp = [sys.executable, '-c', python_prog]
+
+ out, _err = util.subp(
+ python_subp, update_env={'LC_CTYPE': 'C'},
+ data=json.dumps(cmd).encode("utf-8"),
+ decode=False)
+ self.assertEqual(self.utf8_valid, out)
+
+ def test_bogus_command_logs_status_messages(self):
+ """status_cb gets status messages logs on bogus commands provided."""
+ logs = []
+
+ def status_cb(log):
+ logs.append(log)
+
+ with self.assertRaises(util.ProcessExecutionError):
+ util.subp([self.bogus_command], status_cb=status_cb)
+
+ expected = [
+ 'Begin run command: {cmd}\n'.format(cmd=self.bogus_command),
+ 'ERROR: End run command: invalid command provided\n']
+ self.assertEqual(expected, logs)
+
+ def test_command_logs_exit_codes_to_status_cb(self):
+ """status_cb gets status messages containing command exit code."""
+ logs = []
+
+ def status_cb(log):
+ logs.append(log)
+
+ with self.assertRaises(util.ProcessExecutionError):
+ util.subp(['ls', '/I/dont/exist'], status_cb=status_cb)
+ util.subp(['ls'], status_cb=status_cb)
+
+ expected = [
+ 'Begin run command: ls /I/dont/exist\n',
+ 'ERROR: End run command: exit(2)\n',
+ 'Begin run command: ls\n',
+ 'End run command: exit(0)\n']
+ self.assertEqual(expected, logs)
+
class TestEncode(helpers.TestCase):
"""Test the encoding functions"""