summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorBen Howard <ben.howard@canonical.com>2014-02-07 10:10:02 +0200
committerBen Howard <ben.howard@canonical.com>2014-02-07 10:10:02 +0200
commit3087da32ec5cf84c51e0ebdd80a71a84b174a392 (patch)
tree1918a08395e97dbd84bbf43c65c5740bf4ee3365 /tests
parent4fe439b9e137e0b59d00e919dc16aea9da35082a (diff)
parent3cfe9b3d8958b1a4e450d5ff31d805c424945027 (diff)
downloadvyos-cloud-init-3087da32ec5cf84c51e0ebdd80a71a84b174a392.tar.gz
vyos-cloud-init-3087da32ec5cf84c51e0ebdd80a71a84b174a392.zip
Rebase on upstream
Diffstat (limited to 'tests')
-rw-r--r--tests/unittests/helpers.py3
-rw-r--r--tests/unittests/test__init__.py6
-rw-r--r--tests/unittests/test_data.py (renamed from tests/unittests/test_userdata.py)176
-rw-r--r--tests/unittests/test_datasource/test_configdrive.py5
-rw-r--r--tests/unittests/test_datasource/test_maas.py5
-rw-r--r--tests/unittests/test_datasource/test_nocloud.py35
-rw-r--r--tests/unittests/test_datasource/test_opennebula.py8
-rw-r--r--tests/unittests/test_datasource/test_smartos.py145
-rw-r--r--tests/unittests/test_ec2_util.py138
-rw-r--r--tests/unittests/test_handler/test_handler_growpart.py55
-rw-r--r--tests/unittests/test_pathprefix2dict.py40
-rw-r--r--tests/unittests/test_runs/test_merge_run.py4
-rw-r--r--tests/unittests/test_runs/test_simple_run.py4
13 files changed, 545 insertions, 79 deletions
diff --git a/tests/unittests/helpers.py b/tests/unittests/helpers.py
index c0da0983..5b4f4208 100644
--- a/tests/unittests/helpers.py
+++ b/tests/unittests/helpers.py
@@ -187,7 +187,8 @@ class FilesystemMockingTestCase(ResourceUsingTestCase):
def populate_dir(path, files):
- os.makedirs(path)
+ if not os.path.exists(path):
+ os.makedirs(path)
for (name, content) in files.iteritems():
with open(os.path.join(path, name), "w") as fp:
fp.write(content)
diff --git a/tests/unittests/test__init__.py b/tests/unittests/test__init__.py
index b4b20e51..8c41c1ca 100644
--- a/tests/unittests/test__init__.py
+++ b/tests/unittests/test__init__.py
@@ -196,7 +196,7 @@ class TestCmdlineUrl(MockerTestCase):
mock_readurl = self.mocker.replace(url_helper.readurl,
passthrough=False)
mock_readurl(url, ARGS, KWARGS)
- self.mocker.result(util.StringResponse(payload))
+ self.mocker.result(url_helper.StringResponse(payload))
self.mocker.replay()
self.assertEqual((key, url, None),
@@ -212,7 +212,7 @@ class TestCmdlineUrl(MockerTestCase):
mock_readurl = self.mocker.replace(url_helper.readurl,
passthrough=False)
mock_readurl(url, ARGS, KWARGS)
- self.mocker.result(util.StringResponse(payload))
+ self.mocker.result(url_helper.StringResponse(payload))
self.mocker.replay()
self.assertEqual((key, url, payload),
@@ -225,7 +225,7 @@ class TestCmdlineUrl(MockerTestCase):
cmdline = "ro %s=%s bar=1" % (key, url)
self.mocker.replace(url_helper.readurl, passthrough=False)
- self.mocker.result(util.StringResponse(""))
+ self.mocker.result(url_helper.StringResponse(""))
self.mocker.replay()
self.assertEqual((None, None, None),
diff --git a/tests/unittests/test_userdata.py b/tests/unittests/test_data.py
index 5ffe8f0a..68729c57 100644
--- a/tests/unittests/test_userdata.py
+++ b/tests/unittests/test_data.py
@@ -13,6 +13,7 @@ from email.mime.multipart import MIMEMultipart
from cloudinit import handlers
from cloudinit import helpers as c_helpers
from cloudinit import log
+from cloudinit.settings import (PER_INSTANCE)
from cloudinit import sources
from cloudinit import stages
from cloudinit import util
@@ -24,10 +25,11 @@ from tests.unittests import helpers
class FakeDataSource(sources.DataSource):
- def __init__(self, userdata):
+ def __init__(self, userdata=None, vendordata=None):
sources.DataSource.__init__(self, {}, None, None)
self.metadata = {'instance-id': INSTANCE_ID}
self.userdata_raw = userdata
+ self.vendordata_raw = vendordata
# FIXME: these tests shouldn't be checking log output??
@@ -45,6 +47,11 @@ class TestConsumeUserData(helpers.FilesystemMockingTestCase):
if self._log_handler and self._log:
self._log.removeHandler(self._log_handler)
+ def _patchIn(self, root):
+ self.restore()
+ self.patchOS(root)
+ self.patchUtils(root)
+
def capture_log(self, lvl=logging.DEBUG):
log_file = StringIO.StringIO()
self._log_handler = logging.StreamHandler(log_file)
@@ -68,13 +75,89 @@ class TestConsumeUserData(helpers.FilesystemMockingTestCase):
self.patchUtils(new_root)
self.patchOS(new_root)
ci.fetch()
- ci.consume_userdata()
+ ci.consume_data()
cc_contents = util.load_file(ci.paths.get_ipath("cloud_config"))
cc = util.load_yaml(cc_contents)
self.assertEquals(2, len(cc))
self.assertEquals('qux', cc['baz'])
self.assertEquals('qux2', cc['bar'])
+ def test_simple_jsonp_vendor_and_user(self):
+ # test that user-data wins over vendor
+ user_blob = '''
+#cloud-config-jsonp
+[
+ { "op": "add", "path": "/baz", "value": "qux" },
+ { "op": "add", "path": "/bar", "value": "qux2" }
+]
+'''
+ vendor_blob = '''
+#cloud-config-jsonp
+[
+ { "op": "add", "path": "/baz", "value": "quxA" },
+ { "op": "add", "path": "/bar", "value": "quxB" },
+ { "op": "add", "path": "/foo", "value": "quxC" }
+]
+'''
+ new_root = self.makeDir()
+ self._patchIn(new_root)
+ initer = stages.Init()
+ initer.datasource = FakeDataSource(user_blob, vendordata=vendor_blob)
+ initer.read_cfg()
+ initer.initialize()
+ initer.fetch()
+ _iid = initer.instancify()
+ initer.update()
+ initer.cloudify().run('consume_data',
+ initer.consume_data,
+ args=[PER_INSTANCE],
+ freq=PER_INSTANCE)
+ mods = stages.Modules(initer)
+ (_which_ran, _failures) = mods.run_section('cloud_init_modules')
+ cfg = mods.cfg
+ self.assertIn('vendor_data', cfg)
+ self.assertEquals('qux', cfg['baz'])
+ self.assertEquals('qux2', cfg['bar'])
+ self.assertEquals('quxC', cfg['foo'])
+
+ def test_simple_jsonp_no_vendor_consumed(self):
+ # make sure that vendor data is not consumed
+ user_blob = '''
+#cloud-config-jsonp
+[
+ { "op": "add", "path": "/baz", "value": "qux" },
+ { "op": "add", "path": "/bar", "value": "qux2" },
+ { "op": "add", "path": "/vendor_data", "value": {"enabled": "false"}}
+]
+'''
+ vendor_blob = '''
+#cloud-config-jsonp
+[
+ { "op": "add", "path": "/baz", "value": "quxA" },
+ { "op": "add", "path": "/bar", "value": "quxB" },
+ { "op": "add", "path": "/foo", "value": "quxC" }
+]
+'''
+ new_root = self.makeDir()
+ self._patchIn(new_root)
+ initer = stages.Init()
+ initer.datasource = FakeDataSource(user_blob, vendordata=vendor_blob)
+ initer.read_cfg()
+ initer.initialize()
+ initer.fetch()
+ _iid = initer.instancify()
+ initer.update()
+ initer.cloudify().run('consume_data',
+ initer.consume_data,
+ args=[PER_INSTANCE],
+ freq=PER_INSTANCE)
+ mods = stages.Modules(initer)
+ (_which_ran, _failures) = mods.run_section('cloud_init_modules')
+ cfg = mods.cfg
+ self.assertEquals('qux', cfg['baz'])
+ self.assertEquals('qux2', cfg['bar'])
+ self.assertNotIn('foo', cfg)
+
def test_mixed_cloud_config(self):
blob_cc = '''
#cloud-config
@@ -105,12 +188,87 @@ c: d
self.patchUtils(new_root)
self.patchOS(new_root)
ci.fetch()
- ci.consume_userdata()
+ ci.consume_data()
cc_contents = util.load_file(ci.paths.get_ipath("cloud_config"))
cc = util.load_yaml(cc_contents)
self.assertEquals(1, len(cc))
self.assertEquals('c', cc['a'])
+ def test_vendor_user_yaml_cloud_config(self):
+ vendor_blob = '''
+#cloud-config
+a: b
+name: vendor
+run:
+ - x
+ - y
+'''
+
+ user_blob = '''
+#cloud-config
+a: c
+vendor_data:
+ enabled: True
+ prefix: /bin/true
+name: user
+run:
+ - z
+'''
+ new_root = self.makeDir()
+ self._patchIn(new_root)
+ initer = stages.Init()
+ initer.datasource = FakeDataSource(user_blob, vendordata=vendor_blob)
+ initer.read_cfg()
+ initer.initialize()
+ initer.fetch()
+ _iid = initer.instancify()
+ initer.update()
+ initer.cloudify().run('consume_data',
+ initer.consume_data,
+ args=[PER_INSTANCE],
+ freq=PER_INSTANCE)
+ mods = stages.Modules(initer)
+ (_which_ran, _failures) = mods.run_section('cloud_init_modules')
+ cfg = mods.cfg
+ self.assertIn('vendor_data', cfg)
+ self.assertEquals('c', cfg['a'])
+ self.assertEquals('user', cfg['name'])
+ self.assertNotIn('x', cfg['run'])
+ self.assertNotIn('y', cfg['run'])
+ self.assertIn('z', cfg['run'])
+
+ def test_vendordata_script(self):
+ vendor_blob = '''
+#!/bin/bash
+echo "test"
+'''
+
+ user_blob = '''
+#cloud-config
+vendor_data:
+ enabled: True
+ prefix: /bin/true
+'''
+ new_root = self.makeDir()
+ self._patchIn(new_root)
+ initer = stages.Init()
+ initer.datasource = FakeDataSource(user_blob, vendordata=vendor_blob)
+ initer.read_cfg()
+ initer.initialize()
+ initer.fetch()
+ _iid = initer.instancify()
+ initer.update()
+ initer.cloudify().run('consume_data',
+ initer.consume_data,
+ args=[PER_INSTANCE],
+ freq=PER_INSTANCE)
+ mods = stages.Modules(initer)
+ (_which_ran, _failures) = mods.run_section('cloud_init_modules')
+ _cfg = mods.cfg
+ vendor_script = initer.paths.get_ipath_cur('vendor_scripts')
+ vendor_script_fns = "%s%s/part-001" % (new_root, vendor_script)
+ self.assertTrue(os.path.exists(vendor_script_fns))
+
def test_merging_cloud_config(self):
blob = '''
#cloud-config
@@ -185,7 +343,7 @@ p: 1
log_file = self.capture_log(logging.WARNING)
ci.fetch()
- ci.consume_userdata()
+ ci.consume_data()
self.assertIn(
"Unhandled non-multipart (text/x-not-multipart) userdata:",
log_file.getvalue())
@@ -221,7 +379,7 @@ c: 4
self.patchUtils(new_root)
self.patchOS(new_root)
ci.fetch()
- ci.consume_userdata()
+ ci.consume_data()
contents = util.load_file(ci.paths.get_ipath("cloud_config"))
contents = util.load_yaml(contents)
self.assertTrue(isinstance(contents, dict))
@@ -244,7 +402,7 @@ c: 4
log_file = self.capture_log(logging.WARNING)
ci.fetch()
- ci.consume_userdata()
+ ci.consume_data()
self.assertIn(
"Unhandled unknown content-type (text/plain)",
log_file.getvalue())
@@ -264,7 +422,7 @@ c: 4
log_file = self.capture_log(logging.WARNING)
ci.fetch()
- ci.consume_userdata()
+ ci.consume_data()
self.assertEqual("", log_file.getvalue())
def test_mime_text_x_shellscript(self):
@@ -284,7 +442,7 @@ c: 4
log_file = self.capture_log(logging.WARNING)
ci.fetch()
- ci.consume_userdata()
+ ci.consume_data()
self.assertEqual("", log_file.getvalue())
def test_mime_text_plain_shell(self):
@@ -304,5 +462,5 @@ c: 4
log_file = self.capture_log(logging.WARNING)
ci.fetch()
- ci.consume_userdata()
+ ci.consume_data()
self.assertEqual("", log_file.getvalue())
diff --git a/tests/unittests/test_datasource/test_configdrive.py b/tests/unittests/test_datasource/test_configdrive.py
index d5935294..1f4a0a0b 100644
--- a/tests/unittests/test_datasource/test_configdrive.py
+++ b/tests/unittests/test_datasource/test_configdrive.py
@@ -285,10 +285,11 @@ class TestConfigDriveDataSource(MockerTestCase):
self.assertEqual(["/dev/vdb", "/dev/zdd"],
ds.find_candidate_devs())
- # verify that partitions are not considered
+ # verify that partitions are considered, that have correct label.
devs_with_answers = {"TYPE=vfat": ["/dev/sda1"],
"TYPE=iso9660": [], "LABEL=config-2": ["/dev/vdb3"]}
- self.assertEqual([], ds.find_candidate_devs())
+ self.assertEqual(["/dev/vdb3"],
+ ds.find_candidate_devs())
finally:
util.find_devs_with = orig_find_devs_with
diff --git a/tests/unittests/test_datasource/test_maas.py b/tests/unittests/test_datasource/test_maas.py
index 2007a6df..bd5d23fd 100644
--- a/tests/unittests/test_datasource/test_maas.py
+++ b/tests/unittests/test_datasource/test_maas.py
@@ -119,9 +119,10 @@ class TestMAASDataSource(mocker.MockerTestCase):
mock_request(url, headers=None, timeout=mocker.ANY,
data=mocker.ANY, sec_between=mocker.ANY,
ssl_details=mocker.ANY, retries=mocker.ANY,
- headers_cb=my_headers_cb)
+ headers_cb=my_headers_cb,
+ exception_cb=mocker.ANY)
resp = valid.get(key)
- self.mocker.result(util.StringResponse(resp))
+ self.mocker.result(url_helper.StringResponse(resp))
self.mocker.replay()
(userdata, metadata) = DataSourceMAAS.read_maas_seed_url(my_seed,
diff --git a/tests/unittests/test_datasource/test_nocloud.py b/tests/unittests/test_datasource/test_nocloud.py
index 7328b240..af575a10 100644
--- a/tests/unittests/test_datasource/test_nocloud.py
+++ b/tests/unittests/test_datasource/test_nocloud.py
@@ -97,6 +97,41 @@ class TestNoCloudDataSource(MockerTestCase):
self.assertEqual(dsrc.metadata.get('instance-id'), 'IID')
self.assertTrue(ret)
+ def test_nocloud_seed_with_vendordata(self):
+ md = {'instance-id': 'IID', 'dsmode': 'local'}
+ ud = "USER_DATA_HERE"
+ vd = "THIS IS MY VENDOR_DATA"
+
+ populate_dir(os.path.join(self.paths.seed_dir, "nocloud"),
+ {'user-data': ud, 'meta-data': yaml.safe_dump(md),
+ 'vendor-data': vd})
+
+ sys_cfg = {
+ 'datasource': {'NoCloud': {'fs_label': None}}
+ }
+
+ ds = DataSourceNoCloud.DataSourceNoCloud
+
+ dsrc = ds(sys_cfg=sys_cfg, distro=None, paths=self.paths)
+ ret = dsrc.get_data()
+ self.assertEqual(dsrc.userdata_raw, ud)
+ self.assertEqual(dsrc.metadata, md)
+ self.assertEqual(dsrc.vendordata, vd)
+ self.assertTrue(ret)
+
+ def test_nocloud_no_vendordata(self):
+ populate_dir(os.path.join(self.paths.seed_dir, "nocloud"),
+ {'user-data': "ud", 'meta-data': "instance-id: IID\n"})
+
+ sys_cfg = {'datasource': {'NoCloud': {'fs_label': None}}}
+
+ ds = DataSourceNoCloud.DataSourceNoCloud
+
+ dsrc = ds(sys_cfg=sys_cfg, distro=None, paths=self.paths)
+ ret = dsrc.get_data()
+ self.assertEqual(dsrc.userdata_raw, "ud")
+ self.assertFalse(dsrc.vendordata)
+ self.assertTrue(ret)
class TestParseCommandLineData(MockerTestCase):
diff --git a/tests/unittests/test_datasource/test_opennebula.py b/tests/unittests/test_datasource/test_opennebula.py
index e1812a88..6fc5b2ac 100644
--- a/tests/unittests/test_datasource/test_opennebula.py
+++ b/tests/unittests/test_datasource/test_opennebula.py
@@ -258,6 +258,14 @@ iface eth0 inet static
''')
+class TestParseShellConfig(MockerTestCase):
+ def test_no_seconds(self):
+ cfg = '\n'.join(["foo=bar", "SECONDS=2", "xx=foo"])
+ # we could test 'sleep 2', but that would make the test run slower.
+ ret = ds.parse_shell_config(cfg)
+ self.assertEqual(ret, {"foo": "bar", "xx": "foo"})
+
+
def populate_context_dir(path, variables):
data = "# Context variables generated by OpenNebula\n"
for (k, v) in variables.iteritems():
diff --git a/tests/unittests/test_datasource/test_smartos.py b/tests/unittests/test_datasource/test_smartos.py
index 956767d8..ae427bb5 100644
--- a/tests/unittests/test_datasource/test_smartos.py
+++ b/tests/unittests/test_datasource/test_smartos.py
@@ -27,6 +27,10 @@ from cloudinit import helpers
from cloudinit.sources import DataSourceSmartOS
from mocker import MockerTestCase
+import os
+import os.path
+import re
+import stat
import uuid
MOCK_RETURNS = {
@@ -35,7 +39,11 @@ MOCK_RETURNS = {
'disable_iptables_flag': None,
'enable_motd_sys_info': None,
'test-var1': 'some data',
- 'user-data': '\n'.join(['#!/bin/sh', '/bin/true', '']),
+ 'cloud-init:user-data': '\n'.join(['#!/bin/sh', '/bin/true', '']),
+ 'sdc:datacenter_name': 'somewhere2',
+ 'sdc:operator-script': '\n'.join(['bin/true', '']),
+ 'user-data': '\n'.join(['something', '']),
+ 'user-script': '\n'.join(['/bin/true', '']),
}
DMI_DATA_RETURN = (str(uuid.uuid4()), 'smartdc')
@@ -101,6 +109,7 @@ class TestSmartOSDataSource(MockerTestCase):
def setUp(self):
# makeDir comes from MockerTestCase
self.tmp = self.makeDir()
+ self.legacy_user_d = self.makeDir()
# patch cloud_dir, so our 'seed_dir' is guaranteed empty
self.paths = helpers.Paths({'cloud_dir': self.tmp})
@@ -138,6 +147,7 @@ class TestSmartOSDataSource(MockerTestCase):
sys_cfg['datasource'] = sys_cfg.get('datasource', {})
sys_cfg['datasource']['SmartOS'] = ds_cfg
+ self.apply_patches([(mod, 'LEGACY_USER_D', self.legacy_user_d)])
self.apply_patches([(mod, 'get_serial', _get_serial)])
self.apply_patches([(mod, 'dmi_data', _dmi_data)])
dsrc = mod.DataSourceSmartOS(sys_cfg, distro=None,
@@ -194,7 +204,7 @@ class TestSmartOSDataSource(MockerTestCase):
# metadata provided base64_all of true
my_returns = MOCK_RETURNS.copy()
my_returns['base64_all'] = "true"
- for k in ('hostname', 'user-data'):
+ for k in ('hostname', 'cloud-init:user-data'):
my_returns[k] = base64.b64encode(my_returns[k])
dsrc = self._get_ds(mockdata=my_returns)
@@ -202,7 +212,7 @@ class TestSmartOSDataSource(MockerTestCase):
self.assertTrue(ret)
self.assertEquals(MOCK_RETURNS['hostname'],
dsrc.metadata['local-hostname'])
- self.assertEquals(MOCK_RETURNS['user-data'],
+ self.assertEquals(MOCK_RETURNS['cloud-init:user-data'],
dsrc.userdata_raw)
self.assertEquals(MOCK_RETURNS['root_authorized_keys'],
dsrc.metadata['public-keys'])
@@ -213,9 +223,9 @@ class TestSmartOSDataSource(MockerTestCase):
def test_b64_userdata(self):
my_returns = MOCK_RETURNS.copy()
- my_returns['b64-user-data'] = "true"
+ my_returns['b64-cloud-init:user-data'] = "true"
my_returns['b64-hostname'] = "true"
- for k in ('hostname', 'user-data'):
+ for k in ('hostname', 'cloud-init:user-data'):
my_returns[k] = base64.b64encode(my_returns[k])
dsrc = self._get_ds(mockdata=my_returns)
@@ -223,7 +233,8 @@ class TestSmartOSDataSource(MockerTestCase):
self.assertTrue(ret)
self.assertEquals(MOCK_RETURNS['hostname'],
dsrc.metadata['local-hostname'])
- self.assertEquals(MOCK_RETURNS['user-data'], dsrc.userdata_raw)
+ self.assertEquals(MOCK_RETURNS['cloud-init:user-data'],
+ dsrc.userdata_raw)
self.assertEquals(MOCK_RETURNS['root_authorized_keys'],
dsrc.metadata['public-keys'])
@@ -238,13 +249,131 @@ class TestSmartOSDataSource(MockerTestCase):
self.assertTrue(ret)
self.assertEquals(MOCK_RETURNS['hostname'],
dsrc.metadata['local-hostname'])
- self.assertEquals(MOCK_RETURNS['user-data'], dsrc.userdata_raw)
+ self.assertEquals(MOCK_RETURNS['cloud-init:user-data'],
+ dsrc.userdata_raw)
def test_userdata(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
- self.assertEquals(MOCK_RETURNS['user-data'], dsrc.userdata_raw)
+ self.assertEquals(MOCK_RETURNS['user-data'],
+ dsrc.metadata['legacy-user-data'])
+ self.assertEquals(MOCK_RETURNS['cloud-init:user-data'],
+ dsrc.userdata_raw)
+
+ def test_sdc_scripts(self):
+ dsrc = self._get_ds(mockdata=MOCK_RETURNS)
+ ret = dsrc.get_data()
+ self.assertTrue(ret)
+ self.assertEquals(MOCK_RETURNS['user-script'],
+ dsrc.metadata['user-script'])
+
+ legacy_script_f = "%s/user-script" % self.legacy_user_d
+ self.assertTrue(os.path.exists(legacy_script_f))
+ self.assertTrue(os.path.islink(legacy_script_f))
+ user_script_perm = oct(os.stat(legacy_script_f)[stat.ST_MODE])[-3:]
+ self.assertEquals(user_script_perm, '700')
+
+ def test_scripts_shebanged(self):
+ dsrc = self._get_ds(mockdata=MOCK_RETURNS)
+ ret = dsrc.get_data()
+ self.assertTrue(ret)
+ self.assertEquals(MOCK_RETURNS['user-script'],
+ dsrc.metadata['user-script'])
+
+ legacy_script_f = "%s/user-script" % self.legacy_user_d
+ self.assertTrue(os.path.exists(legacy_script_f))
+ self.assertTrue(os.path.islink(legacy_script_f))
+ shebang = None
+ with open(legacy_script_f, 'r') as f:
+ shebang = f.readlines()[0].strip()
+ self.assertEquals(shebang, "#!/bin/bash")
+ user_script_perm = oct(os.stat(legacy_script_f)[stat.ST_MODE])[-3:]
+ self.assertEquals(user_script_perm, '700')
+
+ def test_scripts_shebang_not_added(self):
+ """
+ Test that the SmartOS requirement that plain text scripts
+ are executable. This test makes sure that plain texts scripts
+ with out file magic have it added appropriately by cloud-init.
+ """
+
+ my_returns = MOCK_RETURNS.copy()
+ my_returns['user-script'] = '\n'.join(['#!/usr/bin/perl',
+ 'print("hi")', ''])
+
+ dsrc = self._get_ds(mockdata=my_returns)
+ ret = dsrc.get_data()
+ self.assertTrue(ret)
+ self.assertEquals(my_returns['user-script'],
+ dsrc.metadata['user-script'])
+
+ legacy_script_f = "%s/user-script" % self.legacy_user_d
+ self.assertTrue(os.path.exists(legacy_script_f))
+ self.assertTrue(os.path.islink(legacy_script_f))
+ shebang = None
+ with open(legacy_script_f, 'r') as f:
+ shebang = f.readlines()[0].strip()
+ self.assertEquals(shebang, "#!/usr/bin/perl")
+
+ def test_scripts_removed(self):
+ """
+ Since SmartOS requires that the user script is fetched
+ each boot, we want to make sure that the information
+ is backed-up for user-review later.
+
+ This tests the behavior of when a script is removed. It makes
+ sure that a) the previous script is backed-up; and 2) that
+ there is no script remaining.
+ """
+
+ script_d = os.path.join(self.tmp, "scripts", "per-boot")
+ os.makedirs(script_d)
+
+ test_script_f = "%s/99_user_script" % script_d
+ with open(test_script_f, 'w') as f:
+ f.write("TEST DATA")
+
+ my_returns = MOCK_RETURNS.copy()
+ del my_returns['user-script']
+
+ dsrc = self._get_ds(mockdata=my_returns)
+ ret = dsrc.get_data()
+ self.assertTrue(ret)
+ self.assertFalse(dsrc.metadata['user-script'])
+ self.assertFalse(os.path.exists(test_script_f))
+
+ def test_userdata_removed(self):
+ """
+ User-data in the SmartOS world is supposed to be written to a file
+ each and every boot. This tests to make sure that in the event the
+ legacy user-data is removed, the existing user-data is backed-up and
+ there is no /var/db/user-data left.
+ """
+
+ user_data_f = "%s/mdata-user-data" % self.legacy_user_d
+ with open(user_data_f, 'w') as f:
+ f.write("PREVIOUS")
+
+ my_returns = MOCK_RETURNS.copy()
+ del my_returns['user-data']
+
+ dsrc = self._get_ds(mockdata=my_returns)
+ ret = dsrc.get_data()
+ self.assertTrue(ret)
+ self.assertFalse(dsrc.metadata.get('legacy-user-data'))
+
+ found_new = False
+ for root, _dirs, files in os.walk(self.legacy_user_d):
+ for name in files:
+ name_f = os.path.join(root, name)
+ permissions = oct(os.stat(name_f)[stat.ST_MODE])[-3:]
+ if re.match(r'.*\/mdata-user-data$', name_f):
+ found_new = True
+ print name_f
+ self.assertEquals(permissions, '400')
+
+ self.assertFalse(found_new)
def test_disable_iptables_flag(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
diff --git a/tests/unittests/test_ec2_util.py b/tests/unittests/test_ec2_util.py
new file mode 100644
index 00000000..957dc3f2
--- /dev/null
+++ b/tests/unittests/test_ec2_util.py
@@ -0,0 +1,138 @@
+from tests.unittests import helpers
+
+from cloudinit import ec2_utils as eu
+
+import httpretty as hp
+
+
+class TestEc2Util(helpers.TestCase):
+ VERSION = 'latest'
+
+ @hp.activate
+ def test_userdata_fetch(self):
+ hp.register_uri(hp.GET,
+ 'http://169.254.169.254/%s/user-data' % (self.VERSION),
+ body='stuff',
+ status=200)
+ userdata = eu.get_instance_userdata(self.VERSION)
+ self.assertEquals('stuff', userdata)
+
+ @hp.activate
+ def test_userdata_fetch_fail_not_found(self):
+ hp.register_uri(hp.GET,
+ 'http://169.254.169.254/%s/user-data' % (self.VERSION),
+ status=404)
+ userdata = eu.get_instance_userdata(self.VERSION, retries=0)
+ self.assertEquals('', userdata)
+
+ @hp.activate
+ def test_userdata_fetch_fail_server_dead(self):
+ hp.register_uri(hp.GET,
+ 'http://169.254.169.254/%s/user-data' % (self.VERSION),
+ status=500)
+ userdata = eu.get_instance_userdata(self.VERSION, retries=0)
+ self.assertEquals('', userdata)
+
+ @hp.activate
+ def test_userdata_fetch_fail_server_not_found(self):
+ hp.register_uri(hp.GET,
+ 'http://169.254.169.254/%s/user-data' % (self.VERSION),
+ status=404)
+ userdata = eu.get_instance_userdata(self.VERSION)
+ self.assertEquals('', userdata)
+
+ @hp.activate
+ def test_metadata_fetch_no_keys(self):
+ base_url = 'http://169.254.169.254/%s/meta-data' % (self.VERSION)
+ hp.register_uri(hp.GET, base_url, status=200,
+ body="\n".join(['hostname',
+ 'instance-id',
+ 'ami-launch-index']))
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'hostname'),
+ status=200, body='ec2.fake.host.name.com')
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'instance-id'),
+ status=200, body='123')
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'ami-launch-index'),
+ status=200, body='1')
+ md = eu.get_instance_metadata(self.VERSION, retries=0)
+ self.assertEquals(md['hostname'], 'ec2.fake.host.name.com')
+ self.assertEquals(md['instance-id'], '123')
+ self.assertEquals(md['ami-launch-index'], '1')
+
+ @hp.activate
+ def test_metadata_fetch_key(self):
+ base_url = 'http://169.254.169.254/%s/meta-data' % (self.VERSION)
+ hp.register_uri(hp.GET, base_url, status=200,
+ body="\n".join(['hostname',
+ 'instance-id',
+ 'public-keys/']))
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'hostname'),
+ status=200, body='ec2.fake.host.name.com')
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'instance-id'),
+ status=200, body='123')
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'public-keys/'),
+ status=200, body='0=my-public-key')
+ hp.register_uri(hp.GET,
+ eu.combine_url(base_url, 'public-keys/0/openssh-key'),
+ status=200, body='ssh-rsa AAAA.....wZEf my-public-key')
+ md = eu.get_instance_metadata(self.VERSION, retries=0, timeout=0.1)
+ self.assertEquals(md['hostname'], 'ec2.fake.host.name.com')
+ self.assertEquals(md['instance-id'], '123')
+ self.assertEquals(1, len(md['public-keys']))
+
+ @hp.activate
+ def test_metadata_fetch_with_2_keys(self):
+ base_url = 'http://169.254.169.254/%s/meta-data' % (self.VERSION)
+ hp.register_uri(hp.GET, base_url, status=200,
+ body="\n".join(['hostname',
+ 'instance-id',
+ 'public-keys/']))
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'hostname'),
+ status=200, body='ec2.fake.host.name.com')
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'instance-id'),
+ status=200, body='123')
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'public-keys/'),
+ status=200,
+ body="\n".join(['0=my-public-key', '1=my-other-key']))
+ hp.register_uri(hp.GET,
+ eu.combine_url(base_url, 'public-keys/0/openssh-key'),
+ status=200, body='ssh-rsa AAAA.....wZEf my-public-key')
+ hp.register_uri(hp.GET,
+ eu.combine_url(base_url, 'public-keys/1/openssh-key'),
+ status=200, body='ssh-rsa AAAA.....wZEf my-other-key')
+ md = eu.get_instance_metadata(self.VERSION, retries=0, timeout=0.1)
+ self.assertEquals(md['hostname'], 'ec2.fake.host.name.com')
+ self.assertEquals(md['instance-id'], '123')
+ self.assertEquals(2, len(md['public-keys']))
+
+ @hp.activate
+ def test_metadata_fetch_bdm(self):
+ base_url = 'http://169.254.169.254/%s/meta-data' % (self.VERSION)
+ hp.register_uri(hp.GET, base_url, status=200,
+ body="\n".join(['hostname',
+ 'instance-id',
+ 'block-device-mapping/']))
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'hostname'),
+ status=200, body='ec2.fake.host.name.com')
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'instance-id'),
+ status=200, body='123')
+ hp.register_uri(hp.GET,
+ eu.combine_url(base_url, 'block-device-mapping/'),
+ status=200,
+ body="\n".join(['ami', 'ephemeral0']))
+ hp.register_uri(hp.GET,
+ eu.combine_url(base_url, 'block-device-mapping/ami'),
+ status=200,
+ body="sdb")
+ hp.register_uri(hp.GET,
+ eu.combine_url(base_url,
+ 'block-device-mapping/ephemeral0'),
+ status=200,
+ body="sdc")
+ md = eu.get_instance_metadata(self.VERSION, retries=0, timeout=0.1)
+ self.assertEquals(md['hostname'], 'ec2.fake.host.name.com')
+ self.assertEquals(md['instance-id'], '123')
+ bdm = md['block-device-mapping']
+ self.assertEquals(2, len(bdm))
+ self.assertEquals(bdm['ami'], 'sdb')
+ self.assertEquals(bdm['ephemeral0'], 'sdc')
diff --git a/tests/unittests/test_handler/test_handler_growpart.py b/tests/unittests/test_handler/test_handler_growpart.py
index c0497e08..996526d3 100644
--- a/tests/unittests/test_handler/test_handler_growpart.py
+++ b/tests/unittests/test_handler/test_handler_growpart.py
@@ -12,50 +12,9 @@ import re
import unittest
# growpart:
-# mode: auto # off, on, auto, 'growpart', 'parted'
+# mode: auto # off, on, auto, 'growpart'
# devices: ['root']
-HELP_PARTED_NO_RESIZE = """
-Usage: parted [OPTION]... [DEVICE [COMMAND [PARAMETERS]...]...]
-Apply COMMANDs with PARAMETERS to DEVICE. If no COMMAND(s) are given, run in
-interactive mode.
-
-OPTIONs:
-<SNIP>
-
-COMMANDs:
-<SNIP>
- quit exit program
- rescue START END rescue a lost partition near START
- and END
- resize NUMBER START END resize partition NUMBER and its file
- system
- rm NUMBER delete partition NUMBER
-<SNIP>
-Report bugs to bug-parted@gnu.org
-"""
-
-HELP_PARTED_RESIZE = """
-Usage: parted [OPTION]... [DEVICE [COMMAND [PARAMETERS]...]...]
-Apply COMMANDs with PARAMETERS to DEVICE. If no COMMAND(s) are given, run in
-interactive mode.
-
-OPTIONs:
-<SNIP>
-
-COMMANDs:
-<SNIP>
- quit exit program
- rescue START END rescue a lost partition near START
- and END
- resize NUMBER START END resize partition NUMBER and its file
- system
- resizepart NUMBER END resize partition NUMBER
- rm NUMBER delete partition NUMBER
-<SNIP>
-Report bugs to bug-parted@gnu.org
-"""
-
HELP_GROWPART_RESIZE = """
growpart disk partition
rewrite partition table so that partition takes up all the space it can
@@ -122,11 +81,8 @@ class TestConfig(MockerTestCase):
# Order must be correct
self.mocker.order()
- @unittest.skip("until LP: #1212444 fixed")
def test_no_resizers_auto_is_fine(self):
subp = self.mocker.replace(util.subp, passthrough=False)
- subp(['parted', '--help'], env={'LANG': 'C'})
- self.mocker.result((HELP_PARTED_NO_RESIZE, ""))
subp(['growpart', '--help'], env={'LANG': 'C'})
self.mocker.result((HELP_GROWPART_NO_RESIZE, ""))
self.mocker.replay()
@@ -144,15 +100,14 @@ class TestConfig(MockerTestCase):
self.assertRaises(ValueError, self.handle, self.name, config,
self.cloud_init, self.log, self.args)
- @unittest.skip("until LP: #1212444 fixed")
- def test_mode_auto_prefers_parted(self):
+ def test_mode_auto_prefers_growpart(self):
subp = self.mocker.replace(util.subp, passthrough=False)
- subp(['parted', '--help'], env={'LANG': 'C'})
- self.mocker.result((HELP_PARTED_RESIZE, ""))
+ subp(['growpart', '--help'], env={'LANG': 'C'})
+ self.mocker.result((HELP_GROWPART_RESIZE, ""))
self.mocker.replay()
ret = cc_growpart.resizer_factory(mode="auto")
- self.assertTrue(isinstance(ret, cc_growpart.ResizeParted))
+ self.assertTrue(isinstance(ret, cc_growpart.ResizeGrowPart))
def test_handle_with_no_growpart_entry(self):
#if no 'growpart' entry in config, then mode=auto should be used
diff --git a/tests/unittests/test_pathprefix2dict.py b/tests/unittests/test_pathprefix2dict.py
new file mode 100644
index 00000000..c68c263c
--- /dev/null
+++ b/tests/unittests/test_pathprefix2dict.py
@@ -0,0 +1,40 @@
+from cloudinit import util
+
+from mocker import MockerTestCase
+from tests.unittests.helpers import populate_dir
+
+
+class TestPathPrefix2Dict(MockerTestCase):
+
+ def setUp(self):
+ self.tmp = self.makeDir()
+
+ def test_required_only(self):
+ dirdata = {'f1': 'f1content', 'f2': 'f2content'}
+ populate_dir(self.tmp, dirdata)
+
+ ret = util.pathprefix2dict(self.tmp, required=['f1', 'f2'])
+ self.assertEqual(dirdata, ret)
+
+ def test_required_missing(self):
+ dirdata = {'f1': 'f1content'}
+ populate_dir(self.tmp, dirdata)
+ kwargs = {'required': ['f1', 'f2']}
+ self.assertRaises(ValueError, util.pathprefix2dict, self.tmp, **kwargs)
+
+ def test_no_required_and_optional(self):
+ dirdata = {'f1': 'f1c', 'f2': 'f2c'}
+ populate_dir(self.tmp, dirdata)
+
+ ret = util.pathprefix2dict(self.tmp, required=None,
+ optional=['f1', 'f2'])
+ self.assertEqual(dirdata, ret)
+
+ def test_required_and_optional(self):
+ dirdata = {'f1': 'f1c', 'f2': 'f2c'}
+ populate_dir(self.tmp, dirdata)
+
+ ret = util.pathprefix2dict(self.tmp, required=['f1'], optional=['f2'])
+ self.assertEqual(dirdata, ret)
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/test_runs/test_merge_run.py b/tests/unittests/test_runs/test_merge_run.py
index d9c3a455..5ffe95a2 100644
--- a/tests/unittests/test_runs/test_merge_run.py
+++ b/tests/unittests/test_runs/test_merge_run.py
@@ -35,8 +35,8 @@ class TestMergeRun(helpers.FilesystemMockingTestCase):
initer.datasource.userdata_raw = ud
_iid = initer.instancify()
initer.update()
- initer.cloudify().run('consume_userdata',
- initer.consume_userdata,
+ initer.cloudify().run('consume_data',
+ initer.consume_data,
args=[PER_INSTANCE],
freq=PER_INSTANCE)
mirrors = initer.distro.get_option('package_mirrors')
diff --git a/tests/unittests/test_runs/test_simple_run.py b/tests/unittests/test_runs/test_simple_run.py
index 60ef812a..9a7178d1 100644
--- a/tests/unittests/test_runs/test_simple_run.py
+++ b/tests/unittests/test_runs/test_simple_run.py
@@ -66,8 +66,8 @@ class TestSimpleRun(helpers.FilesystemMockingTestCase):
initer.update()
self.assertTrue(os.path.islink("var/lib/cloud/instance"))
- initer.cloudify().run('consume_userdata',
- initer.consume_userdata,
+ initer.cloudify().run('consume_data',
+ initer.consume_data,
args=[PER_INSTANCE],
freq=PER_INSTANCE)