summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
Diffstat (limited to 'tests')
-rw-r--r--tests/unittests/test_data.py (renamed from tests/unittests/test_userdata.py)176
-rw-r--r--tests/unittests/test_datasource/test_opennebula.py8
-rw-r--r--tests/unittests/test_ec2_util.py130
-rw-r--r--tests/unittests/test_handler/test_handler_growpart.py55
-rw-r--r--tests/unittests/test_runs/test_merge_run.py4
-rw-r--r--tests/unittests/test_runs/test_simple_run.py4
6 files changed, 314 insertions, 63 deletions
diff --git a/tests/unittests/test_userdata.py b/tests/unittests/test_data.py
index 5ffe8f0a..68729c57 100644
--- a/tests/unittests/test_userdata.py
+++ b/tests/unittests/test_data.py
@@ -13,6 +13,7 @@ from email.mime.multipart import MIMEMultipart
from cloudinit import handlers
from cloudinit import helpers as c_helpers
from cloudinit import log
+from cloudinit.settings import (PER_INSTANCE)
from cloudinit import sources
from cloudinit import stages
from cloudinit import util
@@ -24,10 +25,11 @@ from tests.unittests import helpers
class FakeDataSource(sources.DataSource):
- def __init__(self, userdata):
+ def __init__(self, userdata=None, vendordata=None):
sources.DataSource.__init__(self, {}, None, None)
self.metadata = {'instance-id': INSTANCE_ID}
self.userdata_raw = userdata
+ self.vendordata_raw = vendordata
# FIXME: these tests shouldn't be checking log output??
@@ -45,6 +47,11 @@ class TestConsumeUserData(helpers.FilesystemMockingTestCase):
if self._log_handler and self._log:
self._log.removeHandler(self._log_handler)
+ def _patchIn(self, root):
+ self.restore()
+ self.patchOS(root)
+ self.patchUtils(root)
+
def capture_log(self, lvl=logging.DEBUG):
log_file = StringIO.StringIO()
self._log_handler = logging.StreamHandler(log_file)
@@ -68,13 +75,89 @@ class TestConsumeUserData(helpers.FilesystemMockingTestCase):
self.patchUtils(new_root)
self.patchOS(new_root)
ci.fetch()
- ci.consume_userdata()
+ ci.consume_data()
cc_contents = util.load_file(ci.paths.get_ipath("cloud_config"))
cc = util.load_yaml(cc_contents)
self.assertEquals(2, len(cc))
self.assertEquals('qux', cc['baz'])
self.assertEquals('qux2', cc['bar'])
+ def test_simple_jsonp_vendor_and_user(self):
+ # test that user-data wins over vendor
+ user_blob = '''
+#cloud-config-jsonp
+[
+ { "op": "add", "path": "/baz", "value": "qux" },
+ { "op": "add", "path": "/bar", "value": "qux2" }
+]
+'''
+ vendor_blob = '''
+#cloud-config-jsonp
+[
+ { "op": "add", "path": "/baz", "value": "quxA" },
+ { "op": "add", "path": "/bar", "value": "quxB" },
+ { "op": "add", "path": "/foo", "value": "quxC" }
+]
+'''
+ new_root = self.makeDir()
+ self._patchIn(new_root)
+ initer = stages.Init()
+ initer.datasource = FakeDataSource(user_blob, vendordata=vendor_blob)
+ initer.read_cfg()
+ initer.initialize()
+ initer.fetch()
+ _iid = initer.instancify()
+ initer.update()
+ initer.cloudify().run('consume_data',
+ initer.consume_data,
+ args=[PER_INSTANCE],
+ freq=PER_INSTANCE)
+ mods = stages.Modules(initer)
+ (_which_ran, _failures) = mods.run_section('cloud_init_modules')
+ cfg = mods.cfg
+ self.assertIn('vendor_data', cfg)
+ self.assertEquals('qux', cfg['baz'])
+ self.assertEquals('qux2', cfg['bar'])
+ self.assertEquals('quxC', cfg['foo'])
+
+ def test_simple_jsonp_no_vendor_consumed(self):
+ # make sure that vendor data is not consumed
+ user_blob = '''
+#cloud-config-jsonp
+[
+ { "op": "add", "path": "/baz", "value": "qux" },
+ { "op": "add", "path": "/bar", "value": "qux2" },
+ { "op": "add", "path": "/vendor_data", "value": {"enabled": "false"}}
+]
+'''
+ vendor_blob = '''
+#cloud-config-jsonp
+[
+ { "op": "add", "path": "/baz", "value": "quxA" },
+ { "op": "add", "path": "/bar", "value": "quxB" },
+ { "op": "add", "path": "/foo", "value": "quxC" }
+]
+'''
+ new_root = self.makeDir()
+ self._patchIn(new_root)
+ initer = stages.Init()
+ initer.datasource = FakeDataSource(user_blob, vendordata=vendor_blob)
+ initer.read_cfg()
+ initer.initialize()
+ initer.fetch()
+ _iid = initer.instancify()
+ initer.update()
+ initer.cloudify().run('consume_data',
+ initer.consume_data,
+ args=[PER_INSTANCE],
+ freq=PER_INSTANCE)
+ mods = stages.Modules(initer)
+ (_which_ran, _failures) = mods.run_section('cloud_init_modules')
+ cfg = mods.cfg
+ self.assertEquals('qux', cfg['baz'])
+ self.assertEquals('qux2', cfg['bar'])
+ self.assertNotIn('foo', cfg)
+
def test_mixed_cloud_config(self):
blob_cc = '''
#cloud-config
@@ -105,12 +188,87 @@ c: d
self.patchUtils(new_root)
self.patchOS(new_root)
ci.fetch()
- ci.consume_userdata()
+ ci.consume_data()
cc_contents = util.load_file(ci.paths.get_ipath("cloud_config"))
cc = util.load_yaml(cc_contents)
self.assertEquals(1, len(cc))
self.assertEquals('c', cc['a'])
+ def test_vendor_user_yaml_cloud_config(self):
+ vendor_blob = '''
+#cloud-config
+a: b
+name: vendor
+run:
+ - x
+ - y
+'''
+
+ user_blob = '''
+#cloud-config
+a: c
+vendor_data:
+ enabled: True
+ prefix: /bin/true
+name: user
+run:
+ - z
+'''
+ new_root = self.makeDir()
+ self._patchIn(new_root)
+ initer = stages.Init()
+ initer.datasource = FakeDataSource(user_blob, vendordata=vendor_blob)
+ initer.read_cfg()
+ initer.initialize()
+ initer.fetch()
+ _iid = initer.instancify()
+ initer.update()
+ initer.cloudify().run('consume_data',
+ initer.consume_data,
+ args=[PER_INSTANCE],
+ freq=PER_INSTANCE)
+ mods = stages.Modules(initer)
+ (_which_ran, _failures) = mods.run_section('cloud_init_modules')
+ cfg = mods.cfg
+ self.assertIn('vendor_data', cfg)
+ self.assertEquals('c', cfg['a'])
+ self.assertEquals('user', cfg['name'])
+ self.assertNotIn('x', cfg['run'])
+ self.assertNotIn('y', cfg['run'])
+ self.assertIn('z', cfg['run'])
+
+ def test_vendordata_script(self):
+ vendor_blob = '''
+#!/bin/bash
+echo "test"
+'''
+
+ user_blob = '''
+#cloud-config
+vendor_data:
+ enabled: True
+ prefix: /bin/true
+'''
+ new_root = self.makeDir()
+ self._patchIn(new_root)
+ initer = stages.Init()
+ initer.datasource = FakeDataSource(user_blob, vendordata=vendor_blob)
+ initer.read_cfg()
+ initer.initialize()
+ initer.fetch()
+ _iid = initer.instancify()
+ initer.update()
+ initer.cloudify().run('consume_data',
+ initer.consume_data,
+ args=[PER_INSTANCE],
+ freq=PER_INSTANCE)
+ mods = stages.Modules(initer)
+ (_which_ran, _failures) = mods.run_section('cloud_init_modules')
+ _cfg = mods.cfg
+ vendor_script = initer.paths.get_ipath_cur('vendor_scripts')
+ vendor_script_fns = "%s%s/part-001" % (new_root, vendor_script)
+ self.assertTrue(os.path.exists(vendor_script_fns))
+
def test_merging_cloud_config(self):
blob = '''
#cloud-config
@@ -185,7 +343,7 @@ p: 1
log_file = self.capture_log(logging.WARNING)
ci.fetch()
- ci.consume_userdata()
+ ci.consume_data()
self.assertIn(
"Unhandled non-multipart (text/x-not-multipart) userdata:",
log_file.getvalue())
@@ -221,7 +379,7 @@ c: 4
self.patchUtils(new_root)
self.patchOS(new_root)
ci.fetch()
- ci.consume_userdata()
+ ci.consume_data()
contents = util.load_file(ci.paths.get_ipath("cloud_config"))
contents = util.load_yaml(contents)
self.assertTrue(isinstance(contents, dict))
@@ -244,7 +402,7 @@ c: 4
log_file = self.capture_log(logging.WARNING)
ci.fetch()
- ci.consume_userdata()
+ ci.consume_data()
self.assertIn(
"Unhandled unknown content-type (text/plain)",
log_file.getvalue())
@@ -264,7 +422,7 @@ c: 4
log_file = self.capture_log(logging.WARNING)
ci.fetch()
- ci.consume_userdata()
+ ci.consume_data()
self.assertEqual("", log_file.getvalue())
def test_mime_text_x_shellscript(self):
@@ -284,7 +442,7 @@ c: 4
log_file = self.capture_log(logging.WARNING)
ci.fetch()
- ci.consume_userdata()
+ ci.consume_data()
self.assertEqual("", log_file.getvalue())
def test_mime_text_plain_shell(self):
@@ -304,5 +462,5 @@ c: 4
log_file = self.capture_log(logging.WARNING)
ci.fetch()
- ci.consume_userdata()
+ ci.consume_data()
self.assertEqual("", log_file.getvalue())
diff --git a/tests/unittests/test_datasource/test_opennebula.py b/tests/unittests/test_datasource/test_opennebula.py
index e1812a88..6fc5b2ac 100644
--- a/tests/unittests/test_datasource/test_opennebula.py
+++ b/tests/unittests/test_datasource/test_opennebula.py
@@ -258,6 +258,14 @@ iface eth0 inet static
''')
+class TestParseShellConfig(MockerTestCase):
+ def test_no_seconds(self):
+ cfg = '\n'.join(["foo=bar", "SECONDS=2", "xx=foo"])
+ # we could test 'sleep 2', but that would make the test run slower.
+ ret = ds.parse_shell_config(cfg)
+ self.assertEqual(ret, {"foo": "bar", "xx": "foo"})
+
+
def populate_context_dir(path, variables):
data = "# Context variables generated by OpenNebula\n"
for (k, v) in variables.iteritems():
diff --git a/tests/unittests/test_ec2_util.py b/tests/unittests/test_ec2_util.py
new file mode 100644
index 00000000..dd588aca
--- /dev/null
+++ b/tests/unittests/test_ec2_util.py
@@ -0,0 +1,130 @@
+from tests.unittests import helpers
+
+from cloudinit import ec2_utils as eu
+
+import httpretty as hp
+
+
+class TestEc2Util(helpers.TestCase):
+ VERSION = 'latest'
+
+ @hp.activate
+ def test_userdata_fetch(self):
+ hp.register_uri(hp.GET,
+ 'http://169.254.169.254/%s/user-data' % (self.VERSION),
+ body='stuff',
+ status=200)
+ userdata = eu.get_instance_userdata(self.VERSION)
+ self.assertEquals('stuff', userdata)
+
+ @hp.activate
+ def test_userdata_fetch_fail_not_found(self):
+ hp.register_uri(hp.GET,
+ 'http://169.254.169.254/%s/user-data' % (self.VERSION),
+ status=404)
+ userdata = eu.get_instance_userdata(self.VERSION, retries=0)
+ self.assertEquals('', userdata)
+
+ @hp.activate
+ def test_userdata_fetch_fail_server_dead(self):
+ hp.register_uri(hp.GET,
+ 'http://169.254.169.254/%s/user-data' % (self.VERSION),
+ status=500)
+ userdata = eu.get_instance_userdata(self.VERSION, retries=0)
+ self.assertEquals('', userdata)
+
+ @hp.activate
+ def test_metadata_fetch_no_keys(self):
+ base_url = 'http://169.254.169.254/%s/meta-data' % (self.VERSION)
+ hp.register_uri(hp.GET, base_url, status=200,
+ body="\n".join(['hostname',
+ 'instance-id',
+ 'ami-launch-index']))
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'hostname'),
+ status=200, body='ec2.fake.host.name.com')
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'instance-id'),
+ status=200, body='123')
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'ami-launch-index'),
+ status=200, body='1')
+ md = eu.get_instance_metadata(self.VERSION, retries=0)
+ self.assertEquals(md['hostname'], 'ec2.fake.host.name.com')
+ self.assertEquals(md['instance-id'], '123')
+ self.assertEquals(md['ami-launch-index'], '1')
+
+ @hp.activate
+ def test_metadata_fetch_key(self):
+ base_url = 'http://169.254.169.254/%s/meta-data' % (self.VERSION)
+ hp.register_uri(hp.GET, base_url, status=200,
+ body="\n".join(['hostname',
+ 'instance-id',
+ 'public-keys/']))
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'hostname'),
+ status=200, body='ec2.fake.host.name.com')
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'instance-id'),
+ status=200, body='123')
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'public-keys/'),
+ status=200, body='0=my-public-key')
+ hp.register_uri(hp.GET,
+ eu.combine_url(base_url, 'public-keys/0/openssh-key'),
+ status=200, body='ssh-rsa AAAA.....wZEf my-public-key')
+ md = eu.get_instance_metadata(self.VERSION, retries=0, timeout=0.1)
+ self.assertEquals(md['hostname'], 'ec2.fake.host.name.com')
+ self.assertEquals(md['instance-id'], '123')
+ self.assertEquals(1, len(md['public-keys']))
+
+ @hp.activate
+ def test_metadata_fetch_with_2_keys(self):
+ base_url = 'http://169.254.169.254/%s/meta-data' % (self.VERSION)
+ hp.register_uri(hp.GET, base_url, status=200,
+ body="\n".join(['hostname',
+ 'instance-id',
+ 'public-keys/']))
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'hostname'),
+ status=200, body='ec2.fake.host.name.com')
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'instance-id'),
+ status=200, body='123')
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'public-keys/'),
+ status=200,
+ body="\n".join(['0=my-public-key', '1=my-other-key']))
+ hp.register_uri(hp.GET,
+ eu.combine_url(base_url, 'public-keys/0/openssh-key'),
+ status=200, body='ssh-rsa AAAA.....wZEf my-public-key')
+ hp.register_uri(hp.GET,
+ eu.combine_url(base_url, 'public-keys/1/openssh-key'),
+ status=200, body='ssh-rsa AAAA.....wZEf my-other-key')
+ md = eu.get_instance_metadata(self.VERSION, retries=0, timeout=0.1)
+ self.assertEquals(md['hostname'], 'ec2.fake.host.name.com')
+ self.assertEquals(md['instance-id'], '123')
+ self.assertEquals(2, len(md['public-keys']))
+
+ @hp.activate
+ def test_metadata_fetch_bdm(self):
+ base_url = 'http://169.254.169.254/%s/meta-data' % (self.VERSION)
+ hp.register_uri(hp.GET, base_url, status=200,
+ body="\n".join(['hostname',
+ 'instance-id',
+ 'block-device-mapping/']))
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'hostname'),
+ status=200, body='ec2.fake.host.name.com')
+ hp.register_uri(hp.GET, eu.combine_url(base_url, 'instance-id'),
+ status=200, body='123')
+ hp.register_uri(hp.GET,
+ eu.combine_url(base_url, 'block-device-mapping/'),
+ status=200,
+ body="\n".join(['ami', 'ephemeral0']))
+ hp.register_uri(hp.GET,
+ eu.combine_url(base_url, 'block-device-mapping/ami'),
+ status=200,
+ body="sdb")
+ hp.register_uri(hp.GET,
+ eu.combine_url(base_url,
+ 'block-device-mapping/ephemeral0'),
+ status=200,
+ body="sdc")
+ md = eu.get_instance_metadata(self.VERSION, retries=0, timeout=0.1)
+ self.assertEquals(md['hostname'], 'ec2.fake.host.name.com')
+ self.assertEquals(md['instance-id'], '123')
+ bdm = md['block-device-mapping']
+ self.assertEquals(2, len(bdm))
+ self.assertEquals(bdm['ami'], 'sdb')
+ self.assertEquals(bdm['ephemeral0'], 'sdc')
diff --git a/tests/unittests/test_handler/test_handler_growpart.py b/tests/unittests/test_handler/test_handler_growpart.py
index c0497e08..996526d3 100644
--- a/tests/unittests/test_handler/test_handler_growpart.py
+++ b/tests/unittests/test_handler/test_handler_growpart.py
@@ -12,50 +12,9 @@ import re
import unittest
# growpart:
-# mode: auto # off, on, auto, 'growpart', 'parted'
+# mode: auto # off, on, auto, 'growpart'
# devices: ['root']
-HELP_PARTED_NO_RESIZE = """
-Usage: parted [OPTION]... [DEVICE [COMMAND [PARAMETERS]...]...]
-Apply COMMANDs with PARAMETERS to DEVICE. If no COMMAND(s) are given, run in
-interactive mode.
-
-OPTIONs:
-<SNIP>
-
-COMMANDs:
-<SNIP>
- quit exit program
- rescue START END rescue a lost partition near START
- and END
- resize NUMBER START END resize partition NUMBER and its file
- system
- rm NUMBER delete partition NUMBER
-<SNIP>
-Report bugs to bug-parted@gnu.org
-"""
-
-HELP_PARTED_RESIZE = """
-Usage: parted [OPTION]... [DEVICE [COMMAND [PARAMETERS]...]...]
-Apply COMMANDs with PARAMETERS to DEVICE. If no COMMAND(s) are given, run in
-interactive mode.
-
-OPTIONs:
-<SNIP>
-
-COMMANDs:
-<SNIP>
- quit exit program
- rescue START END rescue a lost partition near START
- and END
- resize NUMBER START END resize partition NUMBER and its file
- system
- resizepart NUMBER END resize partition NUMBER
- rm NUMBER delete partition NUMBER
-<SNIP>
-Report bugs to bug-parted@gnu.org
-"""
-
HELP_GROWPART_RESIZE = """
growpart disk partition
rewrite partition table so that partition takes up all the space it can
@@ -122,11 +81,8 @@ class TestConfig(MockerTestCase):
# Order must be correct
self.mocker.order()
- @unittest.skip("until LP: #1212444 fixed")
def test_no_resizers_auto_is_fine(self):
subp = self.mocker.replace(util.subp, passthrough=False)
- subp(['parted', '--help'], env={'LANG': 'C'})
- self.mocker.result((HELP_PARTED_NO_RESIZE, ""))
subp(['growpart', '--help'], env={'LANG': 'C'})
self.mocker.result((HELP_GROWPART_NO_RESIZE, ""))
self.mocker.replay()
@@ -144,15 +100,14 @@ class TestConfig(MockerTestCase):
self.assertRaises(ValueError, self.handle, self.name, config,
self.cloud_init, self.log, self.args)
- @unittest.skip("until LP: #1212444 fixed")
- def test_mode_auto_prefers_parted(self):
+ def test_mode_auto_prefers_growpart(self):
subp = self.mocker.replace(util.subp, passthrough=False)
- subp(['parted', '--help'], env={'LANG': 'C'})
- self.mocker.result((HELP_PARTED_RESIZE, ""))
+ subp(['growpart', '--help'], env={'LANG': 'C'})
+ self.mocker.result((HELP_GROWPART_RESIZE, ""))
self.mocker.replay()
ret = cc_growpart.resizer_factory(mode="auto")
- self.assertTrue(isinstance(ret, cc_growpart.ResizeParted))
+ self.assertTrue(isinstance(ret, cc_growpart.ResizeGrowPart))
def test_handle_with_no_growpart_entry(self):
#if no 'growpart' entry in config, then mode=auto should be used
diff --git a/tests/unittests/test_runs/test_merge_run.py b/tests/unittests/test_runs/test_merge_run.py
index d9c3a455..5ffe95a2 100644
--- a/tests/unittests/test_runs/test_merge_run.py
+++ b/tests/unittests/test_runs/test_merge_run.py
@@ -35,8 +35,8 @@ class TestMergeRun(helpers.FilesystemMockingTestCase):
initer.datasource.userdata_raw = ud
_iid = initer.instancify()
initer.update()
- initer.cloudify().run('consume_userdata',
- initer.consume_userdata,
+ initer.cloudify().run('consume_data',
+ initer.consume_data,
args=[PER_INSTANCE],
freq=PER_INSTANCE)
mirrors = initer.distro.get_option('package_mirrors')
diff --git a/tests/unittests/test_runs/test_simple_run.py b/tests/unittests/test_runs/test_simple_run.py
index 60ef812a..9a7178d1 100644
--- a/tests/unittests/test_runs/test_simple_run.py
+++ b/tests/unittests/test_runs/test_simple_run.py
@@ -66,8 +66,8 @@ class TestSimpleRun(helpers.FilesystemMockingTestCase):
initer.update()
self.assertTrue(os.path.islink("var/lib/cloud/instance"))
- initer.cloudify().run('consume_userdata',
- initer.consume_userdata,
+ initer.cloudify().run('consume_data',
+ initer.consume_data,
args=[PER_INSTANCE],
freq=PER_INSTANCE)