summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--cloudinit/handlers/__init__.py15
-rw-r--r--cloudinit/handlers/cloud_config.py89
-rw-r--r--cloudinit/mergers/__init__.py59
-rw-r--r--cloudinit/mergers/dict.py11
-rw-r--r--cloudinit/mergers/list.py21
-rw-r--r--cloudinit/mergers/str.py5
-rw-r--r--tests/unittests/test__init__.py27
-rw-r--r--tests/unittests/test_merging.py205
-rw-r--r--tests/unittests/test_userdata.py80
9 files changed, 368 insertions, 144 deletions
diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py
index 566b61a7..63fdb948 100644
--- a/cloudinit/handlers/__init__.py
+++ b/cloudinit/handlers/__init__.py
@@ -87,7 +87,7 @@ class Handler(object):
raise NotImplementedError()
-def run_part(mod, data, filename, payload, headers, frequency):
+def run_part(mod, data, filename, payload, frequency, headers):
mod_freq = mod.frequency
if not (mod_freq == PER_ALWAYS or
(frequency == PER_INSTANCE and mod_freq == PER_INSTANCE)):
@@ -98,8 +98,8 @@ def run_part(mod, data, filename, payload, headers, frequency):
mod_ver = int(mod_ver)
except (TypeError, ValueError, AttributeError):
mod_ver = 1
+ content_type = headers['Content-Type']
try:
- content_type = headers['Content-Type']
LOG.debug("Calling handler %s (%s, %s, %s) with frequency %s",
mod, content_type, filename, mod_ver, frequency)
if mod_ver == 3:
@@ -123,17 +123,19 @@ def run_part(mod, data, filename, payload, headers, frequency):
def call_begin(mod, data, frequency):
+ # Create a fake header set
headers = {
'Content-Type': CONTENT_START,
}
- run_part(mod, data, None, None, headers, frequency)
+ run_part(mod, data, None, None, frequency, headers)
def call_end(mod, data, frequency):
+ # Create a fake header set
headers = {
'Content-Type': CONTENT_END,
}
- run_part(mod, data, None, None, headers, frequency)
+ run_part(mod, data, None, None, frequency, headers)
def walker_handle_handler(pdata, _ctype, _filename, payload):
@@ -191,12 +193,12 @@ def walker_callback(data, filename, payload, headers):
handlers = data['handlers']
if content_type in handlers:
run_part(handlers[content_type], data['data'], filename,
- payload, headers, data['frequency'])
+ payload, data['frequency'], headers)
elif payload:
# Extract the first line or 24 bytes for displaying in the log
start = _extract_first_or_bytes(payload, 24)
details = "'%s...'" % (_escape_string(start))
- if ctype == NOT_MULTIPART_TYPE:
+ if content_type == NOT_MULTIPART_TYPE:
LOG.warning("Unhandled non-multipart (%s) userdata: %s",
content_type, details)
else:
@@ -224,6 +226,7 @@ def walk(msg, callback, data):
filename = PART_FN_TPL % (partnum)
headers = dict(part)
+ LOG.debug(headers)
headers['Content-Type'] = ctype
callback(data, filename, part.get_payload(decode=True), headers)
partnum = partnum + 1
diff --git a/cloudinit/handlers/cloud_config.py b/cloudinit/handlers/cloud_config.py
index 02a7ad9d..d458dee2 100644
--- a/cloudinit/handlers/cloud_config.py
+++ b/cloudinit/handlers/cloud_config.py
@@ -29,16 +29,19 @@ from cloudinit.settings import (PER_ALWAYS)
LOG = logging.getLogger(__name__)
-DEF_MERGE_TYPE = "list()+dict()+str()"
+DEF_MERGE_TYPE = "list(extend)+dict()+str(append)"
MERGE_HEADER = 'Merge-Type'
class CloudConfigPartHandler(handlers.Handler):
def __init__(self, paths, **_kwargs):
handlers.Handler.__init__(self, PER_ALWAYS, version=3)
- self.cloud_buf = {}
+ self.cloud_buf = None
self.cloud_fn = paths.get_ipath("cloud_config")
self.file_names = []
+ self.mergers = [
+ mergers.string_extract_mergers(DEF_MERGE_TYPE),
+ ]
def list_types(self):
return [
@@ -48,50 +51,64 @@ class CloudConfigPartHandler(handlers.Handler):
def _write_cloud_config(self, buf):
if not self.cloud_fn:
return
- # Write the combined & merged dictionary/yaml out
- lines = [
- "#cloud-config",
- '',
- ]
- # Write which files we merged from
+ # Capture which files we merged from...
+ file_lines = []
if self.file_names:
- lines.append("# from %s files" % (len(self.file_names)))
+ file_lines.append("# from %s files" % (len(self.file_names)))
for fn in self.file_names:
- lines.append("# %s" % (fn))
- lines.append("")
- lines.append(util.yaml_dumps(self.cloud_buf))
+ file_lines.append("# %s" % (fn))
+ file_lines.append("")
+ if self.cloud_buf is not None:
+ lines = [
+ "#cloud-config",
+ '',
+ ]
+ lines.extend(file_lines)
+ lines.append(util.yaml_dumps(self.cloud_buf))
+ else:
+ lines = []
util.write_file(self.cloud_fn, "\n".join(lines), 0600)
- def _merge_header_extract(self, payload_yaml):
- merge_header_yaml = ''
- for k in [MERGE_HEADER, MERGE_HEADER.lower(),
- MERGE_HEADER.lower().replace("-", "_")]:
- if k in payload_yaml:
- merge_header_yaml = str(payload_yaml[k])
+ def _extract_mergers(self, payload, headers):
+ merge_header_headers = ''
+ for h in [MERGE_HEADER, 'X-%s' % (MERGE_HEADER)]:
+ tmp_h = headers.get(h, '')
+ if tmp_h:
+ merge_header_headers = tmp_h
break
- return merge_header_yaml
-
- def _merge_part(self, payload, headers):
- merge_header_headers = headers.get(MERGE_HEADER, '')
- payload_yaml = util.load_yaml(payload)
- merge_how = ''
# Select either the merge-type from the content
# or the merge type from the headers or default to our own set
- # if neither exists (or is empty) from the later
- merge_header_yaml = self._merge_header_extract(payload_yaml)
- for merge_i in [merge_header_yaml, merge_header_headers]:
- merge_i = merge_i.strip().lower()
- if merge_i:
- merge_how = merge_i
- break
- if not merge_how:
- merge_how = DEF_MERGE_TYPE
- merger = mergers.construct(merge_how)
- self.cloud_buf = merger.merge(self.cloud_buf, payload_yaml)
+ # if neither exists (or is empty) from the later.
+ payload_yaml = util.load_yaml(payload)
+ mergers_yaml = mergers.dict_extract_mergers(payload_yaml)
+ mergers_header = mergers.string_extract_mergers(merge_header_headers)
+ all_mergers = []
+ all_mergers.extend(mergers_yaml)
+ all_mergers.extend(mergers_header)
+ if not all_mergers:
+ all_mergers = mergers.string_extract_mergers(DEF_MERGE_TYPE)
+ return all_mergers
+
+ def _merge_part(self, payload, headers):
+ next_mergers = self._extract_mergers(payload, headers)
+ # Use the merger list from the last call, since it is the one
+ # that will be defining how to merge with the next payload.
+ curr_mergers = list(self.mergers[-1])
+ LOG.debug("Merging with %s", curr_mergers)
+ self.mergers.append(next_mergers)
+ merger = mergers.construct(curr_mergers)
+ if self.cloud_buf is None:
+ # First time through, merge with an empty dict...
+ self.cloud_buf = {}
+ self.cloud_buf = merger.merge(self.cloud_buf,
+ util.load_yaml(payload))
def _reset(self):
self.file_names = []
- self.cloud_buf = {}
+ self.cloud_buf = None
+ self.mergers = [
+ mergers.string_extract_mergers(DEF_MERGE_TYPE),
+ ]
def handle_part(self, _data, ctype, filename, payload, _freq, headers):
if ctype == handlers.CONTENT_START:
diff --git a/cloudinit/mergers/__init__.py b/cloudinit/mergers/__init__.py
index 20658edc..4a112165 100644
--- a/cloudinit/mergers/__init__.py
+++ b/cloudinit/mergers/__init__.py
@@ -34,6 +34,13 @@ class UnknownMerger(object):
def _handle_unknown(self, meth_wanted, value, merge_with):
return value
+ # This merging will attempt to look for a '_on_X' method
+ # in our own object for a given object Y with type X,
+ # if found it will be called to perform the merge of a source
+ # object and a object to merge_with.
+ #
+ # If not found the merge will be given to a '_handle_unknown'
+ # function which can decide what to do wit the 2 values.
def merge(self, source, merge_with):
type_name = util.obj_name(source)
type_name = type_name.lower()
@@ -56,6 +63,11 @@ class LookupMerger(UnknownMerger):
else:
self._lookups = lookups
+ # For items which can not be merged by the parent this object
+ # will lookup in a internally maintained set of objects and
+ # find which one of those objects can perform the merge. If
+ # any of the contained objects have the needed method, they
+ # will be called to perform the merge.
def _handle_unknown(self, meth_wanted, value, merge_with):
meth = None
for merger in self._lookups:
@@ -70,8 +82,33 @@ class LookupMerger(UnknownMerger):
return meth(value, merge_with)
-def _extract_merger_names(merge_how):
- names = []
+def dict_extract_mergers(config):
+ parsed_mergers = []
+ raw_mergers = config.get('merger_how')
+ if raw_mergers is None:
+ raw_mergers = config.get('merge_type')
+ if raw_mergers is None:
+ return parsed_mergers
+ if isinstance(raw_mergers, (str, basestring)):
+ return string_extract_mergers(raw_mergers)
+ for m in raw_mergers:
+ if isinstance(m, (dict)):
+ name = m['name']
+ name = name.replace("-", "_").strip()
+ opts = m['settings']
+ else:
+ name = m[0]
+ if len(m) >= 2:
+ opts = m[1:]
+ else:
+ opts = []
+ if name:
+ parsed_mergers.append((name, opts))
+ return parsed_mergers
+
+
+def string_extract_mergers(merge_how):
+ parsed_mergers = []
for m_name in merge_how.split("+"):
# Canonicalize the name (so that it can be found
# even when users alter it in various ways)
@@ -79,20 +116,20 @@ def _extract_merger_names(merge_how):
m_name = m_name.replace("-", "_")
if not m_name:
continue
- names.append(m_name)
- return names
-
-
-def construct(merge_how):
- mergers_to_be = []
- for name in _extract_merger_names(merge_how):
- match = NAME_MTCH.match(name)
+ match = NAME_MTCH.match(m_name)
if not match:
- msg = "Matcher identifer '%s' is not in the right format" % (name)
+ msg = "Matcher identifer '%s' is not in the right format" % (m_name)
raise ValueError(msg)
(m_name, m_ops) = match.groups()
m_ops = m_ops.strip().split(",")
m_ops = [m.strip().lower() for m in m_ops if m.strip()]
+ parsed_mergers.append((m_name, m_ops))
+ return parsed_mergers
+
+
+def construct(parsed_mergers):
+ mergers_to_be = []
+ for (m_name, m_ops) in parsed_mergers:
merger_locs = importer.find_module(m_name,
[__name__],
['Merger'])
diff --git a/cloudinit/mergers/dict.py b/cloudinit/mergers/dict.py
index bc392afa..45a7d3a5 100644
--- a/cloudinit/mergers/dict.py
+++ b/cloudinit/mergers/dict.py
@@ -22,6 +22,17 @@ class Merger(object):
self._merger = merger
self._overwrite = 'overwrite' in opts
+ # This merging algorithm will attempt to merge with
+ # another dictionary, on encountering any other type of object
+ # it will not merge with said object, but will instead return
+ # the original value
+ #
+ # On encountering a dictionary, it will create a new dictionary
+ # composed of the original and the one to merge with, if 'overwrite'
+ # is enabled then keys that exist in the original will be overwritten
+ # by keys in the one to merge with (and associated values). Otherwise
+ # if not in overwrite mode the 2 conflicting keys themselves will
+ # be merged.
def _on_dict(self, value, merge_with):
if not isinstance(merge_with, (dict)):
return value
diff --git a/cloudinit/mergers/list.py b/cloudinit/mergers/list.py
index a848b8d6..a56ff007 100644
--- a/cloudinit/mergers/list.py
+++ b/cloudinit/mergers/list.py
@@ -26,21 +26,24 @@ class Merger(object):
def _on_tuple(self, value, merge_with):
return self._on_list(list(value), merge_with)
+ # On encountering a list or tuple type this action will be applied
+ # a new list will be returned, if the value to merge with is itself
+ # a list and we have been told to 'extend', then the value here will
+ # be extended with the other list. If in 'extend' mode then we will
+ # attempt to merge instead, which means that values from the list
+ # to merge with will replace values in te original list (they will
+ # also be merged recursively).
+ #
+ # If the value to merge with is not a list, and we are set to discared
+ # then no modifications will take place, otherwise we will just append
+ # the value to merge with onto the end of our own list.
def _on_list(self, value, merge_with):
new_value = list(value)
if isinstance(merge_with, (tuple, list)):
if self._extend:
new_value.extend(merge_with)
else:
- # Merge instead
- for m_v in merge_with:
- m_am = 0
- for (i, o_v) in enumerate(new_value):
- if m_v == o_v:
- new_value[i] = self._merger.merge(o_v, m_v)
- m_am += 1
- if m_am == 0:
- new_value.append(m_v)
+ return new_value
else:
if not self._discard_non:
new_value.append(merge_with)
diff --git a/cloudinit/mergers/str.py b/cloudinit/mergers/str.py
index 14bc46ec..f1534c5b 100644
--- a/cloudinit/mergers/str.py
+++ b/cloudinit/mergers/str.py
@@ -21,9 +21,14 @@ class Merger(object):
def __init__(self, merger, opts):
self._append = 'append' in opts
+ # On encountering a unicode object to merge value with
+ # we will for now just proxy into the string method to let it handle it.
def _on_unicode(self, value, merge_with):
return self._on_str(value, merge_with)
+ # On encountering a string object to merge with we will
+ # perform the following action, if appending we will
+ # merge them together, otherwise we will just return value.
def _on_str(self, value, merge_with):
if not self._append:
return value
diff --git a/tests/unittests/test__init__.py b/tests/unittests/test__init__.py
index ac082076..7924755a 100644
--- a/tests/unittests/test__init__.py
+++ b/tests/unittests/test__init__.py
@@ -22,8 +22,10 @@ class FakeModule(handlers.Handler):
def list_types(self):
return self.types
- def _handle_part(self, data, ctype, filename, payload, frequency):
+ def handle_part(self, data, ctype, filename, payload, frequency):
pass
+
+
class TestWalkerHandleHandler(MockerTestCase):
@@ -103,6 +105,9 @@ class TestHandlerHandlePart(MockerTestCase):
self.filename = "fake filename"
self.payload = "fake payload"
self.frequency = settings.PER_INSTANCE
+ self.headers = {
+ 'Content-Type': self.ctype,
+ }
def test_normal_version_1(self):
"""
@@ -118,8 +123,8 @@ class TestHandlerHandlePart(MockerTestCase):
self.payload)
self.mocker.replay()
- handlers.run_part(mod_mock, self.data, self.ctype, self.filename,
- self.payload, self.frequency)
+ handlers.run_part(mod_mock, self.data, self.filename,
+ self.payload, self.frequency, self.headers)
def test_normal_version_2(self):
"""
@@ -135,8 +140,8 @@ class TestHandlerHandlePart(MockerTestCase):
self.payload, self.frequency)
self.mocker.replay()
- handlers.run_part(mod_mock, self.data, self.ctype, self.filename,
- self.payload, self.frequency)
+ handlers.run_part(mod_mock, self.data, self.filename,
+ self.payload, self.frequency, self.headers)
def test_modfreq_per_always(self):
"""
@@ -152,8 +157,8 @@ class TestHandlerHandlePart(MockerTestCase):
self.payload)
self.mocker.replay()
- handlers.run_part(mod_mock, self.data, self.ctype, self.filename,
- self.payload, self.frequency)
+ handlers.run_part(mod_mock, self.data, self.filename,
+ self.payload, self.frequency, self.headers)
def test_no_handle_when_modfreq_once(self):
"""C{handle_part} is not called if frequency is once."""
@@ -163,8 +168,8 @@ class TestHandlerHandlePart(MockerTestCase):
self.mocker.result(settings.PER_ONCE)
self.mocker.replay()
- handlers.run_part(mod_mock, self.data, self.ctype, self.filename,
- self.payload, self.frequency)
+ handlers.run_part(mod_mock, self.data, self.filename,
+ self.payload, self.frequency, self.headers)
def test_exception_is_caught(self):
"""Exceptions within C{handle_part} are caught and logged."""
@@ -178,8 +183,8 @@ class TestHandlerHandlePart(MockerTestCase):
self.mocker.throw(Exception())
self.mocker.replay()
- handlers.run_part(mod_mock, self.data, self.ctype, self.filename,
- self.payload, self.frequency)
+ handlers.run_part(mod_mock, self.data, self.filename,
+ self.payload, self.frequency, self.headers)
class TestCmdlineUrl(MockerTestCase):
diff --git a/tests/unittests/test_merging.py b/tests/unittests/test_merging.py
index 0037b966..fa7ee8e4 100644
--- a/tests/unittests/test_merging.py
+++ b/tests/unittests/test_merging.py
@@ -1,62 +1,143 @@
-from mocker import MockerTestCase
-
-from cloudinit import util
-
-
-class TestMergeDict(MockerTestCase):
- def test_simple_merge(self):
- """Test simple non-conflict merge."""
- source = {"key1": "value1"}
- candidate = {"key2": "value2"}
- result = util.mergedict(source, candidate)
- self.assertEqual({"key1": "value1", "key2": "value2"}, result)
-
- def test_nested_merge(self):
- """Test nested merge."""
- source = {"key1": {"key1.1": "value1.1"}}
- candidate = {"key1": {"key1.2": "value1.2"}}
- result = util.mergedict(source, candidate)
- self.assertEqual(
- {"key1": {"key1.1": "value1.1", "key1.2": "value1.2"}}, result)
-
- def test_merge_does_not_override(self):
- """Test that candidate doesn't override source."""
- source = {"key1": "value1", "key2": "value2"}
- candidate = {"key1": "value2", "key2": "NEW VALUE"}
- result = util.mergedict(source, candidate)
- self.assertEqual(source, result)
-
- def test_empty_candidate(self):
- """Test empty candidate doesn't change source."""
- source = {"key": "value"}
- candidate = {}
- result = util.mergedict(source, candidate)
- self.assertEqual(source, result)
-
- def test_empty_source(self):
- """Test empty source is replaced by candidate."""
- source = {}
- candidate = {"key": "value"}
- result = util.mergedict(source, candidate)
- self.assertEqual(candidate, result)
-
- def test_non_dict_candidate(self):
- """Test non-dict candidate is discarded."""
- source = {"key": "value"}
- candidate = "not a dict"
- result = util.mergedict(source, candidate)
- self.assertEqual(source, result)
-
- def test_non_dict_source(self):
- """Test non-dict source is not modified with a dict candidate."""
- source = "not a dict"
- candidate = {"key": "value"}
- result = util.mergedict(source, candidate)
- self.assertEqual(source, result)
-
- def test_neither_dict(self):
- """Test if neither candidate or source is dict source wins."""
- source = "source"
- candidate = "candidate"
- result = util.mergedict(source, candidate)
- self.assertEqual(source, result)
+import os
+
+from tests.unittests import helpers
+
+from cloudinit import mergers
+
+
+class TestSimpleRun(helpers.MockerTestCase):
+ def test_basic_merge(self):
+ source = {
+ 'Blah': ['blah2'],
+ 'Blah3': 'c',
+ }
+ merge_with = {
+ 'Blah2': ['blah3'],
+ 'Blah3': 'b',
+ 'Blah': ['123'],
+ }
+ # Basic merge should not do thing special
+ merge_how = "list()+dict()+str()"
+ merger_set = mergers.string_extract_mergers(merge_how)
+ self.assertEquals(3, len(merger_set))
+ merger = mergers.construct(merger_set)
+ merged = merger.merge(source, merge_with)
+ self.assertEquals(merged['Blah'], ['blah2'])
+ self.assertEquals(merged['Blah2'], ['blah3'])
+ self.assertEquals(merged['Blah3'], 'c')
+
+ def test_dict_overwrite(self):
+ source = {
+ 'Blah': ['blah2'],
+ }
+ merge_with = {
+ 'Blah': ['123'],
+ }
+ # Now lets try a dict overwrite
+ merge_how = "list()+dict(overwrite)+str()"
+ merger_set = mergers.string_extract_mergers(merge_how)
+ self.assertEquals(3, len(merger_set))
+ merger = mergers.construct(merger_set)
+ merged = merger.merge(source, merge_with)
+ self.assertEquals(merged['Blah'], ['123'])
+
+ def test_string_append(self):
+ source = {
+ 'Blah': 'blah2',
+ }
+ merge_with = {
+ 'Blah': '345',
+ }
+ merge_how = "list()+dict()+str(append)"
+ merger_set = mergers.string_extract_mergers(merge_how)
+ self.assertEquals(3, len(merger_set))
+ merger = mergers.construct(merger_set)
+ merged = merger.merge(source, merge_with)
+ self.assertEquals(merged['Blah'], 'blah2345')
+
+ def test_list_extend(self):
+ source = ['abc']
+ merge_with = ['123']
+ merge_how = "list(extend)+dict()+str()"
+ merger_set = mergers.string_extract_mergers(merge_how)
+ self.assertEquals(3, len(merger_set))
+ merger = mergers.construct(merger_set)
+ merged = merger.merge(source, merge_with)
+ self.assertEquals(merged, ['abc', '123'])
+
+ def test_deep_merge(self):
+ source = {
+ 'a': [1, 'b', 2],
+ 'b': 'blahblah',
+ 'c': {
+ 'e': [1, 2, 3],
+ 'f': 'bigblobof',
+ 'iamadict': {
+ 'ok': 'ok',
+ }
+ },
+ 'run': [
+ 'runme',
+ 'runme2',
+ ],
+ 'runmereally': [
+ 'e', ['a'], 'd',
+ ],
+ }
+ merge_with = {
+ 'a': ['e', 'f', 'g'],
+ 'b': 'more',
+ 'c': {
+ 'a': 'b',
+ 'f': 'stuff',
+ },
+ 'run': [
+ 'morecmd',
+ 'moremoremore',
+ ],
+ 'runmereally': [
+ 'blah', ['b'], 'e',
+ ],
+ }
+ merge_how = "list(extend)+dict()+str(append)"
+ merger_set = mergers.string_extract_mergers(merge_how)
+ self.assertEquals(3, len(merger_set))
+ merger = mergers.construct(merger_set)
+ merged = merger.merge(source, merge_with)
+ self.assertEquals(merged['a'], [1, 'b', 2, 'e', 'f', 'g'])
+ self.assertEquals(merged['b'], 'blahblahmore')
+ self.assertEquals(merged['c']['f'], 'bigblobofstuff')
+ self.assertEquals(merged['run'], ['runme', 'runme2', 'morecmd', 'moremoremore'])
+ self.assertEquals(merged['runmereally'], ['e', ['a'], 'd', 'blah', ['b'], 'e'])
+
+ def test_dict_overwrite_layered(self):
+ source = {
+ 'Blah3': {
+ 'f': '3',
+ 'g': {
+ 'a': 'b',
+ }
+ }
+ }
+ merge_with = {
+ 'Blah3': {
+ 'e': '2',
+ 'g': {
+ 'e': 'f',
+ }
+ }
+ }
+ merge_how = "list()+dict()+str()"
+ merger_set = mergers.string_extract_mergers(merge_how)
+ self.assertEquals(3, len(merger_set))
+ merger = mergers.construct(merger_set)
+ merged = merger.merge(source, merge_with)
+ self.assertEquals(merged['Blah3'], {
+ 'e': '2',
+ 'f': '3',
+ 'g': {
+ 'a': 'b',
+ 'e': 'f',
+ }
+ })
+
diff --git a/tests/unittests/test_userdata.py b/tests/unittests/test_userdata.py
index 82a4c555..9e1fed7e 100644
--- a/tests/unittests/test_userdata.py
+++ b/tests/unittests/test_userdata.py
@@ -9,12 +9,17 @@ from email.mime.base import MIMEBase
from mocker import MockerTestCase
+from cloudinit import handlers
+from cloudinit import helpers as c_helpers
from cloudinit import log
from cloudinit import sources
from cloudinit import stages
+from cloudinit import util
INSTANCE_ID = "i-testing"
+from tests.unittests import helpers
+
class FakeDataSource(sources.DataSource):
@@ -26,22 +31,16 @@ class FakeDataSource(sources.DataSource):
# FIXME: these tests shouldn't be checking log output??
# Weirddddd...
-
-
-class TestConsumeUserData(MockerTestCase):
+class TestConsumeUserData(helpers.FilesystemMockingTestCase):
def setUp(self):
- MockerTestCase.setUp(self)
- # Replace the write so no actual files
- # get written out...
- self.mock_write = self.mocker.replace("cloudinit.util.write_file",
- passthrough=False)
+ helpers.FilesystemMockingTestCase.setUp(self)
self._log = None
self._log_file = None
self._log_handler = None
def tearDown(self):
- MockerTestCase.tearDown(self)
+ helpers.FilesystemMockingTestCase.tearDown(self)
if self._log_handler and self._log:
self._log.removeHandler(self._log_handler)
@@ -53,12 +52,71 @@ class TestConsumeUserData(MockerTestCase):
self._log.addHandler(self._log_handler)
return log_file
+ def test_merging_cloud_config(self):
+ blob = '''
+#cloud-config
+a: b
+e: f
+run:
+ - b
+ - c
+'''
+ message1 = MIMEBase("text", "cloud-config")
+ message1['Merge-Type'] = 'dict()+list(extend)+str(append)'
+ message1.set_payload(blob)
+
+ blob2 = '''
+#cloud-config
+a: e
+e: g
+run:
+ - stuff
+ - morestuff
+'''
+ message2 = MIMEBase("text", "cloud-config")
+ message2['Merge-Type'] = 'dict()+list(extend)+str()'
+ message2.set_payload(blob2)
+
+ blob3 = '''
+#cloud-config
+e:
+ - 1
+ - 2
+ - 3
+'''
+ message3 = MIMEBase("text", "cloud-config")
+ message3['Merge-Type'] = 'dict()+list()+str()'
+ message3.set_payload(blob3)
+
+ messages = [message1, message2, message3]
+
+ paths = c_helpers.Paths({}, ds=FakeDataSource(''))
+ cloud_cfg = handlers.cloud_config.CloudConfigPartHandler(paths)
+
+ new_root = self.makeDir()
+ self.patchUtils(new_root)
+ self.patchOS(new_root)
+ cloud_cfg.handle_part(None, handlers.CONTENT_START, None, None, None, None)
+ for i, m in enumerate(messages):
+ headers = dict(m)
+ fn = "part-%s" % (i + 1)
+ payload = m.get_payload(decode=True)
+ cloud_cfg.handle_part(None, headers['Content-Type'],
+ fn, payload, None, headers)
+ cloud_cfg.handle_part(None, handlers.CONTENT_END, None, None, None, None)
+ contents = util.load_file(paths.get_ipath('cloud_config'))
+ contents = util.load_yaml(contents)
+ self.assertEquals(contents['run'], ['b', 'c', 'stuff', 'morestuff'])
+ self.assertEquals(contents['a'], 'be')
+ self.assertEquals(contents['e'], 'fg')
+
def test_unhandled_type_warning(self):
"""Raw text without magic is ignored but shows warning."""
ci = stages.Init()
data = "arbitrary text\n"
ci.datasource = FakeDataSource(data)
+ self.mock_write = self.mocker.replace("cloudinit.util.write_file", passthrough=False)
self.mock_write(ci.paths.get_ipath("cloud_config"), "", 0600)
self.mocker.replay()
@@ -76,6 +134,7 @@ class TestConsumeUserData(MockerTestCase):
message.set_payload("Just text")
ci.datasource = FakeDataSource(message.as_string())
+ self.mock_write = self.mocker.replace("cloudinit.util.write_file", passthrough=False)
self.mock_write(ci.paths.get_ipath("cloud_config"), "", 0600)
self.mocker.replay()
@@ -93,6 +152,7 @@ class TestConsumeUserData(MockerTestCase):
ci.datasource = FakeDataSource(script)
outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001")
+ self.mock_write = self.mocker.replace("cloudinit.util.write_file", passthrough=False)
self.mock_write(ci.paths.get_ipath("cloud_config"), "", 0600)
self.mock_write(outpath, script, 0700)
self.mocker.replay()
@@ -111,6 +171,7 @@ class TestConsumeUserData(MockerTestCase):
ci.datasource = FakeDataSource(message.as_string())
outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001")
+ self.mock_write = self.mocker.replace("cloudinit.util.write_file", passthrough=False)
self.mock_write(ci.paths.get_ipath("cloud_config"), "", 0600)
self.mock_write(outpath, script, 0700)
self.mocker.replay()
@@ -129,6 +190,7 @@ class TestConsumeUserData(MockerTestCase):
ci.datasource = FakeDataSource(message.as_string())
outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001")
+ self.mock_write = self.mocker.replace("cloudinit.util.write_file", passthrough=False)
self.mock_write(outpath, script, 0700)
self.mock_write(ci.paths.get_ipath("cloud_config"), "", 0600)
self.mocker.replay()