From 8bc85abd97e06d964bbd26208eb732e80eb87c10 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 20 Nov 2012 20:02:48 -0800 Subject: Start allowing different merging types to be applied After user data handling splits apart all the different content types into there various mime messages it is nice to be able to have each message specify how it should be merged (mainly for cloud-config or cloud-archive) into the single cloud config that is eventually used. This starts to add a plugable merging framework and the needed components to activate said headers and merging. --- cloudinit/handlers/__init__.py | 49 +++++++++++++++++++++++------------------- 1 file changed, 27 insertions(+), 22 deletions(-) (limited to 'cloudinit/handlers/__init__.py') diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index 8d6dcd4d..bfccfd89 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -69,7 +69,6 @@ INCLUSION_SRCH = sorted(list(INCLUSION_TYPES_MAP.keys()), class Handler(object): - __metaclass__ = abc.ABCMeta def __init__(self, frequency, version=2): @@ -83,15 +82,12 @@ class Handler(object): def list_types(self): raise NotImplementedError() - def handle_part(self, data, ctype, filename, payload, frequency): - return self._handle_part(data, ctype, filename, payload, frequency) - @abc.abstractmethod - def _handle_part(self, data, ctype, filename, payload, frequency): + def handle_part(self, *args, **kwargs): raise NotImplementedError() -def run_part(mod, data, ctype, filename, payload, frequency): +def run_part(mod, data, filename, payload, headers, frequency): mod_freq = mod.frequency if not (mod_freq == PER_ALWAYS or (frequency == PER_INSTANCE and mod_freq == PER_INSTANCE)): @@ -102,19 +98,25 @@ def run_part(mod, data, ctype, filename, payload, frequency): mod_ver = int(mod_ver) except: mod_ver = 1 + content_type = headers['Content-Type'] try: LOG.debug("Calling handler %s (%s, %s, %s) with frequency %s", - mod, ctype, filename, mod_ver, frequency) - if mod_ver >= 2: + mod, content_type, filename, mod_ver, frequency) + if mod_ver == 3: + # Treat as v. 3 which does get a frequency + headers + mod.handle_part(data, content_type, filename, + payload, frequency, headers) + elif mod_ver == 2: # Treat as v. 2 which does get a frequency - mod.handle_part(data, ctype, filename, payload, frequency) + mod.handle_part(data, content_type, filename, + payload, frequency) else: # Treat as v. 1 which gets no frequency - mod.handle_part(data, ctype, filename, payload) + mod.handle_part(data, content_type, filename, payload) except: util.logexc(LOG, ("Failed calling handler %s (%s, %s, %s)" " with frequency %s"), - mod, ctype, filename, + mod, content_type, filename, mod_ver, frequency) @@ -173,26 +175,27 @@ def _escape_string(text): return text -def walker_callback(pdata, ctype, filename, payload): - if ctype in PART_CONTENT_TYPES: - walker_handle_handler(pdata, ctype, filename, payload) +def walker_callback(data, filename, payload, headers): + content_type = headers['Content-Type'] + if content_type in PART_CONTENT_TYPES: + walker_handle_handler(data, content_type, filename, payload) return - handlers = pdata['handlers'] - if ctype in pdata['handlers']: - run_part(handlers[ctype], pdata['data'], ctype, filename, - payload, pdata['frequency']) + handlers = data['handlers'] + if content_type in handlers: + run_part(handlers[content_type], data['data'], filename, + payload, headers, data['frequency']) elif payload: # Extract the first line or 24 bytes for displaying in the log start = _extract_first_or_bytes(payload, 24) details = "'%s...'" % (_escape_string(start)) if ctype == NOT_MULTIPART_TYPE: LOG.warning("Unhandled non-multipart (%s) userdata: %s", - ctype, details) + content_type, details) else: LOG.warning("Unhandled unknown content-type (%s) userdata: %s", - ctype, details) + content_type, details) else: - LOG.debug("empty payload of type %s" % ctype) + LOG.debug("Empty payload of type %s", content_type) # Callback is a function that will be called with @@ -212,7 +215,9 @@ def walk(msg, callback, data): if not filename: filename = PART_FN_TPL % (partnum) - callback(data, ctype, filename, part.get_payload(decode=True)) + callback(data, ctype, filename, + part.get_payload(decode=True), + dict(part)) partnum = partnum + 1 -- cgit v1.2.3 From eded09c1e260330107a19bd0b5a351686fe49e80 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Thu, 22 Nov 2012 08:21:37 -0800 Subject: Continue working on merging prototype. --- cloudinit/handlers/__init__.py | 26 +++++++++++++++++--------- cloudinit/handlers/cloud_config.py | 27 +++++++++++++++++++++------ 2 files changed, 38 insertions(+), 15 deletions(-) (limited to 'cloudinit/handlers/__init__.py') diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index bfccfd89..566b61a7 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -92,14 +92,14 @@ def run_part(mod, data, filename, payload, headers, frequency): if not (mod_freq == PER_ALWAYS or (frequency == PER_INSTANCE and mod_freq == PER_INSTANCE)): return - mod_ver = mod.handler_version # Sanity checks on version (should be an int convertable) try: + mod_ver = mod.handler_version mod_ver = int(mod_ver) - except: + except (TypeError, ValueError, AttributeError): mod_ver = 1 - content_type = headers['Content-Type'] try: + content_type = headers['Content-Type'] LOG.debug("Calling handler %s (%s, %s, %s) with frequency %s", mod, content_type, filename, mod_ver, frequency) if mod_ver == 3: @@ -110,9 +110,11 @@ def run_part(mod, data, filename, payload, headers, frequency): # Treat as v. 2 which does get a frequency mod.handle_part(data, content_type, filename, payload, frequency) - else: + elif mod_ver == 1: # Treat as v. 1 which gets no frequency mod.handle_part(data, content_type, filename, payload) + else: + raise ValueError("Unknown module version %s" % (mod_ver)) except: util.logexc(LOG, ("Failed calling handler %s (%s, %s, %s)" " with frequency %s"), @@ -121,11 +123,17 @@ def run_part(mod, data, filename, payload, headers, frequency): def call_begin(mod, data, frequency): - run_part(mod, data, CONTENT_START, None, None, frequency) + headers = { + 'Content-Type': CONTENT_START, + } + run_part(mod, data, None, None, headers, frequency) def call_end(mod, data, frequency): - run_part(mod, data, CONTENT_END, None, None, frequency) + headers = { + 'Content-Type': CONTENT_END, + } + run_part(mod, data, None, None, headers, frequency) def walker_handle_handler(pdata, _ctype, _filename, payload): @@ -215,9 +223,9 @@ def walk(msg, callback, data): if not filename: filename = PART_FN_TPL % (partnum) - callback(data, ctype, filename, - part.get_payload(decode=True), - dict(part)) + headers = dict(part) + headers['Content-Type'] = ctype + callback(data, filename, part.get_payload(decode=True), headers) partnum = partnum + 1 diff --git a/cloudinit/handlers/cloud_config.py b/cloudinit/handlers/cloud_config.py index 86027187..22ced20d 100644 --- a/cloudinit/handlers/cloud_config.py +++ b/cloudinit/handlers/cloud_config.py @@ -29,6 +29,8 @@ from cloudinit.settings import (PER_ALWAYS) LOG = logging.getLogger(__name__) +DEF_MERGE_TYPE = "list+dict+str" + class CloudConfigPartHandler(handlers.Handler): def __init__(self, paths, **_kwargs): @@ -44,10 +46,25 @@ class CloudConfigPartHandler(handlers.Handler): def _write_cloud_config(self, buf): if not self.cloud_fn: return - payload = util.yaml_dumps(self.cloud_buf) - util.write_file(self.cloud_fn, payload, 0600) + lines = ["#cloud-config", util.yaml_dumps(self.cloud_buf)] + util.write_file(self.cloud_fn, "\n".join(lines), 0600) + + def _merge_part(self, payload, headers, filename): + merge_how = headers.get("Merge-Type") + try: + payload_y = util.load_yaml(payload) + if not merge_how: + merge_how = payload_y.pop("Merge-Type", '') + merge_how = merge_how.strip().lower() + if not merge_how: + merge_how = DEF_MERGE_TYPE + merger = mergers.construct(merge_how) + self.cloud_buf = merger.merge(self.cloud_buf, payload_y) + except: + util.logexc(LOG, "Failed at merging in cloud config part from %s", + filename) - def handle_part(self, _data, ctype, filename, payload, _frequency, headers): + def handle_part(self, _data, ctype, filename, payload, _freq, headers): if ctype == handlers.CONTENT_START: self.cloud_buf = {} return @@ -55,6 +72,4 @@ class CloudConfigPartHandler(handlers.Handler): self._write_cloud_config(self.cloud_buf) self.cloud_buf = {} return - merge_how = headers.get("Merge-Type", 'list+dict+str') - merger = mergers.construct(merge_how) - self.cloud_buf = merger.merge(self.cloud_buf, util.load_yaml(payload)) + self._merge_part(payload, headers, filename) -- cgit v1.2.3 From 2653a9172e375484b4d0a88c3de56334136fa134 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 5 Mar 2013 19:16:01 -0800 Subject: Add in a bunch of changes and tests. --- cloudinit/handlers/__init__.py | 15 +-- cloudinit/handlers/cloud_config.py | 89 +++++++++------- cloudinit/mergers/__init__.py | 59 +++++++++-- cloudinit/mergers/dict.py | 11 ++ cloudinit/mergers/list.py | 21 ++-- cloudinit/mergers/str.py | 5 + tests/unittests/test__init__.py | 27 +++-- tests/unittests/test_merging.py | 205 ++++++++++++++++++++++++++----------- tests/unittests/test_userdata.py | 80 +++++++++++++-- 9 files changed, 368 insertions(+), 144 deletions(-) (limited to 'cloudinit/handlers/__init__.py') diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index 566b61a7..63fdb948 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -87,7 +87,7 @@ class Handler(object): raise NotImplementedError() -def run_part(mod, data, filename, payload, headers, frequency): +def run_part(mod, data, filename, payload, frequency, headers): mod_freq = mod.frequency if not (mod_freq == PER_ALWAYS or (frequency == PER_INSTANCE and mod_freq == PER_INSTANCE)): @@ -98,8 +98,8 @@ def run_part(mod, data, filename, payload, headers, frequency): mod_ver = int(mod_ver) except (TypeError, ValueError, AttributeError): mod_ver = 1 + content_type = headers['Content-Type'] try: - content_type = headers['Content-Type'] LOG.debug("Calling handler %s (%s, %s, %s) with frequency %s", mod, content_type, filename, mod_ver, frequency) if mod_ver == 3: @@ -123,17 +123,19 @@ def run_part(mod, data, filename, payload, headers, frequency): def call_begin(mod, data, frequency): + # Create a fake header set headers = { 'Content-Type': CONTENT_START, } - run_part(mod, data, None, None, headers, frequency) + run_part(mod, data, None, None, frequency, headers) def call_end(mod, data, frequency): + # Create a fake header set headers = { 'Content-Type': CONTENT_END, } - run_part(mod, data, None, None, headers, frequency) + run_part(mod, data, None, None, frequency, headers) def walker_handle_handler(pdata, _ctype, _filename, payload): @@ -191,12 +193,12 @@ def walker_callback(data, filename, payload, headers): handlers = data['handlers'] if content_type in handlers: run_part(handlers[content_type], data['data'], filename, - payload, headers, data['frequency']) + payload, data['frequency'], headers) elif payload: # Extract the first line or 24 bytes for displaying in the log start = _extract_first_or_bytes(payload, 24) details = "'%s...'" % (_escape_string(start)) - if ctype == NOT_MULTIPART_TYPE: + if content_type == NOT_MULTIPART_TYPE: LOG.warning("Unhandled non-multipart (%s) userdata: %s", content_type, details) else: @@ -224,6 +226,7 @@ def walk(msg, callback, data): filename = PART_FN_TPL % (partnum) headers = dict(part) + LOG.debug(headers) headers['Content-Type'] = ctype callback(data, filename, part.get_payload(decode=True), headers) partnum = partnum + 1 diff --git a/cloudinit/handlers/cloud_config.py b/cloudinit/handlers/cloud_config.py index 02a7ad9d..d458dee2 100644 --- a/cloudinit/handlers/cloud_config.py +++ b/cloudinit/handlers/cloud_config.py @@ -29,16 +29,19 @@ from cloudinit.settings import (PER_ALWAYS) LOG = logging.getLogger(__name__) -DEF_MERGE_TYPE = "list()+dict()+str()" +DEF_MERGE_TYPE = "list(extend)+dict()+str(append)" MERGE_HEADER = 'Merge-Type' class CloudConfigPartHandler(handlers.Handler): def __init__(self, paths, **_kwargs): handlers.Handler.__init__(self, PER_ALWAYS, version=3) - self.cloud_buf = {} + self.cloud_buf = None self.cloud_fn = paths.get_ipath("cloud_config") self.file_names = [] + self.mergers = [ + mergers.string_extract_mergers(DEF_MERGE_TYPE), + ] def list_types(self): return [ @@ -48,50 +51,64 @@ class CloudConfigPartHandler(handlers.Handler): def _write_cloud_config(self, buf): if not self.cloud_fn: return - # Write the combined & merged dictionary/yaml out - lines = [ - "#cloud-config", - '', - ] - # Write which files we merged from + # Capture which files we merged from... + file_lines = [] if self.file_names: - lines.append("# from %s files" % (len(self.file_names))) + file_lines.append("# from %s files" % (len(self.file_names))) for fn in self.file_names: - lines.append("# %s" % (fn)) - lines.append("") - lines.append(util.yaml_dumps(self.cloud_buf)) + file_lines.append("# %s" % (fn)) + file_lines.append("") + if self.cloud_buf is not None: + lines = [ + "#cloud-config", + '', + ] + lines.extend(file_lines) + lines.append(util.yaml_dumps(self.cloud_buf)) + else: + lines = [] util.write_file(self.cloud_fn, "\n".join(lines), 0600) - def _merge_header_extract(self, payload_yaml): - merge_header_yaml = '' - for k in [MERGE_HEADER, MERGE_HEADER.lower(), - MERGE_HEADER.lower().replace("-", "_")]: - if k in payload_yaml: - merge_header_yaml = str(payload_yaml[k]) + def _extract_mergers(self, payload, headers): + merge_header_headers = '' + for h in [MERGE_HEADER, 'X-%s' % (MERGE_HEADER)]: + tmp_h = headers.get(h, '') + if tmp_h: + merge_header_headers = tmp_h break - return merge_header_yaml - - def _merge_part(self, payload, headers): - merge_header_headers = headers.get(MERGE_HEADER, '') - payload_yaml = util.load_yaml(payload) - merge_how = '' # Select either the merge-type from the content # or the merge type from the headers or default to our own set - # if neither exists (or is empty) from the later - merge_header_yaml = self._merge_header_extract(payload_yaml) - for merge_i in [merge_header_yaml, merge_header_headers]: - merge_i = merge_i.strip().lower() - if merge_i: - merge_how = merge_i - break - if not merge_how: - merge_how = DEF_MERGE_TYPE - merger = mergers.construct(merge_how) - self.cloud_buf = merger.merge(self.cloud_buf, payload_yaml) + # if neither exists (or is empty) from the later. + payload_yaml = util.load_yaml(payload) + mergers_yaml = mergers.dict_extract_mergers(payload_yaml) + mergers_header = mergers.string_extract_mergers(merge_header_headers) + all_mergers = [] + all_mergers.extend(mergers_yaml) + all_mergers.extend(mergers_header) + if not all_mergers: + all_mergers = mergers.string_extract_mergers(DEF_MERGE_TYPE) + return all_mergers + + def _merge_part(self, payload, headers): + next_mergers = self._extract_mergers(payload, headers) + # Use the merger list from the last call, since it is the one + # that will be defining how to merge with the next payload. + curr_mergers = list(self.mergers[-1]) + LOG.debug("Merging with %s", curr_mergers) + self.mergers.append(next_mergers) + merger = mergers.construct(curr_mergers) + if self.cloud_buf is None: + # First time through, merge with an empty dict... + self.cloud_buf = {} + self.cloud_buf = merger.merge(self.cloud_buf, + util.load_yaml(payload)) def _reset(self): self.file_names = [] - self.cloud_buf = {} + self.cloud_buf = None + self.mergers = [ + mergers.string_extract_mergers(DEF_MERGE_TYPE), + ] def handle_part(self, _data, ctype, filename, payload, _freq, headers): if ctype == handlers.CONTENT_START: diff --git a/cloudinit/mergers/__init__.py b/cloudinit/mergers/__init__.py index 20658edc..4a112165 100644 --- a/cloudinit/mergers/__init__.py +++ b/cloudinit/mergers/__init__.py @@ -34,6 +34,13 @@ class UnknownMerger(object): def _handle_unknown(self, meth_wanted, value, merge_with): return value + # This merging will attempt to look for a '_on_X' method + # in our own object for a given object Y with type X, + # if found it will be called to perform the merge of a source + # object and a object to merge_with. + # + # If not found the merge will be given to a '_handle_unknown' + # function which can decide what to do wit the 2 values. def merge(self, source, merge_with): type_name = util.obj_name(source) type_name = type_name.lower() @@ -56,6 +63,11 @@ class LookupMerger(UnknownMerger): else: self._lookups = lookups + # For items which can not be merged by the parent this object + # will lookup in a internally maintained set of objects and + # find which one of those objects can perform the merge. If + # any of the contained objects have the needed method, they + # will be called to perform the merge. def _handle_unknown(self, meth_wanted, value, merge_with): meth = None for merger in self._lookups: @@ -70,8 +82,33 @@ class LookupMerger(UnknownMerger): return meth(value, merge_with) -def _extract_merger_names(merge_how): - names = [] +def dict_extract_mergers(config): + parsed_mergers = [] + raw_mergers = config.get('merger_how') + if raw_mergers is None: + raw_mergers = config.get('merge_type') + if raw_mergers is None: + return parsed_mergers + if isinstance(raw_mergers, (str, basestring)): + return string_extract_mergers(raw_mergers) + for m in raw_mergers: + if isinstance(m, (dict)): + name = m['name'] + name = name.replace("-", "_").strip() + opts = m['settings'] + else: + name = m[0] + if len(m) >= 2: + opts = m[1:] + else: + opts = [] + if name: + parsed_mergers.append((name, opts)) + return parsed_mergers + + +def string_extract_mergers(merge_how): + parsed_mergers = [] for m_name in merge_how.split("+"): # Canonicalize the name (so that it can be found # even when users alter it in various ways) @@ -79,20 +116,20 @@ def _extract_merger_names(merge_how): m_name = m_name.replace("-", "_") if not m_name: continue - names.append(m_name) - return names - - -def construct(merge_how): - mergers_to_be = [] - for name in _extract_merger_names(merge_how): - match = NAME_MTCH.match(name) + match = NAME_MTCH.match(m_name) if not match: - msg = "Matcher identifer '%s' is not in the right format" % (name) + msg = "Matcher identifer '%s' is not in the right format" % (m_name) raise ValueError(msg) (m_name, m_ops) = match.groups() m_ops = m_ops.strip().split(",") m_ops = [m.strip().lower() for m in m_ops if m.strip()] + parsed_mergers.append((m_name, m_ops)) + return parsed_mergers + + +def construct(parsed_mergers): + mergers_to_be = [] + for (m_name, m_ops) in parsed_mergers: merger_locs = importer.find_module(m_name, [__name__], ['Merger']) diff --git a/cloudinit/mergers/dict.py b/cloudinit/mergers/dict.py index bc392afa..45a7d3a5 100644 --- a/cloudinit/mergers/dict.py +++ b/cloudinit/mergers/dict.py @@ -22,6 +22,17 @@ class Merger(object): self._merger = merger self._overwrite = 'overwrite' in opts + # This merging algorithm will attempt to merge with + # another dictionary, on encountering any other type of object + # it will not merge with said object, but will instead return + # the original value + # + # On encountering a dictionary, it will create a new dictionary + # composed of the original and the one to merge with, if 'overwrite' + # is enabled then keys that exist in the original will be overwritten + # by keys in the one to merge with (and associated values). Otherwise + # if not in overwrite mode the 2 conflicting keys themselves will + # be merged. def _on_dict(self, value, merge_with): if not isinstance(merge_with, (dict)): return value diff --git a/cloudinit/mergers/list.py b/cloudinit/mergers/list.py index a848b8d6..a56ff007 100644 --- a/cloudinit/mergers/list.py +++ b/cloudinit/mergers/list.py @@ -26,21 +26,24 @@ class Merger(object): def _on_tuple(self, value, merge_with): return self._on_list(list(value), merge_with) + # On encountering a list or tuple type this action will be applied + # a new list will be returned, if the value to merge with is itself + # a list and we have been told to 'extend', then the value here will + # be extended with the other list. If in 'extend' mode then we will + # attempt to merge instead, which means that values from the list + # to merge with will replace values in te original list (they will + # also be merged recursively). + # + # If the value to merge with is not a list, and we are set to discared + # then no modifications will take place, otherwise we will just append + # the value to merge with onto the end of our own list. def _on_list(self, value, merge_with): new_value = list(value) if isinstance(merge_with, (tuple, list)): if self._extend: new_value.extend(merge_with) else: - # Merge instead - for m_v in merge_with: - m_am = 0 - for (i, o_v) in enumerate(new_value): - if m_v == o_v: - new_value[i] = self._merger.merge(o_v, m_v) - m_am += 1 - if m_am == 0: - new_value.append(m_v) + return new_value else: if not self._discard_non: new_value.append(merge_with) diff --git a/cloudinit/mergers/str.py b/cloudinit/mergers/str.py index 14bc46ec..f1534c5b 100644 --- a/cloudinit/mergers/str.py +++ b/cloudinit/mergers/str.py @@ -21,9 +21,14 @@ class Merger(object): def __init__(self, merger, opts): self._append = 'append' in opts + # On encountering a unicode object to merge value with + # we will for now just proxy into the string method to let it handle it. def _on_unicode(self, value, merge_with): return self._on_str(value, merge_with) + # On encountering a string object to merge with we will + # perform the following action, if appending we will + # merge them together, otherwise we will just return value. def _on_str(self, value, merge_with): if not self._append: return value diff --git a/tests/unittests/test__init__.py b/tests/unittests/test__init__.py index ac082076..7924755a 100644 --- a/tests/unittests/test__init__.py +++ b/tests/unittests/test__init__.py @@ -22,8 +22,10 @@ class FakeModule(handlers.Handler): def list_types(self): return self.types - def _handle_part(self, data, ctype, filename, payload, frequency): + def handle_part(self, data, ctype, filename, payload, frequency): pass + + class TestWalkerHandleHandler(MockerTestCase): @@ -103,6 +105,9 @@ class TestHandlerHandlePart(MockerTestCase): self.filename = "fake filename" self.payload = "fake payload" self.frequency = settings.PER_INSTANCE + self.headers = { + 'Content-Type': self.ctype, + } def test_normal_version_1(self): """ @@ -118,8 +123,8 @@ class TestHandlerHandlePart(MockerTestCase): self.payload) self.mocker.replay() - handlers.run_part(mod_mock, self.data, self.ctype, self.filename, - self.payload, self.frequency) + handlers.run_part(mod_mock, self.data, self.filename, + self.payload, self.frequency, self.headers) def test_normal_version_2(self): """ @@ -135,8 +140,8 @@ class TestHandlerHandlePart(MockerTestCase): self.payload, self.frequency) self.mocker.replay() - handlers.run_part(mod_mock, self.data, self.ctype, self.filename, - self.payload, self.frequency) + handlers.run_part(mod_mock, self.data, self.filename, + self.payload, self.frequency, self.headers) def test_modfreq_per_always(self): """ @@ -152,8 +157,8 @@ class TestHandlerHandlePart(MockerTestCase): self.payload) self.mocker.replay() - handlers.run_part(mod_mock, self.data, self.ctype, self.filename, - self.payload, self.frequency) + handlers.run_part(mod_mock, self.data, self.filename, + self.payload, self.frequency, self.headers) def test_no_handle_when_modfreq_once(self): """C{handle_part} is not called if frequency is once.""" @@ -163,8 +168,8 @@ class TestHandlerHandlePart(MockerTestCase): self.mocker.result(settings.PER_ONCE) self.mocker.replay() - handlers.run_part(mod_mock, self.data, self.ctype, self.filename, - self.payload, self.frequency) + handlers.run_part(mod_mock, self.data, self.filename, + self.payload, self.frequency, self.headers) def test_exception_is_caught(self): """Exceptions within C{handle_part} are caught and logged.""" @@ -178,8 +183,8 @@ class TestHandlerHandlePart(MockerTestCase): self.mocker.throw(Exception()) self.mocker.replay() - handlers.run_part(mod_mock, self.data, self.ctype, self.filename, - self.payload, self.frequency) + handlers.run_part(mod_mock, self.data, self.filename, + self.payload, self.frequency, self.headers) class TestCmdlineUrl(MockerTestCase): diff --git a/tests/unittests/test_merging.py b/tests/unittests/test_merging.py index 0037b966..fa7ee8e4 100644 --- a/tests/unittests/test_merging.py +++ b/tests/unittests/test_merging.py @@ -1,62 +1,143 @@ -from mocker import MockerTestCase - -from cloudinit import util - - -class TestMergeDict(MockerTestCase): - def test_simple_merge(self): - """Test simple non-conflict merge.""" - source = {"key1": "value1"} - candidate = {"key2": "value2"} - result = util.mergedict(source, candidate) - self.assertEqual({"key1": "value1", "key2": "value2"}, result) - - def test_nested_merge(self): - """Test nested merge.""" - source = {"key1": {"key1.1": "value1.1"}} - candidate = {"key1": {"key1.2": "value1.2"}} - result = util.mergedict(source, candidate) - self.assertEqual( - {"key1": {"key1.1": "value1.1", "key1.2": "value1.2"}}, result) - - def test_merge_does_not_override(self): - """Test that candidate doesn't override source.""" - source = {"key1": "value1", "key2": "value2"} - candidate = {"key1": "value2", "key2": "NEW VALUE"} - result = util.mergedict(source, candidate) - self.assertEqual(source, result) - - def test_empty_candidate(self): - """Test empty candidate doesn't change source.""" - source = {"key": "value"} - candidate = {} - result = util.mergedict(source, candidate) - self.assertEqual(source, result) - - def test_empty_source(self): - """Test empty source is replaced by candidate.""" - source = {} - candidate = {"key": "value"} - result = util.mergedict(source, candidate) - self.assertEqual(candidate, result) - - def test_non_dict_candidate(self): - """Test non-dict candidate is discarded.""" - source = {"key": "value"} - candidate = "not a dict" - result = util.mergedict(source, candidate) - self.assertEqual(source, result) - - def test_non_dict_source(self): - """Test non-dict source is not modified with a dict candidate.""" - source = "not a dict" - candidate = {"key": "value"} - result = util.mergedict(source, candidate) - self.assertEqual(source, result) - - def test_neither_dict(self): - """Test if neither candidate or source is dict source wins.""" - source = "source" - candidate = "candidate" - result = util.mergedict(source, candidate) - self.assertEqual(source, result) +import os + +from tests.unittests import helpers + +from cloudinit import mergers + + +class TestSimpleRun(helpers.MockerTestCase): + def test_basic_merge(self): + source = { + 'Blah': ['blah2'], + 'Blah3': 'c', + } + merge_with = { + 'Blah2': ['blah3'], + 'Blah3': 'b', + 'Blah': ['123'], + } + # Basic merge should not do thing special + merge_how = "list()+dict()+str()" + merger_set = mergers.string_extract_mergers(merge_how) + self.assertEquals(3, len(merger_set)) + merger = mergers.construct(merger_set) + merged = merger.merge(source, merge_with) + self.assertEquals(merged['Blah'], ['blah2']) + self.assertEquals(merged['Blah2'], ['blah3']) + self.assertEquals(merged['Blah3'], 'c') + + def test_dict_overwrite(self): + source = { + 'Blah': ['blah2'], + } + merge_with = { + 'Blah': ['123'], + } + # Now lets try a dict overwrite + merge_how = "list()+dict(overwrite)+str()" + merger_set = mergers.string_extract_mergers(merge_how) + self.assertEquals(3, len(merger_set)) + merger = mergers.construct(merger_set) + merged = merger.merge(source, merge_with) + self.assertEquals(merged['Blah'], ['123']) + + def test_string_append(self): + source = { + 'Blah': 'blah2', + } + merge_with = { + 'Blah': '345', + } + merge_how = "list()+dict()+str(append)" + merger_set = mergers.string_extract_mergers(merge_how) + self.assertEquals(3, len(merger_set)) + merger = mergers.construct(merger_set) + merged = merger.merge(source, merge_with) + self.assertEquals(merged['Blah'], 'blah2345') + + def test_list_extend(self): + source = ['abc'] + merge_with = ['123'] + merge_how = "list(extend)+dict()+str()" + merger_set = mergers.string_extract_mergers(merge_how) + self.assertEquals(3, len(merger_set)) + merger = mergers.construct(merger_set) + merged = merger.merge(source, merge_with) + self.assertEquals(merged, ['abc', '123']) + + def test_deep_merge(self): + source = { + 'a': [1, 'b', 2], + 'b': 'blahblah', + 'c': { + 'e': [1, 2, 3], + 'f': 'bigblobof', + 'iamadict': { + 'ok': 'ok', + } + }, + 'run': [ + 'runme', + 'runme2', + ], + 'runmereally': [ + 'e', ['a'], 'd', + ], + } + merge_with = { + 'a': ['e', 'f', 'g'], + 'b': 'more', + 'c': { + 'a': 'b', + 'f': 'stuff', + }, + 'run': [ + 'morecmd', + 'moremoremore', + ], + 'runmereally': [ + 'blah', ['b'], 'e', + ], + } + merge_how = "list(extend)+dict()+str(append)" + merger_set = mergers.string_extract_mergers(merge_how) + self.assertEquals(3, len(merger_set)) + merger = mergers.construct(merger_set) + merged = merger.merge(source, merge_with) + self.assertEquals(merged['a'], [1, 'b', 2, 'e', 'f', 'g']) + self.assertEquals(merged['b'], 'blahblahmore') + self.assertEquals(merged['c']['f'], 'bigblobofstuff') + self.assertEquals(merged['run'], ['runme', 'runme2', 'morecmd', 'moremoremore']) + self.assertEquals(merged['runmereally'], ['e', ['a'], 'd', 'blah', ['b'], 'e']) + + def test_dict_overwrite_layered(self): + source = { + 'Blah3': { + 'f': '3', + 'g': { + 'a': 'b', + } + } + } + merge_with = { + 'Blah3': { + 'e': '2', + 'g': { + 'e': 'f', + } + } + } + merge_how = "list()+dict()+str()" + merger_set = mergers.string_extract_mergers(merge_how) + self.assertEquals(3, len(merger_set)) + merger = mergers.construct(merger_set) + merged = merger.merge(source, merge_with) + self.assertEquals(merged['Blah3'], { + 'e': '2', + 'f': '3', + 'g': { + 'a': 'b', + 'e': 'f', + } + }) + diff --git a/tests/unittests/test_userdata.py b/tests/unittests/test_userdata.py index 82a4c555..9e1fed7e 100644 --- a/tests/unittests/test_userdata.py +++ b/tests/unittests/test_userdata.py @@ -9,12 +9,17 @@ from email.mime.base import MIMEBase from mocker import MockerTestCase +from cloudinit import handlers +from cloudinit import helpers as c_helpers from cloudinit import log from cloudinit import sources from cloudinit import stages +from cloudinit import util INSTANCE_ID = "i-testing" +from tests.unittests import helpers + class FakeDataSource(sources.DataSource): @@ -26,22 +31,16 @@ class FakeDataSource(sources.DataSource): # FIXME: these tests shouldn't be checking log output?? # Weirddddd... - - -class TestConsumeUserData(MockerTestCase): +class TestConsumeUserData(helpers.FilesystemMockingTestCase): def setUp(self): - MockerTestCase.setUp(self) - # Replace the write so no actual files - # get written out... - self.mock_write = self.mocker.replace("cloudinit.util.write_file", - passthrough=False) + helpers.FilesystemMockingTestCase.setUp(self) self._log = None self._log_file = None self._log_handler = None def tearDown(self): - MockerTestCase.tearDown(self) + helpers.FilesystemMockingTestCase.tearDown(self) if self._log_handler and self._log: self._log.removeHandler(self._log_handler) @@ -53,12 +52,71 @@ class TestConsumeUserData(MockerTestCase): self._log.addHandler(self._log_handler) return log_file + def test_merging_cloud_config(self): + blob = ''' +#cloud-config +a: b +e: f +run: + - b + - c +''' + message1 = MIMEBase("text", "cloud-config") + message1['Merge-Type'] = 'dict()+list(extend)+str(append)' + message1.set_payload(blob) + + blob2 = ''' +#cloud-config +a: e +e: g +run: + - stuff + - morestuff +''' + message2 = MIMEBase("text", "cloud-config") + message2['Merge-Type'] = 'dict()+list(extend)+str()' + message2.set_payload(blob2) + + blob3 = ''' +#cloud-config +e: + - 1 + - 2 + - 3 +''' + message3 = MIMEBase("text", "cloud-config") + message3['Merge-Type'] = 'dict()+list()+str()' + message3.set_payload(blob3) + + messages = [message1, message2, message3] + + paths = c_helpers.Paths({}, ds=FakeDataSource('')) + cloud_cfg = handlers.cloud_config.CloudConfigPartHandler(paths) + + new_root = self.makeDir() + self.patchUtils(new_root) + self.patchOS(new_root) + cloud_cfg.handle_part(None, handlers.CONTENT_START, None, None, None, None) + for i, m in enumerate(messages): + headers = dict(m) + fn = "part-%s" % (i + 1) + payload = m.get_payload(decode=True) + cloud_cfg.handle_part(None, headers['Content-Type'], + fn, payload, None, headers) + cloud_cfg.handle_part(None, handlers.CONTENT_END, None, None, None, None) + contents = util.load_file(paths.get_ipath('cloud_config')) + contents = util.load_yaml(contents) + self.assertEquals(contents['run'], ['b', 'c', 'stuff', 'morestuff']) + self.assertEquals(contents['a'], 'be') + self.assertEquals(contents['e'], 'fg') + def test_unhandled_type_warning(self): """Raw text without magic is ignored but shows warning.""" ci = stages.Init() data = "arbitrary text\n" ci.datasource = FakeDataSource(data) + self.mock_write = self.mocker.replace("cloudinit.util.write_file", passthrough=False) self.mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) self.mocker.replay() @@ -76,6 +134,7 @@ class TestConsumeUserData(MockerTestCase): message.set_payload("Just text") ci.datasource = FakeDataSource(message.as_string()) + self.mock_write = self.mocker.replace("cloudinit.util.write_file", passthrough=False) self.mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) self.mocker.replay() @@ -93,6 +152,7 @@ class TestConsumeUserData(MockerTestCase): ci.datasource = FakeDataSource(script) outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001") + self.mock_write = self.mocker.replace("cloudinit.util.write_file", passthrough=False) self.mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) self.mock_write(outpath, script, 0700) self.mocker.replay() @@ -111,6 +171,7 @@ class TestConsumeUserData(MockerTestCase): ci.datasource = FakeDataSource(message.as_string()) outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001") + self.mock_write = self.mocker.replace("cloudinit.util.write_file", passthrough=False) self.mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) self.mock_write(outpath, script, 0700) self.mocker.replay() @@ -129,6 +190,7 @@ class TestConsumeUserData(MockerTestCase): ci.datasource = FakeDataSource(message.as_string()) outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001") + self.mock_write = self.mocker.replace("cloudinit.util.write_file", passthrough=False) self.mock_write(outpath, script, 0700) self.mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) self.mocker.replay() -- cgit v1.2.3 From fc6aa5aa54ee35ff0a3eff823bae0d3cf9b34bc1 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Wed, 6 Mar 2013 19:24:05 -0800 Subject: Continue working on merging code. --- cloudinit/config/cc_landscape.py | 3 ++- cloudinit/config/cc_mounts.py | 3 ++- cloudinit/distros/__init__.py | 15 +++++++------ cloudinit/handlers/__init__.py | 3 ++- cloudinit/handlers/cloud_config.py | 15 ++++++------- cloudinit/helpers.py | 3 ++- cloudinit/mergers/__init__.py | 13 +++++++++--- cloudinit/sources/DataSourceAltCloud.py | 5 +++-- cloudinit/sources/DataSourceCloudStack.py | 3 --- cloudinit/sources/DataSourceConfigDrive.py | 4 +++- cloudinit/sources/DataSourceEc2.py | 3 --- cloudinit/sources/DataSourceMAAS.py | 3 ++- cloudinit/sources/DataSourceNoCloud.py | 5 ++--- cloudinit/sources/DataSourceNone.py | 3 --- cloudinit/sources/DataSourceOVF.py | 3 ++- cloudinit/sources/__init__.py | 10 ++++++--- cloudinit/stages.py | 9 ++++---- cloudinit/type_utils.py | 34 ++++++++++++++++++++++++++++++ cloudinit/util.py | 33 ++++++++++------------------- tests/unittests/test_userdata.py | 4 +++- 20 files changed, 104 insertions(+), 70 deletions(-) create mode 100644 cloudinit/type_utils.py (limited to 'cloudinit/handlers/__init__.py') diff --git a/cloudinit/config/cc_landscape.py b/cloudinit/config/cc_landscape.py index 02610dd0..6734efee 100644 --- a/cloudinit/config/cc_landscape.py +++ b/cloudinit/config/cc_landscape.py @@ -24,6 +24,7 @@ from StringIO import StringIO from configobj import ConfigObj +from cloudinit import type_utils from cloudinit import util from cloudinit.settings import PER_INSTANCE @@ -58,7 +59,7 @@ def handle(_name, cfg, cloud, log, _args): if not isinstance(ls_cloudcfg, (dict)): raise RuntimeError(("'landscape' key existed in config," " but not a dictionary type," - " is a %s instead"), util.obj_name(ls_cloudcfg)) + " is a %s instead"), type_utils.obj_name(ls_cloudcfg)) if not ls_cloudcfg: return diff --git a/cloudinit/config/cc_mounts.py b/cloudinit/config/cc_mounts.py index cb772c86..6ebe563d 100644 --- a/cloudinit/config/cc_mounts.py +++ b/cloudinit/config/cc_mounts.py @@ -22,6 +22,7 @@ from string import whitespace # pylint: disable=W0402 import re +from cloudinit import type_utils from cloudinit import util # Shortname matches 'sda', 'sda1', 'xvda', 'hda', 'sdb', xvdb, vda, vdd1 @@ -60,7 +61,7 @@ def handle(_name, cfg, cloud, log, _args): # skip something that wasn't a list if not isinstance(cfgmnt[i], list): log.warn("Mount option %s not a list, got a %s instead", - (i + 1), util.obj_name(cfgmnt[i])) + (i + 1), type_utils.obj_name(cfgmnt[i])) continue startname = str(cfgmnt[i][0]) diff --git a/cloudinit/distros/__init__.py b/cloudinit/distros/__init__.py index 6a684b89..eeea6af1 100644 --- a/cloudinit/distros/__init__.py +++ b/cloudinit/distros/__init__.py @@ -31,6 +31,7 @@ import re from cloudinit import importer from cloudinit import log as logging from cloudinit import ssh_util +from cloudinit import type_utils from cloudinit import util from cloudinit.distros.parsers import hosts @@ -427,7 +428,7 @@ class Distro(object): lines.append("%s %s" % (user, rules)) else: msg = "Can not create sudoers rule addition with type %r" - raise TypeError(msg % (util.obj_name(rules))) + raise TypeError(msg % (type_utils.obj_name(rules))) content = "\n".join(lines) content += "\n" # trailing newline @@ -550,7 +551,7 @@ def _normalize_groups(grp_cfg): c_grp_cfg[k] = [v] else: raise TypeError("Bad group member type %s" % - util.obj_name(v)) + type_utils.obj_name(v)) else: if isinstance(v, (list)): c_grp_cfg[k].extend(v) @@ -558,13 +559,13 @@ def _normalize_groups(grp_cfg): c_grp_cfg[k].append(v) else: raise TypeError("Bad group member type %s" % - util.obj_name(v)) + type_utils.obj_name(v)) elif isinstance(i, (str, basestring)): if i not in c_grp_cfg: c_grp_cfg[i] = [] else: raise TypeError("Unknown group name type %s" % - util.obj_name(i)) + type_utils.obj_name(i)) grp_cfg = c_grp_cfg groups = {} if isinstance(grp_cfg, (dict)): @@ -573,7 +574,7 @@ def _normalize_groups(grp_cfg): else: raise TypeError(("Group config must be list, dict " " or string types only and not %s") % - util.obj_name(grp_cfg)) + type_utils.obj_name(grp_cfg)) return groups @@ -604,7 +605,7 @@ def _normalize_users(u_cfg, def_user_cfg=None): ad_ucfg.append(v) else: raise TypeError(("Unmappable user value type %s" - " for key %s") % (util.obj_name(v), k)) + " for key %s") % (type_utils.obj_name(v), k)) u_cfg = ad_ucfg elif isinstance(u_cfg, (str, basestring)): u_cfg = util.uniq_merge_sorted(u_cfg) @@ -629,7 +630,7 @@ def _normalize_users(u_cfg, def_user_cfg=None): else: raise TypeError(("User config must be dictionary/list " " or string types only and not %s") % - util.obj_name(user_config)) + type_utils.obj_name(user_config)) # Ensure user options are in the right python friendly format if users: diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index 63fdb948..924463ce 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -27,6 +27,7 @@ from cloudinit.settings import (PER_ALWAYS, PER_INSTANCE, FREQUENCIES) from cloudinit import importer from cloudinit import log as logging +from cloudinit import type_utils from cloudinit import util LOG = logging.getLogger(__name__) @@ -76,7 +77,7 @@ class Handler(object): self.frequency = frequency def __repr__(self): - return "%s: [%s]" % (util.obj_name(self), self.list_types()) + return "%s: [%s]" % (type_utils.obj_name(self), self.list_types()) @abc.abstractmethod def list_types(self): diff --git a/cloudinit/handlers/cloud_config.py b/cloudinit/handlers/cloud_config.py index d458dee2..5f519f78 100644 --- a/cloudinit/handlers/cloud_config.py +++ b/cloudinit/handlers/cloud_config.py @@ -29,8 +29,8 @@ from cloudinit.settings import (PER_ALWAYS) LOG = logging.getLogger(__name__) -DEF_MERGE_TYPE = "list(extend)+dict()+str(append)" MERGE_HEADER = 'Merge-Type' +DEF_MERGERS = mergers.default_mergers() class CloudConfigPartHandler(handlers.Handler): @@ -39,9 +39,7 @@ class CloudConfigPartHandler(handlers.Handler): self.cloud_buf = None self.cloud_fn = paths.get_ipath("cloud_config") self.file_names = [] - self.mergers = [ - mergers.string_extract_mergers(DEF_MERGE_TYPE), - ] + self.mergers = [DEF_MERGERS] def list_types(self): return [ @@ -59,6 +57,7 @@ class CloudConfigPartHandler(handlers.Handler): file_lines.append("# %s" % (fn)) file_lines.append("") if self.cloud_buf is not None: + # Something was actually gathered.... lines = [ "#cloud-config", '', @@ -86,7 +85,7 @@ class CloudConfigPartHandler(handlers.Handler): all_mergers.extend(mergers_yaml) all_mergers.extend(mergers_header) if not all_mergers: - all_mergers = mergers.string_extract_mergers(DEF_MERGE_TYPE) + all_mergers = DEF_MERGERS return all_mergers def _merge_part(self, payload, headers): @@ -94,7 +93,7 @@ class CloudConfigPartHandler(handlers.Handler): # Use the merger list from the last call, since it is the one # that will be defining how to merge with the next payload. curr_mergers = list(self.mergers[-1]) - LOG.debug("Merging with %s", curr_mergers) + LOG.debug("Merging by applying %s", curr_mergers) self.mergers.append(next_mergers) merger = mergers.construct(curr_mergers) if self.cloud_buf is None: @@ -106,9 +105,7 @@ class CloudConfigPartHandler(handlers.Handler): def _reset(self): self.file_names = [] self.cloud_buf = None - self.mergers = [ - mergers.string_extract_mergers(DEF_MERGE_TYPE), - ] + self.mergers = [DEF_MERGERS] def handle_part(self, _data, ctype, filename, payload, _freq, headers): if ctype == handlers.CONTENT_START: diff --git a/cloudinit/helpers.py b/cloudinit/helpers.py index 2077401c..a4e6fb03 100644 --- a/cloudinit/helpers.py +++ b/cloudinit/helpers.py @@ -32,6 +32,7 @@ from cloudinit.settings import (PER_INSTANCE, PER_ALWAYS, PER_ONCE, CFG_ENV_NAME) from cloudinit import log as logging +from cloudinit import type_utils from cloudinit import util LOG = logging.getLogger(__name__) @@ -68,7 +69,7 @@ class FileLock(object): self.fn = fn def __str__(self): - return "<%s using file %r>" % (util.obj_name(self), self.fn) + return "<%s using file %r>" % (type_utils.obj_name(self), self.fn) def canon_sem_name(name): diff --git a/cloudinit/mergers/__init__.py b/cloudinit/mergers/__init__.py index 4a112165..453426af 100644 --- a/cloudinit/mergers/__init__.py +++ b/cloudinit/mergers/__init__.py @@ -20,11 +20,12 @@ import re from cloudinit import importer from cloudinit import log as logging -from cloudinit import util +from cloudinit import type_utils NAME_MTCH = re.compile(r"(^[a-zA-Z_][A-Za-z0-9_]*)\((.*?)\)$") LOG = logging.getLogger(__name__) +DEF_MERGE_TYPE = "list(extend)+dict()+str(append)" class UnknownMerger(object): @@ -42,7 +43,7 @@ class UnknownMerger(object): # If not found the merge will be given to a '_handle_unknown' # function which can decide what to do wit the 2 values. def merge(self, source, merge_with): - type_name = util.obj_name(source) + type_name = type_utils.obj_name(source) type_name = type_name.lower() method_name = "_on_%s" % (type_name) meth = None @@ -127,6 +128,10 @@ def string_extract_mergers(merge_how): return parsed_mergers +def default_mergers(): + return tuple(string_extract_mergers(DEF_MERGE_TYPE)) + + def construct(parsed_mergers): mergers_to_be = [] for (m_name, m_ops) in parsed_mergers: @@ -145,4 +150,6 @@ def construct(parsed_mergers): root = LookupMerger(mergers) for (attr, opts) in mergers_to_be: mergers.append(attr(root, opts)) - return root \ No newline at end of file + return root + + diff --git a/cloudinit/sources/DataSourceAltCloud.py b/cloudinit/sources/DataSourceAltCloud.py index 9812bdcb..64548d43 100644 --- a/cloudinit/sources/DataSourceAltCloud.py +++ b/cloudinit/sources/DataSourceAltCloud.py @@ -30,6 +30,7 @@ import os.path from cloudinit import log as logging from cloudinit import sources from cloudinit import util + from cloudinit.util import ProcessExecutionError LOG = logging.getLogger(__name__) @@ -91,8 +92,8 @@ class DataSourceAltCloud(sources.DataSource): self.supported_seed_starts = ("/", "file://") def __str__(self): - mstr = "%s [seed=%s]" % (util.obj_name(self), self.seed) - return mstr + root = sources.DataSource.__str__(self) + return "%s [seed=%s]" % (root, self.seed) def get_cloud_type(self): ''' diff --git a/cloudinit/sources/DataSourceCloudStack.py b/cloudinit/sources/DataSourceCloudStack.py index 076dba5a..c0e1a23c 100644 --- a/cloudinit/sources/DataSourceCloudStack.py +++ b/cloudinit/sources/DataSourceCloudStack.py @@ -59,9 +59,6 @@ class DataSourceCloudStack(sources.DataSource): return gw return None - def __str__(self): - return util.obj_name(self) - def _get_url_settings(self): mcfg = self.ds_cfg if not mcfg: diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index c7826851..46abd772 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -51,7 +51,9 @@ class DataSourceConfigDrive(sources.DataSource): self.ec2_metadata = None def __str__(self): - mstr = "%s [%s,ver=%s]" % (util.obj_name(self), self.dsmode, + root = sources.DataSource.__str__(self) + mstr = "%s [%s,ver=%s]" % (root, + self.dsmode, self.version) mstr += "[source=%s]" % (self.source) return mstr diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py index 2db53446..f010e640 100644 --- a/cloudinit/sources/DataSourceEc2.py +++ b/cloudinit/sources/DataSourceEc2.py @@ -49,9 +49,6 @@ class DataSourceEc2(sources.DataSource): self.seed_dir = os.path.join(paths.seed_dir, "ec2") self.api_ver = DEF_MD_VERSION - def __str__(self): - return util.obj_name(self) - def get_data(self): seed_ret = {} if util.read_optional_seed(seed_ret, base=(self.seed_dir + "/")): diff --git a/cloudinit/sources/DataSourceMAAS.py b/cloudinit/sources/DataSourceMAAS.py index b55d8a21..612d8ffa 100644 --- a/cloudinit/sources/DataSourceMAAS.py +++ b/cloudinit/sources/DataSourceMAAS.py @@ -50,7 +50,8 @@ class DataSourceMAAS(sources.DataSource): self.oauth_clockskew = None def __str__(self): - return "%s [%s]" % (util.obj_name(self), self.base_url) + root = sources.DataSource.__str__(self) + return "%s [%s]" % (root, self.base_url) def get_data(self): mcfg = self.ds_cfg diff --git a/cloudinit/sources/DataSourceNoCloud.py b/cloudinit/sources/DataSourceNoCloud.py index bed500a2..9a770d38 100644 --- a/cloudinit/sources/DataSourceNoCloud.py +++ b/cloudinit/sources/DataSourceNoCloud.py @@ -40,9 +40,8 @@ class DataSourceNoCloud(sources.DataSource): self.supported_seed_starts = ("/", "file://") def __str__(self): - mstr = "%s [seed=%s][dsmode=%s]" % (util.obj_name(self), - self.seed, self.dsmode) - return mstr + root = sources.DataSource.__str__(self) + return "%s [seed=%s][dsmode=%s]" % (root, self.seed, self.dsmode) def get_data(self): defaults = { diff --git a/cloudinit/sources/DataSourceNone.py b/cloudinit/sources/DataSourceNone.py index c2125bee..e2175e1f 100644 --- a/cloudinit/sources/DataSourceNone.py +++ b/cloudinit/sources/DataSourceNone.py @@ -41,9 +41,6 @@ class DataSourceNone(sources.DataSource): def get_instance_id(self): return 'iid-datasource-none' - def __str__(self): - return util.obj_name(self) - @property def is_disconnected(self): return True diff --git a/cloudinit/sources/DataSourceOVF.py b/cloudinit/sources/DataSourceOVF.py index e90150c6..ae139074 100644 --- a/cloudinit/sources/DataSourceOVF.py +++ b/cloudinit/sources/DataSourceOVF.py @@ -43,7 +43,8 @@ class DataSourceOVF(sources.DataSource): self.supported_seed_starts = ("/", "file://") def __str__(self): - return "%s [seed=%s]" % (util.obj_name(self), self.seed) + root = sources.DataSource.__str__(self) + return "%s [seed=%s]" % (root, self.seed) def get_data(self): found = [] diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 96baff90..d8fbacdd 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -25,6 +25,7 @@ import os from cloudinit import importer from cloudinit import log as logging +from cloudinit import type_utils from cloudinit import user_data as ud from cloudinit import util @@ -52,7 +53,7 @@ class DataSource(object): self.userdata = None self.metadata = None self.userdata_raw = None - name = util.obj_name(self) + name = type_utils.obj_name(self) if name.startswith(DS_PREFIX): name = name[len(DS_PREFIX):] self.ds_cfg = util.get_cfg_by_path(self.sys_cfg, @@ -62,6 +63,9 @@ class DataSource(object): else: self.ud_proc = ud_proc + def __str__(self): + return type_utils.obj_name(self) + def get_userdata(self, apply_filter=False): if self.userdata is None: self.userdata = self.ud_proc.process(self.get_userdata_raw()) @@ -214,7 +218,7 @@ def normalize_pubkey_data(pubkey_data): def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list): ds_list = list_sources(cfg_list, ds_deps, pkg_list) - ds_names = [util.obj_name(f) for f in ds_list] + ds_names = [type_utils.obj_name(f) for f in ds_list] LOG.debug("Searching for data source in: %s", ds_names) for cls in ds_list: @@ -222,7 +226,7 @@ def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list): LOG.debug("Seeing if we can get any data from %s", cls) s = cls(sys_cfg, distro, paths) if s.get_data(): - return (s, util.obj_name(cls)) + return (s, type_utils.obj_name(cls)) except Exception: util.logexc(LOG, "Getting data from %s failed", cls) diff --git a/cloudinit/stages.py b/cloudinit/stages.py index 94a267df..531e7997 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -43,6 +43,7 @@ from cloudinit import helpers from cloudinit import importer from cloudinit import log as logging from cloudinit import sources +from cloudinit import type_utils from cloudinit import util LOG = logging.getLogger(__name__) @@ -220,7 +221,7 @@ class Init(object): # Any config provided??? pkg_list = self.cfg.get('datasource_pkg_list') or [] # Add the defaults at the end - for n in ['', util.obj_name(sources)]: + for n in ['', type_utils.obj_name(sources)]: if n not in pkg_list: pkg_list.append(n) cfg_list = self.cfg.get('datasource_list') or [] @@ -280,7 +281,7 @@ class Init(object): dp = self.paths.get_cpath('data') # Write what the datasource was and is.. - ds = "%s: %s" % (util.obj_name(self.datasource), self.datasource) + ds = "%s: %s" % (type_utils.obj_name(self.datasource), self.datasource) previous_ds = None ds_fn = os.path.join(idir, 'datasource') try: @@ -497,7 +498,7 @@ class Modules(object): else: raise TypeError(("Failed to read '%s' item in config," " unknown type %s") % - (item, util.obj_name(item))) + (item, type_utils.obj_name(item))) return module_list def _fixup_modules(self, raw_mods): @@ -515,7 +516,7 @@ class Modules(object): # Reset it so when ran it will get set to a known value freq = None mod_locs = importer.find_module(mod_name, - ['', util.obj_name(config)], + ['', type_utils.obj_name(config)], ['handle']) if not mod_locs: LOG.warn("Could not find module named %s", mod_name) diff --git a/cloudinit/type_utils.py b/cloudinit/type_utils.py new file mode 100644 index 00000000..2decbfc5 --- /dev/null +++ b/cloudinit/type_utils.py @@ -0,0 +1,34 @@ +# vi: ts=4 expandtab +# +# Copyright (C) 2012 Canonical Ltd. +# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012 Yahoo! Inc. +# +# Author: Scott Moser +# Author: Juerg Haefliger +# Author: Joshua Harlow +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3, as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# pylint: disable=C0302 + +import types + + +def obj_name(obj): + if isinstance(obj, (types.TypeType, + types.ModuleType, + types.FunctionType, + types.LambdaType)): + return str(obj.__name__) + return obj_name(obj.__class__) diff --git a/cloudinit/util.py b/cloudinit/util.py index ab918433..73bf6304 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -43,14 +43,15 @@ import subprocess import sys import tempfile import time -import types import urlparse import yaml from cloudinit import importer from cloudinit import log as logging +from cloudinit import mergers from cloudinit import safeyaml +from cloudinit import type_utils from cloudinit import url_helper as uhelp from cloudinit import version @@ -194,11 +195,12 @@ def fork_cb(child_cb, *args): os._exit(0) # pylint: disable=W0212 except: logexc(LOG, ("Failed forking and" - " calling callback %s"), obj_name(child_cb)) + " calling callback %s"), + type_utils.obj_name(child_cb)) os._exit(1) # pylint: disable=W0212 else: LOG.debug("Forked child %s who will run callback %s", - fid, obj_name(child_cb)) + fid, type_utils.obj_name(child_cb)) def is_true(val, addons=None): @@ -513,15 +515,6 @@ def make_url(scheme, host, port=None, return urlparse.urlunparse(pieces) -def obj_name(obj): - if isinstance(obj, (types.TypeType, - types.ModuleType, - types.FunctionType, - types.LambdaType)): - return str(obj.__name__) - return obj_name(obj.__class__) - - def mergemanydict(srcs, reverse=False): if reverse: srcs = reversed(srcs) @@ -538,13 +531,9 @@ def mergedict(src, cand): If C{src} has a key C{cand} will not override. Nested dictionaries are merged recursively. """ - if isinstance(src, dict) and isinstance(cand, dict): - for (k, v) in cand.iteritems(): - if k not in src: - src[k] = v - else: - src[k] = mergedict(src[k], v) - return src + raw_mergers = mergers.default_mergers() + merger = mergers.construct(raw_mergers) + return merger.merge(src, cand) @contextlib.contextmanager @@ -645,7 +634,7 @@ def load_yaml(blob, default=None, allowed=(dict,)): # Yes this will just be caught, but thats ok for now... raise TypeError(("Yaml load allows %s root types," " but got %s instead") % - (allowed, obj_name(converted))) + (allowed, type_utils.obj_name(converted))) loaded = converted except (yaml.YAMLError, TypeError, ValueError): if len(blob) == 0: @@ -714,7 +703,7 @@ def read_conf_with_confd(cfgfile): if not isinstance(confd, (str, basestring)): raise TypeError(("Config file %s contains 'conf_d' " "with non-string type %s") % - (cfgfile, obj_name(confd))) + (cfgfile, type_utils.obj_name(confd))) else: confd = str(confd).strip() elif os.path.isdir("%s.d" % cfgfile): @@ -1472,7 +1461,7 @@ def shellify(cmdlist, add_header=True): else: raise RuntimeError(("Unable to shellify type %s" " which is not a list or string") - % (obj_name(args))) + % (type_utils.obj_name(args))) LOG.debug("Shellified %s commands.", cmds_made) return content diff --git a/tests/unittests/test_userdata.py b/tests/unittests/test_userdata.py index 9e1fed7e..ef0dd7b8 100644 --- a/tests/unittests/test_userdata.py +++ b/tests/unittests/test_userdata.py @@ -74,7 +74,7 @@ run: - morestuff ''' message2 = MIMEBase("text", "cloud-config") - message2['Merge-Type'] = 'dict()+list(extend)+str()' + message2['X-Merge-Type'] = 'dict()+list(extend)+str()' message2.set_payload(blob2) blob3 = ''' @@ -83,6 +83,7 @@ e: - 1 - 2 - 3 +p: 1 ''' message3 = MIMEBase("text", "cloud-config") message3['Merge-Type'] = 'dict()+list()+str()' @@ -109,6 +110,7 @@ e: self.assertEquals(contents['run'], ['b', 'c', 'stuff', 'morestuff']) self.assertEquals(contents['a'], 'be') self.assertEquals(contents['e'], 'fg') + self.assertEquals(contents['p'], 1) def test_unhandled_type_warning(self): """Raw text without magic is ignored but shows warning.""" -- cgit v1.2.3