From 8bc85abd97e06d964bbd26208eb732e80eb87c10 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 20 Nov 2012 20:02:48 -0800 Subject: Start allowing different merging types to be applied After user data handling splits apart all the different content types into there various mime messages it is nice to be able to have each message specify how it should be merged (mainly for cloud-config or cloud-archive) into the single cloud config that is eventually used. This starts to add a plugable merging framework and the needed components to activate said headers and merging. --- cloudinit/handlers/__init__.py | 49 +++++++++++++++++++++++------------------- 1 file changed, 27 insertions(+), 22 deletions(-) (limited to 'cloudinit/handlers/__init__.py') diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index 8d6dcd4d..bfccfd89 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -69,7 +69,6 @@ INCLUSION_SRCH = sorted(list(INCLUSION_TYPES_MAP.keys()), class Handler(object): - __metaclass__ = abc.ABCMeta def __init__(self, frequency, version=2): @@ -83,15 +82,12 @@ class Handler(object): def list_types(self): raise NotImplementedError() - def handle_part(self, data, ctype, filename, payload, frequency): - return self._handle_part(data, ctype, filename, payload, frequency) - @abc.abstractmethod - def _handle_part(self, data, ctype, filename, payload, frequency): + def handle_part(self, *args, **kwargs): raise NotImplementedError() -def run_part(mod, data, ctype, filename, payload, frequency): +def run_part(mod, data, filename, payload, headers, frequency): mod_freq = mod.frequency if not (mod_freq == PER_ALWAYS or (frequency == PER_INSTANCE and mod_freq == PER_INSTANCE)): @@ -102,19 +98,25 @@ def run_part(mod, data, ctype, filename, payload, frequency): mod_ver = int(mod_ver) except: mod_ver = 1 + content_type = headers['Content-Type'] try: LOG.debug("Calling handler %s (%s, %s, %s) with frequency %s", - mod, ctype, filename, mod_ver, frequency) - if mod_ver >= 2: + mod, content_type, filename, mod_ver, frequency) + if mod_ver == 3: + # Treat as v. 3 which does get a frequency + headers + mod.handle_part(data, content_type, filename, + payload, frequency, headers) + elif mod_ver == 2: # Treat as v. 2 which does get a frequency - mod.handle_part(data, ctype, filename, payload, frequency) + mod.handle_part(data, content_type, filename, + payload, frequency) else: # Treat as v. 1 which gets no frequency - mod.handle_part(data, ctype, filename, payload) + mod.handle_part(data, content_type, filename, payload) except: util.logexc(LOG, ("Failed calling handler %s (%s, %s, %s)" " with frequency %s"), - mod, ctype, filename, + mod, content_type, filename, mod_ver, frequency) @@ -173,26 +175,27 @@ def _escape_string(text): return text -def walker_callback(pdata, ctype, filename, payload): - if ctype in PART_CONTENT_TYPES: - walker_handle_handler(pdata, ctype, filename, payload) +def walker_callback(data, filename, payload, headers): + content_type = headers['Content-Type'] + if content_type in PART_CONTENT_TYPES: + walker_handle_handler(data, content_type, filename, payload) return - handlers = pdata['handlers'] - if ctype in pdata['handlers']: - run_part(handlers[ctype], pdata['data'], ctype, filename, - payload, pdata['frequency']) + handlers = data['handlers'] + if content_type in handlers: + run_part(handlers[content_type], data['data'], filename, + payload, headers, data['frequency']) elif payload: # Extract the first line or 24 bytes for displaying in the log start = _extract_first_or_bytes(payload, 24) details = "'%s...'" % (_escape_string(start)) if ctype == NOT_MULTIPART_TYPE: LOG.warning("Unhandled non-multipart (%s) userdata: %s", - ctype, details) + content_type, details) else: LOG.warning("Unhandled unknown content-type (%s) userdata: %s", - ctype, details) + content_type, details) else: - LOG.debug("empty payload of type %s" % ctype) + LOG.debug("Empty payload of type %s", content_type) # Callback is a function that will be called with @@ -212,7 +215,9 @@ def walk(msg, callback, data): if not filename: filename = PART_FN_TPL % (partnum) - callback(data, ctype, filename, part.get_payload(decode=True)) + callback(data, ctype, filename, + part.get_payload(decode=True), + dict(part)) partnum = partnum + 1 -- cgit v1.2.3 From eded09c1e260330107a19bd0b5a351686fe49e80 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Thu, 22 Nov 2012 08:21:37 -0800 Subject: Continue working on merging prototype. --- cloudinit/handlers/__init__.py | 26 +++++++++++++++++--------- cloudinit/handlers/cloud_config.py | 27 +++++++++++++++++++++------ 2 files changed, 38 insertions(+), 15 deletions(-) (limited to 'cloudinit/handlers/__init__.py') diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index bfccfd89..566b61a7 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -92,14 +92,14 @@ def run_part(mod, data, filename, payload, headers, frequency): if not (mod_freq == PER_ALWAYS or (frequency == PER_INSTANCE and mod_freq == PER_INSTANCE)): return - mod_ver = mod.handler_version # Sanity checks on version (should be an int convertable) try: + mod_ver = mod.handler_version mod_ver = int(mod_ver) - except: + except (TypeError, ValueError, AttributeError): mod_ver = 1 - content_type = headers['Content-Type'] try: + content_type = headers['Content-Type'] LOG.debug("Calling handler %s (%s, %s, %s) with frequency %s", mod, content_type, filename, mod_ver, frequency) if mod_ver == 3: @@ -110,9 +110,11 @@ def run_part(mod, data, filename, payload, headers, frequency): # Treat as v. 2 which does get a frequency mod.handle_part(data, content_type, filename, payload, frequency) - else: + elif mod_ver == 1: # Treat as v. 1 which gets no frequency mod.handle_part(data, content_type, filename, payload) + else: + raise ValueError("Unknown module version %s" % (mod_ver)) except: util.logexc(LOG, ("Failed calling handler %s (%s, %s, %s)" " with frequency %s"), @@ -121,11 +123,17 @@ def run_part(mod, data, filename, payload, headers, frequency): def call_begin(mod, data, frequency): - run_part(mod, data, CONTENT_START, None, None, frequency) + headers = { + 'Content-Type': CONTENT_START, + } + run_part(mod, data, None, None, headers, frequency) def call_end(mod, data, frequency): - run_part(mod, data, CONTENT_END, None, None, frequency) + headers = { + 'Content-Type': CONTENT_END, + } + run_part(mod, data, None, None, headers, frequency) def walker_handle_handler(pdata, _ctype, _filename, payload): @@ -215,9 +223,9 @@ def walk(msg, callback, data): if not filename: filename = PART_FN_TPL % (partnum) - callback(data, ctype, filename, - part.get_payload(decode=True), - dict(part)) + headers = dict(part) + headers['Content-Type'] = ctype + callback(data, filename, part.get_payload(decode=True), headers) partnum = partnum + 1 diff --git a/cloudinit/handlers/cloud_config.py b/cloudinit/handlers/cloud_config.py index 86027187..22ced20d 100644 --- a/cloudinit/handlers/cloud_config.py +++ b/cloudinit/handlers/cloud_config.py @@ -29,6 +29,8 @@ from cloudinit.settings import (PER_ALWAYS) LOG = logging.getLogger(__name__) +DEF_MERGE_TYPE = "list+dict+str" + class CloudConfigPartHandler(handlers.Handler): def __init__(self, paths, **_kwargs): @@ -44,10 +46,25 @@ class CloudConfigPartHandler(handlers.Handler): def _write_cloud_config(self, buf): if not self.cloud_fn: return - payload = util.yaml_dumps(self.cloud_buf) - util.write_file(self.cloud_fn, payload, 0600) + lines = ["#cloud-config", util.yaml_dumps(self.cloud_buf)] + util.write_file(self.cloud_fn, "\n".join(lines), 0600) + + def _merge_part(self, payload, headers, filename): + merge_how = headers.get("Merge-Type") + try: + payload_y = util.load_yaml(payload) + if not merge_how: + merge_how = payload_y.pop("Merge-Type", '') + merge_how = merge_how.strip().lower() + if not merge_how: + merge_how = DEF_MERGE_TYPE + merger = mergers.construct(merge_how) + self.cloud_buf = merger.merge(self.cloud_buf, payload_y) + except: + util.logexc(LOG, "Failed at merging in cloud config part from %s", + filename) - def handle_part(self, _data, ctype, filename, payload, _frequency, headers): + def handle_part(self, _data, ctype, filename, payload, _freq, headers): if ctype == handlers.CONTENT_START: self.cloud_buf = {} return @@ -55,6 +72,4 @@ class CloudConfigPartHandler(handlers.Handler): self._write_cloud_config(self.cloud_buf) self.cloud_buf = {} return - merge_how = headers.get("Merge-Type", 'list+dict+str') - merger = mergers.construct(merge_how) - self.cloud_buf = merger.merge(self.cloud_buf, util.load_yaml(payload)) + self._merge_part(payload, headers, filename) -- cgit v1.2.3 From 2653a9172e375484b4d0a88c3de56334136fa134 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 5 Mar 2013 19:16:01 -0800 Subject: Add in a bunch of changes and tests. --- cloudinit/handlers/__init__.py | 15 +-- cloudinit/handlers/cloud_config.py | 89 +++++++++------- cloudinit/mergers/__init__.py | 59 +++++++++-- cloudinit/mergers/dict.py | 11 ++ cloudinit/mergers/list.py | 21 ++-- cloudinit/mergers/str.py | 5 + tests/unittests/test__init__.py | 27 +++-- tests/unittests/test_merging.py | 205 ++++++++++++++++++++++++++----------- tests/unittests/test_userdata.py | 80 +++++++++++++-- 9 files changed, 368 insertions(+), 144 deletions(-) (limited to 'cloudinit/handlers/__init__.py') diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index 566b61a7..63fdb948 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -87,7 +87,7 @@ class Handler(object): raise NotImplementedError() -def run_part(mod, data, filename, payload, headers, frequency): +def run_part(mod, data, filename, payload, frequency, headers): mod_freq = mod.frequency if not (mod_freq == PER_ALWAYS or (frequency == PER_INSTANCE and mod_freq == PER_INSTANCE)): @@ -98,8 +98,8 @@ def run_part(mod, data, filename, payload, headers, frequency): mod_ver = int(mod_ver) except (TypeError, ValueError, AttributeError): mod_ver = 1 + content_type = headers['Content-Type'] try: - content_type = headers['Content-Type'] LOG.debug("Calling handler %s (%s, %s, %s) with frequency %s", mod, content_type, filename, mod_ver, frequency) if mod_ver == 3: @@ -123,17 +123,19 @@ def run_part(mod, data, filename, payload, headers, frequency): def call_begin(mod, data, frequency): + # Create a fake header set headers = { 'Content-Type': CONTENT_START, } - run_part(mod, data, None, None, headers, frequency) + run_part(mod, data, None, None, frequency, headers) def call_end(mod, data, frequency): + # Create a fake header set headers = { 'Content-Type': CONTENT_END, } - run_part(mod, data, None, None, headers, frequency) + run_part(mod, data, None, None, frequency, headers) def walker_handle_handler(pdata, _ctype, _filename, payload): @@ -191,12 +193,12 @@ def walker_callback(data, filename, payload, headers): handlers = data['handlers'] if content_type in handlers: run_part(handlers[content_type], data['data'], filename, - payload, headers, data['frequency']) + payload, data['frequency'], headers) elif payload: # Extract the first line or 24 bytes for displaying in the log start = _extract_first_or_bytes(payload, 24) details = "'%s...'" % (_escape_string(start)) - if ctype == NOT_MULTIPART_TYPE: + if content_type == NOT_MULTIPART_TYPE: LOG.warning("Unhandled non-multipart (%s) userdata: %s", content_type, details) else: @@ -224,6 +226,7 @@ def walk(msg, callback, data): filename = PART_FN_TPL % (partnum) headers = dict(part) + LOG.debug(headers) headers['Content-Type'] = ctype callback(data, filename, part.get_payload(decode=True), headers) partnum = partnum + 1 diff --git a/cloudinit/handlers/cloud_config.py b/cloudinit/handlers/cloud_config.py index 02a7ad9d..d458dee2 100644 --- a/cloudinit/handlers/cloud_config.py +++ b/cloudinit/handlers/cloud_config.py @@ -29,16 +29,19 @@ from cloudinit.settings import (PER_ALWAYS) LOG = logging.getLogger(__name__) -DEF_MERGE_TYPE = "list()+dict()+str()" +DEF_MERGE_TYPE = "list(extend)+dict()+str(append)" MERGE_HEADER = 'Merge-Type' class CloudConfigPartHandler(handlers.Handler): def __init__(self, paths, **_kwargs): handlers.Handler.__init__(self, PER_ALWAYS, version=3) - self.cloud_buf = {} + self.cloud_buf = None self.cloud_fn = paths.get_ipath("cloud_config") self.file_names = [] + self.mergers = [ + mergers.string_extract_mergers(DEF_MERGE_TYPE), + ] def list_types(self): return [ @@ -48,50 +51,64 @@ class CloudConfigPartHandler(handlers.Handler): def _write_cloud_config(self, buf): if not self.cloud_fn: return - # Write the combined & merged dictionary/yaml out - lines = [ - "#cloud-config", - '', - ] - # Write which files we merged from + # Capture which files we merged from... + file_lines = [] if self.file_names: - lines.append("# from %s files" % (len(self.file_names))) + file_lines.append("# from %s files" % (len(self.file_names))) for fn in self.file_names: - lines.append("# %s" % (fn)) - lines.append("") - lines.append(util.yaml_dumps(self.cloud_buf)) + file_lines.append("# %s" % (fn)) + file_lines.append("") + if self.cloud_buf is not None: + lines = [ + "#cloud-config", + '', + ] + lines.extend(file_lines) + lines.append(util.yaml_dumps(self.cloud_buf)) + else: + lines = [] util.write_file(self.cloud_fn, "\n".join(lines), 0600) - def _merge_header_extract(self, payload_yaml): - merge_header_yaml = '' - for k in [MERGE_HEADER, MERGE_HEADER.lower(), - MERGE_HEADER.lower().replace("-", "_")]: - if k in payload_yaml: - merge_header_yaml = str(payload_yaml[k]) + def _extract_mergers(self, payload, headers): + merge_header_headers = '' + for h in [MERGE_HEADER, 'X-%s' % (MERGE_HEADER)]: + tmp_h = headers.get(h, '') + if tmp_h: + merge_header_headers = tmp_h break - return merge_header_yaml - - def _merge_part(self, payload, headers): - merge_header_headers = headers.get(MERGE_HEADER, '') - payload_yaml = util.load_yaml(payload) - merge_how = '' # Select either the merge-type from the content # or the merge type from the headers or default to our own set - # if neither exists (or is empty) from the later - merge_header_yaml = self._merge_header_extract(payload_yaml) - for merge_i in [merge_header_yaml, merge_header_headers]: - merge_i = merge_i.strip().lower() - if merge_i: - merge_how = merge_i - break - if not merge_how: - merge_how = DEF_MERGE_TYPE - merger = mergers.construct(merge_how) - self.cloud_buf = merger.merge(self.cloud_buf, payload_yaml) + # if neither exists (or is empty) from the later. + payload_yaml = util.load_yaml(payload) + mergers_yaml = mergers.dict_extract_mergers(payload_yaml) + mergers_header = mergers.string_extract_mergers(merge_header_headers) + all_mergers = [] + all_mergers.extend(mergers_yaml) + all_mergers.extend(mergers_header) + if not all_mergers: + all_mergers = mergers.string_extract_mergers(DEF_MERGE_TYPE) + return all_mergers + + def _merge_part(self, payload, headers): + next_mergers = self._extract_mergers(payload, headers) + # Use the merger list from the last call, since it is the one + # that will be defining how to merge with the next payload. + curr_mergers = list(self.mergers[-1]) + LOG.debug("Merging with %s", curr_mergers) + self.mergers.append(next_mergers) + merger = mergers.construct(curr_mergers) + if self.cloud_buf is None: + # First time through, merge with an empty dict... + self.cloud_buf = {} + self.cloud_buf = merger.merge(self.cloud_buf, + util.load_yaml(payload)) def _reset(self): self.file_names = [] - self.cloud_buf = {} + self.cloud_buf = None + self.mergers = [ + mergers.string_extract_mergers(DEF_MERGE_TYPE), + ] def handle_part(self, _data, ctype, filename, payload, _freq, headers): if ctype == handlers.CONTENT_START: diff --git a/cloudinit/mergers/__init__.py b/cloudinit/mergers/__init__.py index 20658edc..4a112165 100644 --- a/cloudinit/mergers/__init__.py +++ b/cloudinit/mergers/__init__.py @@ -34,6 +34,13 @@ class UnknownMerger(object): def _handle_unknown(self, meth_wanted, value, merge_with): return value + # This merging will attempt to look for a '_on_X' method + # in our own object for a given object Y with type X, + # if found it will be called to perform the merge of a source + # object and a object to merge_with. + # + # If not found the merge will be given to a '_handle_unknown' + # function which can decide what to do wit the 2 values. def merge(self, source, merge_with): type_name = util.obj_name(source) type_name = type_name.lower() @@ -56,6 +63,11 @@ class LookupMerger(UnknownMerger): else: self._lookups = lookups + # For items which can not be merged by the parent this object + # will lookup in a internally maintained set of objects and + # find which one of those objects can perform the merge. If + # any of the contained objects have the needed method, they + # will be called to perform the merge. def _handle_unknown(self, meth_wanted, value, merge_with): meth = None for merger in self._lookups: @@ -70,8 +82,33 @@ class LookupMerger(UnknownMerger): return meth(value, merge_with) -def _extract_merger_names(merge_how): - names = [] +def dict_extract_mergers(config): + parsed_mergers = [] + raw_mergers = config.get('merger_how') + if raw_mergers is None: + raw_mergers = config.get('merge_type') + if raw_mergers is None: + return parsed_mergers + if isinstance(raw_mergers, (str, basestring)): + return string_extract_mergers(raw_mergers) + for m in raw_mergers: + if isinstance(m, (dict)): + name = m['name'] + name = name.replace("-", "_").strip() + opts = m['settings'] + else: + name = m[0] + if len(m) >= 2: + opts = m[1:] + else: + opts = [] + if name: + parsed_mergers.append((name, opts)) + return parsed_mergers + + +def string_extract_mergers(merge_how): + parsed_mergers = [] for m_name in merge_how.split("+"): # Canonicalize the name (so that it can be found # even when users alter it in various ways) @@ -79,20 +116,20 @@ def _extract_merger_names(merge_how): m_name = m_name.replace("-", "_") if not m_name: continue - names.append(m_name) - return names - - -def construct(merge_how): - mergers_to_be = [] - for name in _extract_merger_names(merge_how): - match = NAME_MTCH.match(name) + match = NAME_MTCH.match(m_name) if not match: - msg = "Matcher identifer '%s' is not in the right format" % (name) + msg = "Matcher identifer '%s' is not in the right format" % (m_name) raise ValueError(msg) (m_name, m_ops) = match.groups() m_ops = m_ops.strip().split(",") m_ops = [m.strip().lower() for m in m_ops if m.strip()] + parsed_mergers.append((m_name, m_ops)) + return parsed_mergers + + +def construct(parsed_mergers): + mergers_to_be = [] + for (m_name, m_ops) in parsed_mergers: merger_locs = importer.find_module(m_name, [__name__], ['Merger']) diff --git a/cloudinit/mergers/dict.py b/cloudinit/mergers/dict.py index bc392afa..45a7d3a5 100644 --- a/cloudinit/mergers/dict.py +++ b/cloudinit/mergers/dict.py @@ -22,6 +22,17 @@ class Merger(object): self._merger = merger self._overwrite = 'overwrite' in opts + # This merging algorithm will attempt to merge with + # another dictionary, on encountering any other type of object + # it will not merge with said object, but will instead return + # the original value + # + # On encountering a dictionary, it will create a new dictionary + # composed of the original and the one to merge with, if 'overwrite' + # is enabled then keys that exist in the original will be overwritten + # by keys in the one to merge with (and associated values). Otherwise + # if not in overwrite mode the 2 conflicting keys themselves will + # be merged. def _on_dict(self, value, merge_with): if not isinstance(merge_with, (dict)): return value diff --git a/cloudinit/mergers/list.py b/cloudinit/mergers/list.py index a848b8d6..a56ff007 100644 --- a/cloudinit/mergers/list.py +++ b/cloudinit/mergers/list.py @@ -26,21 +26,24 @@ class Merger(object): def _on_tuple(self, value, merge_with): return self._on_list(list(value), merge_with) + # On encountering a list or tuple type this action will be applied + # a new list will be returned, if the value to merge with is itself + # a list and we have been told to 'extend', then the value here will + # be extended with the other list. If in 'extend' mode then we will + # attempt to merge instead, which means that values from the list + # to merge with will replace values in te original list (they will + # also be merged recursively). + # + # If the value to merge with is not a list, and we are set to discared + # then no modifications will take place, otherwise we will just append + # the value to merge with onto the end of our own list. def _on_list(self, value, merge_with): new_value = list(value) if isinstance(merge_with, (tuple, list)): if self._extend: new_value.extend(merge_with) else: - # Merge instead - for m_v in merge_with: - m_am = 0 - for (i, o_v) in enumerate(new_value): - if m_v == o_v: - new_value[i] = self._merger.merge(o_v, m_v) - m_am += 1 - if m_am == 0: - new_value.append(m_v) + return new_value else: if not self._discard_non: new_value.append(merge_with) diff --git a/cloudinit/mergers/str.py b/cloudinit/mergers/str.py index 14bc46ec..f1534c5b 100644 --- a/cloudinit/mergers/str.py +++ b/cloudinit/mergers/str.py @@ -21,9 +21,14 @@ class Merger(object): def __init__(self, merger, opts): self._append = 'append' in opts + # On encountering a unicode object to merge value with + # we will for now just proxy into the string method to let it handle it. def _on_unicode(self, value, merge_with): return self._on_str(value, merge_with) + # On encountering a string object to merge with we will + # perform the following action, if appending we will + # merge them together, otherwise we will just return value. def _on_str(self, value, merge_with): if not self._append: return value diff --git a/tests/unittests/test__init__.py b/tests/unittests/test__init__.py index ac082076..7924755a 100644 --- a/tests/unittests/test__init__.py +++ b/tests/unittests/test__init__.py @@ -22,8 +22,10 @@ class FakeModule(handlers.Handler): def list_types(self): return self.types - def _handle_part(self, data, ctype, filename, payload, frequency): + def handle_part(self, data, ctype, filename, payload, frequency): pass + + class TestWalkerHandleHandler(MockerTestCase): @@ -103,6 +105,9 @@ class TestHandlerHandlePart(MockerTestCase): self.filename = "fake filename" self.payload = "fake payload" self.frequency = settings.PER_INSTANCE + self.headers = { + 'Content-Type': self.ctype, + } def test_normal_version_1(self): """ @@ -118,8 +123,8 @@ class TestHandlerHandlePart(MockerTestCase): self.payload) self.mocker.replay() - handlers.run_part(mod_mock, self.data, self.ctype, self.filename, - self.payload, self.frequency) + handlers.run_part(mod_mock, self.data, self.filename, + self.payload, self.frequency, self.headers) def test_normal_version_2(self): """ @@ -135,8 +140,8 @@ class TestHandlerHandlePart(MockerTestCase): self.payload, self.frequency) self.mocker.replay() - handlers.run_part(mod_mock, self.data, self.ctype, self.filename, - self.payload, self.frequency) + handlers.run_part(mod_mock, self.data, self.filename, + self.payload, self.frequency, self.headers) def test_modfreq_per_always(self): """ @@ -152,8 +157,8 @@ class TestHandlerHandlePart(MockerTestCase): self.payload) self.mocker.replay() - handlers.run_part(mod_mock, self.data, self.ctype, self.filename, - self.payload, self.frequency) + handlers.run_part(mod_mock, self.data, self.filename, + self.payload, self.frequency, self.headers) def test_no_handle_when_modfreq_once(self): """C{handle_part} is not called if frequency is once.""" @@ -163,8 +168,8 @@ class TestHandlerHandlePart(MockerTestCase): self.mocker.result(settings.PER_ONCE) self.mocker.replay() - handlers.run_part(mod_mock, self.data, self.ctype, self.filename, - self.payload, self.frequency) + handlers.run_part(mod_mock, self.data, self.filename, + self.payload, self.frequency, self.headers) def test_exception_is_caught(self): """Exceptions within C{handle_part} are caught and logged.""" @@ -178,8 +183,8 @@ class TestHandlerHandlePart(MockerTestCase): self.mocker.throw(Exception()) self.mocker.replay() - handlers.run_part(mod_mock, self.data, self.ctype, self.filename, - self.payload, self.frequency) + handlers.run_part(mod_mock, self.data, self.filename, + self.payload, self.frequency, self.headers) class TestCmdlineUrl(MockerTestCase): diff --git a/tests/unittests/test_merging.py b/tests/unittests/test_merging.py index 0037b966..fa7ee8e4 100644 --- a/tests/unittests/test_merging.py +++ b/tests/unittests/test_merging.py @@ -1,62 +1,143 @@ -from mocker import MockerTestCase - -from cloudinit import util - - -class TestMergeDict(MockerTestCase): - def test_simple_merge(self): - """Test simple non-conflict merge.""" - source = {"key1": "value1"} - candidate = {"key2": "value2"} - result = util.mergedict(source, candidate) - self.assertEqual({"key1": "value1", "key2": "value2"}, result) - - def test_nested_merge(self): - """Test nested merge.""" - source = {"key1": {"key1.1": "value1.1"}} - candidate = {"key1": {"key1.2": "value1.2"}} - result = util.mergedict(source, candidate) - self.assertEqual( - {"key1": {"key1.1": "value1.1", "key1.2": "value1.2"}}, result) - - def test_merge_does_not_override(self): - """Test that candidate doesn't override source.""" - source = {"key1": "value1", "key2": "value2"} - candidate = {"key1": "value2", "key2": "NEW VALUE"} - result = util.mergedict(source, candidate) - self.assertEqual(source, result) - - def test_empty_candidate(self): - """Test empty candidate doesn't change source.""" - source = {"key": "value"} - candidate = {} - result = util.mergedict(source, candidate) - self.assertEqual(source, result) - - def test_empty_source(self): - """Test empty source is replaced by candidate.""" - source = {} - candidate = {"key": "value"} - result = util.mergedict(source, candidate) - self.assertEqual(candidate, result) - - def test_non_dict_candidate(self): - """Test non-dict candidate is discarded.""" - source = {"key": "value"} - candidate = "not a dict" - result = util.mergedict(source, candidate) - self.assertEqual(source, result) - - def test_non_dict_source(self): - """Test non-dict source is not modified with a dict candidate.""" - source = "not a dict" - candidate = {"key": "value"} - result = util.mergedict(source, candidate) - self.assertEqual(source, result) - - def test_neither_dict(self): - """Test if neither candidate or source is dict source wins.""" - source = "source" - candidate = "candidate" - result = util.mergedict(source, candidate) - self.assertEqual(source, result) +import os + +from tests.unittests import helpers + +from cloudinit import mergers + + +class TestSimpleRun(helpers.MockerTestCase): + def test_basic_merge(self): + source = { + 'Blah': ['blah2'], + 'Blah3': 'c', + } + merge_with = { + 'Blah2': ['blah3'], + 'Blah3': 'b', + 'Blah': ['123'], + } + # Basic merge should not do thing special + merge_how = "list()+dict()+str()" + merger_set = mergers.string_extract_mergers(merge_how) + self.assertEquals(3, len(merger_set)) + merger = mergers.construct(merger_set) + merged = merger.merge(source, merge_with) + self.assertEquals(merged['Blah'], ['blah2']) + self.assertEquals(merged['Blah2'], ['blah3']) + self.assertEquals(merged['Blah3'], 'c') + + def test_dict_overwrite(self): + source = { + 'Blah': ['blah2'], + } + merge_with = { + 'Blah': ['123'], + } + # Now lets try a dict overwrite + merge_how = "list()+dict(overwrite)+str()" + merger_set = mergers.string_extract_mergers(merge_how) + self.assertEquals(3, len(merger_set)) + merger = mergers.construct(merger_set) + merged = merger.merge(source, merge_with) + self.assertEquals(merged['Blah'], ['123']) + + def test_string_append(self): + source = { + 'Blah': 'blah2', + } + merge_with = { + 'Blah': '345', + } + merge_how = "list()+dict()+str(append)" + merger_set = mergers.string_extract_mergers(merge_how) + self.assertEquals(3, len(merger_set)) + merger = mergers.construct(merger_set) + merged = merger.merge(source, merge_with) + self.assertEquals(merged['Blah'], 'blah2345') + + def test_list_extend(self): + source = ['abc'] + merge_with = ['123'] + merge_how = "list(extend)+dict()+str()" + merger_set = mergers.string_extract_mergers(merge_how) + self.assertEquals(3, len(merger_set)) + merger = mergers.construct(merger_set) + merged = merger.merge(source, merge_with) + self.assertEquals(merged, ['abc', '123']) + + def test_deep_merge(self): + source = { + 'a': [1, 'b', 2], + 'b': 'blahblah', + 'c': { + 'e': [1, 2, 3], + 'f': 'bigblobof', + 'iamadict': { + 'ok': 'ok', + } + }, + 'run': [ + 'runme', + 'runme2', + ], + 'runmereally': [ + 'e', ['a'], 'd', + ], + } + merge_with = { + 'a': ['e', 'f', 'g'], + 'b': 'more', + 'c': { + 'a': 'b', + 'f': 'stuff', + }, + 'run': [ + 'morecmd', + 'moremoremore', + ], + 'runmereally': [ + 'blah', ['b'], 'e', + ], + } + merge_how = "list(extend)+dict()+str(append)" + merger_set = mergers.string_extract_mergers(merge_how) + self.assertEquals(3, len(merger_set)) + merger = mergers.construct(merger_set) + merged = merger.merge(source, merge_with) + self.assertEquals(merged['a'], [1, 'b', 2, 'e', 'f', 'g']) + self.assertEquals(merged['b'], 'blahblahmore') + self.assertEquals(merged['c']['f'], 'bigblobofstuff') + self.assertEquals(merged['run'], ['runme', 'runme2', 'morecmd', 'moremoremore']) + self.assertEquals(merged['runmereally'], ['e', ['a'], 'd', 'blah', ['b'], 'e']) + + def test_dict_overwrite_layered(self): + source = { + 'Blah3': { + 'f': '3', + 'g': { + 'a': 'b', + } + } + } + merge_with = { + 'Blah3': { + 'e': '2', + 'g': { + 'e': 'f', + } + } + } + merge_how = "list()+dict()+str()" + merger_set = mergers.string_extract_mergers(merge_how) + self.assertEquals(3, len(merger_set)) + merger = mergers.construct(merger_set) + merged = merger.merge(source, merge_with) + self.assertEquals(merged['Blah3'], { + 'e': '2', + 'f': '3', + 'g': { + 'a': 'b', + 'e': 'f', + } + }) + diff --git a/tests/unittests/test_userdata.py b/tests/unittests/test_userdata.py index 82a4c555..9e1fed7e 100644 --- a/tests/unittests/test_userdata.py +++ b/tests/unittests/test_userdata.py @@ -9,12 +9,17 @@ from email.mime.base import MIMEBase from mocker import MockerTestCase +from cloudinit import handlers +from cloudinit import helpers as c_helpers from cloudinit import log from cloudinit import sources from cloudinit import stages +from cloudinit import util INSTANCE_ID = "i-testing" +from tests.unittests import helpers + class FakeDataSource(sources.DataSource): @@ -26,22 +31,16 @@ class FakeDataSource(sources.DataSource): # FIXME: these tests shouldn't be checking log output?? # Weirddddd... - - -class TestConsumeUserData(MockerTestCase): +class TestConsumeUserData(helpers.FilesystemMockingTestCase): def setUp(self): - MockerTestCase.setUp(self) - # Replace the write so no actual files - # get written out... - self.mock_write = self.mocker.replace("cloudinit.util.write_file", - passthrough=False) + helpers.FilesystemMockingTestCase.setUp(self) self._log = None self._log_file = None self._log_handler = None def tearDown(self): - MockerTestCase.tearDown(self) + helpers.FilesystemMockingTestCase.tearDown(self) if self._log_handler and self._log: self._log.removeHandler(self._log_handler) @@ -53,12 +52,71 @@ class TestConsumeUserData(MockerTestCase): self._log.addHandler(self._log_handler) return log_file + def test_merging_cloud_config(self): + blob = ''' +#cloud-config +a: b +e: f +run: + - b + - c +''' + message1 = MIMEBase("text", "cloud-config") + message1['Merge-Type'] = 'dict()+list(extend)+str(append)' + message1.set_payload(blob) + + blob2 = ''' +#cloud-config +a: e +e: g +run: + - stuff + - morestuff +''' + message2 = MIMEBase("text", "cloud-config") + message2['Merge-Type'] = 'dict()+list(extend)+str()' + message2.set_payload(blob2) + + blob3 = ''' +#cloud-config +e: + - 1 + - 2 + - 3 +''' + message3 = MIMEBase("text", "cloud-config") + message3['Merge-Type'] = 'dict()+list()+str()' + message3.set_payload(blob3) + + messages = [message1, message2, message3] + + paths = c_helpers.Paths({}, ds=FakeDataSource('')) + cloud_cfg = handlers.cloud_config.CloudConfigPartHandler(paths) + + new_root = self.makeDir() + self.patchUtils(new_root) + self.patchOS(new_root) + cloud_cfg.handle_part(None, handlers.CONTENT_START, None, None, None, None) + for i, m in enumerate(messages): + headers = dict(m) + fn = "part-%s" % (i + 1) + payload = m.get_payload(decode=True) + cloud_cfg.handle_part(None, headers['Content-Type'], + fn, payload, None, headers) + cloud_cfg.handle_part(None, handlers.CONTENT_END, None, None, None, None) + contents = util.load_file(paths.get_ipath('cloud_config')) + contents = util.load_yaml(contents) + self.assertEquals(contents['run'], ['b', 'c', 'stuff', 'morestuff']) + self.assertEquals(contents['a'], 'be') + self.assertEquals(contents['e'], 'fg') + def test_unhandled_type_warning(self): """Raw text without magic is ignored but shows warning.""" ci = stages.Init() data = "arbitrary text\n" ci.datasource = FakeDataSource(data) + self.mock_write = self.mocker.replace("cloudinit.util.write_file", passthrough=False) self.mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) self.mocker.replay() @@ -76,6 +134,7 @@ class TestConsumeUserData(MockerTestCase): message.set_payload("Just text") ci.datasource = FakeDataSource(message.as_string()) + self.mock_write = self.mocker.replace("cloudinit.util.write_file", passthrough=False) self.mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) self.mocker.replay() @@ -93,6 +152,7 @@ class TestConsumeUserData(MockerTestCase): ci.datasource = FakeDataSource(script) outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001") + self.mock_write = self.mocker.replace("cloudinit.util.write_file", passthrough=False) self.mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) self.mock_write(outpath, script, 0700) self.mocker.replay() @@ -111,6 +171,7 @@ class TestConsumeUserData(MockerTestCase): ci.datasource = FakeDataSource(message.as_string()) outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001") + self.mock_write = self.mocker.replace("cloudinit.util.write_file", passthrough=False) self.mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) self.mock_write(outpath, script, 0700) self.mocker.replay() @@ -129,6 +190,7 @@ class TestConsumeUserData(MockerTestCase): ci.datasource = FakeDataSource(message.as_string()) outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001") + self.mock_write = self.mocker.replace("cloudinit.util.write_file", passthrough=False) self.mock_write(outpath, script, 0700) self.mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) self.mocker.replay() -- cgit v1.2.3 From fc6aa5aa54ee35ff0a3eff823bae0d3cf9b34bc1 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Wed, 6 Mar 2013 19:24:05 -0800 Subject: Continue working on merging code. --- cloudinit/config/cc_landscape.py | 3 ++- cloudinit/config/cc_mounts.py | 3 ++- cloudinit/distros/__init__.py | 15 +++++++------ cloudinit/handlers/__init__.py | 3 ++- cloudinit/handlers/cloud_config.py | 15 ++++++------- cloudinit/helpers.py | 3 ++- cloudinit/mergers/__init__.py | 13 +++++++++--- cloudinit/sources/DataSourceAltCloud.py | 5 +++-- cloudinit/sources/DataSourceCloudStack.py | 3 --- cloudinit/sources/DataSourceConfigDrive.py | 4 +++- cloudinit/sources/DataSourceEc2.py | 3 --- cloudinit/sources/DataSourceMAAS.py | 3 ++- cloudinit/sources/DataSourceNoCloud.py | 5 ++--- cloudinit/sources/DataSourceNone.py | 3 --- cloudinit/sources/DataSourceOVF.py | 3 ++- cloudinit/sources/__init__.py | 10 ++++++--- cloudinit/stages.py | 9 ++++---- cloudinit/type_utils.py | 34 ++++++++++++++++++++++++++++++ cloudinit/util.py | 33 ++++++++++------------------- tests/unittests/test_userdata.py | 4 +++- 20 files changed, 104 insertions(+), 70 deletions(-) create mode 100644 cloudinit/type_utils.py (limited to 'cloudinit/handlers/__init__.py') diff --git a/cloudinit/config/cc_landscape.py b/cloudinit/config/cc_landscape.py index 02610dd0..6734efee 100644 --- a/cloudinit/config/cc_landscape.py +++ b/cloudinit/config/cc_landscape.py @@ -24,6 +24,7 @@ from StringIO import StringIO from configobj import ConfigObj +from cloudinit import type_utils from cloudinit import util from cloudinit.settings import PER_INSTANCE @@ -58,7 +59,7 @@ def handle(_name, cfg, cloud, log, _args): if not isinstance(ls_cloudcfg, (dict)): raise RuntimeError(("'landscape' key existed in config," " but not a dictionary type," - " is a %s instead"), util.obj_name(ls_cloudcfg)) + " is a %s instead"), type_utils.obj_name(ls_cloudcfg)) if not ls_cloudcfg: return diff --git a/cloudinit/config/cc_mounts.py b/cloudinit/config/cc_mounts.py index cb772c86..6ebe563d 100644 --- a/cloudinit/config/cc_mounts.py +++ b/cloudinit/config/cc_mounts.py @@ -22,6 +22,7 @@ from string import whitespace # pylint: disable=W0402 import re +from cloudinit import type_utils from cloudinit import util # Shortname matches 'sda', 'sda1', 'xvda', 'hda', 'sdb', xvdb, vda, vdd1 @@ -60,7 +61,7 @@ def handle(_name, cfg, cloud, log, _args): # skip something that wasn't a list if not isinstance(cfgmnt[i], list): log.warn("Mount option %s not a list, got a %s instead", - (i + 1), util.obj_name(cfgmnt[i])) + (i + 1), type_utils.obj_name(cfgmnt[i])) continue startname = str(cfgmnt[i][0]) diff --git a/cloudinit/distros/__init__.py b/cloudinit/distros/__init__.py index 6a684b89..eeea6af1 100644 --- a/cloudinit/distros/__init__.py +++ b/cloudinit/distros/__init__.py @@ -31,6 +31,7 @@ import re from cloudinit import importer from cloudinit import log as logging from cloudinit import ssh_util +from cloudinit import type_utils from cloudinit import util from cloudinit.distros.parsers import hosts @@ -427,7 +428,7 @@ class Distro(object): lines.append("%s %s" % (user, rules)) else: msg = "Can not create sudoers rule addition with type %r" - raise TypeError(msg % (util.obj_name(rules))) + raise TypeError(msg % (type_utils.obj_name(rules))) content = "\n".join(lines) content += "\n" # trailing newline @@ -550,7 +551,7 @@ def _normalize_groups(grp_cfg): c_grp_cfg[k] = [v] else: raise TypeError("Bad group member type %s" % - util.obj_name(v)) + type_utils.obj_name(v)) else: if isinstance(v, (list)): c_grp_cfg[k].extend(v) @@ -558,13 +559,13 @@ def _normalize_groups(grp_cfg): c_grp_cfg[k].append(v) else: raise TypeError("Bad group member type %s" % - util.obj_name(v)) + type_utils.obj_name(v)) elif isinstance(i, (str, basestring)): if i not in c_grp_cfg: c_grp_cfg[i] = [] else: raise TypeError("Unknown group name type %s" % - util.obj_name(i)) + type_utils.obj_name(i)) grp_cfg = c_grp_cfg groups = {} if isinstance(grp_cfg, (dict)): @@ -573,7 +574,7 @@ def _normalize_groups(grp_cfg): else: raise TypeError(("Group config must be list, dict " " or string types only and not %s") % - util.obj_name(grp_cfg)) + type_utils.obj_name(grp_cfg)) return groups @@ -604,7 +605,7 @@ def _normalize_users(u_cfg, def_user_cfg=None): ad_ucfg.append(v) else: raise TypeError(("Unmappable user value type %s" - " for key %s") % (util.obj_name(v), k)) + " for key %s") % (type_utils.obj_name(v), k)) u_cfg = ad_ucfg elif isinstance(u_cfg, (str, basestring)): u_cfg = util.uniq_merge_sorted(u_cfg) @@ -629,7 +630,7 @@ def _normalize_users(u_cfg, def_user_cfg=None): else: raise TypeError(("User config must be dictionary/list " " or string types only and not %s") % - util.obj_name(user_config)) + type_utils.obj_name(user_config)) # Ensure user options are in the right python friendly format if users: diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index 63fdb948..924463ce 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -27,6 +27,7 @@ from cloudinit.settings import (PER_ALWAYS, PER_INSTANCE, FREQUENCIES) from cloudinit import importer from cloudinit import log as logging +from cloudinit import type_utils from cloudinit import util LOG = logging.getLogger(__name__) @@ -76,7 +77,7 @@ class Handler(object): self.frequency = frequency def __repr__(self): - return "%s: [%s]" % (util.obj_name(self), self.list_types()) + return "%s: [%s]" % (type_utils.obj_name(self), self.list_types()) @abc.abstractmethod def list_types(self): diff --git a/cloudinit/handlers/cloud_config.py b/cloudinit/handlers/cloud_config.py index d458dee2..5f519f78 100644 --- a/cloudinit/handlers/cloud_config.py +++ b/cloudinit/handlers/cloud_config.py @@ -29,8 +29,8 @@ from cloudinit.settings import (PER_ALWAYS) LOG = logging.getLogger(__name__) -DEF_MERGE_TYPE = "list(extend)+dict()+str(append)" MERGE_HEADER = 'Merge-Type' +DEF_MERGERS = mergers.default_mergers() class CloudConfigPartHandler(handlers.Handler): @@ -39,9 +39,7 @@ class CloudConfigPartHandler(handlers.Handler): self.cloud_buf = None self.cloud_fn = paths.get_ipath("cloud_config") self.file_names = [] - self.mergers = [ - mergers.string_extract_mergers(DEF_MERGE_TYPE), - ] + self.mergers = [DEF_MERGERS] def list_types(self): return [ @@ -59,6 +57,7 @@ class CloudConfigPartHandler(handlers.Handler): file_lines.append("# %s" % (fn)) file_lines.append("") if self.cloud_buf is not None: + # Something was actually gathered.... lines = [ "#cloud-config", '', @@ -86,7 +85,7 @@ class CloudConfigPartHandler(handlers.Handler): all_mergers.extend(mergers_yaml) all_mergers.extend(mergers_header) if not all_mergers: - all_mergers = mergers.string_extract_mergers(DEF_MERGE_TYPE) + all_mergers = DEF_MERGERS return all_mergers def _merge_part(self, payload, headers): @@ -94,7 +93,7 @@ class CloudConfigPartHandler(handlers.Handler): # Use the merger list from the last call, since it is the one # that will be defining how to merge with the next payload. curr_mergers = list(self.mergers[-1]) - LOG.debug("Merging with %s", curr_mergers) + LOG.debug("Merging by applying %s", curr_mergers) self.mergers.append(next_mergers) merger = mergers.construct(curr_mergers) if self.cloud_buf is None: @@ -106,9 +105,7 @@ class CloudConfigPartHandler(handlers.Handler): def _reset(self): self.file_names = [] self.cloud_buf = None - self.mergers = [ - mergers.string_extract_mergers(DEF_MERGE_TYPE), - ] + self.mergers = [DEF_MERGERS] def handle_part(self, _data, ctype, filename, payload, _freq, headers): if ctype == handlers.CONTENT_START: diff --git a/cloudinit/helpers.py b/cloudinit/helpers.py index 2077401c..a4e6fb03 100644 --- a/cloudinit/helpers.py +++ b/cloudinit/helpers.py @@ -32,6 +32,7 @@ from cloudinit.settings import (PER_INSTANCE, PER_ALWAYS, PER_ONCE, CFG_ENV_NAME) from cloudinit import log as logging +from cloudinit import type_utils from cloudinit import util LOG = logging.getLogger(__name__) @@ -68,7 +69,7 @@ class FileLock(object): self.fn = fn def __str__(self): - return "<%s using file %r>" % (util.obj_name(self), self.fn) + return "<%s using file %r>" % (type_utils.obj_name(self), self.fn) def canon_sem_name(name): diff --git a/cloudinit/mergers/__init__.py b/cloudinit/mergers/__init__.py index 4a112165..453426af 100644 --- a/cloudinit/mergers/__init__.py +++ b/cloudinit/mergers/__init__.py @@ -20,11 +20,12 @@ import re from cloudinit import importer from cloudinit import log as logging -from cloudinit import util +from cloudinit import type_utils NAME_MTCH = re.compile(r"(^[a-zA-Z_][A-Za-z0-9_]*)\((.*?)\)$") LOG = logging.getLogger(__name__) +DEF_MERGE_TYPE = "list(extend)+dict()+str(append)" class UnknownMerger(object): @@ -42,7 +43,7 @@ class UnknownMerger(object): # If not found the merge will be given to a '_handle_unknown' # function which can decide what to do wit the 2 values. def merge(self, source, merge_with): - type_name = util.obj_name(source) + type_name = type_utils.obj_name(source) type_name = type_name.lower() method_name = "_on_%s" % (type_name) meth = None @@ -127,6 +128,10 @@ def string_extract_mergers(merge_how): return parsed_mergers +def default_mergers(): + return tuple(string_extract_mergers(DEF_MERGE_TYPE)) + + def construct(parsed_mergers): mergers_to_be = [] for (m_name, m_ops) in parsed_mergers: @@ -145,4 +150,6 @@ def construct(parsed_mergers): root = LookupMerger(mergers) for (attr, opts) in mergers_to_be: mergers.append(attr(root, opts)) - return root \ No newline at end of file + return root + + diff --git a/cloudinit/sources/DataSourceAltCloud.py b/cloudinit/sources/DataSourceAltCloud.py index 9812bdcb..64548d43 100644 --- a/cloudinit/sources/DataSourceAltCloud.py +++ b/cloudinit/sources/DataSourceAltCloud.py @@ -30,6 +30,7 @@ import os.path from cloudinit import log as logging from cloudinit import sources from cloudinit import util + from cloudinit.util import ProcessExecutionError LOG = logging.getLogger(__name__) @@ -91,8 +92,8 @@ class DataSourceAltCloud(sources.DataSource): self.supported_seed_starts = ("/", "file://") def __str__(self): - mstr = "%s [seed=%s]" % (util.obj_name(self), self.seed) - return mstr + root = sources.DataSource.__str__(self) + return "%s [seed=%s]" % (root, self.seed) def get_cloud_type(self): ''' diff --git a/cloudinit/sources/DataSourceCloudStack.py b/cloudinit/sources/DataSourceCloudStack.py index 076dba5a..c0e1a23c 100644 --- a/cloudinit/sources/DataSourceCloudStack.py +++ b/cloudinit/sources/DataSourceCloudStack.py @@ -59,9 +59,6 @@ class DataSourceCloudStack(sources.DataSource): return gw return None - def __str__(self): - return util.obj_name(self) - def _get_url_settings(self): mcfg = self.ds_cfg if not mcfg: diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index c7826851..46abd772 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -51,7 +51,9 @@ class DataSourceConfigDrive(sources.DataSource): self.ec2_metadata = None def __str__(self): - mstr = "%s [%s,ver=%s]" % (util.obj_name(self), self.dsmode, + root = sources.DataSource.__str__(self) + mstr = "%s [%s,ver=%s]" % (root, + self.dsmode, self.version) mstr += "[source=%s]" % (self.source) return mstr diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py index 2db53446..f010e640 100644 --- a/cloudinit/sources/DataSourceEc2.py +++ b/cloudinit/sources/DataSourceEc2.py @@ -49,9 +49,6 @@ class DataSourceEc2(sources.DataSource): self.seed_dir = os.path.join(paths.seed_dir, "ec2") self.api_ver = DEF_MD_VERSION - def __str__(self): - return util.obj_name(self) - def get_data(self): seed_ret = {} if util.read_optional_seed(seed_ret, base=(self.seed_dir + "/")): diff --git a/cloudinit/sources/DataSourceMAAS.py b/cloudinit/sources/DataSourceMAAS.py index b55d8a21..612d8ffa 100644 --- a/cloudinit/sources/DataSourceMAAS.py +++ b/cloudinit/sources/DataSourceMAAS.py @@ -50,7 +50,8 @@ class DataSourceMAAS(sources.DataSource): self.oauth_clockskew = None def __str__(self): - return "%s [%s]" % (util.obj_name(self), self.base_url) + root = sources.DataSource.__str__(self) + return "%s [%s]" % (root, self.base_url) def get_data(self): mcfg = self.ds_cfg diff --git a/cloudinit/sources/DataSourceNoCloud.py b/cloudinit/sources/DataSourceNoCloud.py index bed500a2..9a770d38 100644 --- a/cloudinit/sources/DataSourceNoCloud.py +++ b/cloudinit/sources/DataSourceNoCloud.py @@ -40,9 +40,8 @@ class DataSourceNoCloud(sources.DataSource): self.supported_seed_starts = ("/", "file://") def __str__(self): - mstr = "%s [seed=%s][dsmode=%s]" % (util.obj_name(self), - self.seed, self.dsmode) - return mstr + root = sources.DataSource.__str__(self) + return "%s [seed=%s][dsmode=%s]" % (root, self.seed, self.dsmode) def get_data(self): defaults = { diff --git a/cloudinit/sources/DataSourceNone.py b/cloudinit/sources/DataSourceNone.py index c2125bee..e2175e1f 100644 --- a/cloudinit/sources/DataSourceNone.py +++ b/cloudinit/sources/DataSourceNone.py @@ -41,9 +41,6 @@ class DataSourceNone(sources.DataSource): def get_instance_id(self): return 'iid-datasource-none' - def __str__(self): - return util.obj_name(self) - @property def is_disconnected(self): return True diff --git a/cloudinit/sources/DataSourceOVF.py b/cloudinit/sources/DataSourceOVF.py index e90150c6..ae139074 100644 --- a/cloudinit/sources/DataSourceOVF.py +++ b/cloudinit/sources/DataSourceOVF.py @@ -43,7 +43,8 @@ class DataSourceOVF(sources.DataSource): self.supported_seed_starts = ("/", "file://") def __str__(self): - return "%s [seed=%s]" % (util.obj_name(self), self.seed) + root = sources.DataSource.__str__(self) + return "%s [seed=%s]" % (root, self.seed) def get_data(self): found = [] diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 96baff90..d8fbacdd 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -25,6 +25,7 @@ import os from cloudinit import importer from cloudinit import log as logging +from cloudinit import type_utils from cloudinit import user_data as ud from cloudinit import util @@ -52,7 +53,7 @@ class DataSource(object): self.userdata = None self.metadata = None self.userdata_raw = None - name = util.obj_name(self) + name = type_utils.obj_name(self) if name.startswith(DS_PREFIX): name = name[len(DS_PREFIX):] self.ds_cfg = util.get_cfg_by_path(self.sys_cfg, @@ -62,6 +63,9 @@ class DataSource(object): else: self.ud_proc = ud_proc + def __str__(self): + return type_utils.obj_name(self) + def get_userdata(self, apply_filter=False): if self.userdata is None: self.userdata = self.ud_proc.process(self.get_userdata_raw()) @@ -214,7 +218,7 @@ def normalize_pubkey_data(pubkey_data): def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list): ds_list = list_sources(cfg_list, ds_deps, pkg_list) - ds_names = [util.obj_name(f) for f in ds_list] + ds_names = [type_utils.obj_name(f) for f in ds_list] LOG.debug("Searching for data source in: %s", ds_names) for cls in ds_list: @@ -222,7 +226,7 @@ def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list): LOG.debug("Seeing if we can get any data from %s", cls) s = cls(sys_cfg, distro, paths) if s.get_data(): - return (s, util.obj_name(cls)) + return (s, type_utils.obj_name(cls)) except Exception: util.logexc(LOG, "Getting data from %s failed", cls) diff --git a/cloudinit/stages.py b/cloudinit/stages.py index 94a267df..531e7997 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -43,6 +43,7 @@ from cloudinit import helpers from cloudinit import importer from cloudinit import log as logging from cloudinit import sources +from cloudinit import type_utils from cloudinit import util LOG = logging.getLogger(__name__) @@ -220,7 +221,7 @@ class Init(object): # Any config provided??? pkg_list = self.cfg.get('datasource_pkg_list') or [] # Add the defaults at the end - for n in ['', util.obj_name(sources)]: + for n in ['', type_utils.obj_name(sources)]: if n not in pkg_list: pkg_list.append(n) cfg_list = self.cfg.get('datasource_list') or [] @@ -280,7 +281,7 @@ class Init(object): dp = self.paths.get_cpath('data') # Write what the datasource was and is.. - ds = "%s: %s" % (util.obj_name(self.datasource), self.datasource) + ds = "%s: %s" % (type_utils.obj_name(self.datasource), self.datasource) previous_ds = None ds_fn = os.path.join(idir, 'datasource') try: @@ -497,7 +498,7 @@ class Modules(object): else: raise TypeError(("Failed to read '%s' item in config," " unknown type %s") % - (item, util.obj_name(item))) + (item, type_utils.obj_name(item))) return module_list def _fixup_modules(self, raw_mods): @@ -515,7 +516,7 @@ class Modules(object): # Reset it so when ran it will get set to a known value freq = None mod_locs = importer.find_module(mod_name, - ['', util.obj_name(config)], + ['', type_utils.obj_name(config)], ['handle']) if not mod_locs: LOG.warn("Could not find module named %s", mod_name) diff --git a/cloudinit/type_utils.py b/cloudinit/type_utils.py new file mode 100644 index 00000000..2decbfc5 --- /dev/null +++ b/cloudinit/type_utils.py @@ -0,0 +1,34 @@ +# vi: ts=4 expandtab +# +# Copyright (C) 2012 Canonical Ltd. +# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012 Yahoo! Inc. +# +# Author: Scott Moser +# Author: Juerg Haefliger +# Author: Joshua Harlow +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3, as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# pylint: disable=C0302 + +import types + + +def obj_name(obj): + if isinstance(obj, (types.TypeType, + types.ModuleType, + types.FunctionType, + types.LambdaType)): + return str(obj.__name__) + return obj_name(obj.__class__) diff --git a/cloudinit/util.py b/cloudinit/util.py index ab918433..73bf6304 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -43,14 +43,15 @@ import subprocess import sys import tempfile import time -import types import urlparse import yaml from cloudinit import importer from cloudinit import log as logging +from cloudinit import mergers from cloudinit import safeyaml +from cloudinit import type_utils from cloudinit import url_helper as uhelp from cloudinit import version @@ -194,11 +195,12 @@ def fork_cb(child_cb, *args): os._exit(0) # pylint: disable=W0212 except: logexc(LOG, ("Failed forking and" - " calling callback %s"), obj_name(child_cb)) + " calling callback %s"), + type_utils.obj_name(child_cb)) os._exit(1) # pylint: disable=W0212 else: LOG.debug("Forked child %s who will run callback %s", - fid, obj_name(child_cb)) + fid, type_utils.obj_name(child_cb)) def is_true(val, addons=None): @@ -513,15 +515,6 @@ def make_url(scheme, host, port=None, return urlparse.urlunparse(pieces) -def obj_name(obj): - if isinstance(obj, (types.TypeType, - types.ModuleType, - types.FunctionType, - types.LambdaType)): - return str(obj.__name__) - return obj_name(obj.__class__) - - def mergemanydict(srcs, reverse=False): if reverse: srcs = reversed(srcs) @@ -538,13 +531,9 @@ def mergedict(src, cand): If C{src} has a key C{cand} will not override. Nested dictionaries are merged recursively. """ - if isinstance(src, dict) and isinstance(cand, dict): - for (k, v) in cand.iteritems(): - if k not in src: - src[k] = v - else: - src[k] = mergedict(src[k], v) - return src + raw_mergers = mergers.default_mergers() + merger = mergers.construct(raw_mergers) + return merger.merge(src, cand) @contextlib.contextmanager @@ -645,7 +634,7 @@ def load_yaml(blob, default=None, allowed=(dict,)): # Yes this will just be caught, but thats ok for now... raise TypeError(("Yaml load allows %s root types," " but got %s instead") % - (allowed, obj_name(converted))) + (allowed, type_utils.obj_name(converted))) loaded = converted except (yaml.YAMLError, TypeError, ValueError): if len(blob) == 0: @@ -714,7 +703,7 @@ def read_conf_with_confd(cfgfile): if not isinstance(confd, (str, basestring)): raise TypeError(("Config file %s contains 'conf_d' " "with non-string type %s") % - (cfgfile, obj_name(confd))) + (cfgfile, type_utils.obj_name(confd))) else: confd = str(confd).strip() elif os.path.isdir("%s.d" % cfgfile): @@ -1472,7 +1461,7 @@ def shellify(cmdlist, add_header=True): else: raise RuntimeError(("Unable to shellify type %s" " which is not a list or string") - % (obj_name(args))) + % (type_utils.obj_name(args))) LOG.debug("Shellified %s commands.", cmds_made) return content diff --git a/tests/unittests/test_userdata.py b/tests/unittests/test_userdata.py index 9e1fed7e..ef0dd7b8 100644 --- a/tests/unittests/test_userdata.py +++ b/tests/unittests/test_userdata.py @@ -74,7 +74,7 @@ run: - morestuff ''' message2 = MIMEBase("text", "cloud-config") - message2['Merge-Type'] = 'dict()+list(extend)+str()' + message2['X-Merge-Type'] = 'dict()+list(extend)+str()' message2.set_payload(blob2) blob3 = ''' @@ -83,6 +83,7 @@ e: - 1 - 2 - 3 +p: 1 ''' message3 = MIMEBase("text", "cloud-config") message3['Merge-Type'] = 'dict()+list()+str()' @@ -109,6 +110,7 @@ e: self.assertEquals(contents['run'], ['b', 'c', 'stuff', 'morestuff']) self.assertEquals(contents['a'], 'be') self.assertEquals(contents['e'], 'fg') + self.assertEquals(contents['p'], 1) def test_unhandled_type_warning(self): """Raw text without magic is ignored but shows warning.""" -- cgit v1.2.3 From 944623f4ad3e4c7319758c64053d06a3b05555a2 Mon Sep 17 00:00:00 2001 From: Juerg Haefliger Date: Wed, 19 Jun 2013 08:44:00 +0200 Subject: fix and cleanup usage of util.logexc --- cloudinit/config/cc_bootcmd.py | 5 ++--- cloudinit/config/cc_growpart.py | 8 +++++--- cloudinit/config/cc_phone_home.py | 14 +++++++------- cloudinit/config/cc_rightscale_userdata.py | 10 +++++----- cloudinit/config/cc_set_hostname.py | 6 +++--- cloudinit/config/cc_set_passwords.py | 6 +++--- cloudinit/config/cc_ssh.py | 10 +++++----- cloudinit/config/cc_ssh_import_id.py | 6 +++--- cloudinit/config/cc_update_hostname.py | 6 +++--- cloudinit/distros/__init__.py | 26 ++++++++++++------------- cloudinit/distros/rhel.py | 7 +++---- cloudinit/handlers/__init__.py | 13 ++++++------- cloudinit/handlers/boot_hook.py | 4 ++-- cloudinit/helpers.py | 18 ++++++++--------- cloudinit/sources/DataSourceAltCloud.py | 31 +++++++++++++++--------------- cloudinit/sources/DataSourceCloudStack.py | 6 ++++-- cloudinit/sources/DataSourceNoCloud.py | 6 +++--- cloudinit/ssh_util.py | 8 +++----- cloudinit/stages.py | 7 +++---- cloudinit/util.py | 5 ++--- 20 files changed, 100 insertions(+), 102 deletions(-) (limited to 'cloudinit/handlers/__init__.py') diff --git a/cloudinit/config/cc_bootcmd.py b/cloudinit/config/cc_bootcmd.py index 896cb4d0..3ac22967 100644 --- a/cloudinit/config/cc_bootcmd.py +++ b/cloudinit/config/cc_bootcmd.py @@ -1,7 +1,7 @@ # vi: ts=4 expandtab # # Copyright (C) 2009-2011 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012, 2013 Hewlett-Packard Development Company, L.P. # # Author: Scott Moser # Author: Juerg Haefliger @@ -50,6 +50,5 @@ def handle(name, cfg, cloud, log, _args): cmd = ['/bin/sh', tmpf.name] util.subp(cmd, env=env, capture=False) except: - util.logexc(log, - ("Failed to run bootcmd module %s"), name) + util.logexc(log, "Failed to run bootcmd module %s", name) raise diff --git a/cloudinit/config/cc_growpart.py b/cloudinit/config/cc_growpart.py index b6e1fd37..4f8c8f80 100644 --- a/cloudinit/config/cc_growpart.py +++ b/cloudinit/config/cc_growpart.py @@ -1,8 +1,10 @@ # vi: ts=4 expandtab # # Copyright (C) 2011 Canonical Ltd. +# Copyright (C) 2013 Hewlett-Packard Development Company, L.P. # # Author: Scott Moser +# Author: Juerg Haefliger # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3, as @@ -121,15 +123,15 @@ class ResizeGrowPart(object): util.subp(["growpart", '--dry-run', diskdev, partnum]) except util.ProcessExecutionError as e: if e.exit_code != 1: - util.logexc(LOG, ("Failed growpart --dry-run for (%s, %s)" % - (diskdev, partnum))) + util.logexc(LOG, "Failed growpart --dry-run for (%s, %s)", + diskdev, partnum) raise ResizeFailedException(e) return (before, before) try: util.subp(["growpart", diskdev, partnum]) except util.ProcessExecutionError as e: - util.logexc(LOG, "Failed: growpart %s %s" % (diskdev, partnum)) + util.logexc(LOG, "Failed: growpart %s %s", diskdev, partnum) raise ResizeFailedException(e) return (before, get_size(partdev)) diff --git a/cloudinit/config/cc_phone_home.py b/cloudinit/config/cc_phone_home.py index c873c8a8..2e058ccd 100644 --- a/cloudinit/config/cc_phone_home.py +++ b/cloudinit/config/cc_phone_home.py @@ -1,7 +1,7 @@ # vi: ts=4 expandtab # # Copyright (C) 2011 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012, 2013 Hewlett-Packard Development Company, L.P. # # Author: Scott Moser # Author: Juerg Haefliger @@ -65,8 +65,8 @@ def handle(name, cfg, cloud, log, args): tries = int(tries) except: tries = 10 - util.logexc(log, ("Configuration entry 'tries'" - " is not an integer, using %s instead"), tries) + util.logexc(log, "Configuration entry 'tries' is not an integer, " + "using %s instead", tries) if post_list == "all": post_list = POST_LIST_ALL @@ -85,8 +85,8 @@ def handle(name, cfg, cloud, log, args): try: all_keys[n] = util.load_file(path) except: - util.logexc(log, ("%s: failed to open, can not" - " phone home that data!"), path) + util.logexc(log, "%s: failed to open, can not phone home that " + "data!", path) submit_keys = {} for k in post_list: @@ -115,5 +115,5 @@ def handle(name, cfg, cloud, log, args): retries=tries, sec_between=3, ssl_details=util.fetch_ssl_details(cloud.paths)) except: - util.logexc(log, ("Failed to post phone home data to" - " %s in %s tries"), url, tries) + util.logexc(log, "Failed to post phone home data to %s in %s tries", + url, tries) diff --git a/cloudinit/config/cc_rightscale_userdata.py b/cloudinit/config/cc_rightscale_userdata.py index 4bf18516..c771728d 100644 --- a/cloudinit/config/cc_rightscale_userdata.py +++ b/cloudinit/config/cc_rightscale_userdata.py @@ -1,7 +1,7 @@ # vi: ts=4 expandtab # # Copyright (C) 2011 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012, 2013 Hewlett-Packard Development Company, L.P. # # Author: Scott Moser # Author: Juerg Haefliger @@ -64,8 +64,8 @@ def handle(name, _cfg, cloud, log, _args): " raw userdata"), name, MY_HOOKNAME) return except: - util.logexc(log, ("Failed to parse query string %s" - " into a dictionary"), ud) + util.logexc(log, "Failed to parse query string %s into a dictionary", + ud) raise wrote_fns = [] @@ -86,8 +86,8 @@ def handle(name, _cfg, cloud, log, _args): wrote_fns.append(fname) except Exception as e: captured_excps.append(e) - util.logexc(log, "%s failed to read %s and write %s", - MY_NAME, url, fname) + util.logexc(log, "%s failed to read %s and write %s", MY_NAME, url, + fname) if wrote_fns: log.debug("Wrote out rightscale userdata to %s files", len(wrote_fns)) diff --git a/cloudinit/config/cc_set_hostname.py b/cloudinit/config/cc_set_hostname.py index 2b32fc94..5d7f4331 100644 --- a/cloudinit/config/cc_set_hostname.py +++ b/cloudinit/config/cc_set_hostname.py @@ -1,7 +1,7 @@ # vi: ts=4 expandtab # # Copyright (C) 2011 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012, 2013 Hewlett-Packard Development Company, L.P. # # Author: Scott Moser # Author: Juerg Haefliger @@ -32,6 +32,6 @@ def handle(name, cfg, cloud, log, _args): log.debug("Setting the hostname to %s (%s)", fqdn, hostname) cloud.distro.set_hostname(hostname, fqdn) except Exception: - util.logexc(log, "Failed to set the hostname to %s (%s)", - fqdn, hostname) + util.logexc(log, "Failed to set the hostname to %s (%s)", fqdn, + hostname) raise diff --git a/cloudinit/config/cc_set_passwords.py b/cloudinit/config/cc_set_passwords.py index c6bf62fd..e93c8c6f 100644 --- a/cloudinit/config/cc_set_passwords.py +++ b/cloudinit/config/cc_set_passwords.py @@ -1,7 +1,7 @@ # vi: ts=4 expandtab # # Copyright (C) 2009-2010 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012, 2013 Hewlett-Packard Development Company, L.P. # # Author: Scott Moser # Author: Juerg Haefliger @@ -81,8 +81,8 @@ def handle(_name, cfg, cloud, log, args): util.subp(['chpasswd'], ch_in) except Exception as e: errors.append(e) - util.logexc(log, - "Failed to set passwords with chpasswd for %s", users) + util.logexc(log, "Failed to set passwords with chpasswd for %s", + users) if len(randlist): blurb = ("Set the following 'random' passwords\n", diff --git a/cloudinit/config/cc_ssh.py b/cloudinit/config/cc_ssh.py index 7ef20d9f..64a5e3cb 100644 --- a/cloudinit/config/cc_ssh.py +++ b/cloudinit/config/cc_ssh.py @@ -1,7 +1,7 @@ # vi: ts=4 expandtab # # Copyright (C) 2009-2010 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012, 2013 Hewlett-Packard Development Company, L.P. # # Author: Scott Moser # Author: Juerg Haefliger @@ -85,8 +85,8 @@ def handle(_name, cfg, cloud, log, _args): util.subp(cmd, capture=False) log.debug("Generated a key for %s from %s", pair[0], pair[1]) except: - util.logexc(log, ("Failed generated a key" - " for %s from %s"), pair[0], pair[1]) + util.logexc(log, "Failed generated a key for %s from %s", + pair[0], pair[1]) else: # if not, generate them genkeys = util.get_cfg_option_list(cfg, @@ -102,8 +102,8 @@ def handle(_name, cfg, cloud, log, _args): with util.SeLinuxGuard("/etc/ssh", recursive=True): util.subp(cmd, capture=False) except: - util.logexc(log, ("Failed generating key type" - " %s to file %s"), keytype, keyfile) + util.logexc(log, "Failed generating key type %s to " + "file %s", keytype, keyfile) try: (users, _groups) = ds.normalize_users_groups(cfg, cloud.distro) diff --git a/cloudinit/config/cc_ssh_import_id.py b/cloudinit/config/cc_ssh_import_id.py index 83af36e9..50d96e15 100644 --- a/cloudinit/config/cc_ssh_import_id.py +++ b/cloudinit/config/cc_ssh_import_id.py @@ -1,7 +1,7 @@ # vi: ts=4 expandtab # # Copyright (C) 2009-2010 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012, 2013 Hewlett-Packard Development Company, L.P. # # Author: Scott Moser # Author: Juerg Haefliger @@ -71,8 +71,8 @@ def handle(_name, cfg, cloud, log, args): try: import_ssh_ids(import_ids, user, log) except Exception as exc: - util.logexc(log, "ssh-import-id failed for: %s %s" % - (user, import_ids), exc) + util.logexc(log, "ssh-import-id failed for: %s %s", user, + import_ids) elist.append(exc) if len(elist): diff --git a/cloudinit/config/cc_update_hostname.py b/cloudinit/config/cc_update_hostname.py index 52225cd8..e396ba13 100644 --- a/cloudinit/config/cc_update_hostname.py +++ b/cloudinit/config/cc_update_hostname.py @@ -1,7 +1,7 @@ # vi: ts=4 expandtab # # Copyright (C) 2011 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012, 2013 Hewlett-Packard Development Company, L.P. # # Author: Scott Moser # Author: Juerg Haefliger @@ -38,6 +38,6 @@ def handle(name, cfg, cloud, log, _args): log.debug("Updating hostname to %s (%s)", fqdn, hostname) cloud.distro.update_hostname(hostname, fqdn, prev_fn) except Exception: - util.logexc(log, "Failed to update the hostname to %s (%s)", - fqdn, hostname) + util.logexc(log, "Failed to update the hostname to %s (%s)", fqdn, + hostname) raise diff --git a/cloudinit/distros/__init__.py b/cloudinit/distros/__init__.py index 50d52594..e99cb16f 100644 --- a/cloudinit/distros/__init__.py +++ b/cloudinit/distros/__init__.py @@ -1,7 +1,7 @@ # vi: ts=4 expandtab # # Copyright (C) 2012 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012, 2013 Hewlett-Packard Development Company, L.P. # Copyright (C) 2012 Yahoo! Inc. # # Author: Scott Moser @@ -142,8 +142,8 @@ class Distro(object): try: util.subp(['hostname', hostname]) except util.ProcessExecutionError: - util.logexc(LOG, ("Failed to non-persistently adjust" - " the system hostname to %s"), hostname) + util.logexc(LOG, "Failed to non-persistently adjust the system " + "hostname to %s", hostname) @abc.abstractmethod def _select_hostname(self, hostname, fqdn): @@ -200,8 +200,8 @@ class Distro(object): try: self._write_hostname(hostname, fn) except IOError: - util.logexc(LOG, "Failed to write hostname %s to %s", - hostname, fn) + util.logexc(LOG, "Failed to write hostname %s to %s", hostname, + fn) if (sys_hostname and prev_hostname and sys_hostname != prev_hostname): @@ -347,7 +347,7 @@ class Distro(object): try: util.subp(adduser_cmd, logstring=x_adduser_cmd) except Exception as e: - util.logexc(LOG, "Failed to create user %s due to error.", e) + util.logexc(LOG, "Failed to create user %s", name) raise e # Set password if plain-text password provided @@ -360,8 +360,8 @@ class Distro(object): try: util.subp(['passwd', '--lock', name]) except Exception as e: - util.logexc(LOG, ("Failed to disable password logins for" - "user %s" % name), e) + util.logexc(LOG, "Failed to disable password logins for " + "user %s", name) raise e # Configure sudo access @@ -385,7 +385,7 @@ class Distro(object): try: util.subp(cmd, pass_string, logstring="chpasswd for %s" % user) except Exception as e: - util.logexc(LOG, "Failed to set password for %s" % user) + util.logexc(LOG, "Failed to set password for %s", user) raise e return True @@ -427,7 +427,7 @@ class Distro(object): util.append_file(sudo_base, sudoers_contents) LOG.debug("Added '#includedir %s' to %s" % (path, sudo_base)) except IOError as e: - util.logexc(LOG, "Failed to write %s" % sudo_base, e) + util.logexc(LOG, "Failed to write %s", sudo_base) raise e util.ensure_dir(path, 0750) @@ -478,15 +478,15 @@ class Distro(object): try: util.subp(group_add_cmd) LOG.info("Created new group %s" % name) - except Exception as e: - util.logexc("Failed to create group %s" % name, e) + except Exception: + util.logexc("Failed to create group %s", name) # Add members to the group, if so defined if len(members) > 0: for member in members: if not util.is_user(member): LOG.warn("Unable to add group member '%s' to group '%s'" - "; user does not exist." % (member, name)) + "; user does not exist.", member, name) continue util.subp(['usermod', '-a', '-G', name, member]) diff --git a/cloudinit/distros/rhel.py b/cloudinit/distros/rhel.py index 174da3ab..0727ecd1 100644 --- a/cloudinit/distros/rhel.py +++ b/cloudinit/distros/rhel.py @@ -1,7 +1,7 @@ # vi: ts=4 expandtab # # Copyright (C) 2012 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012, 2013 Hewlett-Packard Development Company, L.P. # Copyright (C) 2012 Yahoo! Inc. # # Author: Scott Moser @@ -72,9 +72,8 @@ class Distro(distros.Distro): r_conf = ResolvConf(util.load_file(self.resolve_conf_fn)) r_conf.parse() except IOError: - util.logexc(LOG, - "Failed at parsing %s reverting to an empty instance", - self.resolve_conf_fn) + util.logexc(LOG, "Failed at parsing %s reverting to an empty " + "instance", self.resolve_conf_fn) r_conf = ResolvConf('') r_conf.parse() if dns_servers: diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index 924463ce..497d68c5 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -1,7 +1,7 @@ # vi: ts=4 expandtab # # Copyright (C) 2012 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012, 2013 Hewlett-Packard Development Company, L.P. # Copyright (C) 2012 Yahoo! Inc. # # Author: Scott Moser @@ -117,10 +117,9 @@ def run_part(mod, data, filename, payload, frequency, headers): else: raise ValueError("Unknown module version %s" % (mod_ver)) except: - util.logexc(LOG, ("Failed calling handler %s (%s, %s, %s)" - " with frequency %s"), - mod, content_type, filename, - mod_ver, frequency) + util.logexc(LOG, "Failed calling handler %s (%s, %s, %s) with " + "frequency %s", mod, content_type, filename, mod_ver, + frequency) def call_begin(mod, data, frequency): @@ -158,8 +157,8 @@ def walker_handle_handler(pdata, _ctype, _filename, payload): handlers.register(mod) pdata['handlercount'] = curcount + 1 except: - util.logexc(LOG, ("Failed at registering python file: %s" - " (part handler %s)"), modfname, curcount) + util.logexc(LOG, "Failed at registering python file: %s (part " + "handler %s)", modfname, curcount) def _extract_first_or_bytes(blob, size): diff --git a/cloudinit/handlers/boot_hook.py b/cloudinit/handlers/boot_hook.py index bf2899ab..11ac4fe5 100644 --- a/cloudinit/handlers/boot_hook.py +++ b/cloudinit/handlers/boot_hook.py @@ -70,5 +70,5 @@ class BootHookPartHandler(handlers.Handler): except util.ProcessExecutionError: util.logexc(LOG, "Boothooks script %s execution error", filepath) except Exception: - util.logexc(LOG, ("Boothooks unknown " - "error when running %s"), filepath) + util.logexc(LOG, "Boothooks unknown error when running %s", + filepath) diff --git a/cloudinit/helpers.py b/cloudinit/helpers.py index a4e6fb03..b91c1290 100644 --- a/cloudinit/helpers.py +++ b/cloudinit/helpers.py @@ -1,7 +1,7 @@ # vi: ts=4 expandtab # # Copyright (C) 2012 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012, 2013 Hewlett-Packard Development Company, L.P. # Copyright (C) 2012 Yahoo! Inc. # # Author: Scott Moser @@ -216,8 +216,8 @@ class ConfigMerger(object): if ds_cfg and isinstance(ds_cfg, (dict)): d_cfgs.append(ds_cfg) except: - util.logexc(LOG, ("Failed loading of datasource" - " config object from %s"), self._ds) + util.logexc(LOG, "Failed loading of datasource config object " + "from %s", self._ds) return d_cfgs def _get_env_configs(self): @@ -227,8 +227,8 @@ class ConfigMerger(object): try: e_cfgs.append(util.read_conf(e_fn)) except: - util.logexc(LOG, ('Failed loading of env. config' - ' from %s'), e_fn) + util.logexc(LOG, 'Failed loading of env. config from %s', + e_fn) return e_cfgs def _get_instance_configs(self): @@ -242,8 +242,8 @@ class ConfigMerger(object): try: i_cfgs.append(util.read_conf(cc_fn)) except: - util.logexc(LOG, ('Failed loading of cloud-config' - ' from %s'), cc_fn) + util.logexc(LOG, 'Failed loading of cloud-config from %s', + cc_fn) return i_cfgs def _read_cfg(self): @@ -259,8 +259,8 @@ class ConfigMerger(object): try: cfgs.append(util.read_conf(c_fn)) except: - util.logexc(LOG, ("Failed loading of configuration" - " from %s"), c_fn) + util.logexc(LOG, "Failed loading of configuration from %s", + c_fn) cfgs.extend(self._get_env_configs()) cfgs.extend(self._get_instance_configs()) diff --git a/cloudinit/sources/DataSourceAltCloud.py b/cloudinit/sources/DataSourceAltCloud.py index 64548d43..a834f8eb 100644 --- a/cloudinit/sources/DataSourceAltCloud.py +++ b/cloudinit/sources/DataSourceAltCloud.py @@ -1,10 +1,11 @@ # vi: ts=4 expandtab # # Copyright (C) 2009-2010 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012, 2013 Hewlett-Packard Development Company, L.P. # Copyright (C) 2012 Yahoo! Inc. # # Author: Joe VLcek +# Author: Juerg Haefliger # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3, as @@ -79,7 +80,7 @@ def read_user_data_callback(mount_dir): try: user_data = util.load_file(user_data_file).strip() except IOError: - util.logexc(LOG, ('Failed accessing user data file.')) + util.logexc(LOG, 'Failed accessing user data file.') return None return user_data @@ -178,7 +179,7 @@ class DataSourceAltCloud(sources.DataSource): return False # No user data found - util.logexc(LOG, ('Failed accessing user data.')) + util.logexc(LOG, 'Failed accessing user data.') return False def user_data_rhevm(self): @@ -205,12 +206,12 @@ class DataSourceAltCloud(sources.DataSource): (cmd_out, _err) = util.subp(cmd) LOG.debug(('Command: %s\nOutput%s') % (' '.join(cmd), cmd_out)) except ProcessExecutionError, _err: - util.logexc(LOG, (('Failed command: %s\n%s') % \ - (' '.join(cmd), _err.message))) + util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd), + _err.message) return False except OSError, _err: - util.logexc(LOG, (('Failed command: %s\n%s') % \ - (' '.join(cmd), _err.message))) + util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd), + _err.message) return False floppy_dev = '/dev/fd0' @@ -222,12 +223,12 @@ class DataSourceAltCloud(sources.DataSource): (cmd_out, _err) = util.subp(cmd) LOG.debug(('Command: %s\nOutput%s') % (' '.join(cmd), cmd_out)) except ProcessExecutionError, _err: - util.logexc(LOG, (('Failed command: %s\n%s') % \ - (' '.join(cmd), _err.message))) + util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd), + _err.message) return False except OSError, _err: - util.logexc(LOG, (('Failed command: %s\n%s') % \ - (' '.join(cmd), _err.message))) + util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd), + _err.message) return False try: @@ -236,8 +237,8 @@ class DataSourceAltCloud(sources.DataSource): if err.errno != errno.ENOENT: raise except util.MountFailedError: - util.logexc(LOG, ("Failed to mount %s" - " when looking for user data"), floppy_dev) + util.logexc(LOG, "Failed to mount %s when looking for user data", + floppy_dev) self.userdata_raw = return_str self.metadata = META_DATA_NOT_SUPPORTED @@ -272,8 +273,8 @@ class DataSourceAltCloud(sources.DataSource): if err.errno != errno.ENOENT: raise except util.MountFailedError: - util.logexc(LOG, ("Failed to mount %s" - " when looking for user data"), cdrom_dev) + util.logexc(LOG, "Failed to mount %s when looking for user " + "data", cdrom_dev) self.userdata_raw = return_str self.metadata = META_DATA_NOT_SUPPORTED diff --git a/cloudinit/sources/DataSourceCloudStack.py b/cloudinit/sources/DataSourceCloudStack.py index 81c8cda9..08f661e4 100644 --- a/cloudinit/sources/DataSourceCloudStack.py +++ b/cloudinit/sources/DataSourceCloudStack.py @@ -4,11 +4,13 @@ # Copyright (C) 2012 Cosmin Luta # Copyright (C) 2012 Yahoo! Inc. # Copyright (C) 2012 Gerard Dethier +# Copyright (C) 2013 Hewlett-Packard Development Company, L.P. # # Author: Cosmin Luta # Author: Scott Moser # Author: Joshua Harlow # Author: Gerard Dethier +# Author: Juerg Haefliger # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3, as @@ -109,8 +111,8 @@ class DataSourceCloudStack(sources.DataSource): int(time.time() - start_time)) return True except Exception: - util.logexc(LOG, ('Failed fetching from metadata ' - 'service %s'), self.metadata_address) + util.logexc(LOG, 'Failed fetching from metadata service %s', + self.metadata_address) return False def get_instance_id(self): diff --git a/cloudinit/sources/DataSourceNoCloud.py b/cloudinit/sources/DataSourceNoCloud.py index 084abca7..4ef92a56 100644 --- a/cloudinit/sources/DataSourceNoCloud.py +++ b/cloudinit/sources/DataSourceNoCloud.py @@ -1,7 +1,7 @@ # vi: ts=4 expandtab # # Copyright (C) 2009-2010 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012, 2013 Hewlett-Packard Development Company, L.P. # Copyright (C) 2012 Yahoo! Inc. # # Author: Scott Moser @@ -119,8 +119,8 @@ class DataSourceNoCloud(sources.DataSource): if e.errno != errno.ENOENT: raise except util.MountFailedError: - util.logexc(LOG, ("Failed to mount %s" - " when looking for data"), dev) + util.logexc(LOG, "Failed to mount %s when looking for " + "data", dev) # There was no indication on kernel cmdline or data # in the seeddir suggesting this handler should be used. diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py index 95133236..70a577bc 100644 --- a/cloudinit/ssh_util.py +++ b/cloudinit/ssh_util.py @@ -229,11 +229,9 @@ def extract_authorized_keys(username): except (IOError, OSError): # Give up and use a default key filename auth_key_fn = os.path.join(ssh_dir, 'authorized_keys') - util.logexc(LOG, ("Failed extracting 'AuthorizedKeysFile'" - " in ssh config" - " from %r, using 'AuthorizedKeysFile' file" - " %r instead"), - DEF_SSHD_CFG, auth_key_fn) + util.logexc(LOG, "Failed extracting 'AuthorizedKeysFile' in ssh " + "config from %r, using 'AuthorizedKeysFile' file " + "%r instead", DEF_SSHD_CFG, auth_key_fn) return (auth_key_fn, parse_authorized_keys(auth_key_fn)) diff --git a/cloudinit/stages.py b/cloudinit/stages.py index 543d247f..df49cabb 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -1,7 +1,7 @@ # vi: ts=4 expandtab # # Copyright (C) 2012 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012, 2013 Hewlett-Packard Development Company, L.P. # Copyright (C) 2012 Yahoo! Inc. # # Author: Scott Moser @@ -154,9 +154,8 @@ class Init(object): try: util.chownbyname(log_file, u, g) except OSError: - util.logexc(LOG, ("Unable to change the ownership" - " of %s to user %s, group %s"), - log_file, u, g) + util.logexc(LOG, "Unable to change the ownership of %s to " + "user %s, group %s", log_file, u, g) def read_cfg(self, extra_fns=None): # None check so that we don't keep on re-loading if empty diff --git a/cloudinit/util.py b/cloudinit/util.py index b27b3567..c45aae06 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -1,7 +1,7 @@ # vi: ts=4 expandtab # # Copyright (C) 2012 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012, 2013 Hewlett-Packard Development Company, L.P. # Copyright (C) 2012 Yahoo! Inc. # # Author: Scott Moser @@ -219,8 +219,7 @@ def fork_cb(child_cb, *args): child_cb(*args) os._exit(0) # pylint: disable=W0212 except: - logexc(LOG, ("Failed forking and" - " calling callback %s"), + logexc(LOG, "Failed forking and calling callback %s", type_utils.obj_name(child_cb)) os._exit(1) # pylint: disable=W0212 else: -- cgit v1.2.3 From 36bbd898e9b8bef508b5d185dc1e52af0f13cfd0 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sat, 20 Jul 2013 05:56:30 -0700 Subject: Add usage of '#json-patch' --- cloudinit/handlers/__init__.py | 1 + cloudinit/handlers/cloud_config.py | 17 ++++++++--------- 2 files changed, 9 insertions(+), 9 deletions(-) (limited to 'cloudinit/handlers/__init__.py') diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index 497d68c5..297e7451 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -62,6 +62,7 @@ INCLUSION_TYPES_MAP = { '#part-handler': 'text/part-handler', '#cloud-boothook': 'text/cloud-boothook', '#cloud-config-archive': 'text/cloud-config-archive', + '#json-patch': 'application/json-patch+json', } # Sorted longest first diff --git a/cloudinit/handlers/cloud_config.py b/cloudinit/handlers/cloud_config.py index 8d1ba37f..84653375 100644 --- a/cloudinit/handlers/cloud_config.py +++ b/cloudinit/handlers/cloud_config.py @@ -66,22 +66,21 @@ class CloudConfigPartHandler(handlers.Handler): def list_types(self): ctypes_handled = [ handlers.type_from_starts_with("#cloud-config"), - JSON_PATCH_CTYPE, + handlers.type_from_starts_with("#json-patch"), ] return ctypes_handled def _write_cloud_config(self): - if not self.cloud_fn: + if not self.cloud_fn or not len(self.file_names): return # Capture which files we merged from... file_lines = [] - if self.file_names: - file_lines.append("# from %s files" % (len(self.file_names))) - for fn in self.file_names: - if not fn: - fn = '?' - file_lines.append("# %s" % (fn)) - file_lines.append("") + file_lines.append("# from %s files" % (len(self.file_names))) + for fn in self.file_names: + if not fn: + fn = '?' + file_lines.append("# %s" % (fn)) + file_lines.append("") if self.cloud_buf is not None: # Something was actually gathered.... lines = [ -- cgit v1.2.3 From 2849c8d3eb44b186e9eaed46080796d56e9529f2 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sat, 20 Jul 2013 13:06:55 -0700 Subject: Also handle custom handlers correctly. LP: #1203368 --- cloudinit/handlers/__init__.py | 9 ++++++--- cloudinit/helpers.py | 2 ++ cloudinit/stages.py | 28 +++++++++++----------------- 3 files changed, 19 insertions(+), 20 deletions(-) (limited to 'cloudinit/handlers/__init__.py') diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index 497d68c5..93df5b61 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -151,10 +151,12 @@ def walker_handle_handler(pdata, _ctype, _filename, payload): try: mod = fixup_handler(importer.import_module(modname)) call_begin(mod, pdata['data'], frequency) - # Only register and increment - # after the above have worked (so we don't if it - # fails) + # Only register and increment after the above have worked, so we don't + # register if it fails starting. handlers.register(mod) + # Ensure that it gets finalized by marking said module as having been + # initialized correctly. + handlers.markings[mod].append('initialized') pdata['handlercount'] = curcount + 1 except: util.logexc(LOG, "Failed at registering python file: %s (part " @@ -230,6 +232,7 @@ def walk(msg, callback, data): headers['Content-Type'] = ctype callback(data, filename, part.get_payload(decode=True), headers) partnum = partnum + 1 + return partnum def fixup_handler(mod, def_freq=PER_INSTANCE): diff --git a/cloudinit/helpers.py b/cloudinit/helpers.py index b91c1290..bd37b8a3 100644 --- a/cloudinit/helpers.py +++ b/cloudinit/helpers.py @@ -22,6 +22,7 @@ from time import time +import collections import contextlib import io import os @@ -281,6 +282,7 @@ class ContentHandlers(object): def __init__(self): self.registered = {} + self.markings = collections.defaultdict(list) def __contains__(self, item): return self.is_registered(item) diff --git a/cloudinit/stages.py b/cloudinit/stages.py index ed995628..43eaca1b 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -383,21 +383,15 @@ class Init(object): # Form our cloud interface data = self.cloudify() - # This list contains the modules initialized (so that we only finalize - # ones that were actually initialized) - inited_handlers = [] - def init_handlers(): # Init the handlers first - called = [] for (_ctype, mod) in c_handlers.iteritems(): - if mod in called: + if 'initialized' in c_handlers.markings[mod]: # Avoid initing the same module twice (if said module # is registered to more than one content-type). continue handlers.call_begin(mod, data, frequency) - inited_handlers.append(mod) - called.append(mod) + c_handlers.markings[mod].append('initialized') def walk_handlers(): # Walk the user data @@ -413,22 +407,22 @@ class Init(object): # names... 'handlercount': 0, } - handlers.walk(user_data_msg, handlers.walker_callback, - data=part_data) + return handlers.walk(user_data_msg, handlers.walker_callback, + data=part_data) def finalize_handlers(): # Give callbacks opportunity to finalize - called = [] for (_ctype, mod) in c_handlers.iteritems(): - if mod in called: - # Avoid finalizing the same module twice (if said module - # is registered to more than one content-type). - continue - if mod not in inited_handlers: + mod_markings = c_handlers.markings[mod] + if 'initialized' not in mod_markings: # Said module was never inited in the first place, so lets # not attempt to finalize those that never got called. continue - called.append(mod) + if 'finalized' in mod_markings: + # Avoid finalizing the same module twice (if said module + # is registered to more than one content-type). + continue + c_handlers.markings[mod].append('finalized') try: handlers.call_end(mod, data, frequency) except: -- cgit v1.2.3 From bbfc76fb74595881b25acc1bbbd426314c2390ed Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sat, 20 Jul 2013 13:13:02 -0700 Subject: Remove return not used. --- cloudinit/handlers/__init__.py | 1 - cloudinit/stages.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) (limited to 'cloudinit/handlers/__init__.py') diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index 93df5b61..f9b90323 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -232,7 +232,6 @@ def walk(msg, callback, data): headers['Content-Type'] = ctype callback(data, filename, part.get_payload(decode=True), headers) partnum = partnum + 1 - return partnum def fixup_handler(mod, def_freq=PER_INSTANCE): diff --git a/cloudinit/stages.py b/cloudinit/stages.py index 43eaca1b..ba974a3e 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -407,8 +407,8 @@ class Init(object): # names... 'handlercount': 0, } - return handlers.walk(user_data_msg, handlers.walker_callback, - data=part_data) + handlers.walk(user_data_msg, handlers.walker_callback, + data=part_data) def finalize_handlers(): # Give callbacks opportunity to finalize -- cgit v1.2.3 From 971c2b2366c6e58921e1d2dd3ba18e597cbc20e8 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sun, 21 Jul 2013 10:45:29 -0700 Subject: Just use an initialized array. --- cloudinit/handlers/__init__.py | 5 +---- cloudinit/helpers.py | 7 ++++--- cloudinit/stages.py | 13 ++++--------- 3 files changed, 9 insertions(+), 16 deletions(-) (limited to 'cloudinit/handlers/__init__.py') diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index f9b90323..1d450061 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -153,10 +153,7 @@ def walker_handle_handler(pdata, _ctype, _filename, payload): call_begin(mod, pdata['data'], frequency) # Only register and increment after the above have worked, so we don't # register if it fails starting. - handlers.register(mod) - # Ensure that it gets finalized by marking said module as having been - # initialized correctly. - handlers.markings[mod].append('initialized') + handlers.register(mod, initialized=True) pdata['handlercount'] = curcount + 1 except: util.logexc(LOG, "Failed at registering python file: %s (part " diff --git a/cloudinit/helpers.py b/cloudinit/helpers.py index bd37b8a3..1c46efde 100644 --- a/cloudinit/helpers.py +++ b/cloudinit/helpers.py @@ -22,7 +22,6 @@ from time import time -import collections import contextlib import io import os @@ -282,7 +281,7 @@ class ContentHandlers(object): def __init__(self): self.registered = {} - self.markings = collections.defaultdict(list) + self.initialized = [] def __contains__(self, item): return self.is_registered(item) @@ -293,11 +292,13 @@ class ContentHandlers(object): def is_registered(self, content_type): return content_type in self.registered - def register(self, mod): + def register(self, mod, initialized=False): types = set() for t in mod.list_types(): self.registered[t] = mod types.add(t) + if initialized and mod not in self.initialized: + self.initialized.append(mod) return types def _get_handler(self, content_type): diff --git a/cloudinit/stages.py b/cloudinit/stages.py index ba974a3e..fade1182 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -386,12 +386,12 @@ class Init(object): def init_handlers(): # Init the handlers first for (_ctype, mod) in c_handlers.iteritems(): - if 'initialized' in c_handlers.markings[mod]: + if mod in c_handlers.initialized: # Avoid initing the same module twice (if said module # is registered to more than one content-type). continue handlers.call_begin(mod, data, frequency) - c_handlers.markings[mod].append('initialized') + c_handlers.initialized.append(mod) def walk_handlers(): # Walk the user data @@ -413,16 +413,11 @@ class Init(object): def finalize_handlers(): # Give callbacks opportunity to finalize for (_ctype, mod) in c_handlers.iteritems(): - mod_markings = c_handlers.markings[mod] - if 'initialized' not in mod_markings: + if mod not in c_handlers.initialized: # Said module was never inited in the first place, so lets # not attempt to finalize those that never got called. continue - if 'finalized' in mod_markings: - # Avoid finalizing the same module twice (if said module - # is registered to more than one content-type). - continue - c_handlers.markings[mod].append('finalized') + c_handlers.initialized.remove(mod) try: handlers.call_end(mod, data, frequency) except: -- cgit v1.2.3 From 243df010c49de52be0ca9159e15378bb335b1163 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Wed, 24 Jul 2013 11:04:56 -0400 Subject: change 'json-patch' to 'cloud-config-jsonp' --- cloudinit/handlers/__init__.py | 2 +- cloudinit/handlers/cloud_config.py | 9 +++++---- 2 files changed, 6 insertions(+), 5 deletions(-) (limited to 'cloudinit/handlers/__init__.py') diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index 4c7c9295..2ddc75f4 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -62,7 +62,7 @@ INCLUSION_TYPES_MAP = { '#part-handler': 'text/part-handler', '#cloud-boothook': 'text/cloud-boothook', '#cloud-config-archive': 'text/cloud-config-archive', - '#json-patch': 'application/json-patch+json', + '#cloud-config-jsonp': 'text/cloud-config-jsonp', } # Sorted longest first diff --git a/cloudinit/handlers/cloud_config.py b/cloudinit/handlers/cloud_config.py index 0f080e66..8bbc904d 100644 --- a/cloudinit/handlers/cloud_config.py +++ b/cloudinit/handlers/cloud_config.py @@ -54,8 +54,9 @@ DEF_MERGERS = mergers.string_extract_mergers('dict(replace)+list()+str()') CLOUD_PREFIX = "#cloud-config" # The file header -> content types this module will handle. +CC_JSONP_PRE = "#cloud-config-jsonp" CC_TYPES = { - '#json-patch': handlers.type_from_starts_with("#json-patch"), + CC_JSONP_PRE: handlers.type_from_starts_with(CC_JSONP_PRE), '#cloud-config': handlers.type_from_starts_with("#cloud-config"), } @@ -116,12 +117,12 @@ class CloudConfigPartHandler(handlers.Handler): def _merge_patch(self, payload): payload = payload.lstrip() - if payload.lower().startswith("#json-patch"): + if payload.lower().startswith(CC_JSONP_PRE): # JSON doesn't handle comments in this manner, so ensure that # if we started with this 'type' that we remove it before # attempting to load it as json (which the jsonpatch library will # attempt to do). - payload = payload[len("#json-patch"):] + payload = payload[CC_JSONP_PRE:] patch = jsonpatch.JsonPatch.from_string(payload) LOG.debug("Merging by applying json patch %s", patch) self.cloud_buf = patch.apply(self.cloud_buf, in_place=False) @@ -149,7 +150,7 @@ class CloudConfigPartHandler(handlers.Handler): # First time through, merge with an empty dict... if self.cloud_buf is None or not self.file_names: self.cloud_buf = {} - if ctype == CC_TYPES['#json-patch']: + if ctype == CC_TYPES[CC_JSONP_PRE]: self._merge_patch(payload) else: self._merge_part(payload, headers) -- cgit v1.2.3