From 6b7e65e4f57902c25363c78a7e47aa2caa579b7b Mon Sep 17 00:00:00 2001 From: Ben Howard Date: Thu, 18 Jul 2013 15:37:18 -0600 Subject: Added SmartOS datasource and unit tests. --- cloudinit/settings.py | 1 + cloudinit/sources/DataSourceSmartOS.py | 172 +++++++++++++++++++++++++++++++++ cloudinit/util.py | 18 ++++ 3 files changed, 191 insertions(+) create mode 100644 cloudinit/sources/DataSourceSmartOS.py (limited to 'cloudinit') diff --git a/cloudinit/settings.py b/cloudinit/settings.py index dc371cd2..9f6badae 100644 --- a/cloudinit/settings.py +++ b/cloudinit/settings.py @@ -37,6 +37,7 @@ CFG_BUILTIN = { 'MAAS', 'Ec2', 'CloudStack', + 'SmartOS', # At the end to act as a 'catch' when none of the above work... 'None', ], diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py new file mode 100644 index 00000000..f9b724eb --- /dev/null +++ b/cloudinit/sources/DataSourceSmartOS.py @@ -0,0 +1,172 @@ +# vi: ts=4 expandtab +# +# Copyright (C) 2013 Canonical Ltd. +# +# Author: Ben Howard +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3, as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# +# Datasource for provisioning on SmartOS. This works on Joyent +# and public/private Clouds using SmartOS. +# +# SmartOS hosts use a serial console (/dev/ttyS1) on Linux Guests. +# The meta-data is transmitted via key/value pairs made by +# requests on the console. For example, to get the hostname, you +# would send "GET hostname" on /dev/ttyS1. +# + + +import os +import os.path +import serial +from cloudinit import log as logging +from cloudinit import sources +from cloudinit import util + + +TTY_LOC = '/dev/ttyS1' +LOG = logging.getLogger(__name__) + + +class DataSourceSmartOS(sources.DataSource): + def __init__(self, sys_cfg, distro, paths): + sources.DataSource.__init__(self, sys_cfg, distro, paths) + self.seed_dir = os.path.join(paths.seed_dir, 'sdc') + self.seed = None + self.is_smartdc = None + + def __str__(self): + root = sources.DataSource.__str__(self) + return "%s [seed=%s]" % (root, self.seed) + + def get_data(self): + md = {} + ud = "" + + if not os.path.exists(TTY_LOC): + LOG.debug("Host does not appear to be on SmartOS") + return False + self.seed = TTY_LOC + + system_uuid, system_type = dmi_data() + if 'smartdc' not in system_type.lower(): + LOG.debug("Host is not on SmartOS") + return False + self.is_smartdc = True + + hostname = query_data("hostname", strip=True) + if not hostname: + hostname = system_uuid + + md['local-hostname'] = hostname + md['instance-id'] = system_uuid + md['public-keys'] = query_data("root_authorized_keys", strip=True) + ud = query_data("user-script") + md['iptables_disable'] = query_data("disable_iptables_flag", + strip=True) + md['motd_sys_info'] = query_data("enable_motd_sys_info", strip=True) + + self.metadata = md + self.userdata_raw = ud + return True + + def get_instance_id(self): + return self.metadata['instance-id'] + + +def get_serial(): + """This is replaced in unit testing, allowing us to replace + serial.Serial with a mocked class""" + return serial.Serial() + + +def query_data(noun, strip=False): + """Makes a request to via the serial console via "GET " + + In the response, the first line is the status, while subsequent lines + are is the value. A blank line with a "." is used to indicate end of + response. + + The timeout value of 60 seconds should never be hit. The value + is taken from SmartOS own provisioning tools. Since we are reading + each line individually up until the single ".", the transfer is + usually very fast (i.e. microseconds) to get the response. + """ + if not noun: + return False + + ser = get_serial() + ser.port = '/dev/ttyS1' + ser.open() + if not ser.isOpen(): + LOG.debug("Serial console is not open") + return False + + ser.write("GET %s\n" % noun.rstrip()) + status = str(ser.readline()).rstrip() + response = [] + eom_found = False + + if 'SUCCESS' not in status: + ser.close() + return None + + while not eom_found: + m = ser.readline() + if m.rstrip() == ".": + eom_found = True + else: + response.append(m) + + ser.close() + if not strip: + return "".join(response) + else: + return "".join(response).rstrip() + + return None + + +def dmi_data(): + sys_uuid, sys_type = None, None + dmidecode_path = util.which('dmidecode') + if not dmidecode_path: + return False + + sys_uuid_cmd = [dmidecode_path, "-s", "system-uuid"] + try: + LOG.debug("Getting hostname from dmidecode") + (sys_uuid, _err) = util.subp(sys_uuid_cmd) + except Exception as e: + util.logexc(LOG, "Failed to get system UUID", e) + + sys_type_cmd = [dmidecode_path, "-s", "system-product-name"] + try: + LOG.debug("Determining hypervisor product name via dmidecode") + (sys_type, _err) = util.subp(sys_type_cmd) + except Exception as e: + util.logexc(LOG, "Failed to get system UUID", e) + + return sys_uuid.lower(), sys_type + + +# Used to match classes to dependencies +datasources = [ + (DataSourceSmartOS, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)), +] + + +# Return a list of data sources that match this set of dependencies +def get_datasource_list(depends): + return sources.list_from_depends(depends, datasources) diff --git a/cloudinit/util.py b/cloudinit/util.py index c45aae06..7163225f 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -1743,3 +1743,21 @@ def get_mount_info(path, log=LOG): mountinfo_path = '/proc/%s/mountinfo' % os.getpid() lines = load_file(mountinfo_path).splitlines() return parse_mount_info(path, lines, log) + +def which(program): + # Return path of program for execution if found in path + def is_exe(fpath): + return os.path.isfile(fpath) and os.access(fpath, os.X_OK) + + fpath, fname = os.path.split(program) + if fpath: + if is_exe(program): + return program + else: + for path in os.environ["PATH"].split(os.pathsep): + path = path.strip('"') + exe_file = os.path.join(path, program) + if is_exe(exe_file): + return exe_file + + return None -- cgit v1.2.3 From c818ddba06ff7d486a085edae531896156c14e9d Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Fri, 19 Jul 2013 15:49:35 -0700 Subject: Add the ability to decompress MIME gzip. Instead of being restricted to only gzip compressing the overall mime segment or individual included segments, allow for each mime segment to be gzip compressed. LP: #1203203 --- cloudinit/user_data.py | 41 +++++++++++++++++++++++++++++++++-------- 1 file changed, 33 insertions(+), 8 deletions(-) (limited to 'cloudinit') diff --git a/cloudinit/user_data.py b/cloudinit/user_data.py index df069ff8..23c31fde 100644 --- a/cloudinit/user_data.py +++ b/cloudinit/user_data.py @@ -48,6 +48,18 @@ ARCHIVE_TYPES = ["text/cloud-config-archive"] UNDEF_TYPE = "text/plain" ARCHIVE_UNDEF_TYPE = "text/cloud-config" +# This seems to hit most of the gzip possible content types. +DECOMP_TYPES = [ + 'application/gzip', + 'application/gzip-compressed', + 'application/gzipped', + 'application/x-compress', + 'application/x-compressed', + 'application/x-gunzip', + 'application/x-gzip', + 'application/x-gzip-compressed', +] + # Msg header used to track attachments ATTACHMENT_FIELD = 'Number-Attachments' @@ -67,6 +79,13 @@ class UserDataProcessor(object): return accumulating_msg def _process_msg(self, base_msg, append_msg): + + def replace_header(part, key, value): + if key in part: + part.replace_header(key, value) + else: + part[key] = value + for part in base_msg.walk(): if is_skippable(part): continue @@ -75,6 +94,18 @@ class UserDataProcessor(object): ctype_orig = part.get_content_type() payload = part.get_payload(decode=True) + # When the message states it is of a gzipped content type ensure + # that we attempt to decode said payload so that the decompressed + # data can be examined (instead of the compressed data). + if ctype_orig in DECOMP_TYPES: + try: + payload = util.decomp_gzip(payload, quiet=False) + ctype_orig = UNDEF_TYPE + # TODO(harlowja): should we also set the payload to the + # decompressed value?? + except util.DecompressionError: + pass + if not ctype_orig: ctype_orig = UNDEF_TYPE @@ -85,10 +116,7 @@ class UserDataProcessor(object): ctype = ctype_orig if ctype != ctype_orig: - if CONTENT_TYPE in part: - part.replace_header(CONTENT_TYPE, ctype) - else: - part[CONTENT_TYPE] = ctype + replace_header(part, CONTENT_TYPE, ctype) if ctype in INCLUDE_TYPES: self._do_include(payload, append_msg) @@ -100,10 +128,7 @@ class UserDataProcessor(object): # Should this be happening, shouldn't # the part header be modified and not the base? - if CONTENT_TYPE in base_msg: - base_msg.replace_header(CONTENT_TYPE, ctype) - else: - base_msg[CONTENT_TYPE] = ctype + replace_header(base_msg, CONTENT_TYPE, ctype) self._attach_part(append_msg, part) -- cgit v1.2.3 From 79b384341eb756bb2d5a9e5f53fbaa4aa40d8527 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sat, 20 Jul 2013 12:18:44 -0700 Subject: Bump the version. LP: #1203364 --- cloudinit/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'cloudinit') diff --git a/cloudinit/version.py b/cloudinit/version.py index 024d5118..4b29a587 100644 --- a/cloudinit/version.py +++ b/cloudinit/version.py @@ -20,7 +20,7 @@ from distutils import version as vr def version(): - return vr.StrictVersion("0.7.2") + return vr.StrictVersion("0.7.3") def version_string(): -- cgit v1.2.3 From 4e9a13142f1ee81c905a2cc9401a88f115ec778e Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sat, 20 Jul 2013 12:38:25 -0700 Subject: Init and finalize refactor. Instead of previously initializing and not finalizing the handles that completed successfully when a handler initializing or running failed we should attempt to always give said handlers a chance to finalize (even when another handler fails). --- cloudinit/stages.py | 82 ++++++++++++++++++++++++++++++++++------------------- 1 file changed, 53 insertions(+), 29 deletions(-) (limited to 'cloudinit') diff --git a/cloudinit/stages.py b/cloudinit/stages.py index df49cabb..6893afd9 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -383,36 +383,60 @@ class Init(object): # Form our cloud interface data = self.cloudify() - # Init the handlers first - called = [] - for (_ctype, mod) in c_handlers.iteritems(): - if mod in called: - continue - handlers.call_begin(mod, data, frequency) - called.append(mod) - - # Walk the user data - part_data = { - 'handlers': c_handlers, - # Any new handlers that are encountered get writen here - 'handlerdir': idir, - 'data': data, - # The default frequency if handlers don't have one - 'frequency': frequency, - # This will be used when new handlers are found - # to help write there contents to files with numbered - # names... - 'handlercount': 0, - } - handlers.walk(user_data_msg, handlers.walker_callback, data=part_data) + # This list contains the modules initialized (so that we only finalize + # ones that were actually initialized) + inited_handlers = [] + + def init_handlers(): + # Init the handlers first + called = [] + for (_ctype, mod) in c_handlers.iteritems(): + if mod in called: + # Avoid initing the same module twice (if said module + # is registered to more than one content-type). + continue + handlers.call_begin(mod, data, frequency) + inited_handlers.append(mod) + called.append(mod) + + def walk_handlers(): + # Walk the user data + part_data = { + 'handlers': c_handlers, + # Any new handlers that are encountered get writen here + 'handlerdir': idir, + 'data': data, + # The default frequency if handlers don't have one + 'frequency': frequency, + # This will be used when new handlers are found + # to help write there contents to files with numbered + # names... + 'handlercount': 0, + } + handlers.walk(user_data_msg, handlers.walker_callback, + data=part_data) + + def finalize_handlers(): + # Give callbacks opportunity to finalize + called = [] + for (_ctype, mod) in c_handlers.iteritems(): + if mod in called: + # Avoid finalizing the same module twice (if said module + # is registered to more than one content-type). + continue + if mod not in inited_handlers: + continue + called.append(mod) + try: + handlers.call_end(mod, data, frequency) + except: + util.logexc(LOG, "Failed to finalize handler: %s", mod) - # Give callbacks opportunity to finalize - called = [] - for (_ctype, mod) in c_handlers.iteritems(): - if mod in called: - continue - handlers.call_end(mod, data, frequency) - called.append(mod) + try: + init_handlers() + walk_handlers() + finally: + finalize_handlers() # Perform post-consumption adjustments so that # modules that run during the init stage reflect -- cgit v1.2.3 From a3ef9d24c6c913676d22dd7017a1f1b235d47a45 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sat, 20 Jul 2013 12:43:51 -0700 Subject: Update comments + link to bug. LP: #1203368 --- cloudinit/stages.py | 2 ++ 1 file changed, 2 insertions(+) (limited to 'cloudinit') diff --git a/cloudinit/stages.py b/cloudinit/stages.py index 6893afd9..ed995628 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -425,6 +425,8 @@ class Init(object): # is registered to more than one content-type). continue if mod not in inited_handlers: + # Said module was never inited in the first place, so lets + # not attempt to finalize those that never got called. continue called.append(mod) try: -- cgit v1.2.3 From 2849c8d3eb44b186e9eaed46080796d56e9529f2 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sat, 20 Jul 2013 13:06:55 -0700 Subject: Also handle custom handlers correctly. LP: #1203368 --- cloudinit/handlers/__init__.py | 9 ++++++--- cloudinit/helpers.py | 2 ++ cloudinit/stages.py | 28 +++++++++++----------------- 3 files changed, 19 insertions(+), 20 deletions(-) (limited to 'cloudinit') diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index 497d68c5..93df5b61 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -151,10 +151,12 @@ def walker_handle_handler(pdata, _ctype, _filename, payload): try: mod = fixup_handler(importer.import_module(modname)) call_begin(mod, pdata['data'], frequency) - # Only register and increment - # after the above have worked (so we don't if it - # fails) + # Only register and increment after the above have worked, so we don't + # register if it fails starting. handlers.register(mod) + # Ensure that it gets finalized by marking said module as having been + # initialized correctly. + handlers.markings[mod].append('initialized') pdata['handlercount'] = curcount + 1 except: util.logexc(LOG, "Failed at registering python file: %s (part " @@ -230,6 +232,7 @@ def walk(msg, callback, data): headers['Content-Type'] = ctype callback(data, filename, part.get_payload(decode=True), headers) partnum = partnum + 1 + return partnum def fixup_handler(mod, def_freq=PER_INSTANCE): diff --git a/cloudinit/helpers.py b/cloudinit/helpers.py index b91c1290..bd37b8a3 100644 --- a/cloudinit/helpers.py +++ b/cloudinit/helpers.py @@ -22,6 +22,7 @@ from time import time +import collections import contextlib import io import os @@ -281,6 +282,7 @@ class ContentHandlers(object): def __init__(self): self.registered = {} + self.markings = collections.defaultdict(list) def __contains__(self, item): return self.is_registered(item) diff --git a/cloudinit/stages.py b/cloudinit/stages.py index ed995628..43eaca1b 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -383,21 +383,15 @@ class Init(object): # Form our cloud interface data = self.cloudify() - # This list contains the modules initialized (so that we only finalize - # ones that were actually initialized) - inited_handlers = [] - def init_handlers(): # Init the handlers first - called = [] for (_ctype, mod) in c_handlers.iteritems(): - if mod in called: + if 'initialized' in c_handlers.markings[mod]: # Avoid initing the same module twice (if said module # is registered to more than one content-type). continue handlers.call_begin(mod, data, frequency) - inited_handlers.append(mod) - called.append(mod) + c_handlers.markings[mod].append('initialized') def walk_handlers(): # Walk the user data @@ -413,22 +407,22 @@ class Init(object): # names... 'handlercount': 0, } - handlers.walk(user_data_msg, handlers.walker_callback, - data=part_data) + return handlers.walk(user_data_msg, handlers.walker_callback, + data=part_data) def finalize_handlers(): # Give callbacks opportunity to finalize - called = [] for (_ctype, mod) in c_handlers.iteritems(): - if mod in called: - # Avoid finalizing the same module twice (if said module - # is registered to more than one content-type). - continue - if mod not in inited_handlers: + mod_markings = c_handlers.markings[mod] + if 'initialized' not in mod_markings: # Said module was never inited in the first place, so lets # not attempt to finalize those that never got called. continue - called.append(mod) + if 'finalized' in mod_markings: + # Avoid finalizing the same module twice (if said module + # is registered to more than one content-type). + continue + c_handlers.markings[mod].append('finalized') try: handlers.call_end(mod, data, frequency) except: -- cgit v1.2.3 From bbfc76fb74595881b25acc1bbbd426314c2390ed Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sat, 20 Jul 2013 13:13:02 -0700 Subject: Remove return not used. --- cloudinit/handlers/__init__.py | 1 - cloudinit/stages.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) (limited to 'cloudinit') diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index 93df5b61..f9b90323 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -232,7 +232,6 @@ def walk(msg, callback, data): headers['Content-Type'] = ctype callback(data, filename, part.get_payload(decode=True), headers) partnum = partnum + 1 - return partnum def fixup_handler(mod, def_freq=PER_INSTANCE): diff --git a/cloudinit/stages.py b/cloudinit/stages.py index 43eaca1b..ba974a3e 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -407,8 +407,8 @@ class Init(object): # names... 'handlercount': 0, } - return handlers.walk(user_data_msg, handlers.walker_callback, - data=part_data) + handlers.walk(user_data_msg, handlers.walker_callback, + data=part_data) def finalize_handlers(): # Give callbacks opportunity to finalize -- cgit v1.2.3 From 7880588f804ea035f03eba9335af71f3322dab97 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sat, 20 Jul 2013 14:34:00 -0700 Subject: Ensure we reset the part after decompression. --- cloudinit/user_data.py | 36 +++++++++++++++++++++++++++--------- 1 file changed, 27 insertions(+), 9 deletions(-) (limited to 'cloudinit') diff --git a/cloudinit/user_data.py b/cloudinit/user_data.py index 23c31fde..97853e51 100644 --- a/cloudinit/user_data.py +++ b/cloudinit/user_data.py @@ -23,8 +23,10 @@ import os import email + from email.mime.base import MIMEBase from email.mime.multipart import MIMEMultipart +from email.mime.nonmultipart import MIMENonMultipart from email.mime.text import MIMEText from cloudinit import handlers @@ -80,6 +82,10 @@ class UserDataProcessor(object): def _process_msg(self, base_msg, append_msg): + def find_ctype(payload): + ctype = handlers.type_from_starts_with(payload) + return ctype + def replace_header(part, key, value): if key in part: part.replace_header(key, value) @@ -93,6 +99,7 @@ class UserDataProcessor(object): ctype = None ctype_orig = part.get_content_type() payload = part.get_payload(decode=True) + was_compressed = False # When the message states it is of a gzipped content type ensure # that we attempt to decode said payload so that the decompressed @@ -100,21 +107,32 @@ class UserDataProcessor(object): if ctype_orig in DECOMP_TYPES: try: payload = util.decomp_gzip(payload, quiet=False) - ctype_orig = UNDEF_TYPE - # TODO(harlowja): should we also set the payload to the - # decompressed value?? - except util.DecompressionError: - pass + # At this point we don't know what the content-type is + # since we just decompressed it. + ctype_orig = None + was_compressed = True + except util.DecompressionError as e: + LOG.warn("Failed decompressing payload from %s of length" + " %s due to: %s", ctype_orig, len(payload), e) + continue + # Attempt to figure out the payloads content-type if not ctype_orig: ctype_orig = UNDEF_TYPE - if ctype_orig in TYPE_NEEDED: - ctype = handlers.type_from_starts_with(payload) - + ctype = find_ctype(payload) if ctype is None: ctype = ctype_orig + # In the case where the data was compressed, we want to make sure + # that we create a new message that contains the found content + # type with the uncompressed content since later traversals of the + # messages will expect a part not compressed. + if was_compressed: + maintype, subtype = ctype.split("/", 1) + part = MIMENonMultipart(maintype, subtype) + part.set_payload(payload) + if ctype != ctype_orig: replace_header(part, CONTENT_TYPE, ctype) @@ -126,7 +144,7 @@ class UserDataProcessor(object): self._explode_archive(payload, append_msg) continue - # Should this be happening, shouldn't + # TODO(harlowja): Should this be happening, shouldn't # the part header be modified and not the base? replace_header(base_msg, CONTENT_TYPE, ctype) -- cgit v1.2.3 From 64c69053c11385cc43b6c628dbe8a1bf28ccc49c Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sat, 20 Jul 2013 14:57:42 -0700 Subject: Keep filename from original part. --- cloudinit/user_data.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) (limited to 'cloudinit') diff --git a/cloudinit/user_data.py b/cloudinit/user_data.py index 97853e51..e17bcaee 100644 --- a/cloudinit/user_data.py +++ b/cloudinit/user_data.py @@ -130,8 +130,12 @@ class UserDataProcessor(object): # messages will expect a part not compressed. if was_compressed: maintype, subtype = ctype.split("/", 1) - part = MIMENonMultipart(maintype, subtype) - part.set_payload(payload) + n_part = MIMENonMultipart(maintype, subtype) + n_part.set_payload(payload) + if part.get_filename(): + n_part.add_header('Content-Disposition', 'attachment', + filename=part.get_filename()) + part = n_part if ctype != ctype_orig: replace_header(part, CONTENT_TYPE, ctype) -- cgit v1.2.3 From 432778cf2890c19940f29f47f9efc2cb8e784f43 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sat, 20 Jul 2013 16:34:39 -0700 Subject: Unify filename, header replacement. --- cloudinit/user_data.py | 56 ++++++++++++++++++++++++++++---------------------- 1 file changed, 32 insertions(+), 24 deletions(-) (limited to 'cloudinit') diff --git a/cloudinit/user_data.py b/cloudinit/user_data.py index e17bcaee..454f3c06 100644 --- a/cloudinit/user_data.py +++ b/cloudinit/user_data.py @@ -70,6 +70,19 @@ ATTACHMENT_FIELD = 'Number-Attachments' EXAMINE_FOR_LAUNCH_INDEX = ["text/cloud-config"] +def _replace_header(msg, key, value): + del msg[key] + msg[key] = value + + +def _set_filename(msg, filename): + if not filename: + return + del msg['Content-Disposition'] + msg.add_header('Content-Disposition', + 'attachment', filename=str(filename)) + + class UserDataProcessor(object): def __init__(self, paths): self.paths = paths @@ -83,14 +96,7 @@ class UserDataProcessor(object): def _process_msg(self, base_msg, append_msg): def find_ctype(payload): - ctype = handlers.type_from_starts_with(payload) - return ctype - - def replace_header(part, key, value): - if key in part: - part.replace_header(key, value) - else: - part[key] = value + return handlers.type_from_starts_with(payload) for part in base_msg.walk(): if is_skippable(part): @@ -132,13 +138,17 @@ class UserDataProcessor(object): maintype, subtype = ctype.split("/", 1) n_part = MIMENonMultipart(maintype, subtype) n_part.set_payload(payload) - if part.get_filename(): - n_part.add_header('Content-Disposition', 'attachment', - filename=part.get_filename()) + # Copy various headers from the old part to the new one, + # but don't include all the headers since some are not useful + # after decoding and decompression. + _set_filename(n_part, part.get_filename()) + for h in ('Launch-Index',): + if h in part: + _replace_header(n_part, h, str(part[h])) part = n_part if ctype != ctype_orig: - replace_header(part, CONTENT_TYPE, ctype) + _replace_header(part, CONTENT_TYPE, ctype) if ctype in INCLUDE_TYPES: self._do_include(payload, append_msg) @@ -150,7 +160,7 @@ class UserDataProcessor(object): # TODO(harlowja): Should this be happening, shouldn't # the part header be modified and not the base? - replace_header(base_msg, CONTENT_TYPE, ctype) + _replace_header(base_msg, CONTENT_TYPE, ctype) self._attach_part(append_msg, part) @@ -185,8 +195,7 @@ class UserDataProcessor(object): def _process_before_attach(self, msg, attached_id): if not msg.get_filename(): - msg.add_header('Content-Disposition', - 'attachment', filename=PART_FN_TPL % (attached_id)) + _set_filename(msg, PART_FN_TPL % (attached_id)) self._attach_launch_index(msg) def _do_include(self, content, append_msg): @@ -264,13 +273,15 @@ class UserDataProcessor(object): msg.set_payload(content) if 'filename' in ent: - msg.add_header('Content-Disposition', - 'attachment', filename=ent['filename']) + _set_filename(msg, ent['filename']) if 'launch-index' in ent: msg.add_header('Launch-Index', str(ent['launch-index'])) for header in list(ent.keys()): - if header in ('content', 'filename', 'type', 'launch-index'): + if header.lower() in ('content', 'filename', 'type', + 'launch-index', 'content-disposition', + ATTACHMENT_FIELD.lower(), + CONTENT_TYPE.lower()): continue msg.add_header(header, ent[header]) @@ -285,13 +296,13 @@ class UserDataProcessor(object): outer_msg[ATTACHMENT_FIELD] = '0' if new_count is not None: - outer_msg.replace_header(ATTACHMENT_FIELD, str(new_count)) + _replace_header(outer_msg, ATTACHMENT_FIELD, str(new_count)) fetched_count = 0 try: fetched_count = int(outer_msg.get(ATTACHMENT_FIELD)) except (ValueError, TypeError): - outer_msg.replace_header(ATTACHMENT_FIELD, str(fetched_count)) + _replace_header(outer_msg, ATTACHMENT_FIELD, str(fetched_count)) return fetched_count def _attach_part(self, outer_msg, part): @@ -323,10 +334,7 @@ def convert_string(raw_data, headers=None): if "mime-version:" in data[0:4096].lower(): msg = email.message_from_string(data) for (key, val) in headers.iteritems(): - if key in msg: - msg.replace_header(key, val) - else: - msg[key] = val + _replace_header(msg, key, val) else: mtype = headers.get(CONTENT_TYPE, NOT_MULTIPART_TYPE) maintype, subtype = mtype.split("/", 1) -- cgit v1.2.3 From 251317563bd36a339e6fa7a08a0fc05b5ee975a4 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sat, 20 Jul 2013 16:40:11 -0700 Subject: Just check the filename existing. --- cloudinit/user_data.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) (limited to 'cloudinit') diff --git a/cloudinit/user_data.py b/cloudinit/user_data.py index 454f3c06..d49ea094 100644 --- a/cloudinit/user_data.py +++ b/cloudinit/user_data.py @@ -76,8 +76,6 @@ def _replace_header(msg, key, value): def _set_filename(msg, filename): - if not filename: - return del msg['Content-Disposition'] msg.add_header('Content-Disposition', 'attachment', filename=str(filename)) @@ -141,7 +139,8 @@ class UserDataProcessor(object): # Copy various headers from the old part to the new one, # but don't include all the headers since some are not useful # after decoding and decompression. - _set_filename(n_part, part.get_filename()) + if part.get_filename(): + _set_filename(n_part, part.get_filename()) for h in ('Launch-Index',): if h in part: _replace_header(n_part, h, str(part[h])) -- cgit v1.2.3 From 7022512f3ceb955be2834844f05d4683f78ff276 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sun, 21 Jul 2013 09:26:44 -0700 Subject: Use constants for repeated type strings. --- cloudinit/handlers/boot_hook.py | 13 ++++++------- cloudinit/handlers/cloud_config.py | 5 +++-- cloudinit/handlers/shell_script.py | 3 ++- cloudinit/handlers/upstart_job.py | 3 ++- 4 files changed, 13 insertions(+), 11 deletions(-) (limited to 'cloudinit') diff --git a/cloudinit/handlers/boot_hook.py b/cloudinit/handlers/boot_hook.py index 11ac4fe5..5e7b6204 100644 --- a/cloudinit/handlers/boot_hook.py +++ b/cloudinit/handlers/boot_hook.py @@ -29,6 +29,7 @@ from cloudinit import util from cloudinit.settings import (PER_ALWAYS) LOG = logging.getLogger(__name__) +BOOTHOOK_PREFIX = "#cloud-boothook" class BootHookPartHandler(handlers.Handler): @@ -41,18 +42,16 @@ class BootHookPartHandler(handlers.Handler): def list_types(self): return [ - handlers.type_from_starts_with("#cloud-boothook"), + handlers.type_from_starts_with(BOOTHOOK_PREFIX), ] def _write_part(self, payload, filename): filename = util.clean_filename(filename) - payload = util.dos2unix(payload) - prefix = "#cloud-boothook" - start = 0 - if payload.startswith(prefix): - start = len(prefix) + 1 filepath = os.path.join(self.boothook_dir, filename) - contents = payload[start:] + contents = util.dos2unix(payload) + if contents.startswith(BOOTHOOK_PREFIX): + real_start = len(BOOTHOOK_PREFIX) + 1 + contents = contents[real_start:] util.write_file(filepath, contents, 0700) return filepath diff --git a/cloudinit/handlers/cloud_config.py b/cloudinit/handlers/cloud_config.py index c97ca3e8..730672d7 100644 --- a/cloudinit/handlers/cloud_config.py +++ b/cloudinit/handlers/cloud_config.py @@ -49,6 +49,7 @@ MERGE_HEADER = 'Merge-Type' # # This gets loaded into yaml with final result {'a': 22} DEF_MERGERS = mergers.string_extract_mergers('dict(replace)+list()+str()') +CLOUD_PREFIX = "#cloud-config" class CloudConfigPartHandler(handlers.Handler): @@ -60,7 +61,7 @@ class CloudConfigPartHandler(handlers.Handler): def list_types(self): return [ - handlers.type_from_starts_with("#cloud-config"), + handlers.type_from_starts_with(CLOUD_PREFIX), ] def _write_cloud_config(self): @@ -78,7 +79,7 @@ class CloudConfigPartHandler(handlers.Handler): if self.cloud_buf is not None: # Something was actually gathered.... lines = [ - "#cloud-config", + CLOUD_PREFIX, '', ] lines.extend(file_lines) diff --git a/cloudinit/handlers/shell_script.py b/cloudinit/handlers/shell_script.py index b185c374..62289d98 100644 --- a/cloudinit/handlers/shell_script.py +++ b/cloudinit/handlers/shell_script.py @@ -29,6 +29,7 @@ from cloudinit import util from cloudinit.settings import (PER_ALWAYS) LOG = logging.getLogger(__name__) +SHELL_PREFIX = "#!" class ShellScriptPartHandler(handlers.Handler): @@ -38,7 +39,7 @@ class ShellScriptPartHandler(handlers.Handler): def list_types(self): return [ - handlers.type_from_starts_with("#!"), + handlers.type_from_starts_with(SHELL_PREFIX), ] def handle_part(self, _data, ctype, filename, # pylint: disable=W0221 diff --git a/cloudinit/handlers/upstart_job.py b/cloudinit/handlers/upstart_job.py index 7a73d1b2..bac4cad2 100644 --- a/cloudinit/handlers/upstart_job.py +++ b/cloudinit/handlers/upstart_job.py @@ -31,6 +31,7 @@ from cloudinit import util from cloudinit.settings import (PER_INSTANCE) LOG = logging.getLogger(__name__) +UPSTART_PREFIX = "#upstart-job" class UpstartJobPartHandler(handlers.Handler): @@ -40,7 +41,7 @@ class UpstartJobPartHandler(handlers.Handler): def list_types(self): return [ - handlers.type_from_starts_with("#upstart-job"), + handlers.type_from_starts_with(UPSTART_PREFIX), ] def handle_part(self, _data, ctype, filename, # pylint: disable=W0221 -- cgit v1.2.3 From 27f096a1ab2e60222f85d87c961e388fdefaf92c Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sun, 21 Jul 2013 09:34:26 -0700 Subject: Use a util helper to do prefix/suffix removal. --- cloudinit/handlers/boot_hook.py | 8 +++----- cloudinit/util.py | 8 ++++++++ 2 files changed, 11 insertions(+), 5 deletions(-) (limited to 'cloudinit') diff --git a/cloudinit/handlers/boot_hook.py b/cloudinit/handlers/boot_hook.py index 5e7b6204..1848ce2c 100644 --- a/cloudinit/handlers/boot_hook.py +++ b/cloudinit/handlers/boot_hook.py @@ -48,11 +48,9 @@ class BootHookPartHandler(handlers.Handler): def _write_part(self, payload, filename): filename = util.clean_filename(filename) filepath = os.path.join(self.boothook_dir, filename) - contents = util.dos2unix(payload) - if contents.startswith(BOOTHOOK_PREFIX): - real_start = len(BOOTHOOK_PREFIX) + 1 - contents = contents[real_start:] - util.write_file(filepath, contents, 0700) + contents = util.strip_prefix_suffix(util.dos2unix(payload), + prefix=BOOTHOOK_PREFIX) + util.write_file(filepath, contents.lstrip(), 0700) return filepath def handle_part(self, _data, ctype, filename, # pylint: disable=W0221 diff --git a/cloudinit/util.py b/cloudinit/util.py index c45aae06..47d71ef4 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -1530,6 +1530,14 @@ def shellify(cmdlist, add_header=True): return content +def strip_prefix_suffix(line, prefix=None, suffix=None): + if prefix and line.startswith(prefix): + line = line[len(prefix):] + if suffix and line.endswith(suffix): + line = line[:-len(suffix)] + return line + + def is_container(): """ Checks to see if this code running in a container of some sort -- cgit v1.2.3 From 971c2b2366c6e58921e1d2dd3ba18e597cbc20e8 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sun, 21 Jul 2013 10:45:29 -0700 Subject: Just use an initialized array. --- cloudinit/handlers/__init__.py | 5 +---- cloudinit/helpers.py | 7 ++++--- cloudinit/stages.py | 13 ++++--------- 3 files changed, 9 insertions(+), 16 deletions(-) (limited to 'cloudinit') diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index f9b90323..1d450061 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -153,10 +153,7 @@ def walker_handle_handler(pdata, _ctype, _filename, payload): call_begin(mod, pdata['data'], frequency) # Only register and increment after the above have worked, so we don't # register if it fails starting. - handlers.register(mod) - # Ensure that it gets finalized by marking said module as having been - # initialized correctly. - handlers.markings[mod].append('initialized') + handlers.register(mod, initialized=True) pdata['handlercount'] = curcount + 1 except: util.logexc(LOG, "Failed at registering python file: %s (part " diff --git a/cloudinit/helpers.py b/cloudinit/helpers.py index bd37b8a3..1c46efde 100644 --- a/cloudinit/helpers.py +++ b/cloudinit/helpers.py @@ -22,7 +22,6 @@ from time import time -import collections import contextlib import io import os @@ -282,7 +281,7 @@ class ContentHandlers(object): def __init__(self): self.registered = {} - self.markings = collections.defaultdict(list) + self.initialized = [] def __contains__(self, item): return self.is_registered(item) @@ -293,11 +292,13 @@ class ContentHandlers(object): def is_registered(self, content_type): return content_type in self.registered - def register(self, mod): + def register(self, mod, initialized=False): types = set() for t in mod.list_types(): self.registered[t] = mod types.add(t) + if initialized and mod not in self.initialized: + self.initialized.append(mod) return types def _get_handler(self, content_type): diff --git a/cloudinit/stages.py b/cloudinit/stages.py index ba974a3e..fade1182 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -386,12 +386,12 @@ class Init(object): def init_handlers(): # Init the handlers first for (_ctype, mod) in c_handlers.iteritems(): - if 'initialized' in c_handlers.markings[mod]: + if mod in c_handlers.initialized: # Avoid initing the same module twice (if said module # is registered to more than one content-type). continue handlers.call_begin(mod, data, frequency) - c_handlers.markings[mod].append('initialized') + c_handlers.initialized.append(mod) def walk_handlers(): # Walk the user data @@ -413,16 +413,11 @@ class Init(object): def finalize_handlers(): # Give callbacks opportunity to finalize for (_ctype, mod) in c_handlers.iteritems(): - mod_markings = c_handlers.markings[mod] - if 'initialized' not in mod_markings: + if mod not in c_handlers.initialized: # Said module was never inited in the first place, so lets # not attempt to finalize those that never got called. continue - if 'finalized' in mod_markings: - # Avoid finalizing the same module twice (if said module - # is registered to more than one content-type). - continue - c_handlers.markings[mod].append('finalized') + c_handlers.initialized.remove(mod) try: handlers.call_end(mod, data, frequency) except: -- cgit v1.2.3 From a5dd2146bb98874219eb449ae06f57203099d4d4 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sun, 21 Jul 2013 11:01:55 -0700 Subject: Also make the dir handler registration a simple function. --- cloudinit/stages.py | 57 +++++++++++++++++++++++++++++++---------------------- 1 file changed, 33 insertions(+), 24 deletions(-) (limited to 'cloudinit') diff --git a/cloudinit/stages.py b/cloudinit/stages.py index fade1182..f08589a7 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -344,12 +344,13 @@ class Init(object): cdir = self.paths.get_cpath("handlers") idir = self._get_ipath("handlers") - # Add the path to the plugins dir to the top of our list for import - # instance dir should be read before cloud-dir - if cdir and cdir not in sys.path: - sys.path.insert(0, cdir) - if idir and idir not in sys.path: - sys.path.insert(0, idir) + # Add the path to the plugins dir to the top of our list for importing + # new handlers. + # + # Note(harlowja): instance dir should be read before cloud-dir + for d in [cdir, idir]: + if d and d not in sys.path: + sys.path.insert(0, d) # Ensure datasource fetched before activation (just incase) user_data_msg = self.datasource.get_userdata(True) @@ -357,24 +358,32 @@ class Init(object): # This keeps track of all the active handlers c_handlers = helpers.ContentHandlers() - # Add handlers in cdir - potential_handlers = util.find_modules(cdir) - for (fname, mod_name) in potential_handlers.iteritems(): - try: - mod_locs = importer.find_module(mod_name, [''], - ['list_types', - 'handle_part']) - if not mod_locs: - LOG.warn(("Could not find a valid user-data handler" - " named %s in file %s"), mod_name, fname) - continue - mod = importer.import_module(mod_locs[0]) - mod = handlers.fixup_handler(mod) - types = c_handlers.register(mod) - LOG.debug("Added handler for %s from %s", types, fname) - except: - util.logexc(LOG, "Failed to register handler from %s", fname) - + def register_handlers_in_dir(path): + # Attempts to register any handler modules under the given path. + potential_handlers = util.find_modules(path) + for (fname, mod_name) in potential_handlers.iteritems(): + try: + mod_locs = importer.find_module(mod_name, [''], + ['list_types', + 'handle_part']) + if not mod_locs: + LOG.warn(("Could not find a valid user-data handler" + " named %s in file %s"), mod_name, fname) + continue + mod = importer.import_module(mod_locs[0]) + mod = handlers.fixup_handler(mod) + types = c_handlers.register(mod) + LOG.debug("Added handler for %s from %s", types, fname) + except Exception: + util.logexc(LOG, "Failed to register handler from %s", + fname) + + # Add any handlers in the cloud-dir + register_handlers_in_dir(cdir) + + # Register any other handlers that come from the default set. This + # is done after the cloud-dir handlers so that the cdir modules can + # take over the default user-data handler content-types. def_handlers = self._default_userdata_handlers() applied_def_handlers = c_handlers.register_defaults(def_handlers) if applied_def_handlers: -- cgit v1.2.3 From d655d019fb0a45389d87db39b0ef5001e27e2616 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sun, 21 Jul 2013 11:04:55 -0700 Subject: Ensure what we are searching over is a directory. --- cloudinit/stages.py | 2 ++ 1 file changed, 2 insertions(+) (limited to 'cloudinit') diff --git a/cloudinit/stages.py b/cloudinit/stages.py index f08589a7..3e49e8c5 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -360,6 +360,8 @@ class Init(object): def register_handlers_in_dir(path): # Attempts to register any handler modules under the given path. + if not path or not os.path.isdir(path): + return potential_handlers = util.find_modules(path) for (fname, mod_name) in potential_handlers.iteritems(): try: -- cgit v1.2.3 From 1072010fdde26203bc69b911e4a478953323a6ef Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Tue, 23 Jul 2013 11:45:34 -0400 Subject: Fix password setting for rhel5. Changing password via 'chpasswd' command in rhel5 would fail, if input to the 'chpasswd' command doesn't end with '\n'. The fix is just to append a carriage return to the input. --- ChangeLog | 2 ++ cloudinit/config/cc_set_passwords.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) (limited to 'cloudinit') diff --git a/ChangeLog b/ChangeLog index dcc67626..4cdd20ad 100644 --- a/ChangeLog +++ b/ChangeLog @@ -5,6 +5,8 @@ 'initctl reload-configuration' (LP: #1124384). If so, then invoke it. - add Azure datasource. - add support for SuSE / SLES [Juerg Haefliger] + - add a trailing carriage return to chpasswd input, which reportedly + caused a problem on rhel5 if missing. 0.7.2: - add a debian watch file - add 'sudo' entry to ubuntu's default user (LP: #1080717) diff --git a/cloudinit/config/cc_set_passwords.py b/cloudinit/config/cc_set_passwords.py index e93c8c6f..56a36906 100644 --- a/cloudinit/config/cc_set_passwords.py +++ b/cloudinit/config/cc_set_passwords.py @@ -75,7 +75,7 @@ def handle(_name, cfg, cloud, log, args): plist_in.append("%s:%s" % (u, p)) users.append(u) - ch_in = '\n'.join(plist_in) + ch_in = '\n'.join(plist_in) + '\n' try: log.debug("Changing password for %s:", users) util.subp(['chpasswd'], ch_in) -- cgit v1.2.3 From ccbdf8c360e4272055208afb013a17a218c9f097 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Tue, 23 Jul 2013 13:10:33 -0400 Subject: alias 'availability_zone' to availability-zone in metadata service. The place this was noticed was in trying to use the 'nova.clouds.archive.ubuntu.com' mirror selection. Because the config-drive-v2 has a metadata entry of 'availability_zone', it didn't get found by the availabilty_zone property in cloudinit/sources/__init__.py LP: #1190431 --- cloudinit/sources/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'cloudinit') diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index d8fbacdd..974c0407 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -135,7 +135,8 @@ class DataSource(object): @property def availability_zone(self): - return self.metadata.get('availability-zone') + return self.metadata.get('availability-zone', + self.metadata.get('availability_zone')) def get_instance_id(self): if not self.metadata or 'instance-id' not in self.metadata: -- cgit v1.2.3 From c67cc904bfddc9ed462f54d85d31236b39b6285f Mon Sep 17 00:00:00 2001 From: Ben Howard Date: Tue, 23 Jul 2013 12:36:15 -0600 Subject: Move more functionality into get_serial() --- cloudinit/sources/DataSourceSmartOS.py | 42 ++++++++++++++++++++++------------ 1 file changed, 27 insertions(+), 15 deletions(-) (limited to 'cloudinit') diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py index f9b724eb..129020ec 100644 --- a/cloudinit/sources/DataSourceSmartOS.py +++ b/cloudinit/sources/DataSourceSmartOS.py @@ -35,7 +35,8 @@ from cloudinit import sources from cloudinit import util -TTY_LOC = '/dev/ttyS1' +DEF_TTY_LOC = '/dev/ttyS1' +TTY_LOC = None LOG = logging.getLogger(__name__) @@ -54,6 +55,7 @@ class DataSourceSmartOS(sources.DataSource): md = {} ud = "" + TTY_LOC = self.sys_cfg.get("serial_device", DEF_TTY_LOC) if not os.path.exists(TTY_LOC): LOG.debug("Host does not appear to be on SmartOS") return False @@ -72,11 +74,17 @@ class DataSourceSmartOS(sources.DataSource): md['local-hostname'] = hostname md['instance-id'] = system_uuid md['public-keys'] = query_data("root_authorized_keys", strip=True) - ud = query_data("user-script") + md['user-script'] = query_data("user-script") + md['user-data'] = query_data("user-script") md['iptables_disable'] = query_data("disable_iptables_flag", strip=True) md['motd_sys_info'] = query_data("enable_motd_sys_info", strip=True) + if md['user-data']: + ud = md['user-data'] + else: + ud = md['user-script'] + self.metadata = md self.userdata_raw = ud return True @@ -87,8 +95,22 @@ class DataSourceSmartOS(sources.DataSource): def get_serial(): """This is replaced in unit testing, allowing us to replace - serial.Serial with a mocked class""" - return serial.Serial() + serial.Serial with a mocked class + + The timeout value of 60 seconds should never be hit. The value + is taken from SmartOS own provisioning tools. Since we are reading + each line individually up until the single ".", the transfer is + usually very fast (i.e. microseconds) to get the response. + """ + if not TTY_LOC: + raise AttributeError("TTY_LOC value is not set") + + _ret = serial.Serial(TTY_LOC, timeout=60) + if not _ret.isOpen(): + raise SystemError("Unable to open %s" % TTY_LOC) + + return _ret + def query_data(noun, strip=False): @@ -97,22 +119,12 @@ def query_data(noun, strip=False): In the response, the first line is the status, while subsequent lines are is the value. A blank line with a "." is used to indicate end of response. + """ - The timeout value of 60 seconds should never be hit. The value - is taken from SmartOS own provisioning tools. Since we are reading - each line individually up until the single ".", the transfer is - usually very fast (i.e. microseconds) to get the response. - """ if not noun: return False ser = get_serial() - ser.port = '/dev/ttyS1' - ser.open() - if not ser.isOpen(): - LOG.debug("Serial console is not open") - return False - ser.write("GET %s\n" % noun.rstrip()) status = str(ser.readline()).rstrip() response = [] -- cgit v1.2.3 From a4310ee3db0b394dcebd4f6b49d3b25bba37fedf Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Tue, 23 Jul 2013 15:17:50 -0400 Subject: on azure datasource, grab use Deployment as the instance-id LP: #1204190 --- cloudinit/sources/DataSourceAzure.py | 44 ++++++++++++++++++++++++++- tests/unittests/test_datasource/test_azure.py | 23 +++++++++++++- 2 files changed, 65 insertions(+), 2 deletions(-) (limited to 'cloudinit') diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index c90d7b07..0a5caebe 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -114,7 +114,8 @@ class DataSourceAzureNet(sources.DataSource): # claim the datasource even if the command failed util.logexc(LOG, "agent command '%s' failed.", mycfg['cmd']) - wait_for = [os.path.join(mycfg['datadir'], "SharedConfig.xml")] + shcfgxml = os.path.join(mycfg['datadir'], "SharedConfig.xml") + wait_for = [shcfgxml] fp_files = [] for pk in self.cfg.get('_pubkeys', []): @@ -129,6 +130,14 @@ class DataSourceAzureNet(sources.DataSource): LOG.debug("waited %.3f seconds for %d files to appear", time.time() - start, len(wait_for)) + if shcfgxml in missing: + LOG.warn("SharedConfig.xml missing, using static instance-id") + else: + try: + self.metadata['instance-id'] = iid_from_shared_config(shcfgxml) + except ValueError as e: + LOG.warn("failed to get instance id in %s: %s" % (shcfgxml, e)) + pubkeys = pubkeys_from_crt_files(fp_files) self.metadata['public-keys'] = pubkeys @@ -252,6 +261,20 @@ def load_azure_ovf_pubkeys(sshnode): return found +def single_node_at_path(node, pathlist): + curnode = node + for tok in pathlist: + results = find_child(curnode, lambda n: n.localName == tok) + if len(results) == 0: + raise ValueError("missing %s token in %s" % (tok, str(pathlist))) + if len(results) > 1: + raise ValueError("found %s nodes of type %s looking for %s" % + (len(results), tok, str(pathlist))) + curnode = results[0] + + return curnode + + def read_azure_ovf(contents): try: dom = minidom.parseString(contents) @@ -362,6 +385,25 @@ def load_azure_ds_dir(source_dir): return (md, ud, cfg, {'ovf-env.xml': contents}) +def iid_from_shared_config(path): + with open(path, "rb") as fp: + content = fp.read() + return iid_from_shared_config_content(content) + + +def iid_from_shared_config_content(content): + """ + find INSTANCE_ID in: + + + + + """ + dom = minidom.parseString(content) + depnode = single_node_at_path(dom, ["SharedConfig", "Deployment"]) + return depnode.attributes.get('name').value + + class BrokenAzureDataSource(Exception): pass diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py index c79c25d8..2e8583f9 100644 --- a/tests/unittests/test_datasource/test_azure.py +++ b/tests/unittests/test_datasource/test_azure.py @@ -99,6 +99,10 @@ class TestAzureDataSource(MockerTestCase): data['pubkey_files'] = flist return ["pubkey_from: %s" % f for f in flist] + def _iid_from_shared_config(path): + data['iid_from_shared_cfg'] = path + return 'i-my-azure-id' + if data.get('ovfcontent') is not None: populate_dir(os.path.join(self.paths.seed_dir, "azure"), {'ovf-env.xml': data['ovfcontent']}) @@ -112,7 +116,9 @@ class TestAzureDataSource(MockerTestCase): (mod, 'write_files', _write_files), (mod, 'wait_for_files', _wait_for_files), (mod, 'pubkeys_from_crt_files', - _pubkeys_from_crt_files)]) + _pubkeys_from_crt_files), + (mod, 'iid_from_shared_config', + _iid_from_shared_config), ]) dsrc = mod.DataSourceAzureNet( data.get('sys_cfg', {}), distro=None, paths=self.paths) @@ -131,6 +137,7 @@ class TestAzureDataSource(MockerTestCase): self.assertEqual(dsrc.metadata['local-hostname'], odata['HostName']) self.assertTrue('ovf-env.xml' in data['files']) self.assertEqual(0700, data['datadir_mode']) + self.assertEqual(dsrc.metadata['instance-id'], 'i-my-azure-id') def test_user_cfg_set_agent_command(self): cfg = {'agent_command': "my_command"} @@ -227,6 +234,20 @@ class TestReadAzureOvf(MockerTestCase): self.assertIn(mypk, cfg['_pubkeys']) +class TestReadAzureSharedConfig(MockerTestCase): + def test_valid_content(self): + xml = """ + + + + + + + """ + ret = DataSourceAzure.iid_from_shared_config_content(xml) + self.assertEqual("MY_INSTANCE_ID", ret) + + def apply_patches(patches): ret = [] for (ref, name, replace) in patches: -- cgit v1.2.3 From 4b41f7dc3d37d5bf7397bbc34d8a5e0c56798ac7 Mon Sep 17 00:00:00 2001 From: Ben Howard Date: Tue, 23 Jul 2013 16:33:46 -0600 Subject: Changed get_serial to be fully parameterized and return the serial initialized. Added a mapping of attributes between cloud-init and smartos. --- cloudinit/sources/DataSourceSmartOS.py | 64 ++++++++++++++----------- cloudinit/util.py | 5 +- tests/unittests/test_datasource/test_smartos.py | 10 ++-- 3 files changed, 43 insertions(+), 36 deletions(-) (limited to 'cloudinit') diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py index 129020ec..d6589f57 100644 --- a/cloudinit/sources/DataSourceSmartOS.py +++ b/cloudinit/sources/DataSourceSmartOS.py @@ -27,25 +27,37 @@ # -import os -import os.path -import serial from cloudinit import log as logging from cloudinit import sources from cloudinit import util +import os +import os.path +import serial DEF_TTY_LOC = '/dev/ttyS1' -TTY_LOC = None +DEF_TTY_TIMEOUT = 60 LOG = logging.getLogger(__name__) +SMARTOS_ATTRIB_MAP = { + #Cloud-init Key : (SmartOS Key, Strip line endings) + 'local-hostname': ('hostname', True), + 'public-keys': ('root_authorized_keys', True), + 'user-script': ('user-script', False), + 'user-data': ('user-data', False), + 'iptables_disable': ('iptables_disable', True), + 'motd_sys_info': ('motd_sys_info', True), +} + class DataSourceSmartOS(sources.DataSource): def __init__(self, sys_cfg, distro, paths): sources.DataSource.__init__(self, sys_cfg, distro, paths) self.seed_dir = os.path.join(paths.seed_dir, 'sdc') - self.seed = None self.is_smartdc = None + self.seed = self.sys_cfg.get("serial_device", DEF_TTY_LOC) + self.seed_timeout = self.sys_cfg.get("serial_timeout", + DEF_TTY_TIMEOUT) def __str__(self): root = sources.DataSource.__str__(self) @@ -55,30 +67,25 @@ class DataSourceSmartOS(sources.DataSource): md = {} ud = "" - TTY_LOC = self.sys_cfg.get("serial_device", DEF_TTY_LOC) - if not os.path.exists(TTY_LOC): + if not os.path.exists(self.seed): LOG.debug("Host does not appear to be on SmartOS") return False - self.seed = TTY_LOC + self.seed = self.seed system_uuid, system_type = dmi_data() if 'smartdc' not in system_type.lower(): LOG.debug("Host is not on SmartOS") return False self.is_smartdc = True + md['instance-id'] = system_uuid - hostname = query_data("hostname", strip=True) - if not hostname: - hostname = system_uuid + for ci_noun, attribute in SMARTOS_ATTRIB_MAP.iteritems(): + smartos_noun, strip = attribute + md[ci_noun] = query_data(smartos_noun, self.seed, + self.seed_timeout, strip=strip) - md['local-hostname'] = hostname - md['instance-id'] = system_uuid - md['public-keys'] = query_data("root_authorized_keys", strip=True) - md['user-script'] = query_data("user-script") - md['user-data'] = query_data("user-script") - md['iptables_disable'] = query_data("disable_iptables_flag", - strip=True) - md['motd_sys_info'] = query_data("enable_motd_sys_info", strip=True) + if not md['local-hostname']: + md['local-hostname'] = system_uuid if md['user-data']: ud = md['user-data'] @@ -93,7 +100,7 @@ class DataSourceSmartOS(sources.DataSource): return self.metadata['instance-id'] -def get_serial(): +def get_serial(seed_device, seed_timeout): """This is replaced in unit testing, allowing us to replace serial.Serial with a mocked class @@ -102,18 +109,17 @@ def get_serial(): each line individually up until the single ".", the transfer is usually very fast (i.e. microseconds) to get the response. """ - if not TTY_LOC: - raise AttributeError("TTY_LOC value is not set") - - _ret = serial.Serial(TTY_LOC, timeout=60) - if not _ret.isOpen(): - raise SystemError("Unable to open %s" % TTY_LOC) + if not seed_device: + raise AttributeError("seed_device value is not set") - return _ret + ser = serial.Serial(seed_device, timeout=seed_timeout) + if not ser.isOpen(): + raise SystemError("Unable to open %s" % seed_device) + return ser -def query_data(noun, strip=False): +def query_data(noun, seed_device, seed_timeout, strip=False): """Makes a request to via the serial console via "GET " In the response, the first line is the status, while subsequent lines @@ -124,7 +130,7 @@ def query_data(noun, strip=False): if not noun: return False - ser = get_serial() + ser = get_serial(seed_device, seed_timeout) ser.write("GET %s\n" % noun.rstrip()) status = str(ser.readline()).rstrip() response = [] diff --git a/cloudinit/util.py b/cloudinit/util.py index 7163225f..a2fbc004 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -1744,13 +1744,14 @@ def get_mount_info(path, log=LOG): lines = load_file(mountinfo_path).splitlines() return parse_mount_info(path, lines, log) + def which(program): # Return path of program for execution if found in path def is_exe(fpath): return os.path.isfile(fpath) and os.access(fpath, os.X_OK) - fpath, fname = os.path.split(program) - if fpath: + _fpath, _ = os.path.split(program) + if _fpath: if is_exe(program): return program else: diff --git a/tests/unittests/test_datasource/test_smartos.py b/tests/unittests/test_datasource/test_smartos.py index 494f9828..6c12f1e2 100644 --- a/tests/unittests/test_datasource/test_smartos.py +++ b/tests/unittests/test_datasource/test_smartos.py @@ -31,8 +31,8 @@ import uuid mock_returns = { 'hostname': 'test-host', 'root_authorized_keys': 'ssh-rsa AAAAB3Nz...aC1yc2E= keyname', - 'disable_iptables_flag': False, - 'enable_motd_sys_info': False, + 'disable_iptables_flag': None, + 'enable_motd_sys_info': None, 'system_uuid': str(uuid.uuid4()), 'smartdc': 'smartdc', 'userdata': """ @@ -118,7 +118,7 @@ class TestSmartOSDataSource(MockerTestCase): def _get_ds(self): - def _get_serial(): + def _get_serial(*_): return MockSerial() def _dmi_data(): @@ -169,14 +169,14 @@ class TestSmartOSDataSource(MockerTestCase): dsrc = self._get_ds() ret = dsrc.get_data() self.assertTrue(ret) - self.assertEquals(str(mock_returns['disable_iptables_flag']), + self.assertEquals(mock_returns['disable_iptables_flag'], dsrc.metadata['iptables_disable']) def test_motd_sys_info(self): dsrc = self._get_ds() ret = dsrc.get_data() self.assertTrue(ret) - self.assertEquals(str(mock_returns['enable_motd_sys_info']), + self.assertEquals(mock_returns['enable_motd_sys_info'], dsrc.metadata['motd_sys_info']) -- cgit v1.2.3 From 0891f6611d1c264220a6f71306802db1e70651fc Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Wed, 24 Jul 2013 10:44:21 -0400 Subject: DataSourceSmartOS: fix issue if dmidecode is not present --- cloudinit/sources/DataSourceSmartOS.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) (limited to 'cloudinit') diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py index d6589f57..1ce20c10 100644 --- a/cloudinit/sources/DataSourceSmartOS.py +++ b/cloudinit/sources/DataSourceSmartOS.py @@ -72,7 +72,12 @@ class DataSourceSmartOS(sources.DataSource): return False self.seed = self.seed - system_uuid, system_type = dmi_data() + dmi_info = dmi_data() + if dmi_info is False: + LOG.debug("No dmidata utility found") + return False + + system_uuid, system_type = dmi_info if 'smartdc' not in system_type.lower(): LOG.debug("Host is not on SmartOS") return False -- cgit v1.2.3