diff options
Diffstat (limited to 'cloudinit/handlers')
-rw-r--r-- | cloudinit/handlers/__init__.py | 159 | ||||
-rw-r--r-- | cloudinit/handlers/boot_hook.py | 21 | ||||
-rw-r--r-- | cloudinit/handlers/cloud_config.py | 38 | ||||
-rw-r--r-- | cloudinit/handlers/jinja_template.py | 120 | ||||
-rw-r--r-- | cloudinit/handlers/shell_script.py | 15 | ||||
-rw-r--r-- | cloudinit/handlers/shell_script_by_frequency.py | 62 | ||||
-rw-r--r-- | cloudinit/handlers/upstart_job.py | 22 |
7 files changed, 298 insertions, 139 deletions
diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index a409ff8a..7d8a9208 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -13,9 +13,8 @@ import os from cloudinit import importer from cloudinit import log as logging -from cloudinit import type_utils -from cloudinit import util -from cloudinit.settings import (PER_ALWAYS, PER_INSTANCE, FREQUENCIES) +from cloudinit import type_utils, util +from cloudinit.settings import FREQUENCIES, PER_ALWAYS, PER_INSTANCE LOG = logging.getLogger(__name__) @@ -24,7 +23,7 @@ LOG = logging.getLogger(__name__) NOT_MULTIPART_TYPE = "text/x-not-multipart" # When none is assigned this gets used -OCTET_TYPE = 'application/octet-stream' +OCTET_TYPE = "application/octet-stream" # Special content types that signal the start and end of processing CONTENT_END = "__end__" @@ -34,32 +33,39 @@ CONTENT_SIGNALS = [CONTENT_START, CONTENT_END] # Used when a part-handler type is encountered # to allow for registration of new types. PART_CONTENT_TYPES = ["text/part-handler"] -PART_HANDLER_FN_TMPL = 'part-handler-%03d' +PART_HANDLER_FN_TMPL = "part-handler-%03d" # For parts without filenames -PART_FN_TPL = 'part-%03d' +PART_FN_TPL = "part-%03d" # Different file beginnings to their content type INCLUSION_TYPES_MAP = { - '#include': 'text/x-include-url', - '#include-once': 'text/x-include-once-url', - '#!': 'text/x-shellscript', - '#cloud-config': 'text/cloud-config', - '#upstart-job': 'text/upstart-job', - '#part-handler': 'text/part-handler', - '#cloud-boothook': 'text/cloud-boothook', - '#cloud-config-archive': 'text/cloud-config-archive', - '#cloud-config-jsonp': 'text/cloud-config-jsonp', - '## template: jinja': 'text/jinja2', + "#include": "text/x-include-url", + "#include-once": "text/x-include-once-url", + "#!": "text/x-shellscript", + "#cloud-config": "text/cloud-config", + "#upstart-job": "text/upstart-job", + "#part-handler": "text/part-handler", + "#cloud-boothook": "text/cloud-boothook", + "#cloud-config-archive": "text/cloud-config-archive", + "#cloud-config-jsonp": "text/cloud-config-jsonp", + "## template: jinja": "text/jinja2", + # Note: for the next 3 entries, the prefix doesn't matter because these + # are for types that can only be used as part of a MIME message. However, + # including these entries supresses warnings during `cloudinit devel + # make-mime`, which otherwise would require `--force`. + "text/x-shellscript-per-boot": "text/x-shellscript-per-boot", + "text/x-shellscript-per-instance": "text/x-shellscript-per-instance", + "text/x-shellscript-per-once": "text/x-shellscript-per-once", } # Sorted longest first -INCLUSION_SRCH = sorted(list(INCLUSION_TYPES_MAP.keys()), - key=(lambda e: 0 - len(e))) +INCLUSION_SRCH = sorted( + list(INCLUSION_TYPES_MAP.keys()), key=(lambda e: 0 - len(e)) +) class Handler(metaclass=abc.ABCMeta): - def __init__(self, frequency, version=2): self.handler_version = version self.frequency = frequency @@ -69,11 +75,13 @@ class Handler(metaclass=abc.ABCMeta): def list_types(self): # Each subclass must define the supported content prefixes it handles. - if not hasattr(self, 'prefixes'): - raise NotImplementedError('Missing prefixes subclass attribute') + if not hasattr(self, "prefixes"): + raise NotImplementedError("Missing prefixes subclass attribute") else: - return [INCLUSION_TYPES_MAP[prefix] - for prefix in getattr(self, 'prefixes')] + return [ + INCLUSION_TYPES_MAP[prefix] + for prefix in getattr(self, "prefixes") + ] @abc.abstractmethod def handle_part(self, *args, **kwargs): @@ -82,8 +90,10 @@ class Handler(metaclass=abc.ABCMeta): def run_part(mod, data, filename, payload, frequency, headers): mod_freq = mod.frequency - if not (mod_freq == PER_ALWAYS or - (frequency == PER_INSTANCE and mod_freq == PER_INSTANCE)): + if not ( + mod_freq == PER_ALWAYS + or (frequency == PER_INSTANCE and mod_freq == PER_INSTANCE) + ): return # Sanity checks on version (should be an int convertable) try: @@ -91,33 +101,45 @@ def run_part(mod, data, filename, payload, frequency, headers): mod_ver = int(mod_ver) except (TypeError, ValueError, AttributeError): mod_ver = 1 - content_type = headers['Content-Type'] + content_type = headers["Content-Type"] try: - LOG.debug("Calling handler %s (%s, %s, %s) with frequency %s", - mod, content_type, filename, mod_ver, frequency) + LOG.debug( + "Calling handler %s (%s, %s, %s) with frequency %s", + mod, + content_type, + filename, + mod_ver, + frequency, + ) if mod_ver == 3: # Treat as v. 3 which does get a frequency + headers - mod.handle_part(data, content_type, filename, - payload, frequency, headers) + mod.handle_part( + data, content_type, filename, payload, frequency, headers + ) elif mod_ver == 2: # Treat as v. 2 which does get a frequency - mod.handle_part(data, content_type, filename, - payload, frequency) + mod.handle_part(data, content_type, filename, payload, frequency) elif mod_ver == 1: # Treat as v. 1 which gets no frequency mod.handle_part(data, content_type, filename, payload) else: raise ValueError("Unknown module version %s" % (mod_ver)) except Exception: - util.logexc(LOG, "Failed calling handler %s (%s, %s, %s) with " - "frequency %s", mod, content_type, filename, mod_ver, - frequency) + util.logexc( + LOG, + "Failed calling handler %s (%s, %s, %s) with frequency %s", + mod, + content_type, + filename, + mod_ver, + frequency, + ) def call_begin(mod, data, frequency): # Create a fake header set headers = { - 'Content-Type': CONTENT_START, + "Content-Type": CONTENT_START, } run_part(mod, data, None, None, frequency, headers) @@ -125,31 +147,35 @@ def call_begin(mod, data, frequency): def call_end(mod, data, frequency): # Create a fake header set headers = { - 'Content-Type': CONTENT_END, + "Content-Type": CONTENT_END, } run_part(mod, data, None, None, frequency, headers) def walker_handle_handler(pdata, _ctype, _filename, payload): - curcount = pdata['handlercount'] + curcount = pdata["handlercount"] modname = PART_HANDLER_FN_TMPL % (curcount) - frequency = pdata['frequency'] - modfname = os.path.join(pdata['handlerdir'], "%s" % (modname)) + frequency = pdata["frequency"] + modfname = os.path.join(pdata["handlerdir"], "%s" % (modname)) if not modfname.endswith(".py"): modfname = "%s.py" % (modfname) # TODO(harlowja): Check if path exists?? util.write_file(modfname, payload, 0o600) - handlers = pdata['handlers'] + handlers = pdata["handlers"] try: mod = fixup_handler(importer.import_module(modname)) - call_begin(mod, pdata['data'], frequency) + call_begin(mod, pdata["data"], frequency) # Only register and increment after the above have worked, so we don't # register if it fails starting. handlers.register(mod, initialized=True) - pdata['handlercount'] = curcount + 1 + pdata["handlercount"] = curcount + 1 except Exception: - util.logexc(LOG, "Failed at registering python file: %s (part " - "handler %s)", modfname, curcount) + util.logexc( + LOG, + "Failed at registering python file: %s (part handler %s)", + modfname, + curcount, + ) def _extract_first_or_bytes(blob, size): @@ -161,7 +187,7 @@ def _extract_first_or_bytes(blob, size): else: # We want to avoid decoding the whole blob (it might be huge) # By taking 4*size bytes we guarantee to decode size utf8 chars - start = blob[:4 * size].decode(errors='ignore').split("\n", 1)[0] + start = blob[: 4 * size].decode(errors="ignore").split("\n", 1)[0] if len(start) >= size: start = start[:size] except UnicodeDecodeError: @@ -176,7 +202,7 @@ def _escape_string(text): except (LookupError, TypeError): try: # Unicode (and Python 3's str) doesn't support string_escape... - return text.encode('unicode_escape') + return text.encode("unicode_escape") except TypeError: # Give up... pass @@ -189,28 +215,40 @@ def _escape_string(text): def walker_callback(data, filename, payload, headers): - content_type = headers['Content-Type'] - if content_type in data.get('excluded'): + content_type = headers["Content-Type"] + if content_type in data.get("excluded"): LOG.debug('content_type "%s" is excluded', content_type) return if content_type in PART_CONTENT_TYPES: walker_handle_handler(data, content_type, filename, payload) return - handlers = data['handlers'] + handlers = data["handlers"] if content_type in handlers: - run_part(handlers[content_type], data['data'], filename, - payload, data['frequency'], headers) + run_part( + handlers[content_type], + data["data"], + filename, + payload, + data["frequency"], + headers, + ) elif payload: # Extract the first line or 24 bytes for displaying in the log start = _extract_first_or_bytes(payload, 24) details = "'%s...'" % (_escape_string(start)) if content_type == NOT_MULTIPART_TYPE: - LOG.warning("Unhandled non-multipart (%s) userdata: %s", - content_type, details) + LOG.warning( + "Unhandled non-multipart (%s) userdata: %s", + content_type, + details, + ) else: - LOG.warning("Unhandled unknown content-type (%s) userdata: %s", - content_type, details) + LOG.warning( + "Unhandled unknown content-type (%s) userdata: %s", + content_type, + details, + ) else: LOG.debug("Empty payload of type %s", content_type) @@ -221,7 +259,7 @@ def walk(msg, callback, data): partnum = 0 for part in msg.walk(): # multipart/* are just containers - if part.get_content_maintype() == 'multipart': + if part.get_content_maintype() == "multipart": continue ctype = part.get_content_type() @@ -234,7 +272,7 @@ def walk(msg, callback, data): headers = dict(part) LOG.debug(headers) - headers['Content-Type'] = ctype + headers["Content-Type"] = ctype payload = util.fully_decoded_payload(part) callback(data, filename, payload, headers) partnum = partnum + 1 @@ -243,8 +281,8 @@ def walk(msg, callback, data): def fixup_handler(mod, def_freq=PER_INSTANCE): if not hasattr(mod, "handler_version"): setattr(mod, "handler_version", 1) - if not hasattr(mod, 'frequency'): - setattr(mod, 'frequency', def_freq) + if not hasattr(mod, "frequency"): + setattr(mod, "frequency", def_freq) else: freq = mod.frequency if freq and freq not in FREQUENCIES: @@ -263,4 +301,5 @@ def type_from_starts_with(payload, default=None): return INCLUSION_TYPES_MAP[text] return default + # vi: ts=4 expandtab diff --git a/cloudinit/handlers/boot_hook.py b/cloudinit/handlers/boot_hook.py index c6205097..602800ed 100644 --- a/cloudinit/handlers/boot_hook.py +++ b/cloudinit/handlers/boot_hook.py @@ -12,10 +12,8 @@ import os from cloudinit import handlers from cloudinit import log as logging -from cloudinit import subp -from cloudinit import util - -from cloudinit.settings import (PER_ALWAYS) +from cloudinit import subp, util +from cloudinit.settings import PER_ALWAYS LOG = logging.getLogger(__name__) @@ -23,7 +21,7 @@ LOG = logging.getLogger(__name__) class BootHookPartHandler(handlers.Handler): # The content prefixes this handler understands. - prefixes = ['#cloud-boothook'] + prefixes = ["#cloud-boothook"] def __init__(self, paths, datasource, **_kwargs): handlers.Handler.__init__(self, PER_ALWAYS) @@ -35,8 +33,9 @@ class BootHookPartHandler(handlers.Handler): def _write_part(self, payload, filename): filename = util.clean_filename(filename) filepath = os.path.join(self.boothook_dir, filename) - contents = util.strip_prefix_suffix(util.dos2unix(payload), - prefix=self.prefixes[0]) + contents = util.strip_prefix_suffix( + util.dos2unix(payload), prefix=self.prefixes[0] + ) util.write_file(filepath, contents.lstrip(), 0o700) return filepath @@ -48,12 +47,14 @@ class BootHookPartHandler(handlers.Handler): try: env = os.environ.copy() if self.instance_id is not None: - env['INSTANCE_ID'] = str(self.instance_id) + env["INSTANCE_ID"] = str(self.instance_id) subp.subp([filepath], env=env) except subp.ProcessExecutionError: util.logexc(LOG, "Boothooks script %s execution error", filepath) except Exception: - util.logexc(LOG, "Boothooks unknown error when running %s", - filepath) + util.logexc( + LOG, "Boothooks unknown error when running %s", filepath + ) + # vi: ts=4 expandtab diff --git a/cloudinit/handlers/cloud_config.py b/cloudinit/handlers/cloud_config.py index 2a307364..8070c6cb 100644 --- a/cloudinit/handlers/cloud_config.py +++ b/cloudinit/handlers/cloud_config.py @@ -12,15 +12,12 @@ import jsonpatch from cloudinit import handlers from cloudinit import log as logging -from cloudinit import mergers -from cloudinit import util -from cloudinit import safeyaml - -from cloudinit.settings import (PER_ALWAYS) +from cloudinit import mergers, safeyaml, util +from cloudinit.settings import PER_ALWAYS LOG = logging.getLogger(__name__) -MERGE_HEADER = 'Merge-Type' +MERGE_HEADER = "Merge-Type" # Due to the way the loading of yaml configuration was done previously, # where previously each cloud config part was appended to a larger yaml @@ -39,7 +36,7 @@ MERGE_HEADER = 'Merge-Type' # a: 22 # # This gets loaded into yaml with final result {'a': 22} -DEF_MERGERS = mergers.string_extract_mergers('dict(replace)+list()+str()') +DEF_MERGERS = mergers.string_extract_mergers("dict(replace)+list()+str()") CLOUD_PREFIX = "#cloud-config" JSONP_PREFIX = "#cloud-config-jsonp" @@ -53,7 +50,7 @@ class CloudConfigPartHandler(handlers.Handler): handlers.Handler.__init__(self, PER_ALWAYS, version=3) self.cloud_buf = None self.cloud_fn = paths.get_ipath("cloud_config") - if 'cloud_config_path' in _kwargs: + if "cloud_config_path" in _kwargs: self.cloud_fn = paths.get_ipath(_kwargs["cloud_config_path"]) self.file_names = [] @@ -66,14 +63,14 @@ class CloudConfigPartHandler(handlers.Handler): file_lines.append("# from %s files" % (len(self.file_names))) for fn in self.file_names: if not fn: - fn = '?' + fn = "?" file_lines.append("# %s" % (fn)) file_lines.append("") if self.cloud_buf is not None: # Something was actually gathered.... lines = [ CLOUD_PREFIX, - '', + "", ] lines.extend(file_lines) lines.append(safeyaml.dumps(self.cloud_buf)) @@ -82,9 +79,9 @@ class CloudConfigPartHandler(handlers.Handler): util.write_file(self.cloud_fn, "\n".join(lines), 0o600) def _extract_mergers(self, payload, headers): - merge_header_headers = '' - for h in [MERGE_HEADER, 'X-%s' % (MERGE_HEADER)]: - tmp_h = headers.get(h, '') + merge_header_headers = "" + for h in [MERGE_HEADER, "X-%s" % (MERGE_HEADER)]: + tmp_h = headers.get(h, "") if tmp_h: merge_header_headers = tmp_h break @@ -92,6 +89,9 @@ class CloudConfigPartHandler(handlers.Handler): # or the merge type from the headers or default to our own set # if neither exists (or is empty) from the later. payload_yaml = util.load_yaml(payload) + if payload_yaml is None: + raise ValueError("empty cloud config") + mergers_yaml = mergers.dict_extract_mergers(payload_yaml) mergers_header = mergers.string_extract_mergers(merge_header_headers) all_mergers = [] @@ -142,8 +142,16 @@ class CloudConfigPartHandler(handlers.Handler): for i in ("\n", "\r", "\t"): filename = filename.replace(i, " ") self.file_names.append(filename.strip()) + except ValueError as err: + LOG.warning( + "Failed at merging in cloud config part from %s: %s", + filename, + err, + ) except Exception: - util.logexc(LOG, "Failed at merging in cloud config part from %s", - filename) + util.logexc( + LOG, "Failed at merging in cloud config part from %s", filename + ) + # vi: ts=4 expandtab diff --git a/cloudinit/handlers/jinja_template.py b/cloudinit/handlers/jinja_template.py index aadfbf86..1f9caa64 100644 --- a/cloudinit/handlers/jinja_template.py +++ b/cloudinit/handlers/jinja_template.py @@ -1,63 +1,75 @@ # This file is part of cloud-init. See LICENSE file for license information. -from errno import EACCES +import copy import os import re +from errno import EACCES +from typing import Optional try: from jinja2.exceptions import UndefinedError as JUndefinedError + from jinja2.lexer import operator_re except ImportError: # No jinja2 dependency JUndefinedError = Exception + operator_re = re.compile(r"[-.]") from cloudinit import handlers from cloudinit import log as logging -from cloudinit.sources import INSTANCE_JSON_FILE -from cloudinit.templater import render_string, MISSING_JINJA_PREFIX -from cloudinit.util import b64d, load_file, load_json, json_dumps - from cloudinit.settings import PER_ALWAYS +from cloudinit.sources import INSTANCE_JSON_SENSITIVE_FILE +from cloudinit.templater import MISSING_JINJA_PREFIX, render_string +from cloudinit.util import b64d, json_dumps, load_file, load_json LOG = logging.getLogger(__name__) class JinjaTemplatePartHandler(handlers.Handler): - prefixes = ['## template: jinja'] + prefixes = ["## template: jinja"] def __init__(self, paths, **_kwargs): handlers.Handler.__init__(self, PER_ALWAYS, version=3) self.paths = paths self.sub_handlers = {} - for handler in _kwargs.get('sub_handlers', []): + for handler in _kwargs.get("sub_handlers", []): for ctype in handler.list_types(): self.sub_handlers[ctype] = handler def handle_part(self, data, ctype, filename, payload, frequency, headers): if ctype in handlers.CONTENT_SIGNALS: return - jinja_json_file = os.path.join(self.paths.run_dir, INSTANCE_JSON_FILE) + jinja_json_file = os.path.join( + self.paths.run_dir, INSTANCE_JSON_SENSITIVE_FILE + ) rendered_payload = render_jinja_payload_from_file( - payload, filename, jinja_json_file) + payload, filename, jinja_json_file + ) if not rendered_payload: return subtype = handlers.type_from_starts_with(rendered_payload) sub_handler = self.sub_handlers.get(subtype) if not sub_handler: LOG.warning( - 'Ignoring jinja template for %s. Could not find supported' - ' sub-handler for type %s', filename, subtype) + "Ignoring jinja template for %s. Could not find supported" + " sub-handler for type %s", + filename, + subtype, + ) return if sub_handler.handler_version == 3: sub_handler.handle_part( - data, ctype, filename, rendered_payload, frequency, headers) + data, ctype, filename, rendered_payload, frequency, headers + ) elif sub_handler.handler_version == 2: sub_handler.handle_part( - data, ctype, filename, rendered_payload, frequency) + data, ctype, filename, rendered_payload, frequency + ) def render_jinja_payload_from_file( - payload, payload_fn, instance_data_file, debug=False): + payload, payload_fn, instance_data_file, debug=False +): """Render a jinja template payload sourcing variables from jinja_vars_path. @param payload: String of jinja template content. Should begin with @@ -75,19 +87,21 @@ def render_jinja_payload_from_file( rendered_payload = None if not os.path.exists(instance_data_file): raise RuntimeError( - 'Cannot render jinja template vars. Instance data not yet' - ' present at %s' % instance_data_file) + "Cannot render jinja template vars. Instance data not yet" + " present at %s" % instance_data_file + ) try: instance_data = load_json(load_file(instance_data_file)) except (IOError, OSError) as e: if e.errno == EACCES: raise RuntimeError( - 'Cannot render jinja template vars. No read permission on' + "Cannot render jinja template vars. No read permission on" " '%s'. Try sudo" % instance_data_file ) from e rendered_payload = render_jinja_payload( - payload, payload_fn, instance_data, debug) + payload, payload_fn, instance_data, debug + ) if not rendered_payload: return None return rendered_payload @@ -96,51 +110,87 @@ def render_jinja_payload_from_file( def render_jinja_payload(payload, payload_fn, instance_data, debug=False): instance_jinja_vars = convert_jinja_instance_data( instance_data, - decode_paths=instance_data.get('base64-encoded-keys', [])) + decode_paths=instance_data.get("base64-encoded-keys", []), + include_key_aliases=True, + ) if debug: - LOG.debug('Converted jinja variables\n%s', - json_dumps(instance_jinja_vars)) + LOG.debug( + "Converted jinja variables\n%s", json_dumps(instance_jinja_vars) + ) try: rendered_payload = render_string(payload, instance_jinja_vars) except (TypeError, JUndefinedError) as e: - LOG.warning( - 'Ignoring jinja template for %s: %s', payload_fn, str(e)) + LOG.warning("Ignoring jinja template for %s: %s", payload_fn, str(e)) return None warnings = [ - "'%s'" % var.replace(MISSING_JINJA_PREFIX, '') + "'%s'" % var.replace(MISSING_JINJA_PREFIX, "") for var in re.findall( - r'%s[^\s]+' % MISSING_JINJA_PREFIX, rendered_payload)] + r"%s[^\s]+" % MISSING_JINJA_PREFIX, rendered_payload + ) + ] if warnings: LOG.warning( "Could not render jinja template variables in file '%s': %s", - payload_fn, ', '.join(warnings)) + payload_fn, + ", ".join(warnings), + ) return rendered_payload -def convert_jinja_instance_data(data, prefix='', sep='/', decode_paths=()): +def get_jinja_variable_alias(orig_name: str) -> Optional[str]: + """Return a jinja variable alias, replacing any operators with underscores. + + Provide underscore-delimited key aliases to simplify dot-notation + attribute references for keys which contain operators "." or "-". + This provides for simpler short-hand jinja attribute notation + allowing one to avoid quoting keys which contain operators. + {{ ds.v1_0.config.user_network_config }} instead of + {{ ds['v1.0'].config["user.network-config"] }}. + + :param orig_name: String representing a jinja variable name to scrub/alias. + + :return: A string with any jinja operators replaced if needed. Otherwise, + none if no alias required. + """ + alias_name = re.sub(operator_re, "_", orig_name) + if alias_name != orig_name: + return alias_name + return None + + +def convert_jinja_instance_data( + data, prefix="", sep="/", decode_paths=(), include_key_aliases=False +): """Process instance-data.json dict for use in jinja templates. Replace hyphens with underscores for jinja templates and decode any base64_encoded_keys. """ result = {} - decode_paths = [path.replace('-', '_') for path in decode_paths] + decode_paths = [path.replace("-", "_") for path in decode_paths] for key, value in sorted(data.items()): - if '-' in key: - # Standardize keys for use in #cloud-config/shell templates - key = key.replace('-', '_') - key_path = '{0}{1}{2}'.format(prefix, sep, key) if prefix else key + key_path = "{0}{1}{2}".format(prefix, sep, key) if prefix else key if key_path in decode_paths: value = b64d(value) if isinstance(value, dict): result[key] = convert_jinja_instance_data( - value, key_path, sep=sep, decode_paths=decode_paths) - if re.match(r'v\d+', key): + value, + key_path, + sep=sep, + decode_paths=decode_paths, + include_key_aliases=include_key_aliases, + ) + if re.match(r"v\d+$", key): # Copy values to top-level aliases for subkey, subvalue in result[key].items(): - result[subkey] = subvalue + result[subkey] = copy.deepcopy(subvalue) else: result[key] = value + if include_key_aliases: + alias_name = get_jinja_variable_alias(key) + if alias_name: + result[alias_name] = copy.deepcopy(result[key]) return result + # vi: ts=4 expandtab diff --git a/cloudinit/handlers/shell_script.py b/cloudinit/handlers/shell_script.py index 9917f551..44061838 100644 --- a/cloudinit/handlers/shell_script.py +++ b/cloudinit/handlers/shell_script.py @@ -10,21 +10,19 @@ import os -from cloudinit import handlers -from cloudinit import util - -from cloudinit.settings import (PER_ALWAYS) +from cloudinit import handlers, util +from cloudinit.settings import PER_ALWAYS class ShellScriptPartHandler(handlers.Handler): - prefixes = ['#!'] + prefixes = ["#!"] def __init__(self, paths, **_kwargs): handlers.Handler.__init__(self, PER_ALWAYS) - self.script_dir = paths.get_ipath_cur('scripts') - if 'script_path' in _kwargs: - self.script_dir = paths.get_ipath_cur(_kwargs['script_path']) + self.script_dir = paths.get_ipath_cur("scripts") + if "script_path" in _kwargs: + self.script_dir = paths.get_ipath_cur(_kwargs["script_path"]) def handle_part(self, data, ctype, filename, payload, frequency): if ctype in handlers.CONTENT_SIGNALS: @@ -36,4 +34,5 @@ class ShellScriptPartHandler(handlers.Handler): path = os.path.join(self.script_dir, filename) util.write_file(path, payload, 0o700) + # vi: ts=4 expandtab diff --git a/cloudinit/handlers/shell_script_by_frequency.py b/cloudinit/handlers/shell_script_by_frequency.py new file mode 100644 index 00000000..923cca57 --- /dev/null +++ b/cloudinit/handlers/shell_script_by_frequency.py @@ -0,0 +1,62 @@ +import os + +from cloudinit import log, util +from cloudinit.handlers import Handler +from cloudinit.settings import PER_ALWAYS, PER_INSTANCE, PER_ONCE + +LOG = log.getLogger(__name__) + +# cloudinit/settings.py defines PER_*** frequency constants. It makes sense to +# use them here, instead of hardcodes, and map them to the 'per-***' frequency- +# specific folders in /v/l/c/scripts. It might make sense to expose this at a +# higher level or in a more general module -- eg maybe in cloudinit/settings.py +# itself -- but for now it's here. +path_map = { + PER_ALWAYS: "per-boot", + PER_INSTANCE: "per-instance", + PER_ONCE: "per-once", +} + + +def get_mime_type_by_frequency(freq): + mime_type = f"text/x-shellscript-{path_map[freq]}" + return mime_type + + +def get_script_folder_by_frequency(freq, scripts_dir): + """Return the frequency-specific subfolder for a given frequency constant + and parent folder.""" + freqPath = path_map[freq] + folder = os.path.join(scripts_dir, freqPath) + return folder + + +def write_script_by_frequency(script_path, payload, frequency, scripts_dir): + """Given a filename, a payload, a frequency, and a scripts folder, write + the payload to the correct frequency-specific path""" + filename = os.path.basename(script_path) + filename = util.clean_filename(filename) + folder = get_script_folder_by_frequency(frequency, scripts_dir) + path = os.path.join(folder, filename) + payload = util.dos2unix(payload) + util.write_file(path, payload, 0o700) + + +class ShellScriptByFreqPartHandler(Handler): + """Common base class for the frequency-specific script handlers.""" + + def __init__(self, script_frequency, paths, **_kwargs): + Handler.__init__(self, PER_ALWAYS) + self.prefixes = [get_mime_type_by_frequency(script_frequency)] + self.script_frequency = script_frequency + self.scripts_dir = paths.get_cpath("scripts") + if "script_path" in _kwargs: + self.scripts_dir = paths.get_cpath(_kwargs["script_path"]) + + def handle_part(self, data, ctype, script_path, payload, frequency): + if script_path is not None: + filename = os.path.basename(script_path) + filename = util.clean_filename(filename) + write_script_by_frequency( + script_path, payload, self.script_frequency, self.scripts_dir + ) diff --git a/cloudinit/handlers/upstart_job.py b/cloudinit/handlers/upstart_job.py index a9d29537..4bc95f97 100644 --- a/cloudinit/handlers/upstart_job.py +++ b/cloudinit/handlers/upstart_job.py @@ -13,17 +13,15 @@ import re from cloudinit import handlers from cloudinit import log as logging -from cloudinit import subp -from cloudinit import util - -from cloudinit.settings import (PER_INSTANCE) +from cloudinit import subp, util +from cloudinit.settings import PER_INSTANCE LOG = logging.getLogger(__name__) class UpstartJobPartHandler(handlers.Handler): - prefixes = ['#upstart-job'] + prefixes = ["#upstart-job"] def __init__(self, paths, **_kwargs): handlers.Handler.__init__(self, PER_INSTANCE) @@ -43,7 +41,7 @@ class UpstartJobPartHandler(handlers.Handler): filename = util.clean_filename(filename) (_name, ext) = os.path.splitext(filename) if not ext: - ext = '' + ext = "" ext = ext.lower() if ext != ".conf": filename = filename + ".conf" @@ -78,9 +76,10 @@ def _has_suitable_upstart(): if not os.path.exists("/usr/bin/dpkg-query"): return False try: - (dpkg_ver, _err) = subp.subp(["dpkg-query", - "--showformat=${Version}", - "--show", "upstart"], rcs=[0, 1]) + (dpkg_ver, _err) = subp.subp( + ["dpkg-query", "--showformat=${Version}", "--show", "upstart"], + rcs=[0, 1], + ) except Exception: util.logexc(LOG, "dpkg-query failed") return False @@ -93,8 +92,9 @@ def _has_suitable_upstart(): if e.exit_code == 1: pass else: - util.logexc(LOG, "dpkg --compare-versions failed [%s]", - e.exit_code) + util.logexc( + LOG, "dpkg --compare-versions failed [%s]", e.exit_code + ) except Exception: util.logexc(LOG, "dpkg --compare-versions failed") return False |