summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--bash_completion/cloud-init2
-rw-r--r--cloudinit/cmd/devel/__init__.py25
-rw-r--r--cloudinit/cmd/devel/parser.py5
-rwxr-xr-xcloudinit/cmd/devel/render.py90
-rw-r--r--cloudinit/cmd/devel/tests/test_render.py101
-rw-r--r--cloudinit/cmd/main.py16
-rw-r--r--cloudinit/handlers/__init__.py11
-rw-r--r--cloudinit/handlers/boot_hook.py12
-rw-r--r--cloudinit/handlers/cloud_config.py15
-rw-r--r--cloudinit/handlers/jinja_template.py137
-rw-r--r--cloudinit/handlers/shell_script.py9
-rw-r--r--cloudinit/handlers/upstart_job.py9
-rw-r--r--cloudinit/helpers.py4
-rw-r--r--cloudinit/log.py12
-rw-r--r--cloudinit/sources/__init__.py47
-rw-r--r--cloudinit/sources/tests/test_init.py75
-rw-r--r--cloudinit/stages.py22
-rw-r--r--cloudinit/templater.py28
-rw-r--r--cloudinit/tests/helpers.py9
-rw-r--r--doc/rtd/topics/capabilities.rst15
-rw-r--r--doc/rtd/topics/datasources.rst47
-rw-r--r--doc/rtd/topics/format.rst21
-rw-r--r--tests/cloud_tests/testcases/base.py8
-rw-r--r--tests/unittests/test_builtin_handlers.py324
-rw-r--r--tests/unittests/test_handler/test_handler_etc_hosts.py1
-rw-r--r--tests/unittests/test_handler/test_handler_ntp.py1
-rw-r--r--tests/unittests/test_templating.py23
27 files changed, 959 insertions, 110 deletions
diff --git a/bash_completion/cloud-init b/bash_completion/cloud-init
index f38164b0..b3a5ced3 100644
--- a/bash_completion/cloud-init
+++ b/bash_completion/cloud-init
@@ -62,6 +62,8 @@ _cloudinit_complete()
net-convert)
COMPREPLY=($(compgen -W "--help --network-data --kind --directory --output-kind" -- $cur_word))
;;
+ render)
+ COMPREPLY=($(compgen -W "--help --instance-data --debug" -- $cur_word))
schema)
COMPREPLY=($(compgen -W "--help --config-file --doc --annotate" -- $cur_word))
;;
diff --git a/cloudinit/cmd/devel/__init__.py b/cloudinit/cmd/devel/__init__.py
index e69de29b..3ae28b69 100644
--- a/cloudinit/cmd/devel/__init__.py
+++ b/cloudinit/cmd/devel/__init__.py
@@ -0,0 +1,25 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+"""Common cloud-init devel commandline utility functions."""
+
+
+import logging
+
+from cloudinit import log
+from cloudinit.stages import Init
+
+
+def addLogHandlerCLI(logger, log_level):
+ """Add a commandline logging handler to emit messages to stderr."""
+ formatter = logging.Formatter('%(levelname)s: %(message)s')
+ log.setupBasicLogging(log_level, formatter=formatter)
+ return logger
+
+
+def read_cfg_paths():
+ """Return a Paths object based on the system configuration on disk."""
+ init = Init(ds_deps=[])
+ init.read_cfg()
+ return init.paths
+
+# vi: ts=4 expandtab
diff --git a/cloudinit/cmd/devel/parser.py b/cloudinit/cmd/devel/parser.py
index 40a4b019..99a234ce 100644
--- a/cloudinit/cmd/devel/parser.py
+++ b/cloudinit/cmd/devel/parser.py
@@ -8,6 +8,7 @@ import argparse
from cloudinit.config import schema
from . import net_convert
+from . import render
def get_parser(parser=None):
@@ -22,7 +23,9 @@ def get_parser(parser=None):
('schema', 'Validate cloud-config files for document schema',
schema.get_parser, schema.handle_schema_args),
(net_convert.NAME, net_convert.__doc__,
- net_convert.get_parser, net_convert.handle_args)
+ net_convert.get_parser, net_convert.handle_args),
+ (render.NAME, render.__doc__,
+ render.get_parser, render.handle_args)
]
for (subcmd, helpmsg, get_parser, handler) in subcmds:
parser = subparsers.add_parser(subcmd, help=helpmsg)
diff --git a/cloudinit/cmd/devel/render.py b/cloudinit/cmd/devel/render.py
new file mode 100755
index 00000000..e85933db
--- /dev/null
+++ b/cloudinit/cmd/devel/render.py
@@ -0,0 +1,90 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+"""Debug jinja template rendering of user-data."""
+
+import argparse
+import os
+import sys
+
+from cloudinit.handlers.jinja_template import render_jinja_payload_from_file
+from cloudinit import log
+from cloudinit.sources import INSTANCE_JSON_FILE
+from cloudinit import util
+from . import addLogHandlerCLI, read_cfg_paths
+
+NAME = 'render'
+DEFAULT_INSTANCE_DATA = '/run/cloud-init/instance-data.json'
+
+LOG = log.getLogger(NAME)
+
+
+def get_parser(parser=None):
+ """Build or extend and arg parser for jinja render utility.
+
+ @param parser: Optional existing ArgumentParser instance representing the
+ subcommand which will be extended to support the args of this utility.
+
+ @returns: ArgumentParser with proper argument configuration.
+ """
+ if not parser:
+ parser = argparse.ArgumentParser(prog=NAME, description=__doc__)
+ parser.add_argument(
+ 'user_data', type=str, help='Path to the user-data file to render')
+ parser.add_argument(
+ '-i', '--instance-data', type=str,
+ help=('Optional path to instance-data.json file. Defaults to'
+ ' /run/cloud-init/instance-data.json'))
+ parser.add_argument('-d', '--debug', action='store_true', default=False,
+ help='Add verbose messages during template render')
+ return parser
+
+
+def handle_args(name, args):
+ """Render the provided user-data template file using instance-data values.
+
+ Also setup CLI log handlers to report to stderr since this is a development
+ utility which should be run by a human on the CLI.
+
+ @return 0 on success, 1 on failure.
+ """
+ addLogHandlerCLI(LOG, log.DEBUG if args.debug else log.WARNING)
+ if not args.instance_data:
+ paths = read_cfg_paths()
+ instance_data_fn = os.path.join(
+ paths.run_dir, INSTANCE_JSON_FILE)
+ else:
+ instance_data_fn = args.instance_data
+ try:
+ with open(instance_data_fn) as stream:
+ instance_data = stream.read()
+ instance_data = util.load_json(instance_data)
+ except IOError:
+ LOG.error('Missing instance-data.json file: %s', instance_data_fn)
+ return 1
+ try:
+ with open(args.user_data) as stream:
+ user_data = stream.read()
+ except IOError:
+ LOG.error('Missing user-data file: %s', args.user_data)
+ return 1
+ rendered_payload = render_jinja_payload_from_file(
+ payload=user_data, payload_fn=args.user_data,
+ instance_data_file=instance_data_fn,
+ debug=True if args.debug else False)
+ if not rendered_payload:
+ LOG.error('Unable to render user-data file: %s', args.user_data)
+ return 1
+ sys.stdout.write(rendered_payload)
+ return 0
+
+
+def main():
+ args = get_parser().parse_args()
+ return(handle_args(NAME, args))
+
+
+if __name__ == '__main__':
+ sys.exit(main())
+
+
+# vi: ts=4 expandtab
diff --git a/cloudinit/cmd/devel/tests/test_render.py b/cloudinit/cmd/devel/tests/test_render.py
new file mode 100644
index 00000000..fc5d2c0d
--- /dev/null
+++ b/cloudinit/cmd/devel/tests/test_render.py
@@ -0,0 +1,101 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+from six import StringIO
+import os
+
+from collections import namedtuple
+from cloudinit.cmd.devel import render
+from cloudinit.helpers import Paths
+from cloudinit.sources import INSTANCE_JSON_FILE
+from cloudinit.tests.helpers import CiTestCase, mock, skipUnlessJinja
+from cloudinit.util import ensure_dir, write_file
+
+
+class TestRender(CiTestCase):
+
+ with_logs = True
+
+ args = namedtuple('renderargs', 'user_data instance_data debug')
+
+ def setUp(self):
+ super(TestRender, self).setUp()
+ self.tmp = self.tmp_dir()
+
+ def test_handle_args_error_on_missing_user_data(self):
+ """When user_data file path does not exist, log an error."""
+ absent_file = self.tmp_path('user-data', dir=self.tmp)
+ instance_data = self.tmp_path('instance-data', dir=self.tmp)
+ write_file(instance_data, '{}')
+ args = self.args(
+ user_data=absent_file, instance_data=instance_data, debug=False)
+ with mock.patch('sys.stderr', new_callable=StringIO):
+ self.assertEqual(1, render.handle_args('anyname', args))
+ self.assertIn(
+ 'Missing user-data file: %s' % absent_file,
+ self.logs.getvalue())
+
+ def test_handle_args_error_on_missing_instance_data(self):
+ """When instance_data file path does not exist, log an error."""
+ user_data = self.tmp_path('user-data', dir=self.tmp)
+ absent_file = self.tmp_path('instance-data', dir=self.tmp)
+ args = self.args(
+ user_data=user_data, instance_data=absent_file, debug=False)
+ with mock.patch('sys.stderr', new_callable=StringIO):
+ self.assertEqual(1, render.handle_args('anyname', args))
+ self.assertIn(
+ 'Missing instance-data.json file: %s' % absent_file,
+ self.logs.getvalue())
+
+ def test_handle_args_defaults_instance_data(self):
+ """When no instance_data argument, default to configured run_dir."""
+ user_data = self.tmp_path('user-data', dir=self.tmp)
+ run_dir = self.tmp_path('run_dir', dir=self.tmp)
+ ensure_dir(run_dir)
+ paths = Paths({'run_dir': run_dir})
+ self.add_patch('cloudinit.cmd.devel.render.read_cfg_paths', 'm_paths')
+ self.m_paths.return_value = paths
+ args = self.args(
+ user_data=user_data, instance_data=None, debug=False)
+ with mock.patch('sys.stderr', new_callable=StringIO):
+ self.assertEqual(1, render.handle_args('anyname', args))
+ json_file = os.path.join(run_dir, INSTANCE_JSON_FILE)
+ self.assertIn(
+ 'Missing instance-data.json file: %s' % json_file,
+ self.logs.getvalue())
+
+ @skipUnlessJinja()
+ def test_handle_args_renders_instance_data_vars_in_template(self):
+ """If user_data file is a jinja template render instance-data vars."""
+ user_data = self.tmp_path('user-data', dir=self.tmp)
+ write_file(user_data, '##template: jinja\nrendering: {{ my_var }}')
+ instance_data = self.tmp_path('instance-data', dir=self.tmp)
+ write_file(instance_data, '{"my-var": "jinja worked"}')
+ args = self.args(
+ user_data=user_data, instance_data=instance_data, debug=True)
+ with mock.patch('sys.stderr', new_callable=StringIO) as m_console_err:
+ with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout:
+ self.assertEqual(0, render.handle_args('anyname', args))
+ self.assertIn(
+ 'DEBUG: Converted jinja variables\n{', self.logs.getvalue())
+ self.assertIn(
+ 'DEBUG: Converted jinja variables\n{', m_console_err.getvalue())
+ self.assertEqual('rendering: jinja worked', m_stdout.getvalue())
+
+ @skipUnlessJinja()
+ def test_handle_args_warns_and_gives_up_on_invalid_jinja_operation(self):
+ """If user_data file has invalid jinja operations log warnings."""
+ user_data = self.tmp_path('user-data', dir=self.tmp)
+ write_file(user_data, '##template: jinja\nrendering: {{ my-var }}')
+ instance_data = self.tmp_path('instance-data', dir=self.tmp)
+ write_file(instance_data, '{"my-var": "jinja worked"}')
+ args = self.args(
+ user_data=user_data, instance_data=instance_data, debug=True)
+ with mock.patch('sys.stderr', new_callable=StringIO):
+ self.assertEqual(1, render.handle_args('anyname', args))
+ self.assertIn(
+ 'WARNING: Ignoring jinja template for %s: Undefined jinja'
+ ' variable: "my-var". Jinja tried subtraction. Perhaps you meant'
+ ' "my_var"?' % user_data,
+ self.logs.getvalue())
+
+# vi: ts=4 expandtab
diff --git a/cloudinit/cmd/main.py b/cloudinit/cmd/main.py
index 4ea4fe7f..0eee583c 100644
--- a/cloudinit/cmd/main.py
+++ b/cloudinit/cmd/main.py
@@ -348,6 +348,7 @@ def main_init(name, args):
LOG.debug("[%s] barreling on in force mode without datasource",
mode)
+ _maybe_persist_instance_data(init)
# Stage 6
iid = init.instancify()
LOG.debug("[%s] %s will now be targeting instance id: %s. new=%s",
@@ -490,6 +491,7 @@ def main_modules(action_name, args):
print_exc(msg)
if not args.force:
return [(msg)]
+ _maybe_persist_instance_data(init)
# Stage 3
mods = stages.Modules(init, extract_fns(args), reporter=args.reporter)
# Stage 4
@@ -541,6 +543,7 @@ def main_single(name, args):
" likely bad things to come!"))
if not args.force:
return 1
+ _maybe_persist_instance_data(init)
# Stage 3
mods = stages.Modules(init, extract_fns(args), reporter=args.reporter)
mod_args = args.module_args
@@ -688,6 +691,15 @@ def status_wrapper(name, args, data_d=None, link_d=None):
return len(v1[mode]['errors'])
+def _maybe_persist_instance_data(init):
+ """Write instance-data.json file if absent and datasource is restored."""
+ if init.ds_restored:
+ instance_data_file = os.path.join(
+ init.paths.run_dir, sources.INSTANCE_JSON_FILE)
+ if not os.path.exists(instance_data_file):
+ init.datasource.persist_instance_data()
+
+
def _maybe_set_hostname(init, stage, retry_stage):
"""Call set-hostname if metadata, vendordata or userdata provides it.
@@ -887,6 +899,8 @@ def main(sysv_args=None):
if __name__ == '__main__':
if 'TZ' not in os.environ:
os.environ['TZ'] = ":/etc/localtime"
- main(sys.argv)
+ return_value = main(sys.argv)
+ if return_value:
+ sys.exit(return_value)
# vi: ts=4 expandtab
diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py
index c3576c04..0db75af9 100644
--- a/cloudinit/handlers/__init__.py
+++ b/cloudinit/handlers/__init__.py
@@ -41,7 +41,7 @@ PART_HANDLER_FN_TMPL = 'part-handler-%03d'
# For parts without filenames
PART_FN_TPL = 'part-%03d'
-# Different file beginnings to there content type
+# Different file beginnings to their content type
INCLUSION_TYPES_MAP = {
'#include': 'text/x-include-url',
'#include-once': 'text/x-include-once-url',
@@ -52,6 +52,7 @@ INCLUSION_TYPES_MAP = {
'#cloud-boothook': 'text/cloud-boothook',
'#cloud-config-archive': 'text/cloud-config-archive',
'#cloud-config-jsonp': 'text/cloud-config-jsonp',
+ '## template: jinja': 'text/jinja2',
}
# Sorted longest first
@@ -69,9 +70,13 @@ class Handler(object):
def __repr__(self):
return "%s: [%s]" % (type_utils.obj_name(self), self.list_types())
- @abc.abstractmethod
def list_types(self):
- raise NotImplementedError()
+ # Each subclass must define the supported content prefixes it handles.
+ if not hasattr(self, 'prefixes'):
+ raise NotImplementedError('Missing prefixes subclass attribute')
+ else:
+ return [INCLUSION_TYPES_MAP[prefix]
+ for prefix in getattr(self, 'prefixes')]
@abc.abstractmethod
def handle_part(self, *args, **kwargs):
diff --git a/cloudinit/handlers/boot_hook.py b/cloudinit/handlers/boot_hook.py
index 057b4dbc..dca50a49 100644
--- a/cloudinit/handlers/boot_hook.py
+++ b/cloudinit/handlers/boot_hook.py
@@ -17,10 +17,13 @@ from cloudinit import util
from cloudinit.settings import (PER_ALWAYS)
LOG = logging.getLogger(__name__)
-BOOTHOOK_PREFIX = "#cloud-boothook"
class BootHookPartHandler(handlers.Handler):
+
+ # The content prefixes this handler understands.
+ prefixes = ['#cloud-boothook']
+
def __init__(self, paths, datasource, **_kwargs):
handlers.Handler.__init__(self, PER_ALWAYS)
self.boothook_dir = paths.get_ipath("boothooks")
@@ -28,16 +31,11 @@ class BootHookPartHandler(handlers.Handler):
if datasource:
self.instance_id = datasource.get_instance_id()
- def list_types(self):
- return [
- handlers.type_from_starts_with(BOOTHOOK_PREFIX),
- ]
-
def _write_part(self, payload, filename):
filename = util.clean_filename(filename)
filepath = os.path.join(self.boothook_dir, filename)
contents = util.strip_prefix_suffix(util.dos2unix(payload),
- prefix=BOOTHOOK_PREFIX)
+ prefix=self.prefixes[0])
util.write_file(filepath, contents.lstrip(), 0o700)
return filepath
diff --git a/cloudinit/handlers/cloud_config.py b/cloudinit/handlers/cloud_config.py
index 178a5b9b..99bf0e61 100644
--- a/cloudinit/handlers/cloud_config.py
+++ b/cloudinit/handlers/cloud_config.py
@@ -42,14 +42,12 @@ DEF_MERGERS = mergers.string_extract_mergers('dict(replace)+list()+str()')
CLOUD_PREFIX = "#cloud-config"
JSONP_PREFIX = "#cloud-config-jsonp"
-# The file header -> content types this module will handle.
-CC_TYPES = {
- JSONP_PREFIX: handlers.type_from_starts_with(JSONP_PREFIX),
- CLOUD_PREFIX: handlers.type_from_starts_with(CLOUD_PREFIX),
-}
-
class CloudConfigPartHandler(handlers.Handler):
+
+ # The content prefixes this handler understands.
+ prefixes = [CLOUD_PREFIX, JSONP_PREFIX]
+
def __init__(self, paths, **_kwargs):
handlers.Handler.__init__(self, PER_ALWAYS, version=3)
self.cloud_buf = None
@@ -58,9 +56,6 @@ class CloudConfigPartHandler(handlers.Handler):
self.cloud_fn = paths.get_ipath(_kwargs["cloud_config_path"])
self.file_names = []
- def list_types(self):
- return list(CC_TYPES.values())
-
def _write_cloud_config(self):
if not self.cloud_fn:
return
@@ -138,7 +133,7 @@ class CloudConfigPartHandler(handlers.Handler):
# First time through, merge with an empty dict...
if self.cloud_buf is None or not self.file_names:
self.cloud_buf = {}
- if ctype == CC_TYPES[JSONP_PREFIX]:
+ if ctype == handlers.INCLUSION_TYPES_MAP[JSONP_PREFIX]:
self._merge_patch(payload)
else:
self._merge_part(payload, headers)
diff --git a/cloudinit/handlers/jinja_template.py b/cloudinit/handlers/jinja_template.py
new file mode 100644
index 00000000..3fa4097e
--- /dev/null
+++ b/cloudinit/handlers/jinja_template.py
@@ -0,0 +1,137 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+import os
+import re
+
+try:
+ from jinja2.exceptions import UndefinedError as JUndefinedError
+except ImportError:
+ # No jinja2 dependency
+ JUndefinedError = Exception
+
+from cloudinit import handlers
+from cloudinit import log as logging
+from cloudinit.sources import INSTANCE_JSON_FILE
+from cloudinit.templater import render_string, MISSING_JINJA_PREFIX
+from cloudinit.util import b64d, load_file, load_json, json_dumps
+
+from cloudinit.settings import PER_ALWAYS
+
+LOG = logging.getLogger(__name__)
+
+
+class JinjaTemplatePartHandler(handlers.Handler):
+
+ prefixes = ['## template: jinja']
+
+ def __init__(self, paths, **_kwargs):
+ handlers.Handler.__init__(self, PER_ALWAYS, version=3)
+ self.paths = paths
+ self.sub_handlers = {}
+ for handler in _kwargs.get('sub_handlers', []):
+ for ctype in handler.list_types():
+ self.sub_handlers[ctype] = handler
+
+ def handle_part(self, data, ctype, filename, payload, frequency, headers):
+ if ctype in handlers.CONTENT_SIGNALS:
+ return
+ jinja_json_file = os.path.join(self.paths.run_dir, INSTANCE_JSON_FILE)
+ rendered_payload = render_jinja_payload_from_file(
+ payload, filename, jinja_json_file)
+ if not rendered_payload:
+ return
+ subtype = handlers.type_from_starts_with(rendered_payload)
+ sub_handler = self.sub_handlers.get(subtype)
+ if not sub_handler:
+ LOG.warning(
+ 'Ignoring jinja template for %s. Could not find supported'
+ ' sub-handler for type %s', filename, subtype)
+ return
+ if sub_handler.handler_version == 3:
+ sub_handler.handle_part(
+ data, ctype, filename, rendered_payload, frequency, headers)
+ elif sub_handler.handler_version == 2:
+ sub_handler.handle_part(
+ data, ctype, filename, rendered_payload, frequency)
+
+
+def render_jinja_payload_from_file(
+ payload, payload_fn, instance_data_file, debug=False):
+ """Render a jinja template payload sourcing variables from jinja_vars_path.
+
+ @param payload: String of jinja template content. Should begin with
+ ## template: jinja\n.
+ @param payload_fn: String representing the filename from which the payload
+ was read used in error reporting. Generally in part-handling this is
+ 'part-##'.
+ @param instance_data_file: A path to a json file containing variables that
+ will be used as jinja template variables.
+
+ @return: A string of jinja-rendered content with the jinja header removed.
+ Returns None on error.
+ """
+ instance_data = {}
+ rendered_payload = None
+ if not os.path.exists(instance_data_file):
+ raise RuntimeError(
+ 'Cannot render jinja template vars. Instance data not yet'
+ ' present at %s' % instance_data_file)
+ instance_data = load_json(load_file(instance_data_file))
+ rendered_payload = render_jinja_payload(
+ payload, payload_fn, instance_data, debug)
+ if not rendered_payload:
+ return None
+ return rendered_payload
+
+
+def render_jinja_payload(payload, payload_fn, instance_data, debug=False):
+ instance_jinja_vars = convert_jinja_instance_data(
+ instance_data,
+ decode_paths=instance_data.get('base64-encoded-keys', []))
+ if debug:
+ LOG.debug('Converted jinja variables\n%s',
+ json_dumps(instance_jinja_vars))
+ try:
+ rendered_payload = render_string(payload, instance_jinja_vars)
+ except (TypeError, JUndefinedError) as e:
+ LOG.warning(
+ 'Ignoring jinja template for %s: %s', payload_fn, str(e))
+ return None
+ warnings = [
+ "'%s'" % var.replace(MISSING_JINJA_PREFIX, '')
+ for var in re.findall(
+ r'%s[^\s]+' % MISSING_JINJA_PREFIX, rendered_payload)]
+ if warnings:
+ LOG.warning(
+ "Could not render jinja template variables in file '%s': %s",
+ payload_fn, ', '.join(warnings))
+ return rendered_payload
+
+
+def convert_jinja_instance_data(data, prefix='', sep='/', decode_paths=()):
+ """Process instance-data.json dict for use in jinja templates.
+
+ Replace hyphens with underscores for jinja templates and decode any
+ base64_encoded_keys.
+ """
+ result = {}
+ decode_paths = [path.replace('-', '_') for path in decode_paths]
+ for key, value in sorted(data.items()):
+ if '-' in key:
+ # Standardize keys for use in #cloud-config/shell templates
+ key = key.replace('-', '_')
+ key_path = '{0}{1}{2}'.format(prefix, sep, key) if prefix else key
+ if key_path in decode_paths:
+ value = b64d(value)
+ if isinstance(value, dict):
+ result[key] = convert_jinja_instance_data(
+ value, key_path, sep=sep, decode_paths=decode_paths)
+ if re.match(r'v\d+', key):
+ # Copy values to top-level aliases
+ for subkey, subvalue in result[key].items():
+ result[subkey] = subvalue
+ else:
+ result[key] = value
+ return result
+
+# vi: ts=4 expandtab
diff --git a/cloudinit/handlers/shell_script.py b/cloudinit/handlers/shell_script.py
index e4945a23..214714bc 100644
--- a/cloudinit/handlers/shell_script.py
+++ b/cloudinit/handlers/shell_script.py
@@ -17,21 +17,18 @@ from cloudinit import util
from cloudinit.settings import (PER_ALWAYS)
LOG = logging.getLogger(__name__)
-SHELL_PREFIX = "#!"
class ShellScriptPartHandler(handlers.Handler):
+
+ prefixes = ['#!']
+
def __init__(self, paths, **_kwargs):
handlers.Handler.__init__(self, PER_ALWAYS)
self.script_dir = paths.get_ipath_cur('scripts')
if 'script_path' in _kwargs:
self.script_dir = paths.get_ipath_cur(_kwargs['script_path'])
- def list_types(self):
- return [
- handlers.type_from_starts_with(SHELL_PREFIX),
- ]
-
def handle_part(self, data, ctype, filename, payload, frequency):
if ctype in handlers.CONTENT_SIGNALS:
# TODO(harlowja): maybe delete existing things here
diff --git a/cloudinit/handlers/upstart_job.py b/cloudinit/handlers/upstart_job.py
index dc338769..83fb0724 100644
--- a/cloudinit/handlers/upstart_job.py
+++ b/cloudinit/handlers/upstart_job.py
@@ -18,19 +18,16 @@ from cloudinit import util
from cloudinit.settings import (PER_INSTANCE)
LOG = logging.getLogger(__name__)
-UPSTART_PREFIX = "#upstart-job"
class UpstartJobPartHandler(handlers.Handler):
+
+ prefixes = ['#upstart-job']
+
def __init__(self, paths, **_kwargs):
handlers.Handler.__init__(self, PER_INSTANCE)
self.upstart_dir = paths.upstart_conf_d
- def list_types(self):
- return [
- handlers.type_from_starts_with(UPSTART_PREFIX),
- ]
-
def handle_part(self, data, ctype, filename, payload, frequency):
if ctype in handlers.CONTENT_SIGNALS:
return
diff --git a/cloudinit/helpers.py b/cloudinit/helpers.py
index 1979cd96..3cc1fb19 100644
--- a/cloudinit/helpers.py
+++ b/cloudinit/helpers.py
@@ -449,4 +449,8 @@ class DefaultingConfigParser(RawConfigParser):
contents = '\n'.join([header, contents, ''])
return contents
+
+def identity(object):
+ return object
+
# vi: ts=4 expandtab
diff --git a/cloudinit/log.py b/cloudinit/log.py
index 1d75c9ff..5ae312ba 100644
--- a/cloudinit/log.py
+++ b/cloudinit/log.py
@@ -38,10 +38,18 @@ DEF_CON_FORMAT = '%(asctime)s - %(filename)s[%(levelname)s]: %(message)s'
logging.Formatter.converter = time.gmtime
-def setupBasicLogging(level=DEBUG):
+def setupBasicLogging(level=DEBUG, formatter=None):
+ if not formatter:
+ formatter = logging.Formatter(DEF_CON_FORMAT)
root = logging.getLogger()
+ for handler in root.handlers:
+ if hasattr(handler, 'stream') and hasattr(handler.stream, 'name'):
+ if handler.stream.name == '<stderr>':
+ handler.setLevel(level)
+ return
+ # Didn't have an existing stderr handler; create a new handler
console = logging.StreamHandler(sys.stderr)
- console.setFormatter(logging.Formatter(DEF_CON_FORMAT))
+ console.setFormatter(formatter)
console.setLevel(level)
root.addHandler(console)
root.setLevel(level)
diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py
index 41fde9ba..a775f1a8 100644
--- a/cloudinit/sources/__init__.py
+++ b/cloudinit/sources/__init__.py
@@ -58,22 +58,27 @@ class InvalidMetaDataException(Exception):
pass
-def process_base64_metadata(metadata, key_path=''):
- """Strip ci-b64 prefix and return metadata with base64-encoded-keys set."""
+def process_instance_metadata(metadata, key_path=''):
+ """Process all instance metadata cleaning it up for persisting as json.
+
+ Strip ci-b64 prefix and catalog any 'base64_encoded_keys' as a list
+
+ @return Dict copy of processed metadata.
+ """
md_copy = copy.deepcopy(metadata)
- md_copy['base64-encoded-keys'] = []
+ md_copy['base64_encoded_keys'] = []
for key, val in metadata.items():
if key_path:
sub_key_path = key_path + '/' + key
else:
sub_key_path = key
if isinstance(val, str) and val.startswith('ci-b64:'):
- md_copy['base64-encoded-keys'].append(sub_key_path)
+ md_copy['base64_encoded_keys'].append(sub_key_path)
md_copy[key] = val.replace('ci-b64:', '')
if isinstance(val, dict):
- return_val = process_base64_metadata(val, sub_key_path)
- md_copy['base64-encoded-keys'].extend(
- return_val.pop('base64-encoded-keys'))
+ return_val = process_instance_metadata(val, sub_key_path)
+ md_copy['base64_encoded_keys'].extend(
+ return_val.pop('base64_encoded_keys'))
md_copy[key] = return_val
return md_copy
@@ -180,15 +185,24 @@ class DataSource(object):
"""
self._dirty_cache = True
return_value = self._get_data()
- json_file = os.path.join(self.paths.run_dir, INSTANCE_JSON_FILE)
if not return_value:
return return_value
+ self.persist_instance_data()
+ return return_value
+
+ def persist_instance_data(self):
+ """Process and write INSTANCE_JSON_FILE with all instance metadata.
+ Replace any hyphens with underscores in key names for use in template
+ processing.
+
+ @return True on successful write, False otherwise.
+ """
instance_data = {
'ds': {
- 'meta-data': self.metadata,
- 'user-data': self.get_userdata_raw(),
- 'vendor-data': self.get_vendordata_raw()}}
+ 'meta_data': self.metadata,
+ 'user_data': self.get_userdata_raw(),
+ 'vendor_data': self.get_vendordata_raw()}}
if hasattr(self, 'network_json'):
network_json = getattr(self, 'network_json')
if network_json != UNSET:
@@ -202,16 +216,17 @@ class DataSource(object):
try:
# Process content base64encoding unserializable values
content = util.json_dumps(instance_data)
- # Strip base64: prefix and return base64-encoded-keys
- processed_data = process_base64_metadata(json.loads(content))
+ # Strip base64: prefix and set base64_encoded_keys list.
+ processed_data = process_instance_metadata(json.loads(content))
except TypeError as e:
LOG.warning('Error persisting instance-data.json: %s', str(e))
- return return_value
+ return False
except UnicodeDecodeError as e:
LOG.warning('Error persisting instance-data.json: %s', str(e))
- return return_value
+ return False
+ json_file = os.path.join(self.paths.run_dir, INSTANCE_JSON_FILE)
write_json(json_file, processed_data, mode=0o600)
- return return_value
+ return True
def _get_data(self):
"""Walk metadata sources, process crawled data and save attributes."""
diff --git a/cloudinit/sources/tests/test_init.py b/cloudinit/sources/tests/test_init.py
index 9e939c1e..8299af23 100644
--- a/cloudinit/sources/tests/test_init.py
+++ b/cloudinit/sources/tests/test_init.py
@@ -20,10 +20,12 @@ class DataSourceTestSubclassNet(DataSource):
dsname = 'MyTestSubclass'
url_max_wait = 55
- def __init__(self, sys_cfg, distro, paths, custom_userdata=None):
+ def __init__(self, sys_cfg, distro, paths, custom_userdata=None,
+ get_data_retval=True):
super(DataSourceTestSubclassNet, self).__init__(
sys_cfg, distro, paths)
self._custom_userdata = custom_userdata
+ self._get_data_retval = get_data_retval
def _get_cloud_name(self):
return 'SubclassCloudName'
@@ -37,7 +39,7 @@ class DataSourceTestSubclassNet(DataSource):
else:
self.userdata_raw = 'userdata_raw'
self.vendordata_raw = 'vendordata_raw'
- return True
+ return self._get_data_retval
class InvalidDataSourceTestSubclassNet(DataSource):
@@ -264,7 +266,18 @@ class TestDataSource(CiTestCase):
self.assertEqual('fqdnhostname.domain.com',
datasource.get_hostname(fqdn=True))
- def test_get_data_write_json_instance_data(self):
+ def test_get_data_does_not_write_instance_data_on_failure(self):
+ """get_data does not write INSTANCE_JSON_FILE on get_data False."""
+ tmp = self.tmp_dir()
+ datasource = DataSourceTestSubclassNet(
+ self.sys_cfg, self.distro, Paths({'run_dir': tmp}),
+ get_data_retval=False)
+ self.assertFalse(datasource.get_data())
+ json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp)
+ self.assertFalse(
+ os.path.exists(json_file), 'Found unexpected file %s' % json_file)
+
+ def test_get_data_writes_json_instance_data_on_success(self):
"""get_data writes INSTANCE_JSON_FILE to run_dir as readonly root."""
tmp = self.tmp_dir()
datasource = DataSourceTestSubclassNet(
@@ -273,7 +286,7 @@ class TestDataSource(CiTestCase):
json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp)
content = util.load_file(json_file)
expected = {
- 'base64-encoded-keys': [],
+ 'base64_encoded_keys': [],
'v1': {
'availability-zone': 'myaz',
'cloud-name': 'subclasscloudname',
@@ -281,11 +294,12 @@ class TestDataSource(CiTestCase):
'local-hostname': 'test-subclass-hostname',
'region': 'myregion'},
'ds': {
- 'meta-data': {'availability_zone': 'myaz',
+ 'meta_data': {'availability_zone': 'myaz',
'local-hostname': 'test-subclass-hostname',
'region': 'myregion'},
- 'user-data': 'userdata_raw',
- 'vendor-data': 'vendordata_raw'}}
+ 'user_data': 'userdata_raw',
+ 'vendor_data': 'vendordata_raw'}}
+ self.maxDiff = None
self.assertEqual(expected, util.load_json(content))
file_stat = os.stat(json_file)
self.assertEqual(0o600, stat.S_IMODE(file_stat.st_mode))
@@ -296,7 +310,7 @@ class TestDataSource(CiTestCase):
datasource = DataSourceTestSubclassNet(
self.sys_cfg, self.distro, Paths({'run_dir': tmp}),
custom_userdata={'key1': 'val1', 'key2': {'key2.1': self.paths}})
- self.assertTrue(datasource.get_data())
+ datasource.get_data()
json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp)
content = util.load_file(json_file)
expected_userdata = {
@@ -306,7 +320,40 @@ class TestDataSource(CiTestCase):
" 'cloudinit.helpers.Paths'>"}}
instance_json = util.load_json(content)
self.assertEqual(
- expected_userdata, instance_json['ds']['user-data'])
+ expected_userdata, instance_json['ds']['user_data'])
+
+ def test_persist_instance_data_writes_ec2_metadata_when_set(self):
+ """When ec2_metadata class attribute is set, persist to json."""
+ tmp = self.tmp_dir()
+ datasource = DataSourceTestSubclassNet(
+ self.sys_cfg, self.distro, Paths({'run_dir': tmp}))
+ datasource.ec2_metadata = UNSET
+ datasource.get_data()
+ json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp)
+ instance_data = util.load_json(util.load_file(json_file))
+ self.assertNotIn('ec2_metadata', instance_data['ds'])
+ datasource.ec2_metadata = {'ec2stuff': 'is good'}
+ datasource.persist_instance_data()
+ instance_data = util.load_json(util.load_file(json_file))
+ self.assertEqual(
+ {'ec2stuff': 'is good'},
+ instance_data['ds']['ec2_metadata'])
+
+ def test_persist_instance_data_writes_network_json_when_set(self):
+ """When network_data.json class attribute is set, persist to json."""
+ tmp = self.tmp_dir()
+ datasource = DataSourceTestSubclassNet(
+ self.sys_cfg, self.distro, Paths({'run_dir': tmp}))
+ datasource.get_data()
+ json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp)
+ instance_data = util.load_json(util.load_file(json_file))
+ self.assertNotIn('network_json', instance_data['ds'])
+ datasource.network_json = {'network_json': 'is good'}
+ datasource.persist_instance_data()
+ instance_data = util.load_json(util.load_file(json_file))
+ self.assertEqual(
+ {'network_json': 'is good'},
+ instance_data['ds']['network_json'])
@skipIf(not six.PY3, "json serialization on <= py2.7 handles bytes")
def test_get_data_base64encodes_unserializable_bytes(self):
@@ -320,11 +367,11 @@ class TestDataSource(CiTestCase):
content = util.load_file(json_file)
instance_json = util.load_json(content)
self.assertEqual(
- ['ds/user-data/key2/key2.1'],
- instance_json['base64-encoded-keys'])
+ ['ds/user_data/key2/key2.1'],
+ instance_json['base64_encoded_keys'])
self.assertEqual(
{'key1': 'val1', 'key2': {'key2.1': 'EjM='}},
- instance_json['ds']['user-data'])
+ instance_json['ds']['user_data'])
@skipIf(not six.PY2, "json serialization on <= py2.7 handles bytes")
def test_get_data_handles_bytes_values(self):
@@ -337,10 +384,10 @@ class TestDataSource(CiTestCase):
json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp)
content = util.load_file(json_file)
instance_json = util.load_json(content)
- self.assertEqual([], instance_json['base64-encoded-keys'])
+ self.assertEqual([], instance_json['base64_encoded_keys'])
self.assertEqual(
{'key1': 'val1', 'key2': {'key2.1': '\x123'}},
- instance_json['ds']['user-data'])
+ instance_json['ds']['user_data'])
@skipIf(not six.PY2, "Only python2 hits UnicodeDecodeErrors on non-utf8")
def test_non_utf8_encoding_logs_warning(self):
diff --git a/cloudinit/stages.py b/cloudinit/stages.py
index 8874d405..ef5c6996 100644
--- a/cloudinit/stages.py
+++ b/cloudinit/stages.py
@@ -17,10 +17,11 @@ from cloudinit.settings import (
from cloudinit import handlers
# Default handlers (used if not overridden)
-from cloudinit.handlers import boot_hook as bh_part
-from cloudinit.handlers import cloud_config as cc_part
-from cloudinit.handlers import shell_script as ss_part
-from cloudinit.handlers import upstart_job as up_part
+from cloudinit.handlers.boot_hook import BootHookPartHandler
+from cloudinit.handlers.cloud_config import CloudConfigPartHandler
+from cloudinit.handlers.jinja_template import JinjaTemplatePartHandler
+from cloudinit.handlers.shell_script import ShellScriptPartHandler
+from cloudinit.handlers.upstart_job import UpstartJobPartHandler
from cloudinit.event import EventType
@@ -413,12 +414,17 @@ class Init(object):
'datasource': self.datasource,
})
# TODO(harlowja) Hmmm, should we dynamically import these??
+ cloudconfig_handler = CloudConfigPartHandler(**opts)
+ shellscript_handler = ShellScriptPartHandler(**opts)
def_handlers = [
- cc_part.CloudConfigPartHandler(**opts),
- ss_part.ShellScriptPartHandler(**opts),
- bh_part.BootHookPartHandler(**opts),
- up_part.UpstartJobPartHandler(**opts),
+ cloudconfig_handler,
+ shellscript_handler,
+ BootHookPartHandler(**opts),
+ UpstartJobPartHandler(**opts),
]
+ opts.update(
+ {'sub_handlers': [cloudconfig_handler, shellscript_handler]})
+ def_handlers.append(JinjaTemplatePartHandler(**opts))
return def_handlers
def _default_userdata_handlers(self):
diff --git a/cloudinit/templater.py b/cloudinit/templater.py
index 7e7acb86..b668674b 100644
--- a/cloudinit/templater.py
+++ b/cloudinit/templater.py
@@ -13,6 +13,7 @@
import collections
import re
+
try:
from Cheetah.Template import Template as CTemplate
CHEETAH_AVAILABLE = True
@@ -20,23 +21,44 @@ except (ImportError, AttributeError):
CHEETAH_AVAILABLE = False
try:
- import jinja2
+ from jinja2.runtime import implements_to_string
from jinja2 import Template as JTemplate
+ from jinja2 import DebugUndefined as JUndefined
JINJA_AVAILABLE = True
except (ImportError, AttributeError):
+ from cloudinit.helpers import identity
+ implements_to_string = identity
JINJA_AVAILABLE = False
+ JUndefined = object
from cloudinit import log as logging
from cloudinit import type_utils as tu
from cloudinit import util
+
LOG = logging.getLogger(__name__)
TYPE_MATCHER = re.compile(r"##\s*template:(.*)", re.I)
BASIC_MATCHER = re.compile(r'\$\{([A-Za-z0-9_.]+)\}|\$([A-Za-z0-9_.]+)')
+MISSING_JINJA_PREFIX = u'CI_MISSING_JINJA_VAR/'
+
+
+@implements_to_string # Needed for python2.7. Otherwise cached super.__str__
+class UndefinedJinjaVariable(JUndefined):
+ """Class used to represent any undefined jinja template varible."""
+
+ def __str__(self):
+ return u'%s%s' % (MISSING_JINJA_PREFIX, self._undefined_name)
+
+ def __sub__(self, other):
+ other = str(other).replace(MISSING_JINJA_PREFIX, '')
+ raise TypeError(
+ 'Undefined jinja variable: "{this}-{other}". Jinja tried'
+ ' subtraction. Perhaps you meant "{this}_{other}"?'.format(
+ this=self._undefined_name, other=other))
def basic_render(content, params):
- """This does simple replacement of bash variable like templates.
+ """This does sumple replacement of bash variable like templates.
It identifies patterns like ${a} or $a and can also identify patterns like
${a.b} or $a.b which will look for a key 'b' in the dictionary rooted
@@ -82,7 +104,7 @@ def detect_template(text):
# keep_trailing_newline is in jinja2 2.7+, not 2.6
add = "\n" if content.endswith("\n") else ""
return JTemplate(content,
- undefined=jinja2.StrictUndefined,
+ undefined=UndefinedJinjaVariable,
trim_blocks=True).render(**params) + add
if text.find("\n") != -1:
diff --git a/cloudinit/tests/helpers.py b/cloudinit/tests/helpers.py
index 42f56c27..2eb7b0cd 100644
--- a/cloudinit/tests/helpers.py
+++ b/cloudinit/tests/helpers.py
@@ -32,6 +32,7 @@ from cloudinit import cloud
from cloudinit import distros
from cloudinit import helpers as ch
from cloudinit.sources import DataSourceNone
+from cloudinit.templater import JINJA_AVAILABLE
from cloudinit import util
_real_subp = util.subp
@@ -518,6 +519,14 @@ def skipUnlessJsonSchema():
_missing_jsonschema_dep, "No python-jsonschema dependency present.")
+def skipUnlessJinja():
+ return skipIf(not JINJA_AVAILABLE, "No jinja dependency present.")
+
+
+def skipIfJinja():
+ return skipIf(JINJA_AVAILABLE, "Jinja dependency present.")
+
+
# older versions of mock do not have the useful 'assert_not_called'
if not hasattr(mock.Mock, 'assert_not_called'):
def __mock_assert_not_called(mmock):
diff --git a/doc/rtd/topics/capabilities.rst b/doc/rtd/topics/capabilities.rst
index 3e2c9e31..2d8e2538 100644
--- a/doc/rtd/topics/capabilities.rst
+++ b/doc/rtd/topics/capabilities.rst
@@ -16,13 +16,15 @@ User configurability
`Cloud-init`_ 's behavior can be configured via user-data.
- User-data can be given by the user at instance launch time.
+ User-data can be given by the user at instance launch time. See
+ :ref:`user_data_formats` for acceptable user-data content.
+
This is done via the ``--user-data`` or ``--user-data-file`` argument to
ec2-run-instances for example.
-* Check your local clients documentation for how to provide a `user-data`
- string or `user-data` file for usage by cloud-init on instance creation.
+* Check your local client's documentation for how to provide a `user-data`
+ string or `user-data` file to cloud-init on instance creation.
Feature detection
@@ -166,6 +168,13 @@ likely be promoted to top-level subcommands when stable.
validation is work in progress and supports a subset of cloud-config
modules.
+ * ``cloud-init devel render``: Use cloud-init's jinja template render to
+ process **#cloud-config** or **custom-scripts**, injecting any variables
+ from ``/run/cloud-init/instance-data.json``. It accepts a user-data file
+ containing the jinja template header ``## template: jinja`` and renders
+ that content with any instance-data.json variables present.
+
+
.. _cli_clean:
cloud-init clean
diff --git a/doc/rtd/topics/datasources.rst b/doc/rtd/topics/datasources.rst
index 83034589..14432e65 100644
--- a/doc/rtd/topics/datasources.rst
+++ b/doc/rtd/topics/datasources.rst
@@ -18,6 +18,8 @@ single way to access the different cloud systems methods to provide this data
through the typical usage of subclasses.
+.. _instance_metadata:
+
instance-data
-------------
For reference, cloud-init stores all the metadata, vendordata and userdata
@@ -110,6 +112,51 @@ Below is an instance-data.json example from an OpenStack instance:
}
}
+
+As of cloud-init v. 18.4, any values present in
+``/run/cloud-init/instance-data.json`` can be used in cloud-init user data
+scripts or cloud config data. This allows consumers to use cloud-init's
+vendor-neutral, standardized metadata keys as well as datasource-specific
+content for any scripts or cloud-config modules they are using.
+
+To use instance-data.json values in scripts and **#config-config** files the
+user-data will need to contain the following header as the first line **## template: jinja**. Cloud-init will source all variables defined in
+``/run/cloud-init/instance-data.json`` and allow scripts or cloud-config files
+to reference those paths. Below are two examples::
+
+ * Cloud config calling home with the ec2 public hostname and avaliability-zone
+ ```
+ ## template: jinja
+ #cloud-config
+ runcmd:
+ - echo 'EC2 public hostname allocated to instance: {{ ds.meta_data.public_hostname }}' > /tmp/instance_metadata
+ - echo 'EC2 avaiability zone: {{ v1.availability_zone }}' >> /tmp/instance_metadata
+ - curl -X POST -d '{"hostname": "{{ds.meta_data.public_hostname }}", "availability-zone": "{{ v1.availability_zone }}"}' https://example.com.com
+ ```
+
+ * Custom user script performing different operations based on region
+ ```
+ ## template: jinja
+ #!/bin/bash
+ {% if v1.region == 'us-east-2' -%}
+ echo 'Installing custom proxies for {{ v1.region }}
+ sudo apt-get install my-xtra-fast-stack
+ {%- endif %}
+ ...
+
+ ```
+
+.. note::
+ Trying to reference jinja variables that don't exist in
+ instance-data.json will result in warnings in ``/var/log/cloud-init.log``
+ and the following string in your rendered user-data:
+ ``CI_MISSING_JINJA_VAR/<your_varname>``.
+
+.. note::
+ To save time designing your user-data for a specific cloud's
+ instance-data.json, use the 'render' cloud-init command on an
+ instance booted on your favorite cloud. See :ref:`cli_devel` for more
+ information.
Datasource API
diff --git a/doc/rtd/topics/format.rst b/doc/rtd/topics/format.rst
index 1b0ff366..15234d21 100644
--- a/doc/rtd/topics/format.rst
+++ b/doc/rtd/topics/format.rst
@@ -1,6 +1,8 @@
-*******
-Formats
-*******
+.. _user_data_formats:
+
+*****************
+User-Data Formats
+*****************
User data that will be acted upon by cloud-init must be in one of the following types.
@@ -65,6 +67,11 @@ Typically used by those who just want to execute a shell script.
Begins with: ``#!`` or ``Content-Type: text/x-shellscript`` when using a MIME archive.
+.. note::
+ New in cloud-init v. 18.4: User-data scripts can also render cloud instance
+ metadata variables using jinja templating. See
+ :ref:`instance_metadata` for more information.
+
Example
-------
@@ -103,12 +110,18 @@ These things include:
- certain ssh keys should be imported
- *and many more...*
-**Note:** The file must be valid yaml syntax.
+.. note::
+ This file must be valid yaml syntax.
See the :ref:`yaml_examples` section for a commented set of examples of supported cloud config formats.
Begins with: ``#cloud-config`` or ``Content-Type: text/cloud-config`` when using a MIME archive.
+.. note::
+ New in cloud-init v. 18.4: Cloud config dta can also render cloud instance
+ metadata variables using jinja templating. See
+ :ref:`instance_metadata` for more information.
+
Upstart Job
===========
diff --git a/tests/cloud_tests/testcases/base.py b/tests/cloud_tests/testcases/base.py
index 696db8dd..27458271 100644
--- a/tests/cloud_tests/testcases/base.py
+++ b/tests/cloud_tests/testcases/base.py
@@ -168,7 +168,7 @@ class CloudTestCase(unittest.TestCase):
' OS: %s not bionic or newer' % self.os_name)
instance_data = json.loads(out)
self.assertEqual(
- ['ds/user-data'], instance_data['base64-encoded-keys'])
+ ['ds/user_data'], instance_data['base64_encoded_keys'])
ds = instance_data.get('ds', {})
v1_data = instance_data.get('v1', {})
metadata = ds.get('meta-data', {})
@@ -214,8 +214,8 @@ class CloudTestCase(unittest.TestCase):
instance_data = json.loads(out)
v1_data = instance_data.get('v1', {})
self.assertEqual(
- ['ds/user-data', 'ds/vendor-data'],
- sorted(instance_data['base64-encoded-keys']))
+ ['ds/user_data', 'ds/vendor_data'],
+ sorted(instance_data['base64_encoded_keys']))
self.assertEqual('nocloud', v1_data['cloud-name'])
self.assertIsNone(
v1_data['availability-zone'],
@@ -249,7 +249,7 @@ class CloudTestCase(unittest.TestCase):
instance_data = json.loads(out)
v1_data = instance_data.get('v1', {})
self.assertEqual(
- ['ds/user-data'], instance_data['base64-encoded-keys'])
+ ['ds/user_data'], instance_data['base64_encoded_keys'])
self.assertEqual('nocloud', v1_data['cloud-name'])
self.assertIsNone(
v1_data['availability-zone'],
diff --git a/tests/unittests/test_builtin_handlers.py b/tests/unittests/test_builtin_handlers.py
index 9751ed95..abe820e1 100644
--- a/tests/unittests/test_builtin_handlers.py
+++ b/tests/unittests/test_builtin_handlers.py
@@ -2,27 +2,34 @@
"""Tests of the built-in user data handlers."""
+import copy
import os
import shutil
import tempfile
+from textwrap import dedent
-try:
- from unittest import mock
-except ImportError:
- import mock
-from cloudinit.tests import helpers as test_helpers
+from cloudinit.tests.helpers import (
+ FilesystemMockingTestCase, CiTestCase, mock, skipUnlessJinja)
from cloudinit import handlers
from cloudinit import helpers
from cloudinit import util
-from cloudinit.handlers import upstart_job
+from cloudinit.handlers.cloud_config import CloudConfigPartHandler
+from cloudinit.handlers.jinja_template import (
+ JinjaTemplatePartHandler, convert_jinja_instance_data,
+ render_jinja_payload)
+from cloudinit.handlers.shell_script import ShellScriptPartHandler
+from cloudinit.handlers.upstart_job import UpstartJobPartHandler
from cloudinit.settings import (PER_ALWAYS, PER_INSTANCE)
-class TestBuiltins(test_helpers.FilesystemMockingTestCase):
+class TestUpstartJobPartHandler(FilesystemMockingTestCase):
+
+ mpath = 'cloudinit.handlers.upstart_job.'
+
def test_upstart_frequency_no_out(self):
c_root = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, c_root)
@@ -32,14 +39,13 @@ class TestBuiltins(test_helpers.FilesystemMockingTestCase):
'cloud_dir': c_root,
'upstart_dir': up_root,
})
- freq = PER_ALWAYS
- h = upstart_job.UpstartJobPartHandler(paths)
+ h = UpstartJobPartHandler(paths)
# No files should be written out when
# the frequency is ! per-instance
h.handle_part('', handlers.CONTENT_START,
None, None, None)
h.handle_part('blah', 'text/upstart-job',
- 'test.conf', 'blah', freq)
+ 'test.conf', 'blah', frequency=PER_ALWAYS)
h.handle_part('', handlers.CONTENT_END,
None, None, None)
self.assertEqual(0, len(os.listdir(up_root)))
@@ -48,7 +54,6 @@ class TestBuiltins(test_helpers.FilesystemMockingTestCase):
# files should be written out when frequency is ! per-instance
new_root = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, new_root)
- freq = PER_INSTANCE
self.patchOS(new_root)
self.patchUtils(new_root)
@@ -56,22 +61,297 @@ class TestBuiltins(test_helpers.FilesystemMockingTestCase):
'upstart_dir': "/etc/upstart",
})
- upstart_job.SUITABLE_UPSTART = True
util.ensure_dir("/run")
util.ensure_dir("/etc/upstart")
- with mock.patch.object(util, 'subp') as mockobj:
- h = upstart_job.UpstartJobPartHandler(paths)
- h.handle_part('', handlers.CONTENT_START,
- None, None, None)
- h.handle_part('blah', 'text/upstart-job',
- 'test.conf', 'blah', freq)
- h.handle_part('', handlers.CONTENT_END,
- None, None, None)
+ with mock.patch(self.mpath + 'SUITABLE_UPSTART', return_value=True):
+ with mock.patch.object(util, 'subp') as m_subp:
+ h = UpstartJobPartHandler(paths)
+ h.handle_part('', handlers.CONTENT_START,
+ None, None, None)
+ h.handle_part('blah', 'text/upstart-job',
+ 'test.conf', 'blah', frequency=PER_INSTANCE)
+ h.handle_part('', handlers.CONTENT_END,
+ None, None, None)
- self.assertEqual(len(os.listdir('/etc/upstart')), 1)
+ self.assertEqual(len(os.listdir('/etc/upstart')), 1)
- mockobj.assert_called_once_with(
+ m_subp.assert_called_once_with(
['initctl', 'reload-configuration'], capture=False)
+
+class TestJinjaTemplatePartHandler(CiTestCase):
+
+ with_logs = True
+
+ mpath = 'cloudinit.handlers.jinja_template.'
+
+ def setUp(self):
+ super(TestJinjaTemplatePartHandler, self).setUp()
+ self.tmp = self.tmp_dir()
+ self.run_dir = os.path.join(self.tmp, 'run_dir')
+ util.ensure_dir(self.run_dir)
+ self.paths = helpers.Paths({
+ 'cloud_dir': self.tmp, 'run_dir': self.run_dir})
+
+ def test_jinja_template_part_handler_defaults(self):
+ """On init, paths are saved and subhandler types are empty."""
+ h = JinjaTemplatePartHandler(self.paths)
+ self.assertEqual(['## template: jinja'], h.prefixes)
+ self.assertEqual(3, h.handler_version)
+ self.assertEqual(self.paths, h.paths)
+ self.assertEqual({}, h.sub_handlers)
+
+ def test_jinja_template_part_handler_looks_up_sub_handler_types(self):
+ """When sub_handlers are passed, init lists types of subhandlers."""
+ script_handler = ShellScriptPartHandler(self.paths)
+ cloudconfig_handler = CloudConfigPartHandler(self.paths)
+ h = JinjaTemplatePartHandler(
+ self.paths, sub_handlers=[script_handler, cloudconfig_handler])
+ self.assertItemsEqual(
+ ['text/cloud-config', 'text/cloud-config-jsonp',
+ 'text/x-shellscript'],
+ h.sub_handlers)
+
+ def test_jinja_template_part_handler_looks_up_subhandler_types(self):
+ """When sub_handlers are passed, init lists types of subhandlers."""
+ script_handler = ShellScriptPartHandler(self.paths)
+ cloudconfig_handler = CloudConfigPartHandler(self.paths)
+ h = JinjaTemplatePartHandler(
+ self.paths, sub_handlers=[script_handler, cloudconfig_handler])
+ self.assertItemsEqual(
+ ['text/cloud-config', 'text/cloud-config-jsonp',
+ 'text/x-shellscript'],
+ h.sub_handlers)
+
+ def test_jinja_template_handle_noop_on_content_signals(self):
+ """Perform no part handling when content type is CONTENT_SIGNALS."""
+ script_handler = ShellScriptPartHandler(self.paths)
+
+ h = JinjaTemplatePartHandler(
+ self.paths, sub_handlers=[script_handler])
+ with mock.patch.object(script_handler, 'handle_part') as m_handle_part:
+ h.handle_part(
+ data='data', ctype=handlers.CONTENT_START, filename='part-1',
+ payload='## template: jinja\n#!/bin/bash\necho himom',
+ frequency='freq', headers='headers')
+ m_handle_part.assert_not_called()
+
+ @skipUnlessJinja()
+ def test_jinja_template_handle_subhandler_v2_with_clean_payload(self):
+ """Call version 2 subhandler.handle_part with stripped payload."""
+ script_handler = ShellScriptPartHandler(self.paths)
+ self.assertEqual(2, script_handler.handler_version)
+
+ # Create required instance-data.json file
+ instance_json = os.path.join(self.run_dir, 'instance-data.json')
+ instance_data = {'topkey': 'echo himom'}
+ util.write_file(instance_json, util.json_dumps(instance_data))
+ h = JinjaTemplatePartHandler(
+ self.paths, sub_handlers=[script_handler])
+ with mock.patch.object(script_handler, 'handle_part') as m_part:
+ # ctype with leading '!' not in handlers.CONTENT_SIGNALS
+ h.handle_part(
+ data='data', ctype="!" + handlers.CONTENT_START,
+ filename='part01',
+ payload='## template: jinja \t \n#!/bin/bash\n{{ topkey }}',
+ frequency='freq', headers='headers')
+ m_part.assert_called_once_with(
+ 'data', '!__begin__', 'part01', '#!/bin/bash\necho himom', 'freq')
+
+ @skipUnlessJinja()
+ def test_jinja_template_handle_subhandler_v3_with_clean_payload(self):
+ """Call version 3 subhandler.handle_part with stripped payload."""
+ cloudcfg_handler = CloudConfigPartHandler(self.paths)
+ self.assertEqual(3, cloudcfg_handler.handler_version)
+
+ # Create required instance-data.json file
+ instance_json = os.path.join(self.run_dir, 'instance-data.json')
+ instance_data = {'topkey': {'sub': 'runcmd: [echo hi]'}}
+ util.write_file(instance_json, util.json_dumps(instance_data))
+ h = JinjaTemplatePartHandler(
+ self.paths, sub_handlers=[cloudcfg_handler])
+ with mock.patch.object(cloudcfg_handler, 'handle_part') as m_part:
+ # ctype with leading '!' not in handlers.CONTENT_SIGNALS
+ h.handle_part(
+ data='data', ctype="!" + handlers.CONTENT_END,
+ filename='part01',
+ payload='## template: jinja\n#cloud-config\n{{ topkey.sub }}',
+ frequency='freq', headers='headers')
+ m_part.assert_called_once_with(
+ 'data', '!__end__', 'part01', '#cloud-config\nruncmd: [echo hi]',
+ 'freq', 'headers')
+
+ def test_jinja_template_handle_errors_on_missing_instance_data_json(self):
+ """If instance-data is absent, raise an error from handle_part."""
+ script_handler = ShellScriptPartHandler(self.paths)
+ h = JinjaTemplatePartHandler(
+ self.paths, sub_handlers=[script_handler])
+ with self.assertRaises(RuntimeError) as context_manager:
+ h.handle_part(
+ data='data', ctype="!" + handlers.CONTENT_START,
+ filename='part01',
+ payload='## template: jinja \n#!/bin/bash\necho himom',
+ frequency='freq', headers='headers')
+ script_file = os.path.join(script_handler.script_dir, 'part01')
+ self.assertEqual(
+ 'Cannot render jinja template vars. Instance data not yet present'
+ ' at {}/instance-data.json'.format(
+ self.run_dir), str(context_manager.exception))
+ self.assertFalse(
+ os.path.exists(script_file),
+ 'Unexpected file created %s' % script_file)
+
+ @skipUnlessJinja()
+ def test_jinja_template_handle_renders_jinja_content(self):
+ """When present, render jinja variables from instance-data.json."""
+ script_handler = ShellScriptPartHandler(self.paths)
+ instance_json = os.path.join(self.run_dir, 'instance-data.json')
+ instance_data = {'topkey': {'subkey': 'echo himom'}}
+ util.write_file(instance_json, util.json_dumps(instance_data))
+ h = JinjaTemplatePartHandler(
+ self.paths, sub_handlers=[script_handler])
+ h.handle_part(
+ data='data', ctype="!" + handlers.CONTENT_START,
+ filename='part01',
+ payload=(
+ '## template: jinja \n'
+ '#!/bin/bash\n'
+ '{{ topkey.subkey|default("nosubkey") }}'),
+ frequency='freq', headers='headers')
+ script_file = os.path.join(script_handler.script_dir, 'part01')
+ self.assertNotIn(
+ 'Instance data not yet present at {}/instance-data.json'.format(
+ self.run_dir),
+ self.logs.getvalue())
+ self.assertEqual(
+ '#!/bin/bash\necho himom', util.load_file(script_file))
+
+ @skipUnlessJinja()
+ def test_jinja_template_handle_renders_jinja_content_missing_keys(self):
+ """When specified jinja variable is undefined, log a warning."""
+ script_handler = ShellScriptPartHandler(self.paths)
+ instance_json = os.path.join(self.run_dir, 'instance-data.json')
+ instance_data = {'topkey': {'subkey': 'echo himom'}}
+ util.write_file(instance_json, util.json_dumps(instance_data))
+ h = JinjaTemplatePartHandler(
+ self.paths, sub_handlers=[script_handler])
+ h.handle_part(
+ data='data', ctype="!" + handlers.CONTENT_START,
+ filename='part01',
+ payload='## template: jinja \n#!/bin/bash\n{{ goodtry }}',
+ frequency='freq', headers='headers')
+ script_file = os.path.join(script_handler.script_dir, 'part01')
+ self.assertTrue(
+ os.path.exists(script_file),
+ 'Missing expected file %s' % script_file)
+ self.assertIn(
+ "WARNING: Could not render jinja template variables in file"
+ " 'part01': 'goodtry'\n",
+ self.logs.getvalue())
+
+
+class TestConvertJinjaInstanceData(CiTestCase):
+
+ def test_convert_instance_data_hyphens_to_underscores(self):
+ """Replace hyphenated keys with underscores in instance-data."""
+ data = {'hyphenated-key': 'hyphenated-val',
+ 'underscore_delim_key': 'underscore_delimited_val'}
+ expected_data = {'hyphenated_key': 'hyphenated-val',
+ 'underscore_delim_key': 'underscore_delimited_val'}
+ self.assertEqual(
+ expected_data,
+ convert_jinja_instance_data(data=data))
+
+ def test_convert_instance_data_promotes_versioned_keys_to_top_level(self):
+ """Any versioned keys are promoted as top-level keys
+
+ This provides any cloud-init standardized keys up at a top-level to
+ allow ease of reference for users. Intsead of v1.availability_zone,
+ the name availability_zone can be used in templates.
+ """
+ data = {'ds': {'dskey1': 1, 'dskey2': 2},
+ 'v1': {'v1key1': 'v1.1'},
+ 'v2': {'v2key1': 'v2.1'}}
+ expected_data = copy.deepcopy(data)
+ expected_data.update({'v1key1': 'v1.1', 'v2key1': 'v2.1'})
+
+ converted_data = convert_jinja_instance_data(data=data)
+ self.assertItemsEqual(
+ ['ds', 'v1', 'v2', 'v1key1', 'v2key1'], converted_data.keys())
+ self.assertEqual(
+ expected_data,
+ converted_data)
+
+ def test_convert_instance_data_most_recent_version_of_promoted_keys(self):
+ """The most-recent versioned key value is promoted to top-level."""
+ data = {'v1': {'key1': 'old v1 key1', 'key2': 'old v1 key2'},
+ 'v2': {'key1': 'newer v2 key1', 'key3': 'newer v2 key3'},
+ 'v3': {'key1': 'newest v3 key1'}}
+ expected_data = copy.deepcopy(data)
+ expected_data.update(
+ {'key1': 'newest v3 key1', 'key2': 'old v1 key2',
+ 'key3': 'newer v2 key3'})
+
+ converted_data = convert_jinja_instance_data(data=data)
+ self.assertEqual(
+ expected_data,
+ converted_data)
+
+ def test_convert_instance_data_decodes_decode_paths(self):
+ """Any decode_paths provided are decoded by convert_instance_data."""
+ data = {'key1': {'subkey1': 'aGkgbW9t'}, 'key2': 'aGkgZGFk'}
+ expected_data = copy.deepcopy(data)
+ expected_data['key1']['subkey1'] = 'hi mom'
+
+ converted_data = convert_jinja_instance_data(
+ data=data, decode_paths=('key1/subkey1',))
+ self.assertEqual(
+ expected_data,
+ converted_data)
+
+
+class TestRenderJinjaPayload(CiTestCase):
+
+ with_logs = True
+
+ @skipUnlessJinja()
+ def test_render_jinja_payload_logs_jinja_vars_on_debug(self):
+ """When debug is True, log jinja varables available."""
+ payload = (
+ '## template: jinja\n#!/bin/sh\necho hi from {{ v1.hostname }}')
+ instance_data = {'v1': {'hostname': 'foo'}, 'instance-id': 'iid'}
+ expected_log = dedent("""\
+ DEBUG: Converted jinja variables
+ {
+ "hostname": "foo",
+ "instance_id": "iid",
+ "v1": {
+ "hostname": "foo"
+ }
+ }
+ """)
+ self.assertEqual(
+ render_jinja_payload(
+ payload=payload, payload_fn='myfile',
+ instance_data=instance_data, debug=True),
+ '#!/bin/sh\necho hi from foo')
+ self.assertEqual(expected_log, self.logs.getvalue())
+
+ @skipUnlessJinja()
+ def test_render_jinja_payload_replaces_missing_variables_and_warns(self):
+ """Warn on missing jinja variables and replace the absent variable."""
+ payload = (
+ '## template: jinja\n#!/bin/sh\necho hi from {{ NOTHERE }}')
+ instance_data = {'v1': {'hostname': 'foo'}, 'instance-id': 'iid'}
+ self.assertEqual(
+ render_jinja_payload(
+ payload=payload, payload_fn='myfile',
+ instance_data=instance_data),
+ '#!/bin/sh\necho hi from CI_MISSING_JINJA_VAR/NOTHERE')
+ expected_log = (
+ 'WARNING: Could not render jinja template variables in file'
+ " 'myfile': 'NOTHERE'")
+ self.assertIn(expected_log, self.logs.getvalue())
+
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_handler/test_handler_etc_hosts.py b/tests/unittests/test_handler/test_handler_etc_hosts.py
index ced05a8d..d854afcb 100644
--- a/tests/unittests/test_handler/test_handler_etc_hosts.py
+++ b/tests/unittests/test_handler/test_handler_etc_hosts.py
@@ -49,6 +49,7 @@ class TestHostsFile(t_help.FilesystemMockingTestCase):
if '192.168.1.1\tblah.blah.us\tblah' not in contents:
self.assertIsNone('Default etc/hosts content modified')
+ @t_help.skipUnlessJinja()
def test_write_etc_hosts_suse_template(self):
cfg = {
'manage_etc_hosts': 'template',
diff --git a/tests/unittests/test_handler/test_handler_ntp.py b/tests/unittests/test_handler/test_handler_ntp.py
index 6fe3659d..0f22e579 100644
--- a/tests/unittests/test_handler/test_handler_ntp.py
+++ b/tests/unittests/test_handler/test_handler_ntp.py
@@ -3,6 +3,7 @@
from cloudinit.config import cc_ntp
from cloudinit.sources import DataSourceNone
from cloudinit import (distros, helpers, cloud, util)
+
from cloudinit.tests.helpers import (
CiTestCase, FilesystemMockingTestCase, mock, skipUnlessJsonSchema)
diff --git a/tests/unittests/test_templating.py b/tests/unittests/test_templating.py
index 20c87efa..c36e6eb0 100644
--- a/tests/unittests/test_templating.py
+++ b/tests/unittests/test_templating.py
@@ -21,6 +21,9 @@ except ImportError:
class TestTemplates(test_helpers.CiTestCase):
+
+ with_logs = True
+
jinja_utf8 = b'It\xe2\x80\x99s not ascii, {{name}}\n'
jinja_utf8_rbob = b'It\xe2\x80\x99s not ascii, bob\n'.decode('utf-8')
@@ -124,6 +127,13 @@ $a,$b'''
self.add_header("jinja", self.jinja_utf8), {"name": "bob"}),
self.jinja_utf8_rbob)
+ def test_jinja_nonascii_render_undefined_variables_to_default_py3(self):
+ """Test py3 jinja render_to_string with undefined variable default."""
+ self.assertEqual(
+ templater.render_string(
+ self.add_header("jinja", self.jinja_utf8), {}),
+ self.jinja_utf8_rbob.replace('bob', 'CI_MISSING_JINJA_VAR/name'))
+
def test_jinja_nonascii_render_to_file(self):
"""Test jinja render_to_file of a filename with non-ascii content."""
tmpl_fn = self.tmp_path("j-render-to-file.template")
@@ -144,5 +154,18 @@ $a,$b'''
result = templater.render_from_file(tmpl_fn, {"name": "bob"})
self.assertEqual(result, self.jinja_utf8_rbob)
+ @test_helpers.skipIfJinja()
+ def test_jinja_warns_on_missing_dep_and_uses_basic_renderer(self):
+ """Test jinja render_from_file will fallback to basic renderer."""
+ tmpl_fn = self.tmp_path("j-render-from-file.template")
+ write_file(tmpl_fn, omode="wb",
+ content=self.add_header(
+ "jinja", self.jinja_utf8).encode('utf-8'))
+ result = templater.render_from_file(tmpl_fn, {"name": "bob"})
+ self.assertEqual(result, self.jinja_utf8.decode())
+ self.assertIn(
+ 'WARNING: Jinja not available as the selected renderer for desired'
+ ' template, reverting to the basic renderer.',
+ self.logs.getvalue())
# vi: ts=4 expandtab