summaryrefslogtreecommitdiff
path: root/cloudinit/cmd/devel
diff options
context:
space:
mode:
Diffstat (limited to 'cloudinit/cmd/devel')
-rw-r--r--cloudinit/cmd/devel/__init__.py25
-rw-r--r--cloudinit/cmd/devel/logs.py59
-rwxr-xr-xcloudinit/cmd/devel/net_convert.py132
-rw-r--r--cloudinit/cmd/devel/parser.py23
-rwxr-xr-xcloudinit/cmd/devel/render.py85
-rw-r--r--cloudinit/cmd/devel/tests/test_logs.py21
-rw-r--r--cloudinit/cmd/devel/tests/test_render.py101
7 files changed, 425 insertions, 21 deletions
diff --git a/cloudinit/cmd/devel/__init__.py b/cloudinit/cmd/devel/__init__.py
index e69de29b..3ae28b69 100644
--- a/cloudinit/cmd/devel/__init__.py
+++ b/cloudinit/cmd/devel/__init__.py
@@ -0,0 +1,25 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+"""Common cloud-init devel commandline utility functions."""
+
+
+import logging
+
+from cloudinit import log
+from cloudinit.stages import Init
+
+
+def addLogHandlerCLI(logger, log_level):
+ """Add a commandline logging handler to emit messages to stderr."""
+ formatter = logging.Formatter('%(levelname)s: %(message)s')
+ log.setupBasicLogging(log_level, formatter=formatter)
+ return logger
+
+
+def read_cfg_paths():
+ """Return a Paths object based on the system configuration on disk."""
+ init = Init(ds_deps=[])
+ init.read_cfg()
+ return init.paths
+
+# vi: ts=4 expandtab
diff --git a/cloudinit/cmd/devel/logs.py b/cloudinit/cmd/devel/logs.py
index 35ca478f..df725204 100644
--- a/cloudinit/cmd/devel/logs.py
+++ b/cloudinit/cmd/devel/logs.py
@@ -11,6 +11,7 @@ from cloudinit.temp_utils import tempdir
from datetime import datetime
import os
import shutil
+import sys
CLOUDINIT_LOGS = ['/var/log/cloud-init.log', '/var/log/cloud-init-output.log']
@@ -31,6 +32,8 @@ def get_parser(parser=None):
parser = argparse.ArgumentParser(
prog='collect-logs',
description='Collect and tar all cloud-init debug info')
+ parser.add_argument('--verbose', '-v', action='count', default=0,
+ dest='verbosity', help="Be more verbose.")
parser.add_argument(
"--tarfile", '-t', default='cloud-init.tar.gz',
help=('The tarfile to create containing all collected logs.'
@@ -43,17 +46,33 @@ def get_parser(parser=None):
return parser
-def _write_command_output_to_file(cmd, filename):
+def _write_command_output_to_file(cmd, filename, msg, verbosity):
"""Helper which runs a command and writes output or error to filename."""
try:
out, _ = subp(cmd)
except ProcessExecutionError as e:
write_file(filename, str(e))
+ _debug("collecting %s failed.\n" % msg, 1, verbosity)
else:
write_file(filename, out)
+ _debug("collected %s\n" % msg, 1, verbosity)
+ return out
-def collect_logs(tarfile, include_userdata):
+def _debug(msg, level, verbosity):
+ if level <= verbosity:
+ sys.stderr.write(msg)
+
+
+def _collect_file(path, out_dir, verbosity):
+ if os.path.isfile(path):
+ copy(path, out_dir)
+ _debug("collected file: %s\n" % path, 1, verbosity)
+ else:
+ _debug("file %s did not exist\n" % path, 2, verbosity)
+
+
+def collect_logs(tarfile, include_userdata, verbosity=0):
"""Collect all cloud-init logs and tar them up into the provided tarfile.
@param tarfile: The path of the tar-gzipped file to create.
@@ -64,28 +83,46 @@ def collect_logs(tarfile, include_userdata):
log_dir = 'cloud-init-logs-{0}'.format(date)
with tempdir(dir='/tmp') as tmp_dir:
log_dir = os.path.join(tmp_dir, log_dir)
- _write_command_output_to_file(
+ version = _write_command_output_to_file(
+ ['cloud-init', '--version'],
+ os.path.join(log_dir, 'version'),
+ "cloud-init --version", verbosity)
+ dpkg_ver = _write_command_output_to_file(
['dpkg-query', '--show', "-f=${Version}\n", 'cloud-init'],
- os.path.join(log_dir, 'version'))
+ os.path.join(log_dir, 'dpkg-version'),
+ "dpkg version", verbosity)
+ if not version:
+ version = dpkg_ver if dpkg_ver else "not-available"
+ _debug("collected cloud-init version: %s\n" % version, 1, verbosity)
_write_command_output_to_file(
- ['dmesg'], os.path.join(log_dir, 'dmesg.txt'))
+ ['dmesg'], os.path.join(log_dir, 'dmesg.txt'),
+ "dmesg output", verbosity)
_write_command_output_to_file(
- ['journalctl', '-o', 'short-precise'],
- os.path.join(log_dir, 'journal.txt'))
+ ['journalctl', '--boot=0', '-o', 'short-precise'],
+ os.path.join(log_dir, 'journal.txt'),
+ "systemd journal of current boot", verbosity)
+
for log in CLOUDINIT_LOGS:
- copy(log, log_dir)
+ _collect_file(log, log_dir, verbosity)
if include_userdata:
- copy(USER_DATA_FILE, log_dir)
+ _collect_file(USER_DATA_FILE, log_dir, verbosity)
run_dir = os.path.join(log_dir, 'run')
ensure_dir(run_dir)
- shutil.copytree(CLOUDINIT_RUN_DIR, os.path.join(run_dir, 'cloud-init'))
+ if os.path.exists(CLOUDINIT_RUN_DIR):
+ shutil.copytree(CLOUDINIT_RUN_DIR,
+ os.path.join(run_dir, 'cloud-init'))
+ _debug("collected dir %s\n" % CLOUDINIT_RUN_DIR, 1, verbosity)
+ else:
+ _debug("directory '%s' did not exist\n" % CLOUDINIT_RUN_DIR, 1,
+ verbosity)
with chdir(tmp_dir):
subp(['tar', 'czvf', tarfile, log_dir.replace(tmp_dir + '/', '')])
+ sys.stderr.write("Wrote %s\n" % tarfile)
def handle_collect_logs_args(name, args):
"""Handle calls to 'cloud-init collect-logs' as a subcommand."""
- collect_logs(args.tarfile, args.userdata)
+ collect_logs(args.tarfile, args.userdata, args.verbosity)
def main():
diff --git a/cloudinit/cmd/devel/net_convert.py b/cloudinit/cmd/devel/net_convert.py
new file mode 100755
index 00000000..a0f58a0a
--- /dev/null
+++ b/cloudinit/cmd/devel/net_convert.py
@@ -0,0 +1,132 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+"""Debug network config format conversions."""
+import argparse
+import json
+import os
+import sys
+import yaml
+
+from cloudinit.sources.helpers import openstack
+from cloudinit.sources import DataSourceAzure as azure
+
+from cloudinit import distros
+from cloudinit.net import eni, netplan, network_state, sysconfig
+from cloudinit import log
+
+NAME = 'net-convert'
+
+
+def get_parser(parser=None):
+ """Build or extend and arg parser for net-convert utility.
+
+ @param parser: Optional existing ArgumentParser instance representing the
+ subcommand which will be extended to support the args of this utility.
+
+ @returns: ArgumentParser with proper argument configuration.
+ """
+ if not parser:
+ parser = argparse.ArgumentParser(prog=NAME, description=__doc__)
+ parser.add_argument("-p", "--network-data", type=open,
+ metavar="PATH", required=True)
+ parser.add_argument("-k", "--kind",
+ choices=['eni', 'network_data.json', 'yaml',
+ 'azure-imds'],
+ required=True)
+ parser.add_argument("-d", "--directory",
+ metavar="PATH",
+ help="directory to place output in",
+ required=True)
+ parser.add_argument("-D", "--distro",
+ choices=[item for sublist in
+ distros.OSFAMILIES.values()
+ for item in sublist],
+ required=True)
+ parser.add_argument("-m", "--mac",
+ metavar="name,mac",
+ action='append',
+ help="interface name to mac mapping")
+ parser.add_argument("--debug", action='store_true',
+ help='enable debug logging to stderr.')
+ parser.add_argument("-O", "--output-kind",
+ choices=['eni', 'netplan', 'sysconfig'],
+ required=True)
+ return parser
+
+
+def handle_args(name, args):
+ if not args.directory.endswith("/"):
+ args.directory += "/"
+
+ if not os.path.isdir(args.directory):
+ os.makedirs(args.directory)
+
+ if args.debug:
+ log.setupBasicLogging(level=log.DEBUG)
+ else:
+ log.setupBasicLogging(level=log.WARN)
+ if args.mac:
+ known_macs = {}
+ for item in args.mac:
+ iface_name, iface_mac = item.split(",", 1)
+ known_macs[iface_mac] = iface_name
+ else:
+ known_macs = None
+
+ net_data = args.network_data.read()
+ if args.kind == "eni":
+ pre_ns = eni.convert_eni_data(net_data)
+ ns = network_state.parse_net_config_data(pre_ns)
+ elif args.kind == "yaml":
+ pre_ns = yaml.load(net_data)
+ if 'network' in pre_ns:
+ pre_ns = pre_ns.get('network')
+ if args.debug:
+ sys.stderr.write('\n'.join(
+ ["Input YAML",
+ yaml.dump(pre_ns, default_flow_style=False, indent=4), ""]))
+ ns = network_state.parse_net_config_data(pre_ns)
+ elif args.kind == 'network_data.json':
+ pre_ns = openstack.convert_net_json(
+ json.loads(net_data), known_macs=known_macs)
+ ns = network_state.parse_net_config_data(pre_ns)
+ elif args.kind == 'azure-imds':
+ pre_ns = azure.parse_network_config(json.loads(net_data))
+ ns = network_state.parse_net_config_data(pre_ns)
+
+ if not ns:
+ raise RuntimeError("No valid network_state object created from"
+ "input data")
+
+ if args.debug:
+ sys.stderr.write('\n'.join([
+ "", "Internal State",
+ yaml.dump(ns, default_flow_style=False, indent=4), ""]))
+ distro_cls = distros.fetch(args.distro)
+ distro = distro_cls(args.distro, {}, None)
+ config = {}
+ if args.output_kind == "eni":
+ r_cls = eni.Renderer
+ config = distro.renderer_configs.get('eni')
+ elif args.output_kind == "netplan":
+ r_cls = netplan.Renderer
+ config = distro.renderer_configs.get('netplan')
+ else:
+ r_cls = sysconfig.Renderer
+ config = distro.renderer_configs.get('sysconfig')
+
+ r = r_cls(config=config)
+ sys.stderr.write(''.join([
+ "Read input format '%s' from '%s'.\n" % (
+ args.kind, args.network_data.name),
+ "Wrote output format '%s' to '%s'\n" % (
+ args.output_kind, args.directory)]) + "\n")
+ r.render_network_state(network_state=ns, target=args.directory)
+
+
+if __name__ == '__main__':
+ args = get_parser().parse_args()
+ handle_args(NAME, args)
+
+
+# vi: ts=4 expandtab
diff --git a/cloudinit/cmd/devel/parser.py b/cloudinit/cmd/devel/parser.py
index acacc4ed..99a234ce 100644
--- a/cloudinit/cmd/devel/parser.py
+++ b/cloudinit/cmd/devel/parser.py
@@ -5,8 +5,10 @@
"""Define 'devel' subcommand argument parsers to include in cloud-init cmd."""
import argparse
-from cloudinit.config.schema import (
- get_parser as schema_parser, handle_schema_args)
+from cloudinit.config import schema
+
+from . import net_convert
+from . import render
def get_parser(parser=None):
@@ -17,10 +19,17 @@ def get_parser(parser=None):
subparsers = parser.add_subparsers(title='Subcommands', dest='subcommand')
subparsers.required = True
- parser_schema = subparsers.add_parser(
- 'schema', help='Validate cloud-config files or document schema')
- # Construct schema subcommand parser
- schema_parser(parser_schema)
- parser_schema.set_defaults(action=('schema', handle_schema_args))
+ subcmds = [
+ ('schema', 'Validate cloud-config files for document schema',
+ schema.get_parser, schema.handle_schema_args),
+ (net_convert.NAME, net_convert.__doc__,
+ net_convert.get_parser, net_convert.handle_args),
+ (render.NAME, render.__doc__,
+ render.get_parser, render.handle_args)
+ ]
+ for (subcmd, helpmsg, get_parser, handler) in subcmds:
+ parser = subparsers.add_parser(subcmd, help=helpmsg)
+ get_parser(parser)
+ parser.set_defaults(action=(subcmd, handler))
return parser
diff --git a/cloudinit/cmd/devel/render.py b/cloudinit/cmd/devel/render.py
new file mode 100755
index 00000000..2ba6b681
--- /dev/null
+++ b/cloudinit/cmd/devel/render.py
@@ -0,0 +1,85 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+"""Debug jinja template rendering of user-data."""
+
+import argparse
+import os
+import sys
+
+from cloudinit.handlers.jinja_template import render_jinja_payload_from_file
+from cloudinit import log
+from cloudinit.sources import INSTANCE_JSON_FILE
+from . import addLogHandlerCLI, read_cfg_paths
+
+NAME = 'render'
+DEFAULT_INSTANCE_DATA = '/run/cloud-init/instance-data.json'
+
+LOG = log.getLogger(NAME)
+
+
+def get_parser(parser=None):
+ """Build or extend and arg parser for jinja render utility.
+
+ @param parser: Optional existing ArgumentParser instance representing the
+ subcommand which will be extended to support the args of this utility.
+
+ @returns: ArgumentParser with proper argument configuration.
+ """
+ if not parser:
+ parser = argparse.ArgumentParser(prog=NAME, description=__doc__)
+ parser.add_argument(
+ 'user_data', type=str, help='Path to the user-data file to render')
+ parser.add_argument(
+ '-i', '--instance-data', type=str,
+ help=('Optional path to instance-data.json file. Defaults to'
+ ' /run/cloud-init/instance-data.json'))
+ parser.add_argument('-d', '--debug', action='store_true', default=False,
+ help='Add verbose messages during template render')
+ return parser
+
+
+def handle_args(name, args):
+ """Render the provided user-data template file using instance-data values.
+
+ Also setup CLI log handlers to report to stderr since this is a development
+ utility which should be run by a human on the CLI.
+
+ @return 0 on success, 1 on failure.
+ """
+ addLogHandlerCLI(LOG, log.DEBUG if args.debug else log.WARNING)
+ if not args.instance_data:
+ paths = read_cfg_paths()
+ instance_data_fn = os.path.join(
+ paths.run_dir, INSTANCE_JSON_FILE)
+ else:
+ instance_data_fn = args.instance_data
+ if not os.path.exists(instance_data_fn):
+ LOG.error('Missing instance-data.json file: %s', instance_data_fn)
+ return 1
+ try:
+ with open(args.user_data) as stream:
+ user_data = stream.read()
+ except IOError:
+ LOG.error('Missing user-data file: %s', args.user_data)
+ return 1
+ rendered_payload = render_jinja_payload_from_file(
+ payload=user_data, payload_fn=args.user_data,
+ instance_data_file=instance_data_fn,
+ debug=True if args.debug else False)
+ if not rendered_payload:
+ LOG.error('Unable to render user-data file: %s', args.user_data)
+ return 1
+ sys.stdout.write(rendered_payload)
+ return 0
+
+
+def main():
+ args = get_parser().parse_args()
+ return(handle_args(NAME, args))
+
+
+if __name__ == '__main__':
+ sys.exit(main())
+
+
+# vi: ts=4 expandtab
diff --git a/cloudinit/cmd/devel/tests/test_logs.py b/cloudinit/cmd/devel/tests/test_logs.py
index dc4947cc..98b47560 100644
--- a/cloudinit/cmd/devel/tests/test_logs.py
+++ b/cloudinit/cmd/devel/tests/test_logs.py
@@ -4,6 +4,7 @@ from cloudinit.cmd.devel import logs
from cloudinit.util import ensure_dir, load_file, subp, write_file
from cloudinit.tests.helpers import FilesystemMockingTestCase, wrap_and_call
from datetime import datetime
+import mock
import os
@@ -27,11 +28,13 @@ class TestCollectLogs(FilesystemMockingTestCase):
date = datetime.utcnow().date().strftime('%Y-%m-%d')
date_logdir = 'cloud-init-logs-{0}'.format(date)
+ version_out = '/usr/bin/cloud-init 18.2fake\n'
expected_subp = {
('dpkg-query', '--show', "-f=${Version}\n", 'cloud-init'):
'0.7fake\n',
+ ('cloud-init', '--version'): version_out,
('dmesg',): 'dmesg-out\n',
- ('journalctl', '-o', 'short-precise'): 'journal-out\n',
+ ('journalctl', '--boot=0', '-o', 'short-precise'): 'journal-out\n',
('tar', 'czvf', output_tarfile, date_logdir): ''
}
@@ -44,9 +47,12 @@ class TestCollectLogs(FilesystemMockingTestCase):
subp(cmd) # Pass through tar cmd so we can check output
return expected_subp[cmd_tuple], ''
+ fake_stderr = mock.MagicMock()
+
wrap_and_call(
'cloudinit.cmd.devel.logs',
{'subp': {'side_effect': fake_subp},
+ 'sys.stderr': {'new': fake_stderr},
'CLOUDINIT_LOGS': {'new': [log1, log2]},
'CLOUDINIT_RUN_DIR': {'new': self.run_dir}},
logs.collect_logs, output_tarfile, include_userdata=False)
@@ -55,7 +61,9 @@ class TestCollectLogs(FilesystemMockingTestCase):
out_logdir = self.tmp_path(date_logdir, self.new_root)
self.assertEqual(
'0.7fake\n',
- load_file(os.path.join(out_logdir, 'version')))
+ load_file(os.path.join(out_logdir, 'dpkg-version')))
+ self.assertEqual(version_out,
+ load_file(os.path.join(out_logdir, 'version')))
self.assertEqual(
'cloud-init-log',
load_file(os.path.join(out_logdir, 'cloud-init.log')))
@@ -72,6 +80,7 @@ class TestCollectLogs(FilesystemMockingTestCase):
'results',
load_file(
os.path.join(out_logdir, 'run', 'cloud-init', 'results.json')))
+ fake_stderr.write.assert_any_call('Wrote %s\n' % output_tarfile)
def test_collect_logs_includes_optional_userdata(self):
"""collect-logs include userdata when --include-userdata is set."""
@@ -88,11 +97,13 @@ class TestCollectLogs(FilesystemMockingTestCase):
date = datetime.utcnow().date().strftime('%Y-%m-%d')
date_logdir = 'cloud-init-logs-{0}'.format(date)
+ version_out = '/usr/bin/cloud-init 18.2fake\n'
expected_subp = {
('dpkg-query', '--show', "-f=${Version}\n", 'cloud-init'):
'0.7fake',
+ ('cloud-init', '--version'): version_out,
('dmesg',): 'dmesg-out\n',
- ('journalctl', '-o', 'short-precise'): 'journal-out\n',
+ ('journalctl', '--boot=0', '-o', 'short-precise'): 'journal-out\n',
('tar', 'czvf', output_tarfile, date_logdir): ''
}
@@ -105,9 +116,12 @@ class TestCollectLogs(FilesystemMockingTestCase):
subp(cmd) # Pass through tar cmd so we can check output
return expected_subp[cmd_tuple], ''
+ fake_stderr = mock.MagicMock()
+
wrap_and_call(
'cloudinit.cmd.devel.logs',
{'subp': {'side_effect': fake_subp},
+ 'sys.stderr': {'new': fake_stderr},
'CLOUDINIT_LOGS': {'new': [log1, log2]},
'CLOUDINIT_RUN_DIR': {'new': self.run_dir},
'USER_DATA_FILE': {'new': userdata}},
@@ -118,3 +132,4 @@ class TestCollectLogs(FilesystemMockingTestCase):
self.assertEqual(
'user-data',
load_file(os.path.join(out_logdir, 'user-data.txt')))
+ fake_stderr.write.assert_any_call('Wrote %s\n' % output_tarfile)
diff --git a/cloudinit/cmd/devel/tests/test_render.py b/cloudinit/cmd/devel/tests/test_render.py
new file mode 100644
index 00000000..fc5d2c0d
--- /dev/null
+++ b/cloudinit/cmd/devel/tests/test_render.py
@@ -0,0 +1,101 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+from six import StringIO
+import os
+
+from collections import namedtuple
+from cloudinit.cmd.devel import render
+from cloudinit.helpers import Paths
+from cloudinit.sources import INSTANCE_JSON_FILE
+from cloudinit.tests.helpers import CiTestCase, mock, skipUnlessJinja
+from cloudinit.util import ensure_dir, write_file
+
+
+class TestRender(CiTestCase):
+
+ with_logs = True
+
+ args = namedtuple('renderargs', 'user_data instance_data debug')
+
+ def setUp(self):
+ super(TestRender, self).setUp()
+ self.tmp = self.tmp_dir()
+
+ def test_handle_args_error_on_missing_user_data(self):
+ """When user_data file path does not exist, log an error."""
+ absent_file = self.tmp_path('user-data', dir=self.tmp)
+ instance_data = self.tmp_path('instance-data', dir=self.tmp)
+ write_file(instance_data, '{}')
+ args = self.args(
+ user_data=absent_file, instance_data=instance_data, debug=False)
+ with mock.patch('sys.stderr', new_callable=StringIO):
+ self.assertEqual(1, render.handle_args('anyname', args))
+ self.assertIn(
+ 'Missing user-data file: %s' % absent_file,
+ self.logs.getvalue())
+
+ def test_handle_args_error_on_missing_instance_data(self):
+ """When instance_data file path does not exist, log an error."""
+ user_data = self.tmp_path('user-data', dir=self.tmp)
+ absent_file = self.tmp_path('instance-data', dir=self.tmp)
+ args = self.args(
+ user_data=user_data, instance_data=absent_file, debug=False)
+ with mock.patch('sys.stderr', new_callable=StringIO):
+ self.assertEqual(1, render.handle_args('anyname', args))
+ self.assertIn(
+ 'Missing instance-data.json file: %s' % absent_file,
+ self.logs.getvalue())
+
+ def test_handle_args_defaults_instance_data(self):
+ """When no instance_data argument, default to configured run_dir."""
+ user_data = self.tmp_path('user-data', dir=self.tmp)
+ run_dir = self.tmp_path('run_dir', dir=self.tmp)
+ ensure_dir(run_dir)
+ paths = Paths({'run_dir': run_dir})
+ self.add_patch('cloudinit.cmd.devel.render.read_cfg_paths', 'm_paths')
+ self.m_paths.return_value = paths
+ args = self.args(
+ user_data=user_data, instance_data=None, debug=False)
+ with mock.patch('sys.stderr', new_callable=StringIO):
+ self.assertEqual(1, render.handle_args('anyname', args))
+ json_file = os.path.join(run_dir, INSTANCE_JSON_FILE)
+ self.assertIn(
+ 'Missing instance-data.json file: %s' % json_file,
+ self.logs.getvalue())
+
+ @skipUnlessJinja()
+ def test_handle_args_renders_instance_data_vars_in_template(self):
+ """If user_data file is a jinja template render instance-data vars."""
+ user_data = self.tmp_path('user-data', dir=self.tmp)
+ write_file(user_data, '##template: jinja\nrendering: {{ my_var }}')
+ instance_data = self.tmp_path('instance-data', dir=self.tmp)
+ write_file(instance_data, '{"my-var": "jinja worked"}')
+ args = self.args(
+ user_data=user_data, instance_data=instance_data, debug=True)
+ with mock.patch('sys.stderr', new_callable=StringIO) as m_console_err:
+ with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout:
+ self.assertEqual(0, render.handle_args('anyname', args))
+ self.assertIn(
+ 'DEBUG: Converted jinja variables\n{', self.logs.getvalue())
+ self.assertIn(
+ 'DEBUG: Converted jinja variables\n{', m_console_err.getvalue())
+ self.assertEqual('rendering: jinja worked', m_stdout.getvalue())
+
+ @skipUnlessJinja()
+ def test_handle_args_warns_and_gives_up_on_invalid_jinja_operation(self):
+ """If user_data file has invalid jinja operations log warnings."""
+ user_data = self.tmp_path('user-data', dir=self.tmp)
+ write_file(user_data, '##template: jinja\nrendering: {{ my-var }}')
+ instance_data = self.tmp_path('instance-data', dir=self.tmp)
+ write_file(instance_data, '{"my-var": "jinja worked"}')
+ args = self.args(
+ user_data=user_data, instance_data=instance_data, debug=True)
+ with mock.patch('sys.stderr', new_callable=StringIO):
+ self.assertEqual(1, render.handle_args('anyname', args))
+ self.assertIn(
+ 'WARNING: Ignoring jinja template for %s: Undefined jinja'
+ ' variable: "my-var". Jinja tried subtraction. Perhaps you meant'
+ ' "my_var"?' % user_data,
+ self.logs.getvalue())
+
+# vi: ts=4 expandtab