summaryrefslogtreecommitdiff
path: root/cloudinit/cmd/devel
diff options
context:
space:
mode:
Diffstat (limited to 'cloudinit/cmd/devel')
-rw-r--r--cloudinit/cmd/devel/logs.py31
-rwxr-xr-xcloudinit/cmd/devel/net_convert.py15
-rwxr-xr-xcloudinit/cmd/devel/render.py35
-rw-r--r--cloudinit/cmd/devel/tests/test_logs.py43
-rw-r--r--cloudinit/cmd/devel/tests/test_render.py45
5 files changed, 138 insertions, 31 deletions
diff --git a/cloudinit/cmd/devel/logs.py b/cloudinit/cmd/devel/logs.py
index df725204..4c086b51 100644
--- a/cloudinit/cmd/devel/logs.py
+++ b/cloudinit/cmd/devel/logs.py
@@ -5,14 +5,16 @@
"""Define 'collect-logs' utility and handler to include in cloud-init cmd."""
import argparse
-from cloudinit.util import (
- ProcessExecutionError, chdir, copy, ensure_dir, subp, write_file)
-from cloudinit.temp_utils import tempdir
from datetime import datetime
import os
import shutil
import sys
+from cloudinit.sources import INSTANCE_JSON_SENSITIVE_FILE
+from cloudinit.temp_utils import tempdir
+from cloudinit.util import (
+ ProcessExecutionError, chdir, copy, ensure_dir, subp, write_file)
+
CLOUDINIT_LOGS = ['/var/log/cloud-init.log', '/var/log/cloud-init-output.log']
CLOUDINIT_RUN_DIR = '/run/cloud-init'
@@ -46,6 +48,13 @@ def get_parser(parser=None):
return parser
+def _copytree_ignore_sensitive_files(curdir, files):
+ """Return a list of files to ignore if we are non-root"""
+ if os.getuid() == 0:
+ return ()
+ return (INSTANCE_JSON_SENSITIVE_FILE,) # Ignore root-permissioned files
+
+
def _write_command_output_to_file(cmd, filename, msg, verbosity):
"""Helper which runs a command and writes output or error to filename."""
try:
@@ -78,6 +87,11 @@ def collect_logs(tarfile, include_userdata, verbosity=0):
@param tarfile: The path of the tar-gzipped file to create.
@param include_userdata: Boolean, true means include user-data.
"""
+ if include_userdata and os.getuid() != 0:
+ sys.stderr.write(
+ "To include userdata, root user is required."
+ " Try sudo cloud-init collect-logs\n")
+ return 1
tarfile = os.path.abspath(tarfile)
date = datetime.utcnow().date().strftime('%Y-%m-%d')
log_dir = 'cloud-init-logs-{0}'.format(date)
@@ -110,7 +124,8 @@ def collect_logs(tarfile, include_userdata, verbosity=0):
ensure_dir(run_dir)
if os.path.exists(CLOUDINIT_RUN_DIR):
shutil.copytree(CLOUDINIT_RUN_DIR,
- os.path.join(run_dir, 'cloud-init'))
+ os.path.join(run_dir, 'cloud-init'),
+ ignore=_copytree_ignore_sensitive_files)
_debug("collected dir %s\n" % CLOUDINIT_RUN_DIR, 1, verbosity)
else:
_debug("directory '%s' did not exist\n" % CLOUDINIT_RUN_DIR, 1,
@@ -118,21 +133,21 @@ def collect_logs(tarfile, include_userdata, verbosity=0):
with chdir(tmp_dir):
subp(['tar', 'czvf', tarfile, log_dir.replace(tmp_dir + '/', '')])
sys.stderr.write("Wrote %s\n" % tarfile)
+ return 0
def handle_collect_logs_args(name, args):
"""Handle calls to 'cloud-init collect-logs' as a subcommand."""
- collect_logs(args.tarfile, args.userdata, args.verbosity)
+ return collect_logs(args.tarfile, args.userdata, args.verbosity)
def main():
"""Tool to collect and tar all cloud-init related logs."""
parser = get_parser()
- handle_collect_logs_args('collect-logs', parser.parse_args())
- return 0
+ return handle_collect_logs_args('collect-logs', parser.parse_args())
if __name__ == '__main__':
- main()
+ sys.exit(main())
# vi: ts=4 expandtab
diff --git a/cloudinit/cmd/devel/net_convert.py b/cloudinit/cmd/devel/net_convert.py
index a0f58a0a..1ad7e0bd 100755
--- a/cloudinit/cmd/devel/net_convert.py
+++ b/cloudinit/cmd/devel/net_convert.py
@@ -9,6 +9,7 @@ import yaml
from cloudinit.sources.helpers import openstack
from cloudinit.sources import DataSourceAzure as azure
+from cloudinit.sources import DataSourceOVF as ovf
from cloudinit import distros
from cloudinit.net import eni, netplan, network_state, sysconfig
@@ -31,7 +32,7 @@ def get_parser(parser=None):
metavar="PATH", required=True)
parser.add_argument("-k", "--kind",
choices=['eni', 'network_data.json', 'yaml',
- 'azure-imds'],
+ 'azure-imds', 'vmware-imc'],
required=True)
parser.add_argument("-d", "--directory",
metavar="PATH",
@@ -76,7 +77,6 @@ def handle_args(name, args):
net_data = args.network_data.read()
if args.kind == "eni":
pre_ns = eni.convert_eni_data(net_data)
- ns = network_state.parse_net_config_data(pre_ns)
elif args.kind == "yaml":
pre_ns = yaml.load(net_data)
if 'network' in pre_ns:
@@ -85,15 +85,16 @@ def handle_args(name, args):
sys.stderr.write('\n'.join(
["Input YAML",
yaml.dump(pre_ns, default_flow_style=False, indent=4), ""]))
- ns = network_state.parse_net_config_data(pre_ns)
elif args.kind == 'network_data.json':
pre_ns = openstack.convert_net_json(
json.loads(net_data), known_macs=known_macs)
- ns = network_state.parse_net_config_data(pre_ns)
elif args.kind == 'azure-imds':
pre_ns = azure.parse_network_config(json.loads(net_data))
- ns = network_state.parse_net_config_data(pre_ns)
+ elif args.kind == 'vmware-imc':
+ config = ovf.Config(ovf.ConfigFile(args.network_data.name))
+ pre_ns = ovf.get_network_config_from_conf(config, False)
+ ns = network_state.parse_net_config_data(pre_ns)
if not ns:
raise RuntimeError("No valid network_state object created from"
"input data")
@@ -111,6 +112,10 @@ def handle_args(name, args):
elif args.output_kind == "netplan":
r_cls = netplan.Renderer
config = distro.renderer_configs.get('netplan')
+ # don't run netplan generate/apply
+ config['postcmds'] = False
+ # trim leading slash
+ config['netplan_path'] = config['netplan_path'][1:]
else:
r_cls = sysconfig.Renderer
config = distro.renderer_configs.get('sysconfig')
diff --git a/cloudinit/cmd/devel/render.py b/cloudinit/cmd/devel/render.py
index 2ba6b681..1bc22406 100755
--- a/cloudinit/cmd/devel/render.py
+++ b/cloudinit/cmd/devel/render.py
@@ -8,11 +8,10 @@ import sys
from cloudinit.handlers.jinja_template import render_jinja_payload_from_file
from cloudinit import log
-from cloudinit.sources import INSTANCE_JSON_FILE
+from cloudinit.sources import INSTANCE_JSON_FILE, INSTANCE_JSON_SENSITIVE_FILE
from . import addLogHandlerCLI, read_cfg_paths
NAME = 'render'
-DEFAULT_INSTANCE_DATA = '/run/cloud-init/instance-data.json'
LOG = log.getLogger(NAME)
@@ -47,12 +46,22 @@ def handle_args(name, args):
@return 0 on success, 1 on failure.
"""
addLogHandlerCLI(LOG, log.DEBUG if args.debug else log.WARNING)
- if not args.instance_data:
- paths = read_cfg_paths()
- instance_data_fn = os.path.join(
- paths.run_dir, INSTANCE_JSON_FILE)
- else:
+ if args.instance_data:
instance_data_fn = args.instance_data
+ else:
+ paths = read_cfg_paths()
+ uid = os.getuid()
+ redacted_data_fn = os.path.join(paths.run_dir, INSTANCE_JSON_FILE)
+ if uid == 0:
+ instance_data_fn = os.path.join(
+ paths.run_dir, INSTANCE_JSON_SENSITIVE_FILE)
+ if not os.path.exists(instance_data_fn):
+ LOG.warning(
+ 'Missing root-readable %s. Using redacted %s instead.',
+ instance_data_fn, redacted_data_fn)
+ instance_data_fn = redacted_data_fn
+ else:
+ instance_data_fn = redacted_data_fn
if not os.path.exists(instance_data_fn):
LOG.error('Missing instance-data.json file: %s', instance_data_fn)
return 1
@@ -62,10 +71,14 @@ def handle_args(name, args):
except IOError:
LOG.error('Missing user-data file: %s', args.user_data)
return 1
- rendered_payload = render_jinja_payload_from_file(
- payload=user_data, payload_fn=args.user_data,
- instance_data_file=instance_data_fn,
- debug=True if args.debug else False)
+ try:
+ rendered_payload = render_jinja_payload_from_file(
+ payload=user_data, payload_fn=args.user_data,
+ instance_data_file=instance_data_fn,
+ debug=True if args.debug else False)
+ except RuntimeError as e:
+ LOG.error('Cannot render from instance data: %s', str(e))
+ return 1
if not rendered_payload:
LOG.error('Unable to render user-data file: %s', args.user_data)
return 1
diff --git a/cloudinit/cmd/devel/tests/test_logs.py b/cloudinit/cmd/devel/tests/test_logs.py
index 98b47560..4951797b 100644
--- a/cloudinit/cmd/devel/tests/test_logs.py
+++ b/cloudinit/cmd/devel/tests/test_logs.py
@@ -1,13 +1,17 @@
# This file is part of cloud-init. See LICENSE file for license information.
-from cloudinit.cmd.devel import logs
-from cloudinit.util import ensure_dir, load_file, subp, write_file
-from cloudinit.tests.helpers import FilesystemMockingTestCase, wrap_and_call
from datetime import datetime
-import mock
import os
+from six import StringIO
+
+from cloudinit.cmd.devel import logs
+from cloudinit.sources import INSTANCE_JSON_SENSITIVE_FILE
+from cloudinit.tests.helpers import (
+ FilesystemMockingTestCase, mock, wrap_and_call)
+from cloudinit.util import ensure_dir, load_file, subp, write_file
+@mock.patch('cloudinit.cmd.devel.logs.os.getuid')
class TestCollectLogs(FilesystemMockingTestCase):
def setUp(self):
@@ -15,14 +19,29 @@ class TestCollectLogs(FilesystemMockingTestCase):
self.new_root = self.tmp_dir()
self.run_dir = self.tmp_path('run', self.new_root)
- def test_collect_logs_creates_tarfile(self):
+ def test_collect_logs_with_userdata_requires_root_user(self, m_getuid):
+ """collect-logs errors when non-root user collects userdata ."""
+ m_getuid.return_value = 100 # non-root
+ output_tarfile = self.tmp_path('logs.tgz')
+ with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr:
+ self.assertEqual(
+ 1, logs.collect_logs(output_tarfile, include_userdata=True))
+ self.assertEqual(
+ 'To include userdata, root user is required.'
+ ' Try sudo cloud-init collect-logs\n',
+ m_stderr.getvalue())
+
+ def test_collect_logs_creates_tarfile(self, m_getuid):
"""collect-logs creates a tarfile with all related cloud-init info."""
+ m_getuid.return_value = 100
log1 = self.tmp_path('cloud-init.log', self.new_root)
write_file(log1, 'cloud-init-log')
log2 = self.tmp_path('cloud-init-output.log', self.new_root)
write_file(log2, 'cloud-init-output-log')
ensure_dir(self.run_dir)
write_file(self.tmp_path('results.json', self.run_dir), 'results')
+ write_file(self.tmp_path(INSTANCE_JSON_SENSITIVE_FILE, self.run_dir),
+ 'sensitive')
output_tarfile = self.tmp_path('logs.tgz')
date = datetime.utcnow().date().strftime('%Y-%m-%d')
@@ -59,6 +78,11 @@ class TestCollectLogs(FilesystemMockingTestCase):
# unpack the tarfile and check file contents
subp(['tar', 'zxvf', output_tarfile, '-C', self.new_root])
out_logdir = self.tmp_path(date_logdir, self.new_root)
+ self.assertFalse(
+ os.path.exists(
+ os.path.join(out_logdir, 'run', 'cloud-init',
+ INSTANCE_JSON_SENSITIVE_FILE)),
+ 'Unexpected file found: %s' % INSTANCE_JSON_SENSITIVE_FILE)
self.assertEqual(
'0.7fake\n',
load_file(os.path.join(out_logdir, 'dpkg-version')))
@@ -82,8 +106,9 @@ class TestCollectLogs(FilesystemMockingTestCase):
os.path.join(out_logdir, 'run', 'cloud-init', 'results.json')))
fake_stderr.write.assert_any_call('Wrote %s\n' % output_tarfile)
- def test_collect_logs_includes_optional_userdata(self):
+ def test_collect_logs_includes_optional_userdata(self, m_getuid):
"""collect-logs include userdata when --include-userdata is set."""
+ m_getuid.return_value = 0
log1 = self.tmp_path('cloud-init.log', self.new_root)
write_file(log1, 'cloud-init-log')
log2 = self.tmp_path('cloud-init-output.log', self.new_root)
@@ -92,6 +117,8 @@ class TestCollectLogs(FilesystemMockingTestCase):
write_file(userdata, 'user-data')
ensure_dir(self.run_dir)
write_file(self.tmp_path('results.json', self.run_dir), 'results')
+ write_file(self.tmp_path(INSTANCE_JSON_SENSITIVE_FILE, self.run_dir),
+ 'sensitive')
output_tarfile = self.tmp_path('logs.tgz')
date = datetime.utcnow().date().strftime('%Y-%m-%d')
@@ -132,4 +159,8 @@ class TestCollectLogs(FilesystemMockingTestCase):
self.assertEqual(
'user-data',
load_file(os.path.join(out_logdir, 'user-data.txt')))
+ self.assertEqual(
+ 'sensitive',
+ load_file(os.path.join(out_logdir, 'run', 'cloud-init',
+ INSTANCE_JSON_SENSITIVE_FILE)))
fake_stderr.write.assert_any_call('Wrote %s\n' % output_tarfile)
diff --git a/cloudinit/cmd/devel/tests/test_render.py b/cloudinit/cmd/devel/tests/test_render.py
index fc5d2c0d..988bba03 100644
--- a/cloudinit/cmd/devel/tests/test_render.py
+++ b/cloudinit/cmd/devel/tests/test_render.py
@@ -6,7 +6,7 @@ import os
from collections import namedtuple
from cloudinit.cmd.devel import render
from cloudinit.helpers import Paths
-from cloudinit.sources import INSTANCE_JSON_FILE
+from cloudinit.sources import INSTANCE_JSON_FILE, INSTANCE_JSON_SENSITIVE_FILE
from cloudinit.tests.helpers import CiTestCase, mock, skipUnlessJinja
from cloudinit.util import ensure_dir, write_file
@@ -63,6 +63,49 @@ class TestRender(CiTestCase):
'Missing instance-data.json file: %s' % json_file,
self.logs.getvalue())
+ def test_handle_args_root_fallback_from_sensitive_instance_data(self):
+ """When root user defaults to sensitive.json."""
+ user_data = self.tmp_path('user-data', dir=self.tmp)
+ run_dir = self.tmp_path('run_dir', dir=self.tmp)
+ ensure_dir(run_dir)
+ paths = Paths({'run_dir': run_dir})
+ self.add_patch('cloudinit.cmd.devel.render.read_cfg_paths', 'm_paths')
+ self.m_paths.return_value = paths
+ args = self.args(
+ user_data=user_data, instance_data=None, debug=False)
+ with mock.patch('sys.stderr', new_callable=StringIO):
+ with mock.patch('os.getuid') as m_getuid:
+ m_getuid.return_value = 0
+ self.assertEqual(1, render.handle_args('anyname', args))
+ json_file = os.path.join(run_dir, INSTANCE_JSON_FILE)
+ json_sensitive = os.path.join(run_dir, INSTANCE_JSON_SENSITIVE_FILE)
+ self.assertIn(
+ 'WARNING: Missing root-readable %s. Using redacted %s' % (
+ json_sensitive, json_file), self.logs.getvalue())
+ self.assertIn(
+ 'ERROR: Missing instance-data.json file: %s' % json_file,
+ self.logs.getvalue())
+
+ def test_handle_args_root_uses_sensitive_instance_data(self):
+ """When root user, and no instance-data arg, use sensitive.json."""
+ user_data = self.tmp_path('user-data', dir=self.tmp)
+ write_file(user_data, '##template: jinja\nrendering: {{ my_var }}')
+ run_dir = self.tmp_path('run_dir', dir=self.tmp)
+ ensure_dir(run_dir)
+ json_sensitive = os.path.join(run_dir, INSTANCE_JSON_SENSITIVE_FILE)
+ write_file(json_sensitive, '{"my-var": "jinja worked"}')
+ paths = Paths({'run_dir': run_dir})
+ self.add_patch('cloudinit.cmd.devel.render.read_cfg_paths', 'm_paths')
+ self.m_paths.return_value = paths
+ args = self.args(
+ user_data=user_data, instance_data=None, debug=False)
+ with mock.patch('sys.stderr', new_callable=StringIO):
+ with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout:
+ with mock.patch('os.getuid') as m_getuid:
+ m_getuid.return_value = 0
+ self.assertEqual(0, render.handle_args('anyname', args))
+ self.assertIn('rendering: jinja worked', m_stdout.getvalue())
+
@skipUnlessJinja()
def test_handle_args_renders_instance_data_vars_in_template(self):
"""If user_data file is a jinja template render instance-data vars."""