diff options
Diffstat (limited to 'cloudinit/cmd')
-rwxr-xr-x | cloudinit/cmd/cloud_id.py | 90 | ||||
-rw-r--r-- | cloudinit/cmd/devel/logs.py | 31 | ||||
-rwxr-xr-x | cloudinit/cmd/devel/net_convert.py | 15 | ||||
-rwxr-xr-x | cloudinit/cmd/devel/render.py | 35 | ||||
-rw-r--r-- | cloudinit/cmd/devel/tests/test_logs.py | 43 | ||||
-rw-r--r-- | cloudinit/cmd/devel/tests/test_render.py | 45 | ||||
-rw-r--r-- | cloudinit/cmd/main.py | 20 | ||||
-rw-r--r-- | cloudinit/cmd/query.py | 36 | ||||
-rw-r--r-- | cloudinit/cmd/tests/test_cloud_id.py | 127 | ||||
-rw-r--r-- | cloudinit/cmd/tests/test_query.py | 76 |
10 files changed, 454 insertions, 64 deletions
diff --git a/cloudinit/cmd/cloud_id.py b/cloudinit/cmd/cloud_id.py new file mode 100755 index 00000000..97608921 --- /dev/null +++ b/cloudinit/cmd/cloud_id.py @@ -0,0 +1,90 @@ +# This file is part of cloud-init. See LICENSE file for license information. + +"""Commandline utility to list the canonical cloud-id for an instance.""" + +import argparse +import json +import sys + +from cloudinit.sources import ( + INSTANCE_JSON_FILE, METADATA_UNKNOWN, canonical_cloud_id) + +DEFAULT_INSTANCE_JSON = '/run/cloud-init/%s' % INSTANCE_JSON_FILE + +NAME = 'cloud-id' + + +def get_parser(parser=None): + """Build or extend an arg parser for the cloud-id utility. + + @param parser: Optional existing ArgumentParser instance representing the + query subcommand which will be extended to support the args of + this utility. + + @returns: ArgumentParser with proper argument configuration. + """ + if not parser: + parser = argparse.ArgumentParser( + prog=NAME, + description='Report the canonical cloud-id for this instance') + parser.add_argument( + '-j', '--json', action='store_true', default=False, + help='Report all standardized cloud-id information as json.') + parser.add_argument( + '-l', '--long', action='store_true', default=False, + help='Report extended cloud-id information as tab-delimited string.') + parser.add_argument( + '-i', '--instance-data', type=str, default=DEFAULT_INSTANCE_JSON, + help=('Path to instance-data.json file. Default is %s' % + DEFAULT_INSTANCE_JSON)) + return parser + + +def error(msg): + sys.stderr.write('ERROR: %s\n' % msg) + return 1 + + +def handle_args(name, args): + """Handle calls to 'cloud-id' cli. + + Print the canonical cloud-id on which the instance is running. + + @return: 0 on success, 1 otherwise. + """ + try: + instance_data = json.load(open(args.instance_data)) + except IOError: + return error( + "File not found '%s'. Provide a path to instance data json file" + ' using --instance-data' % args.instance_data) + except ValueError as e: + return error( + "File '%s' is not valid json. %s" % (args.instance_data, e)) + v1 = instance_data.get('v1', {}) + cloud_id = canonical_cloud_id( + v1.get('cloud_name', METADATA_UNKNOWN), + v1.get('region', METADATA_UNKNOWN), + v1.get('platform', METADATA_UNKNOWN)) + if args.json: + v1['cloud_id'] = cloud_id + response = json.dumps( # Pretty, sorted json + v1, indent=1, sort_keys=True, separators=(',', ': ')) + elif args.long: + response = '%s\t%s' % (cloud_id, v1.get('region', METADATA_UNKNOWN)) + else: + response = cloud_id + sys.stdout.write('%s\n' % response) + return 0 + + +def main(): + """Tool to query specific instance-data values.""" + parser = get_parser() + sys.exit(handle_args(NAME, parser.parse_args())) + + +if __name__ == '__main__': + main() + +# vi: ts=4 expandtab diff --git a/cloudinit/cmd/devel/logs.py b/cloudinit/cmd/devel/logs.py index df725204..4c086b51 100644 --- a/cloudinit/cmd/devel/logs.py +++ b/cloudinit/cmd/devel/logs.py @@ -5,14 +5,16 @@ """Define 'collect-logs' utility and handler to include in cloud-init cmd.""" import argparse -from cloudinit.util import ( - ProcessExecutionError, chdir, copy, ensure_dir, subp, write_file) -from cloudinit.temp_utils import tempdir from datetime import datetime import os import shutil import sys +from cloudinit.sources import INSTANCE_JSON_SENSITIVE_FILE +from cloudinit.temp_utils import tempdir +from cloudinit.util import ( + ProcessExecutionError, chdir, copy, ensure_dir, subp, write_file) + CLOUDINIT_LOGS = ['/var/log/cloud-init.log', '/var/log/cloud-init-output.log'] CLOUDINIT_RUN_DIR = '/run/cloud-init' @@ -46,6 +48,13 @@ def get_parser(parser=None): return parser +def _copytree_ignore_sensitive_files(curdir, files): + """Return a list of files to ignore if we are non-root""" + if os.getuid() == 0: + return () + return (INSTANCE_JSON_SENSITIVE_FILE,) # Ignore root-permissioned files + + def _write_command_output_to_file(cmd, filename, msg, verbosity): """Helper which runs a command and writes output or error to filename.""" try: @@ -78,6 +87,11 @@ def collect_logs(tarfile, include_userdata, verbosity=0): @param tarfile: The path of the tar-gzipped file to create. @param include_userdata: Boolean, true means include user-data. """ + if include_userdata and os.getuid() != 0: + sys.stderr.write( + "To include userdata, root user is required." + " Try sudo cloud-init collect-logs\n") + return 1 tarfile = os.path.abspath(tarfile) date = datetime.utcnow().date().strftime('%Y-%m-%d') log_dir = 'cloud-init-logs-{0}'.format(date) @@ -110,7 +124,8 @@ def collect_logs(tarfile, include_userdata, verbosity=0): ensure_dir(run_dir) if os.path.exists(CLOUDINIT_RUN_DIR): shutil.copytree(CLOUDINIT_RUN_DIR, - os.path.join(run_dir, 'cloud-init')) + os.path.join(run_dir, 'cloud-init'), + ignore=_copytree_ignore_sensitive_files) _debug("collected dir %s\n" % CLOUDINIT_RUN_DIR, 1, verbosity) else: _debug("directory '%s' did not exist\n" % CLOUDINIT_RUN_DIR, 1, @@ -118,21 +133,21 @@ def collect_logs(tarfile, include_userdata, verbosity=0): with chdir(tmp_dir): subp(['tar', 'czvf', tarfile, log_dir.replace(tmp_dir + '/', '')]) sys.stderr.write("Wrote %s\n" % tarfile) + return 0 def handle_collect_logs_args(name, args): """Handle calls to 'cloud-init collect-logs' as a subcommand.""" - collect_logs(args.tarfile, args.userdata, args.verbosity) + return collect_logs(args.tarfile, args.userdata, args.verbosity) def main(): """Tool to collect and tar all cloud-init related logs.""" parser = get_parser() - handle_collect_logs_args('collect-logs', parser.parse_args()) - return 0 + return handle_collect_logs_args('collect-logs', parser.parse_args()) if __name__ == '__main__': - main() + sys.exit(main()) # vi: ts=4 expandtab diff --git a/cloudinit/cmd/devel/net_convert.py b/cloudinit/cmd/devel/net_convert.py index a0f58a0a..1ad7e0bd 100755 --- a/cloudinit/cmd/devel/net_convert.py +++ b/cloudinit/cmd/devel/net_convert.py @@ -9,6 +9,7 @@ import yaml from cloudinit.sources.helpers import openstack from cloudinit.sources import DataSourceAzure as azure +from cloudinit.sources import DataSourceOVF as ovf from cloudinit import distros from cloudinit.net import eni, netplan, network_state, sysconfig @@ -31,7 +32,7 @@ def get_parser(parser=None): metavar="PATH", required=True) parser.add_argument("-k", "--kind", choices=['eni', 'network_data.json', 'yaml', - 'azure-imds'], + 'azure-imds', 'vmware-imc'], required=True) parser.add_argument("-d", "--directory", metavar="PATH", @@ -76,7 +77,6 @@ def handle_args(name, args): net_data = args.network_data.read() if args.kind == "eni": pre_ns = eni.convert_eni_data(net_data) - ns = network_state.parse_net_config_data(pre_ns) elif args.kind == "yaml": pre_ns = yaml.load(net_data) if 'network' in pre_ns: @@ -85,15 +85,16 @@ def handle_args(name, args): sys.stderr.write('\n'.join( ["Input YAML", yaml.dump(pre_ns, default_flow_style=False, indent=4), ""])) - ns = network_state.parse_net_config_data(pre_ns) elif args.kind == 'network_data.json': pre_ns = openstack.convert_net_json( json.loads(net_data), known_macs=known_macs) - ns = network_state.parse_net_config_data(pre_ns) elif args.kind == 'azure-imds': pre_ns = azure.parse_network_config(json.loads(net_data)) - ns = network_state.parse_net_config_data(pre_ns) + elif args.kind == 'vmware-imc': + config = ovf.Config(ovf.ConfigFile(args.network_data.name)) + pre_ns = ovf.get_network_config_from_conf(config, False) + ns = network_state.parse_net_config_data(pre_ns) if not ns: raise RuntimeError("No valid network_state object created from" "input data") @@ -111,6 +112,10 @@ def handle_args(name, args): elif args.output_kind == "netplan": r_cls = netplan.Renderer config = distro.renderer_configs.get('netplan') + # don't run netplan generate/apply + config['postcmds'] = False + # trim leading slash + config['netplan_path'] = config['netplan_path'][1:] else: r_cls = sysconfig.Renderer config = distro.renderer_configs.get('sysconfig') diff --git a/cloudinit/cmd/devel/render.py b/cloudinit/cmd/devel/render.py index 2ba6b681..1bc22406 100755 --- a/cloudinit/cmd/devel/render.py +++ b/cloudinit/cmd/devel/render.py @@ -8,11 +8,10 @@ import sys from cloudinit.handlers.jinja_template import render_jinja_payload_from_file from cloudinit import log -from cloudinit.sources import INSTANCE_JSON_FILE +from cloudinit.sources import INSTANCE_JSON_FILE, INSTANCE_JSON_SENSITIVE_FILE from . import addLogHandlerCLI, read_cfg_paths NAME = 'render' -DEFAULT_INSTANCE_DATA = '/run/cloud-init/instance-data.json' LOG = log.getLogger(NAME) @@ -47,12 +46,22 @@ def handle_args(name, args): @return 0 on success, 1 on failure. """ addLogHandlerCLI(LOG, log.DEBUG if args.debug else log.WARNING) - if not args.instance_data: - paths = read_cfg_paths() - instance_data_fn = os.path.join( - paths.run_dir, INSTANCE_JSON_FILE) - else: + if args.instance_data: instance_data_fn = args.instance_data + else: + paths = read_cfg_paths() + uid = os.getuid() + redacted_data_fn = os.path.join(paths.run_dir, INSTANCE_JSON_FILE) + if uid == 0: + instance_data_fn = os.path.join( + paths.run_dir, INSTANCE_JSON_SENSITIVE_FILE) + if not os.path.exists(instance_data_fn): + LOG.warning( + 'Missing root-readable %s. Using redacted %s instead.', + instance_data_fn, redacted_data_fn) + instance_data_fn = redacted_data_fn + else: + instance_data_fn = redacted_data_fn if not os.path.exists(instance_data_fn): LOG.error('Missing instance-data.json file: %s', instance_data_fn) return 1 @@ -62,10 +71,14 @@ def handle_args(name, args): except IOError: LOG.error('Missing user-data file: %s', args.user_data) return 1 - rendered_payload = render_jinja_payload_from_file( - payload=user_data, payload_fn=args.user_data, - instance_data_file=instance_data_fn, - debug=True if args.debug else False) + try: + rendered_payload = render_jinja_payload_from_file( + payload=user_data, payload_fn=args.user_data, + instance_data_file=instance_data_fn, + debug=True if args.debug else False) + except RuntimeError as e: + LOG.error('Cannot render from instance data: %s', str(e)) + return 1 if not rendered_payload: LOG.error('Unable to render user-data file: %s', args.user_data) return 1 diff --git a/cloudinit/cmd/devel/tests/test_logs.py b/cloudinit/cmd/devel/tests/test_logs.py index 98b47560..4951797b 100644 --- a/cloudinit/cmd/devel/tests/test_logs.py +++ b/cloudinit/cmd/devel/tests/test_logs.py @@ -1,13 +1,17 @@ # This file is part of cloud-init. See LICENSE file for license information. -from cloudinit.cmd.devel import logs -from cloudinit.util import ensure_dir, load_file, subp, write_file -from cloudinit.tests.helpers import FilesystemMockingTestCase, wrap_and_call from datetime import datetime -import mock import os +from six import StringIO + +from cloudinit.cmd.devel import logs +from cloudinit.sources import INSTANCE_JSON_SENSITIVE_FILE +from cloudinit.tests.helpers import ( + FilesystemMockingTestCase, mock, wrap_and_call) +from cloudinit.util import ensure_dir, load_file, subp, write_file +@mock.patch('cloudinit.cmd.devel.logs.os.getuid') class TestCollectLogs(FilesystemMockingTestCase): def setUp(self): @@ -15,14 +19,29 @@ class TestCollectLogs(FilesystemMockingTestCase): self.new_root = self.tmp_dir() self.run_dir = self.tmp_path('run', self.new_root) - def test_collect_logs_creates_tarfile(self): + def test_collect_logs_with_userdata_requires_root_user(self, m_getuid): + """collect-logs errors when non-root user collects userdata .""" + m_getuid.return_value = 100 # non-root + output_tarfile = self.tmp_path('logs.tgz') + with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr: + self.assertEqual( + 1, logs.collect_logs(output_tarfile, include_userdata=True)) + self.assertEqual( + 'To include userdata, root user is required.' + ' Try sudo cloud-init collect-logs\n', + m_stderr.getvalue()) + + def test_collect_logs_creates_tarfile(self, m_getuid): """collect-logs creates a tarfile with all related cloud-init info.""" + m_getuid.return_value = 100 log1 = self.tmp_path('cloud-init.log', self.new_root) write_file(log1, 'cloud-init-log') log2 = self.tmp_path('cloud-init-output.log', self.new_root) write_file(log2, 'cloud-init-output-log') ensure_dir(self.run_dir) write_file(self.tmp_path('results.json', self.run_dir), 'results') + write_file(self.tmp_path(INSTANCE_JSON_SENSITIVE_FILE, self.run_dir), + 'sensitive') output_tarfile = self.tmp_path('logs.tgz') date = datetime.utcnow().date().strftime('%Y-%m-%d') @@ -59,6 +78,11 @@ class TestCollectLogs(FilesystemMockingTestCase): # unpack the tarfile and check file contents subp(['tar', 'zxvf', output_tarfile, '-C', self.new_root]) out_logdir = self.tmp_path(date_logdir, self.new_root) + self.assertFalse( + os.path.exists( + os.path.join(out_logdir, 'run', 'cloud-init', + INSTANCE_JSON_SENSITIVE_FILE)), + 'Unexpected file found: %s' % INSTANCE_JSON_SENSITIVE_FILE) self.assertEqual( '0.7fake\n', load_file(os.path.join(out_logdir, 'dpkg-version'))) @@ -82,8 +106,9 @@ class TestCollectLogs(FilesystemMockingTestCase): os.path.join(out_logdir, 'run', 'cloud-init', 'results.json'))) fake_stderr.write.assert_any_call('Wrote %s\n' % output_tarfile) - def test_collect_logs_includes_optional_userdata(self): + def test_collect_logs_includes_optional_userdata(self, m_getuid): """collect-logs include userdata when --include-userdata is set.""" + m_getuid.return_value = 0 log1 = self.tmp_path('cloud-init.log', self.new_root) write_file(log1, 'cloud-init-log') log2 = self.tmp_path('cloud-init-output.log', self.new_root) @@ -92,6 +117,8 @@ class TestCollectLogs(FilesystemMockingTestCase): write_file(userdata, 'user-data') ensure_dir(self.run_dir) write_file(self.tmp_path('results.json', self.run_dir), 'results') + write_file(self.tmp_path(INSTANCE_JSON_SENSITIVE_FILE, self.run_dir), + 'sensitive') output_tarfile = self.tmp_path('logs.tgz') date = datetime.utcnow().date().strftime('%Y-%m-%d') @@ -132,4 +159,8 @@ class TestCollectLogs(FilesystemMockingTestCase): self.assertEqual( 'user-data', load_file(os.path.join(out_logdir, 'user-data.txt'))) + self.assertEqual( + 'sensitive', + load_file(os.path.join(out_logdir, 'run', 'cloud-init', + INSTANCE_JSON_SENSITIVE_FILE))) fake_stderr.write.assert_any_call('Wrote %s\n' % output_tarfile) diff --git a/cloudinit/cmd/devel/tests/test_render.py b/cloudinit/cmd/devel/tests/test_render.py index fc5d2c0d..988bba03 100644 --- a/cloudinit/cmd/devel/tests/test_render.py +++ b/cloudinit/cmd/devel/tests/test_render.py @@ -6,7 +6,7 @@ import os from collections import namedtuple from cloudinit.cmd.devel import render from cloudinit.helpers import Paths -from cloudinit.sources import INSTANCE_JSON_FILE +from cloudinit.sources import INSTANCE_JSON_FILE, INSTANCE_JSON_SENSITIVE_FILE from cloudinit.tests.helpers import CiTestCase, mock, skipUnlessJinja from cloudinit.util import ensure_dir, write_file @@ -63,6 +63,49 @@ class TestRender(CiTestCase): 'Missing instance-data.json file: %s' % json_file, self.logs.getvalue()) + def test_handle_args_root_fallback_from_sensitive_instance_data(self): + """When root user defaults to sensitive.json.""" + user_data = self.tmp_path('user-data', dir=self.tmp) + run_dir = self.tmp_path('run_dir', dir=self.tmp) + ensure_dir(run_dir) + paths = Paths({'run_dir': run_dir}) + self.add_patch('cloudinit.cmd.devel.render.read_cfg_paths', 'm_paths') + self.m_paths.return_value = paths + args = self.args( + user_data=user_data, instance_data=None, debug=False) + with mock.patch('sys.stderr', new_callable=StringIO): + with mock.patch('os.getuid') as m_getuid: + m_getuid.return_value = 0 + self.assertEqual(1, render.handle_args('anyname', args)) + json_file = os.path.join(run_dir, INSTANCE_JSON_FILE) + json_sensitive = os.path.join(run_dir, INSTANCE_JSON_SENSITIVE_FILE) + self.assertIn( + 'WARNING: Missing root-readable %s. Using redacted %s' % ( + json_sensitive, json_file), self.logs.getvalue()) + self.assertIn( + 'ERROR: Missing instance-data.json file: %s' % json_file, + self.logs.getvalue()) + + def test_handle_args_root_uses_sensitive_instance_data(self): + """When root user, and no instance-data arg, use sensitive.json.""" + user_data = self.tmp_path('user-data', dir=self.tmp) + write_file(user_data, '##template: jinja\nrendering: {{ my_var }}') + run_dir = self.tmp_path('run_dir', dir=self.tmp) + ensure_dir(run_dir) + json_sensitive = os.path.join(run_dir, INSTANCE_JSON_SENSITIVE_FILE) + write_file(json_sensitive, '{"my-var": "jinja worked"}') + paths = Paths({'run_dir': run_dir}) + self.add_patch('cloudinit.cmd.devel.render.read_cfg_paths', 'm_paths') + self.m_paths.return_value = paths + args = self.args( + user_data=user_data, instance_data=None, debug=False) + with mock.patch('sys.stderr', new_callable=StringIO): + with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: + with mock.patch('os.getuid') as m_getuid: + m_getuid.return_value = 0 + self.assertEqual(0, render.handle_args('anyname', args)) + self.assertIn('rendering: jinja worked', m_stdout.getvalue()) + @skipUnlessJinja() def test_handle_args_renders_instance_data_vars_in_template(self): """If user_data file is a jinja template render instance-data vars.""" diff --git a/cloudinit/cmd/main.py b/cloudinit/cmd/main.py index 5a437020..933c019a 100644 --- a/cloudinit/cmd/main.py +++ b/cloudinit/cmd/main.py @@ -41,7 +41,7 @@ from cloudinit.settings import (PER_INSTANCE, PER_ALWAYS, PER_ONCE, from cloudinit import atomic_helper from cloudinit.config import cc_set_hostname -from cloudinit.dhclient_hook import LogDhclient +from cloudinit import dhclient_hook # Welcome message template @@ -586,12 +586,6 @@ def main_single(name, args): return 0 -def dhclient_hook(name, args): - record = LogDhclient(args) - record.check_hooks_dir() - record.record() - - def status_wrapper(name, args, data_d=None, link_d=None): if data_d is None: data_d = os.path.normpath("/var/lib/cloud/data") @@ -795,15 +789,9 @@ def main(sysv_args=None): 'query', help='Query standardized instance metadata from the command line.') - parser_dhclient = subparsers.add_parser('dhclient-hook', - help=('run the dhclient hook' - 'to record network info')) - parser_dhclient.add_argument("net_action", - help=('action taken on the interface')) - parser_dhclient.add_argument("net_interface", - help=('the network interface being acted' - ' upon')) - parser_dhclient.set_defaults(action=('dhclient_hook', dhclient_hook)) + parser_dhclient = subparsers.add_parser( + dhclient_hook.NAME, help=dhclient_hook.__doc__) + dhclient_hook.get_parser(parser_dhclient) parser_features = subparsers.add_parser('features', help=('list defined features')) diff --git a/cloudinit/cmd/query.py b/cloudinit/cmd/query.py index 7d2d4fe4..1d888b9d 100644 --- a/cloudinit/cmd/query.py +++ b/cloudinit/cmd/query.py @@ -3,6 +3,7 @@ """Query standardized instance metadata from the command line.""" import argparse +from errno import EACCES import os import six import sys @@ -79,27 +80,38 @@ def handle_args(name, args): uid = os.getuid() if not all([args.instance_data, args.user_data, args.vendor_data]): paths = read_cfg_paths() - if not args.instance_data: + if args.instance_data: + instance_data_fn = args.instance_data + else: + redacted_data_fn = os.path.join(paths.run_dir, INSTANCE_JSON_FILE) if uid == 0: - default_json_fn = INSTANCE_JSON_SENSITIVE_FILE + sensitive_data_fn = os.path.join( + paths.run_dir, INSTANCE_JSON_SENSITIVE_FILE) + if os.path.exists(sensitive_data_fn): + instance_data_fn = sensitive_data_fn + else: + LOG.warning( + 'Missing root-readable %s. Using redacted %s instead.', + sensitive_data_fn, redacted_data_fn) + instance_data_fn = redacted_data_fn else: - default_json_fn = INSTANCE_JSON_FILE # World readable - instance_data_fn = os.path.join(paths.run_dir, default_json_fn) + instance_data_fn = redacted_data_fn + if args.user_data: + user_data_fn = args.user_data else: - instance_data_fn = args.instance_data - if not args.user_data: user_data_fn = os.path.join(paths.instance_link, 'user-data.txt') + if args.vendor_data: + vendor_data_fn = args.vendor_data else: - user_data_fn = args.user_data - if not args.vendor_data: vendor_data_fn = os.path.join(paths.instance_link, 'vendor-data.txt') - else: - vendor_data_fn = args.vendor_data try: instance_json = util.load_file(instance_data_fn) - except IOError: - LOG.error('Missing instance-data.json file: %s', instance_data_fn) + except (IOError, OSError) as e: + if e.errno == EACCES: + LOG.error("No read permission on '%s'. Try sudo", instance_data_fn) + else: + LOG.error('Missing instance-data file: %s', instance_data_fn) return 1 instance_data = util.load_json(instance_json) diff --git a/cloudinit/cmd/tests/test_cloud_id.py b/cloudinit/cmd/tests/test_cloud_id.py new file mode 100644 index 00000000..73738170 --- /dev/null +++ b/cloudinit/cmd/tests/test_cloud_id.py @@ -0,0 +1,127 @@ +# This file is part of cloud-init. See LICENSE file for license information. + +"""Tests for cloud-id command line utility.""" + +from cloudinit import util +from collections import namedtuple +from six import StringIO + +from cloudinit.cmd import cloud_id + +from cloudinit.tests.helpers import CiTestCase, mock + + +class TestCloudId(CiTestCase): + + args = namedtuple('cloudidargs', ('instance_data json long')) + + def setUp(self): + super(TestCloudId, self).setUp() + self.tmp = self.tmp_dir() + self.instance_data = self.tmp_path('instance-data.json', dir=self.tmp) + + def test_cloud_id_arg_parser_defaults(self): + """Validate the argument defaults when not provided by the end-user.""" + cmd = ['cloud-id'] + with mock.patch('sys.argv', cmd): + args = cloud_id.get_parser().parse_args() + self.assertEqual( + '/run/cloud-init/instance-data.json', + args.instance_data) + self.assertEqual(False, args.long) + self.assertEqual(False, args.json) + + def test_cloud_id_arg_parse_overrides(self): + """Override argument defaults by specifying values for each param.""" + util.write_file(self.instance_data, '{}') + cmd = ['cloud-id', '--instance-data', self.instance_data, '--long', + '--json'] + with mock.patch('sys.argv', cmd): + args = cloud_id.get_parser().parse_args() + self.assertEqual(self.instance_data, args.instance_data) + self.assertEqual(True, args.long) + self.assertEqual(True, args.json) + + def test_cloud_id_missing_instance_data_json(self): + """Exit error when the provided instance-data.json does not exist.""" + cmd = ['cloud-id', '--instance-data', self.instance_data] + with mock.patch('sys.argv', cmd): + with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr: + with self.assertRaises(SystemExit) as context_manager: + cloud_id.main() + self.assertEqual(1, context_manager.exception.code) + self.assertIn( + "ERROR: File not found '%s'" % self.instance_data, + m_stderr.getvalue()) + + def test_cloud_id_non_json_instance_data(self): + """Exit error when the provided instance-data.json is not json.""" + cmd = ['cloud-id', '--instance-data', self.instance_data] + util.write_file(self.instance_data, '{') + with mock.patch('sys.argv', cmd): + with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr: + with self.assertRaises(SystemExit) as context_manager: + cloud_id.main() + self.assertEqual(1, context_manager.exception.code) + self.assertIn( + "ERROR: File '%s' is not valid json." % self.instance_data, + m_stderr.getvalue()) + + def test_cloud_id_from_cloud_name_in_instance_data(self): + """Report canonical cloud-id from cloud_name in instance-data.""" + util.write_file( + self.instance_data, + '{"v1": {"cloud_name": "mycloud", "region": "somereg"}}') + cmd = ['cloud-id', '--instance-data', self.instance_data] + with mock.patch('sys.argv', cmd): + with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: + with self.assertRaises(SystemExit) as context_manager: + cloud_id.main() + self.assertEqual(0, context_manager.exception.code) + self.assertEqual("mycloud\n", m_stdout.getvalue()) + + def test_cloud_id_long_name_from_instance_data(self): + """Report long cloud-id format from cloud_name and region.""" + util.write_file( + self.instance_data, + '{"v1": {"cloud_name": "mycloud", "region": "somereg"}}') + cmd = ['cloud-id', '--instance-data', self.instance_data, '--long'] + with mock.patch('sys.argv', cmd): + with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: + with self.assertRaises(SystemExit) as context_manager: + cloud_id.main() + self.assertEqual(0, context_manager.exception.code) + self.assertEqual("mycloud\tsomereg\n", m_stdout.getvalue()) + + def test_cloud_id_lookup_from_instance_data_region(self): + """Report discovered canonical cloud_id when region lookup matches.""" + util.write_file( + self.instance_data, + '{"v1": {"cloud_name": "aws", "region": "cn-north-1",' + ' "platform": "ec2"}}') + cmd = ['cloud-id', '--instance-data', self.instance_data, '--long'] + with mock.patch('sys.argv', cmd): + with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: + with self.assertRaises(SystemExit) as context_manager: + cloud_id.main() + self.assertEqual(0, context_manager.exception.code) + self.assertEqual("aws-china\tcn-north-1\n", m_stdout.getvalue()) + + def test_cloud_id_lookup_json_instance_data_adds_cloud_id_to_json(self): + """Report v1 instance-data content with cloud_id when --json set.""" + util.write_file( + self.instance_data, + '{"v1": {"cloud_name": "unknown", "region": "dfw",' + ' "platform": "openstack", "public_ssh_keys": []}}') + expected = util.json_dumps({ + 'cloud_id': 'openstack', 'cloud_name': 'unknown', + 'platform': 'openstack', 'public_ssh_keys': [], 'region': 'dfw'}) + cmd = ['cloud-id', '--instance-data', self.instance_data, '--json'] + with mock.patch('sys.argv', cmd): + with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: + with self.assertRaises(SystemExit) as context_manager: + cloud_id.main() + self.assertEqual(0, context_manager.exception.code) + self.assertEqual(expected + '\n', m_stdout.getvalue()) + +# vi: ts=4 expandtab diff --git a/cloudinit/cmd/tests/test_query.py b/cloudinit/cmd/tests/test_query.py index fb87c6ab..28738b1e 100644 --- a/cloudinit/cmd/tests/test_query.py +++ b/cloudinit/cmd/tests/test_query.py @@ -1,5 +1,6 @@ # This file is part of cloud-init. See LICENSE file for license information. +import errno from six import StringIO from textwrap import dedent import os @@ -7,7 +8,8 @@ import os from collections import namedtuple from cloudinit.cmd import query from cloudinit.helpers import Paths -from cloudinit.sources import REDACT_SENSITIVE_VALUE, INSTANCE_JSON_FILE +from cloudinit.sources import ( + REDACT_SENSITIVE_VALUE, INSTANCE_JSON_FILE, INSTANCE_JSON_SENSITIVE_FILE) from cloudinit.tests.helpers import CiTestCase, mock from cloudinit.util import ensure_dir, write_file @@ -50,10 +52,28 @@ class TestQuery(CiTestCase): with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr: self.assertEqual(1, query.handle_args('anyname', args)) self.assertIn( - 'ERROR: Missing instance-data.json file: %s' % absent_fn, + 'ERROR: Missing instance-data file: %s' % absent_fn, self.logs.getvalue()) self.assertIn( - 'ERROR: Missing instance-data.json file: %s' % absent_fn, + 'ERROR: Missing instance-data file: %s' % absent_fn, + m_stderr.getvalue()) + + def test_handle_args_error_when_no_read_permission_instance_data(self): + """When instance_data file is unreadable, log an error.""" + noread_fn = self.tmp_path('unreadable', dir=self.tmp) + write_file(noread_fn, 'thou shall not pass') + args = self.args( + debug=False, dump_all=True, format=None, instance_data=noread_fn, + list_keys=False, user_data='ud', vendor_data='vd', varname=None) + with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr: + with mock.patch('cloudinit.cmd.query.util.load_file') as m_load: + m_load.side_effect = OSError(errno.EACCES, 'Not allowed') + self.assertEqual(1, query.handle_args('anyname', args)) + self.assertIn( + "ERROR: No read permission on '%s'. Try sudo" % noread_fn, + self.logs.getvalue()) + self.assertIn( + "ERROR: No read permission on '%s'. Try sudo" % noread_fn, m_stderr.getvalue()) def test_handle_args_defaults_instance_data(self): @@ -70,12 +90,58 @@ class TestQuery(CiTestCase): self.assertEqual(1, query.handle_args('anyname', args)) json_file = os.path.join(run_dir, INSTANCE_JSON_FILE) self.assertIn( - 'ERROR: Missing instance-data.json file: %s' % json_file, + 'ERROR: Missing instance-data file: %s' % json_file, self.logs.getvalue()) self.assertIn( - 'ERROR: Missing instance-data.json file: %s' % json_file, + 'ERROR: Missing instance-data file: %s' % json_file, m_stderr.getvalue()) + def test_handle_args_root_fallsback_to_instance_data(self): + """When no instance_data argument, root falls back to redacted json.""" + args = self.args( + debug=False, dump_all=True, format=None, instance_data=None, + list_keys=False, user_data=None, vendor_data=None, varname=None) + run_dir = self.tmp_path('run_dir', dir=self.tmp) + ensure_dir(run_dir) + paths = Paths({'run_dir': run_dir}) + self.add_patch('cloudinit.cmd.query.read_cfg_paths', 'm_paths') + self.m_paths.return_value = paths + with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr: + with mock.patch('os.getuid') as m_getuid: + m_getuid.return_value = 0 + self.assertEqual(1, query.handle_args('anyname', args)) + json_file = os.path.join(run_dir, INSTANCE_JSON_FILE) + sensitive_file = os.path.join(run_dir, INSTANCE_JSON_SENSITIVE_FILE) + self.assertIn( + 'WARNING: Missing root-readable %s. Using redacted %s instead.' % ( + sensitive_file, json_file), + m_stderr.getvalue()) + + def test_handle_args_root_uses_instance_sensitive_data(self): + """When no instance_data argument, root uses semsitive json.""" + user_data = self.tmp_path('user-data', dir=self.tmp) + vendor_data = self.tmp_path('vendor-data', dir=self.tmp) + write_file(user_data, 'ud') + write_file(vendor_data, 'vd') + run_dir = self.tmp_path('run_dir', dir=self.tmp) + sensitive_file = os.path.join(run_dir, INSTANCE_JSON_SENSITIVE_FILE) + write_file(sensitive_file, '{"my-var": "it worked"}') + ensure_dir(run_dir) + paths = Paths({'run_dir': run_dir}) + self.add_patch('cloudinit.cmd.query.read_cfg_paths', 'm_paths') + self.m_paths.return_value = paths + args = self.args( + debug=False, dump_all=True, format=None, instance_data=None, + list_keys=False, user_data=vendor_data, vendor_data=vendor_data, + varname=None) + with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: + with mock.patch('os.getuid') as m_getuid: + m_getuid.return_value = 0 + self.assertEqual(0, query.handle_args('anyname', args)) + self.assertEqual( + '{\n "my_var": "it worked",\n "userdata": "vd",\n ' + '"vendordata": "vd"\n}\n', m_stdout.getvalue()) + def test_handle_args_dumps_all_instance_data(self): """When --all is specified query will dump all instance data vars.""" write_file(self.instance_data, '{"my-var": "it worked"}') |