summaryrefslogtreecommitdiff
path: root/cloudinit/cmd/devel
diff options
context:
space:
mode:
authorChad Smith <chad.smith@canonical.com>2017-09-12 10:27:07 -0600
committerScott Moser <smoser@brickies.net>2017-09-15 15:46:02 -0400
commite626966ee7d339b53d2c8b14a8f2ff8e3fe892ee (patch)
treebba6a1a7d8d31b1fc790b7bff1ca94183c278b78 /cloudinit/cmd/devel
parentda1db792b2721d94ef85df8c136e78012c49c6e5 (diff)
downloadvyos-cloud-init-e626966ee7d339b53d2c8b14a8f2ff8e3fe892ee.tar.gz
vyos-cloud-init-e626966ee7d339b53d2c8b14a8f2ff8e3fe892ee.zip
cmdline: add collect-logs subcommand.
Add a new collect-logs sub command to the cloud-init CLI. This script will collect all logs pertinent to a cloud-init run and store them in a compressed tar-gzipped file. This tarfile can be attached to any cloud-init bug filed in order to aid in bug triage and resolution. A cloudinit.apport module is also added that allows apport interaction. Here is an example bug filed via ubuntu-bug cloud-init: LP: #1716975. Once the apport launcher is packaged in cloud-init, bugs can be filed against cloud-init with the following command: ubuntu-bug cloud-init LP: #1607345
Diffstat (limited to 'cloudinit/cmd/devel')
-rw-r--r--cloudinit/cmd/devel/logs.py101
-rw-r--r--cloudinit/cmd/devel/tests/__init__.py0
-rw-r--r--cloudinit/cmd/devel/tests/test_logs.py120
3 files changed, 221 insertions, 0 deletions
diff --git a/cloudinit/cmd/devel/logs.py b/cloudinit/cmd/devel/logs.py
new file mode 100644
index 00000000..35ca478f
--- /dev/null
+++ b/cloudinit/cmd/devel/logs.py
@@ -0,0 +1,101 @@
+# Copyright (C) 2017 Canonical Ltd.
+#
+# This file is part of cloud-init. See LICENSE file for license information.
+
+"""Define 'collect-logs' utility and handler to include in cloud-init cmd."""
+
+import argparse
+from cloudinit.util import (
+ ProcessExecutionError, chdir, copy, ensure_dir, subp, write_file)
+from cloudinit.temp_utils import tempdir
+from datetime import datetime
+import os
+import shutil
+
+
+CLOUDINIT_LOGS = ['/var/log/cloud-init.log', '/var/log/cloud-init-output.log']
+CLOUDINIT_RUN_DIR = '/run/cloud-init'
+USER_DATA_FILE = '/var/lib/cloud/instance/user-data.txt' # Optional
+
+
+def get_parser(parser=None):
+ """Build or extend and arg parser for collect-logs utility.
+
+ @param parser: Optional existing ArgumentParser instance representing the
+ collect-logs subcommand which will be extended to support the args of
+ this utility.
+
+ @returns: ArgumentParser with proper argument configuration.
+ """
+ if not parser:
+ parser = argparse.ArgumentParser(
+ prog='collect-logs',
+ description='Collect and tar all cloud-init debug info')
+ parser.add_argument(
+ "--tarfile", '-t', default='cloud-init.tar.gz',
+ help=('The tarfile to create containing all collected logs.'
+ ' Default: cloud-init.tar.gz'))
+ parser.add_argument(
+ "--include-userdata", '-u', default=False, action='store_true',
+ dest='userdata', help=(
+ 'Optionally include user-data from {0} which could contain'
+ ' sensitive information.'.format(USER_DATA_FILE)))
+ return parser
+
+
+def _write_command_output_to_file(cmd, filename):
+ """Helper which runs a command and writes output or error to filename."""
+ try:
+ out, _ = subp(cmd)
+ except ProcessExecutionError as e:
+ write_file(filename, str(e))
+ else:
+ write_file(filename, out)
+
+
+def collect_logs(tarfile, include_userdata):
+ """Collect all cloud-init logs and tar them up into the provided tarfile.
+
+ @param tarfile: The path of the tar-gzipped file to create.
+ @param include_userdata: Boolean, true means include user-data.
+ """
+ tarfile = os.path.abspath(tarfile)
+ date = datetime.utcnow().date().strftime('%Y-%m-%d')
+ log_dir = 'cloud-init-logs-{0}'.format(date)
+ with tempdir(dir='/tmp') as tmp_dir:
+ log_dir = os.path.join(tmp_dir, log_dir)
+ _write_command_output_to_file(
+ ['dpkg-query', '--show', "-f=${Version}\n", 'cloud-init'],
+ os.path.join(log_dir, 'version'))
+ _write_command_output_to_file(
+ ['dmesg'], os.path.join(log_dir, 'dmesg.txt'))
+ _write_command_output_to_file(
+ ['journalctl', '-o', 'short-precise'],
+ os.path.join(log_dir, 'journal.txt'))
+ for log in CLOUDINIT_LOGS:
+ copy(log, log_dir)
+ if include_userdata:
+ copy(USER_DATA_FILE, log_dir)
+ run_dir = os.path.join(log_dir, 'run')
+ ensure_dir(run_dir)
+ shutil.copytree(CLOUDINIT_RUN_DIR, os.path.join(run_dir, 'cloud-init'))
+ with chdir(tmp_dir):
+ subp(['tar', 'czvf', tarfile, log_dir.replace(tmp_dir + '/', '')])
+
+
+def handle_collect_logs_args(name, args):
+ """Handle calls to 'cloud-init collect-logs' as a subcommand."""
+ collect_logs(args.tarfile, args.userdata)
+
+
+def main():
+ """Tool to collect and tar all cloud-init related logs."""
+ parser = get_parser()
+ handle_collect_logs_args('collect-logs', parser.parse_args())
+ return 0
+
+
+if __name__ == '__main__':
+ main()
+
+# vi: ts=4 expandtab
diff --git a/cloudinit/cmd/devel/tests/__init__.py b/cloudinit/cmd/devel/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/cloudinit/cmd/devel/tests/__init__.py
diff --git a/cloudinit/cmd/devel/tests/test_logs.py b/cloudinit/cmd/devel/tests/test_logs.py
new file mode 100644
index 00000000..dc4947cc
--- /dev/null
+++ b/cloudinit/cmd/devel/tests/test_logs.py
@@ -0,0 +1,120 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+
+from cloudinit.cmd.devel import logs
+from cloudinit.util import ensure_dir, load_file, subp, write_file
+from cloudinit.tests.helpers import FilesystemMockingTestCase, wrap_and_call
+from datetime import datetime
+import os
+
+
+class TestCollectLogs(FilesystemMockingTestCase):
+
+ def setUp(self):
+ super(TestCollectLogs, self).setUp()
+ self.new_root = self.tmp_dir()
+ self.run_dir = self.tmp_path('run', self.new_root)
+
+ def test_collect_logs_creates_tarfile(self):
+ """collect-logs creates a tarfile with all related cloud-init info."""
+ log1 = self.tmp_path('cloud-init.log', self.new_root)
+ write_file(log1, 'cloud-init-log')
+ log2 = self.tmp_path('cloud-init-output.log', self.new_root)
+ write_file(log2, 'cloud-init-output-log')
+ ensure_dir(self.run_dir)
+ write_file(self.tmp_path('results.json', self.run_dir), 'results')
+ output_tarfile = self.tmp_path('logs.tgz')
+
+ date = datetime.utcnow().date().strftime('%Y-%m-%d')
+ date_logdir = 'cloud-init-logs-{0}'.format(date)
+
+ expected_subp = {
+ ('dpkg-query', '--show', "-f=${Version}\n", 'cloud-init'):
+ '0.7fake\n',
+ ('dmesg',): 'dmesg-out\n',
+ ('journalctl', '-o', 'short-precise'): 'journal-out\n',
+ ('tar', 'czvf', output_tarfile, date_logdir): ''
+ }
+
+ def fake_subp(cmd):
+ cmd_tuple = tuple(cmd)
+ if cmd_tuple not in expected_subp:
+ raise AssertionError(
+ 'Unexpected command provided to subp: {0}'.format(cmd))
+ if cmd == ['tar', 'czvf', output_tarfile, date_logdir]:
+ subp(cmd) # Pass through tar cmd so we can check output
+ return expected_subp[cmd_tuple], ''
+
+ wrap_and_call(
+ 'cloudinit.cmd.devel.logs',
+ {'subp': {'side_effect': fake_subp},
+ 'CLOUDINIT_LOGS': {'new': [log1, log2]},
+ 'CLOUDINIT_RUN_DIR': {'new': self.run_dir}},
+ logs.collect_logs, output_tarfile, include_userdata=False)
+ # unpack the tarfile and check file contents
+ subp(['tar', 'zxvf', output_tarfile, '-C', self.new_root])
+ out_logdir = self.tmp_path(date_logdir, self.new_root)
+ self.assertEqual(
+ '0.7fake\n',
+ load_file(os.path.join(out_logdir, 'version')))
+ self.assertEqual(
+ 'cloud-init-log',
+ load_file(os.path.join(out_logdir, 'cloud-init.log')))
+ self.assertEqual(
+ 'cloud-init-output-log',
+ load_file(os.path.join(out_logdir, 'cloud-init-output.log')))
+ self.assertEqual(
+ 'dmesg-out\n',
+ load_file(os.path.join(out_logdir, 'dmesg.txt')))
+ self.assertEqual(
+ 'journal-out\n',
+ load_file(os.path.join(out_logdir, 'journal.txt')))
+ self.assertEqual(
+ 'results',
+ load_file(
+ os.path.join(out_logdir, 'run', 'cloud-init', 'results.json')))
+
+ def test_collect_logs_includes_optional_userdata(self):
+ """collect-logs include userdata when --include-userdata is set."""
+ log1 = self.tmp_path('cloud-init.log', self.new_root)
+ write_file(log1, 'cloud-init-log')
+ log2 = self.tmp_path('cloud-init-output.log', self.new_root)
+ write_file(log2, 'cloud-init-output-log')
+ userdata = self.tmp_path('user-data.txt', self.new_root)
+ write_file(userdata, 'user-data')
+ ensure_dir(self.run_dir)
+ write_file(self.tmp_path('results.json', self.run_dir), 'results')
+ output_tarfile = self.tmp_path('logs.tgz')
+
+ date = datetime.utcnow().date().strftime('%Y-%m-%d')
+ date_logdir = 'cloud-init-logs-{0}'.format(date)
+
+ expected_subp = {
+ ('dpkg-query', '--show', "-f=${Version}\n", 'cloud-init'):
+ '0.7fake',
+ ('dmesg',): 'dmesg-out\n',
+ ('journalctl', '-o', 'short-precise'): 'journal-out\n',
+ ('tar', 'czvf', output_tarfile, date_logdir): ''
+ }
+
+ def fake_subp(cmd):
+ cmd_tuple = tuple(cmd)
+ if cmd_tuple not in expected_subp:
+ raise AssertionError(
+ 'Unexpected command provided to subp: {0}'.format(cmd))
+ if cmd == ['tar', 'czvf', output_tarfile, date_logdir]:
+ subp(cmd) # Pass through tar cmd so we can check output
+ return expected_subp[cmd_tuple], ''
+
+ wrap_and_call(
+ 'cloudinit.cmd.devel.logs',
+ {'subp': {'side_effect': fake_subp},
+ 'CLOUDINIT_LOGS': {'new': [log1, log2]},
+ 'CLOUDINIT_RUN_DIR': {'new': self.run_dir},
+ 'USER_DATA_FILE': {'new': userdata}},
+ logs.collect_logs, output_tarfile, include_userdata=True)
+ # unpack the tarfile and check file contents
+ subp(['tar', 'zxvf', output_tarfile, '-C', self.new_root])
+ out_logdir = self.tmp_path(date_logdir, self.new_root)
+ self.assertEqual(
+ 'user-data',
+ load_file(os.path.join(out_logdir, 'user-data.txt')))