From 87ebd91af8c230b8f1deed8e734297e40397eea0 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Thu, 7 Jun 2012 12:31:49 -0700 Subject: Move binaries to an actual binary dir. --- bin/cloud-init-cfg.py | 115 ++++++++++++++++++++++++ bin/cloud-init-query.py | 56 ++++++++++++ bin/cloud-init.py | 229 ++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 400 insertions(+) create mode 100755 bin/cloud-init-cfg.py create mode 100755 bin/cloud-init-query.py create mode 100755 bin/cloud-init.py (limited to 'bin') diff --git a/bin/cloud-init-cfg.py b/bin/cloud-init-cfg.py new file mode 100755 index 00000000..3a475c1c --- /dev/null +++ b/bin/cloud-init-cfg.py @@ -0,0 +1,115 @@ +#!/usr/bin/python +# vi: ts=4 expandtab +# +# Copyright (C) 2009-2010 Canonical Ltd. +# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# +# Author: Scott Moser +# Author: Juerg Haefliger +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3, as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import sys +import cloudinit +import cloudinit.util as util +import cloudinit.CloudConfig as CC +import logging +import os + + +def Usage(out=sys.stdout): + out.write("Usage: %s name\n" % sys.argv[0]) + + +def main(): + # expect to be called with + # name [ freq [ args ] + # run the cloud-config job 'name' at with given args + # or + # read cloud config jobs from config (builtin -> system) + # and run all in order + + util.close_stdin() + + modename = "config" + + if len(sys.argv) < 2: + Usage(sys.stderr) + sys.exit(1) + if sys.argv[1] == "all": + name = "all" + if len(sys.argv) > 2: + modename = sys.argv[2] + else: + freq = None + run_args = [] + name = sys.argv[1] + if len(sys.argv) > 2: + freq = sys.argv[2] + if freq == "None": + freq = None + if len(sys.argv) > 3: + run_args = sys.argv[3:] + + cfg_path = cloudinit.get_ipath_cur("cloud_config") + cfg_env_name = cloudinit.cfg_env_name + if cfg_env_name in os.environ: + cfg_path = os.environ[cfg_env_name] + + cloud = cloudinit.CloudInit(ds_deps=[]) # ds_deps=[], get only cached + try: + cloud.get_data_source() + except cloudinit.DataSourceNotFoundException as e: + # there was no datasource found, theres nothing to do + sys.exit(0) + + cc = CC.CloudConfig(cfg_path, cloud) + + try: + (outfmt, errfmt) = CC.get_output_cfg(cc.cfg, modename) + CC.redirect_output(outfmt, errfmt) + except Exception as e: + err("Failed to get and set output config: %s\n" % e) + + cloudinit.logging_set_from_cfg(cc.cfg) + log = logging.getLogger() + log.info("cloud-init-cfg %s" % sys.argv[1:]) + + module_list = [] + if name == "all": + modlist_cfg_name = "cloud_%s_modules" % modename + module_list = CC.read_cc_modules(cc.cfg, modlist_cfg_name) + if not len(module_list): + err("no modules to run in cloud_config [%s]" % modename, log) + sys.exit(0) + else: + module_list.append([name, freq] + run_args) + + failures = CC.run_cc_modules(cc, module_list, log) + if len(failures): + err("errors running cloud_config [%s]: %s" % (modename, failures), log) + sys.exit(len(failures)) + + +def err(msg, log=None): + if log: + log.error(msg) + sys.stderr.write(msg + "\n") + + +def fail(msg, log=None): + err(msg, log) + sys.exit(1) + +if __name__ == '__main__': + main() diff --git a/bin/cloud-init-query.py b/bin/cloud-init-query.py new file mode 100755 index 00000000..856cf462 --- /dev/null +++ b/bin/cloud-init-query.py @@ -0,0 +1,56 @@ +#!/usr/bin/python +# vi: ts=4 expandtab +# +# Copyright (C) 2009-2010 Canonical Ltd. +# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# +# Author: Scott Moser +# Author: Juerg Haefliger +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3, as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import sys +import cloudinit +import cloudinit.CloudConfig + + +def Usage(out=sys.stdout): + out.write("Usage: %s name\n" % sys.argv[0]) + + +def main(): + # expect to be called with name of item to fetch + if len(sys.argv) != 2: + Usage(sys.stderr) + sys.exit(1) + + cfg_path = cloudinit.get_ipath_cur("cloud_config") + cc = cloudinit.CloudConfig.CloudConfig(cfg_path) + data = { + 'user_data': cc.cloud.get_userdata(), + 'user_data_raw': cc.cloud.get_userdata_raw(), + 'instance_id': cc.cloud.get_instance_id(), + } + + name = sys.argv[1].replace('-', '_') + + if name not in data: + sys.stderr.write("unknown name '%s'. Known values are:\n %s\n" % + (sys.argv[1], ' '.join(data.keys()))) + sys.exit(1) + + print data[name] + sys.exit(0) + +if __name__ == '__main__': + main() diff --git a/bin/cloud-init.py b/bin/cloud-init.py new file mode 100755 index 00000000..2acea3f8 --- /dev/null +++ b/bin/cloud-init.py @@ -0,0 +1,229 @@ +#!/usr/bin/python +# vi: ts=4 expandtab +# +# Copyright (C) 2009-2010 Canonical Ltd. +# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# +# Author: Scott Moser +# Author: Juerg Haefliger +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3, as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import subprocess +import sys + +import cloudinit +import cloudinit.util as util +import cloudinit.CloudConfig as CC +import cloudinit.DataSource as ds +import cloudinit.netinfo as netinfo +import time +import traceback +import logging +import errno +import os + + +def warn(wstr): + sys.stderr.write("WARN:%s" % wstr) + + +def main(): + util.close_stdin() + + cmds = ("start", "start-local") + deps = {"start": (ds.DEP_FILESYSTEM, ds.DEP_NETWORK), + "start-local": (ds.DEP_FILESYSTEM, )} + + cmd = "" + if len(sys.argv) > 1: + cmd = sys.argv[1] + + cfg_path = None + if len(sys.argv) > 2: + # this is really for debugging only + # but you can invoke on development system with ./config/cloud.cfg + cfg_path = sys.argv[2] + + if not cmd in cmds: + sys.stderr.write("bad command %s. use one of %s\n" % (cmd, cmds)) + sys.exit(1) + + now = time.strftime("%a, %d %b %Y %H:%M:%S %z", time.gmtime()) + try: + uptimef = open("/proc/uptime") + uptime = uptimef.read().split(" ")[0] + uptimef.close() + except IOError as e: + warn("unable to open /proc/uptime\n") + uptime = "na" + + cmdline_msg = None + cmdline_exc = None + if cmd == "start": + target = "%s.d/%s" % (cloudinit.system_config, + "91_kernel_cmdline_url.cfg") + if os.path.exists(target): + cmdline_msg = "cmdline: %s existed" % target + else: + cmdline = util.get_cmdline() + try: + (key, url, content) = cloudinit.get_cmdline_url( + cmdline=cmdline) + if key and content: + util.write_file(target, content, mode=0600) + cmdline_msg = ("cmdline: wrote %s from %s, %s" % + (target, key, url)) + elif key: + cmdline_msg = ("cmdline: %s, %s had no cloud-config" % + (key, url)) + except Exception: + cmdline_exc = ("cmdline: '%s' raised exception\n%s" % + (cmdline, traceback.format_exc())) + warn(cmdline_exc) + + try: + cfg = cloudinit.get_base_cfg(cfg_path) + except Exception as e: + warn("Failed to get base config. falling back to builtin: %s\n" % e) + try: + cfg = cloudinit.get_builtin_cfg() + except Exception as e: + warn("Unable to load builtin config\n") + raise + + try: + (outfmt, errfmt) = CC.get_output_cfg(cfg, "init") + CC.redirect_output(outfmt, errfmt) + except Exception as e: + warn("Failed to get and set output config: %s\n" % e) + + cloudinit.logging_set_from_cfg(cfg) + log = logging.getLogger() + + if cmdline_exc: + log.debug(cmdline_exc) + elif cmdline_msg: + log.debug(cmdline_msg) + + try: + cloudinit.initfs() + except Exception as e: + warn("failed to initfs, likely bad things to come: %s\n" % str(e)) + + nonet_path = "%s/%s" % (cloudinit.get_cpath("data"), "no-net") + + if cmd == "start": + print netinfo.debug_info() + + stop_files = (cloudinit.get_ipath_cur("obj_pkl"), nonet_path) + # if starting as the network start, there are cases + # where everything is already done for us, and it makes + # most sense to exit early and silently + for f in stop_files: + try: + fp = open(f, "r") + fp.close() + except: + continue + + log.debug("no need for cloud-init start to run (%s)\n", f) + sys.exit(0) + elif cmd == "start-local": + # cache is not instance specific, so it has to be purged + # but we want 'start' to benefit from a cache if + # a previous start-local populated one + manclean = util.get_cfg_option_bool(cfg, 'manual_cache_clean', False) + if manclean: + log.debug("not purging cache, manual_cache_clean = True") + cloudinit.purge_cache(not manclean) + + try: + os.unlink(nonet_path) + except OSError as e: + if e.errno != errno.ENOENT: + raise + + msg = "cloud-init %s running: %s. up %s seconds" % (cmd, now, uptime) + sys.stderr.write(msg + "\n") + sys.stderr.flush() + + log.info(msg) + + cloud = cloudinit.CloudInit(ds_deps=deps[cmd]) + + try: + cloud.get_data_source() + except cloudinit.DataSourceNotFoundException as e: + sys.stderr.write("no instance data found in %s\n" % cmd) + sys.exit(0) + + # set this as the current instance + cloud.set_cur_instance() + + # store the metadata + cloud.update_cache() + + msg = "found data source: %s" % cloud.datasource + sys.stderr.write(msg + "\n") + log.debug(msg) + + # parse the user data (ec2-run-userdata.py) + try: + ran = cloud.sem_and_run("consume_userdata", cloudinit.per_instance, + cloud.consume_userdata, [cloudinit.per_instance], False) + if not ran: + cloud.consume_userdata(cloudinit.per_always) + except: + warn("consuming user data failed!\n") + raise + + cfg_path = cloudinit.get_ipath_cur("cloud_config") + cc = CC.CloudConfig(cfg_path, cloud) + + # if the output config changed, update output and err + try: + outfmt_orig = outfmt + errfmt_orig = errfmt + (outfmt, errfmt) = CC.get_output_cfg(cc.cfg, "init") + if outfmt_orig != outfmt or errfmt_orig != errfmt: + warn("stdout, stderr changing to (%s,%s)" % (outfmt, errfmt)) + CC.redirect_output(outfmt, errfmt) + except Exception as e: + warn("Failed to get and set output config: %s\n" % e) + + # send the cloud-config ready event + cc_path = cloudinit.get_ipath_cur('cloud_config') + cc_ready = cc.cfg.get("cc_ready_cmd", + ['initctl', 'emit', 'cloud-config', + '%s=%s' % (cloudinit.cfg_env_name, cc_path)]) + if cc_ready: + if isinstance(cc_ready, str): + cc_ready = ['sh', '-c', cc_ready] + subprocess.Popen(cc_ready).communicate() + + module_list = CC.read_cc_modules(cc.cfg, "cloud_init_modules") + + failures = [] + if len(module_list): + failures = CC.run_cc_modules(cc, module_list, log) + else: + msg = "no cloud_init_modules to run" + sys.stderr.write(msg + "\n") + log.debug(msg) + sys.exit(0) + + sys.exit(len(failures)) + +if __name__ == '__main__': + main() -- cgit v1.2.3 From 9daaba92fbef013768cc7c601df012ef2eb49686 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Mon, 18 Jun 2012 17:33:36 -0700 Subject: 1. Initial work on a unified program that will call into the new libraries. Main features: 1. Use of argparse + subcommands to handle 'action' specific actions a. Actions being 'query', 'init', 'final', 'config' !WIP! --- bin/cloud-init2.py | 183 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 183 insertions(+) create mode 100755 bin/cloud-init2.py (limited to 'bin') diff --git a/bin/cloud-init2.py b/bin/cloud-init2.py new file mode 100755 index 00000000..6663379f --- /dev/null +++ b/bin/cloud-init2.py @@ -0,0 +1,183 @@ +#!/usr/bin/python +# vi: ts=4 expandtab +# +# Copyright (C) 2012 Canonical Ltd. +# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012 Yahoo! Inc. +# +# Author: Scott Moser +# Author: Juerg Haefliger +# Author: Joshua Harlow +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3, as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import argparse +import os +import traceback +import sys + +# This is more just for running from the bin folder +possible_topdir = os.path.normpath(os.path.join(os.path.abspath( + sys.argv[0]), os.pardir, os.pardir)) +if os.path.exists(os.path.join(possible_topdir, "cloudinit", "__init__.py")): + sys.path.insert(0, possible_topdir) + + +from cloudinit import log as logging +from cloudinit import netinfo +from cloudinit import settings +from cloudinit import sources +from cloudinit import stages +from cloudinit import util +from cloudinit import version + + +# Things u can query on +QUERY_DATA_TYPES = [ + 'data', + 'data_raw', + 'instance_id', +] + +LOG = logging.getLogger(__name__) + + +def read_write_cmdline_url(target_fn): + if not os.path.exists(target_fn): + try: + (key, url, content) = util.get_cmdline_url() + except: + util.logexc(LOG, "Failed fetching command line url") + return + try: + if key and content: + util.write_file(target_fn, content, mode=0600) + LOG.info(("Wrote to %s with contents of command line" + " url %s (len=%s)"), target_fn, url, len(content)) + elif key and not content: + LOG.info(("Command line key %s with url" + " %s had no contents"), key, url) + except: + util.logexc(LOG, "Failed writing url content to %s", target_fn) + + +def main_init(args): + deps = [sources.DEP_FILESYSTEM, sources.DEP_NETWORK] + if args.local: + deps = [sources.DEP_FILESYSTEM] + + cfg_path = None + if args.file: + # Already opened so lets just pass that along + # since it would of broke if it couldn't have + # read that file + cfg_path = str(args.file.name) + + if not args.local: + # What is this for?? + root_name = "%s.d" % (settings.CLOUD_CONFIG) + target_fn = os.path.join(root_name, "91_kernel_cmdline_url.cfg") + read_write_cmdline_url(target_fn) + + # Cloud-init 'init' stage is broken up into the following stages + # 1. Ensure that the init object fetches its config without errors + # 2. Setup logging/output redirections with resultant config (if any) + # 3. Initialize the cloud-init filesystem + # 4. Check if we can stop early by looking for various files + # 5. Fetch the datasource + # 6. Consume the userdata (handlers get activated here) + # 7. Adjust any subsequent logging/output redirections + # 8. Run the transforms for the 'init' stage + # 9. Done! + now = util.time_rfc2822() + uptime = util.uptime() + init = stages.Init(deps) + # Stage 1 + init.read_cfg() + # Stage 2 + try: + util.fixup_output(init.cfg, 'init') + except: + util.logexc(LOG, "Failed to setup output redirection") + if args.debug: + # Reset so that all the debug handlers are closed out + LOG.debug("Logging being reset, this logger may no longer be active shortly") + logging.resetLogging() + logging.setupLogging(init.cfg) + # Stage 3 + try: + init.initialize() + except Exception as e: + util.logexc(LOG, "Failed to initialize, likely bad things to come: %s", e) + # Stage 4 + path_helper = init.paths + if not args.local: + nonet_path = "%s/%s" % (cloudinit.get_cpath("data"), "no-net") + +def main_config(args): + pass + + +def main_final(args): + pass + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--version', '-v', action='version', + version='%(prog)s ' + (version.version_string())) + parser.add_argument('--file', '-f', action='store', + help='additional configuration file to include', + type=argparse.FileType('rb')) + parser.add_argument('--debug', '-d', action='store_true', + help='show additional pre-action logging', + default=False) + subparsers = parser.add_subparsers() + + # Each action and its suboptions (if any) + parser_init = subparsers.add_parser('init', help='initializes cloud-init and performs \'init\' transforms') + parser_init.add_argument("--local", '-l', action='store_true', + help="start in local mode", default=False) + parser_init.set_defaults(action='init') # This is used so that we can know which action is selected + + parser_config = subparsers.add_parser('config', help='performs cloud-init \'config\' transforms') + parser_config.set_defaults(action='config') + + parser_final = subparsers.add_parser('final', help='performs cloud-init \'final\' transforms') + parser_final.set_defaults(action='final') + + parser_query = subparsers.add_parser('query', help='query information stored in cloud-init') + parser_query.add_argument("--name", action="store", + help="item name to query on", + required=True, + choices=QUERY_DATA_TYPES) + parser_query.set_defaults(action='query') + args = parser.parse_args() + + # Setup basic logging to start (until reinitialized) + if args.debug: + logging.setupBasicLogging() + + stage_name = args.action + stage_mp = { + 'init': main_init, + 'config': main_config, + 'final': main_final, + } + func = stage_mp.get(stage_name) + return func(args) + + +if __name__ == '__main__': + sys.exit(main()) + -- cgit v1.2.3 From 4c56faf8d36e684f07340bcf9e37a48508c12a5a Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 19 Jun 2012 11:07:05 -0700 Subject: 1. Continue working on main 'init' method 2. Add the ability to take in multiple extra config files (mainly for debugging) 3. Move 'simple' welcome message to this file 4. Only setup initial basic logging if '-d' option is provided. --- bin/cloud-init2.py | 142 +++++++++++++++++++++++++++++++++++++---------------- 1 file changed, 100 insertions(+), 42 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init2.py b/bin/cloud-init2.py index 6663379f..0e6a75da 100755 --- a/bin/cloud-init2.py +++ b/bin/cloud-init2.py @@ -32,12 +32,12 @@ possible_topdir = os.path.normpath(os.path.join(os.path.abspath( if os.path.exists(os.path.join(possible_topdir, "cloudinit", "__init__.py")): sys.path.insert(0, possible_topdir) - from cloudinit import log as logging from cloudinit import netinfo from cloudinit import settings from cloudinit import sources from cloudinit import stages +from cloudinit import templater from cloudinit import util from cloudinit import version @@ -49,58 +49,58 @@ QUERY_DATA_TYPES = [ 'instance_id', ] -LOG = logging.getLogger(__name__) - - -def read_write_cmdline_url(target_fn): - if not os.path.exists(target_fn): - try: - (key, url, content) = util.get_cmdline_url() - except: - util.logexc(LOG, "Failed fetching command line url") - return - try: - if key and content: - util.write_file(target_fn, content, mode=0600) - LOG.info(("Wrote to %s with contents of command line" - " url %s (len=%s)"), target_fn, url, len(content)) - elif key and not content: - LOG.info(("Command line key %s with url" - " %s had no contents"), key, url) - except: - util.logexc(LOG, "Failed writing url content to %s", target_fn) - - -def main_init(args): +LOG = logging.getLogger() + + +def warn(wstr): + sys.stderr.write("WARN: %s\n" % (wstr)) + + +def welcome(action): + msg = ("Cloud-init v. {{version}} running '{{action}}' at " + "{{timestamp}}. Up {{uptime}} seconds.") + tpl_params = { + 'version': version.version_string(), + 'uptime': util.uptime(), + 'timestamp': util.time_rfc2822(), + 'action': action, + } + welcome_msg = "%s" % (templater.render_string(msg, tpl_params)) + sys.stderr.write("%s\n" % (welcome_msg)) + sys.stderr.flush() + LOG.info(welcome_msg) + + +def main_init(name, args): deps = [sources.DEP_FILESYSTEM, sources.DEP_NETWORK] if args.local: deps = [sources.DEP_FILESYSTEM] - cfg_path = None - if args.file: + cfg_extra_paths = [] + if args.files: # Already opened so lets just pass that along # since it would of broke if it couldn't have # read that file - cfg_path = str(args.file.name) + for f in args.files: + cfg_extra_paths.append(f.name) if not args.local: - # What is this for?? + # TODO: What is this for?? root_name = "%s.d" % (settings.CLOUD_CONFIG) target_fn = os.path.join(root_name, "91_kernel_cmdline_url.cfg") - read_write_cmdline_url(target_fn) - - # Cloud-init 'init' stage is broken up into the following stages + util.read_write_cmdline_url(target_fn) + + # Cloud-init 'init' stage is broken up into the following sub-stages # 1. Ensure that the init object fetches its config without errors # 2. Setup logging/output redirections with resultant config (if any) # 3. Initialize the cloud-init filesystem # 4. Check if we can stop early by looking for various files # 5. Fetch the datasource - # 6. Consume the userdata (handlers get activated here) - # 7. Adjust any subsequent logging/output redirections - # 8. Run the transforms for the 'init' stage - # 9. Done! - now = util.time_rfc2822() - uptime = util.uptime() + # 6. Connect to the current instance location + update the cache + # 7. Consume the userdata (handlers get activated here) + # 8. Adjust any subsequent logging/output redirections + # 9. Run the transforms for the 'init' stage + # 10. Done! init = stages.Init(deps) # Stage 1 init.read_cfg() @@ -122,13 +122,70 @@ def main_init(args): # Stage 4 path_helper = init.paths if not args.local: - nonet_path = "%s/%s" % (cloudinit.get_cpath("data"), "no-net") + sys.stderr.write("%s\n" % (netinfo.debug_info())) + LOG.debug(("Checking to see if files that we need already" + " exist from a previous run that would allow us" + " to stop early.")) + stop_files = [ + os.path.join(path_helper.get_cpath("data"), "no-net"), + path_helper.get_ipath_cur("obj_pkl"), + ] + existing_files = [] + for fn in stop_files: + try: + c = util.load_file(fn) + if len(c): + existing_files.append((fn, len(c))) + except Exception as e: + pass + if existing_files: + LOG.debug("Exiting early due to the existence of %s", existing_files) + return 0 + else: + # The cache is not instance specific, so it has to be purged + # but we want 'start' to benefit from a cache if + # a previous start-local populated one... + manual_clean = util.get_cfg_option_bool(init.cfg, + 'manual_cache_clean', False) + if manual_clean: + LOG.debug("Not purging instance link, manual cleaning enabled") + init.purge_cache(False) + else: + init.purge_cache() + # Delete the non-net file as well + util.del_fie(os.path.join(path_helper.get_cpath("data"), "no-net")) + # Stage 5 + welcome(name) + try: + init.fetch() + except sources.DataSourceNotFoundException as e: + util.logexc(LOG, "No instance datasource found") + warn("No instance datasource found: %s" % (e)) + # TODO: Return 0 or 1?? + return 1 + # Stage 6 + iid = init.instancify() + LOG.debug("%s will now be targeting instance id: %s", name, iid) + init.update() + # Stage 7 + try: + (ran, _results) = init.cloudify().run('consume_userdata', + init.consume, + args=[settings.PER_INSTANCE], + freq=settings.PER_INSTANCE) + if not ran: + init.consume(settings.ALWAYS) + except Exception as e: + warn("Consuming user data failed: %s" % (e)) + raise + # Stage 8 + -def main_config(args): +def main_config(name, args): pass -def main_final(args): +def main_final(name, args): pass @@ -136,7 +193,8 @@ def main(): parser = argparse.ArgumentParser() parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + (version.version_string())) - parser.add_argument('--file', '-f', action='store', + parser.add_argument('--file', '-f', action='append', + dest='files', help='additional configuration file to include', type=argparse.FileType('rb')) parser.add_argument('--debug', '-d', action='store_true', @@ -175,7 +233,7 @@ def main(): 'final': main_final, } func = stage_mp.get(stage_name) - return func(args) + return func(stage_name, args) if __name__ == '__main__': -- cgit v1.2.3 From 02a8f1487d51dc8dda4ea61830420b225029268b Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 19 Jun 2012 11:17:08 -0700 Subject: Passed along the extra config paths --- bin/cloud-init2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'bin') diff --git a/bin/cloud-init2.py b/bin/cloud-init2.py index 0e6a75da..e1757af4 100755 --- a/bin/cloud-init2.py +++ b/bin/cloud-init2.py @@ -103,7 +103,7 @@ def main_init(name, args): # 10. Done! init = stages.Init(deps) # Stage 1 - init.read_cfg() + init.read_cfg(cfg_extra_paths) # Stage 2 try: util.fixup_output(init.cfg, 'init') -- cgit v1.2.3 From ce396ee2fcbe2eeac22e76ea1fc5b6aa01e4f298 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 19 Jun 2012 12:16:51 -0700 Subject: Small logging changes. --- bin/cloud-init2.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init2.py b/bin/cloud-init2.py index e1757af4..d80ee1ec 100755 --- a/bin/cloud-init2.py +++ b/bin/cloud-init2.py @@ -106,9 +106,9 @@ def main_init(name, args): init.read_cfg(cfg_extra_paths) # Stage 2 try: - util.fixup_output(init.cfg, 'init') + util.fixup_output(init.cfg, name) except: - util.logexc(LOG, "Failed to setup output redirection") + util.logexc(LOG, "Failed to setup output redirection!") if args.debug: # Reset so that all the debug handlers are closed out LOG.debug("Logging being reset, this logger may no longer be active shortly") @@ -118,7 +118,7 @@ def main_init(name, args): try: init.initialize() except Exception as e: - util.logexc(LOG, "Failed to initialize, likely bad things to come: %s", e) + util.logexc(LOG, "Failed to initialize, likely bad things to come!") # Stage 4 path_helper = init.paths if not args.local: @@ -159,8 +159,7 @@ def main_init(name, args): try: init.fetch() except sources.DataSourceNotFoundException as e: - util.logexc(LOG, "No instance datasource found") - warn("No instance datasource found: %s" % (e)) + util.logexc(LOG, "No instance datasource found!") # TODO: Return 0 or 1?? return 1 # Stage 6 @@ -176,8 +175,8 @@ def main_init(name, args): if not ran: init.consume(settings.ALWAYS) except Exception as e: - warn("Consuming user data failed: %s" % (e)) - raise + util.logexc(LOG, "Consuming user data failed!") + return 1 # Stage 8 -- cgit v1.2.3 From 51e28788a7f26c112587144b8c2165952d6f3ac7 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 19 Jun 2012 16:00:17 -0700 Subject: Get the 'init' stage working. --- bin/cloud-init2.py | 74 +++++++++++++++++++++++++++++++++++++++++------------- 1 file changed, 56 insertions(+), 18 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init2.py b/bin/cloud-init2.py index d80ee1ec..1f938f01 100755 --- a/bin/cloud-init2.py +++ b/bin/cloud-init2.py @@ -42,6 +42,9 @@ from cloudinit import util from cloudinit import version +# Transform section template +TR_TPL = "cloud_%s_modules" + # Things u can query on QUERY_DATA_TYPES = [ 'data', @@ -98,26 +101,31 @@ def main_init(name, args): # 5. Fetch the datasource # 6. Connect to the current instance location + update the cache # 7. Consume the userdata (handlers get activated here) - # 8. Adjust any subsequent logging/output redirections - # 9. Run the transforms for the 'init' stage - # 10. Done! + # 8. Construct the transform object + # 9. Adjust any subsequent logging/output redirections using + # the transform objects configuration + # 10. Run the transforms for the 'init' stage + # 11. Done! init = stages.Init(deps) # Stage 1 init.read_cfg(cfg_extra_paths) # Stage 2 + outfmt = None + errfmt = None try: - util.fixup_output(init.cfg, name) + (outfmt, errfmt) = util.fixup_output(init.cfg, name) except: util.logexc(LOG, "Failed to setup output redirection!") if args.debug: # Reset so that all the debug handlers are closed out - LOG.debug("Logging being reset, this logger may no longer be active shortly") + LOG.debug(("Logging being reset, this logger may no" + " longer be active shortly")) logging.resetLogging() logging.setupLogging(init.cfg) # Stage 3 try: init.initialize() - except Exception as e: + except Exception: util.logexc(LOG, "Failed to initialize, likely bad things to come!") # Stage 4 path_helper = init.paths @@ -136,10 +144,11 @@ def main_init(name, args): c = util.load_file(fn) if len(c): existing_files.append((fn, len(c))) - except Exception as e: + except Exception: pass if existing_files: - LOG.debug("Exiting early due to the existence of %s", existing_files) + LOG.debug("Exiting early due to the existence of %s files", + existing_files) return 0 else: # The cache is not instance specific, so it has to be purged @@ -153,7 +162,7 @@ def main_init(name, args): else: init.purge_cache() # Delete the non-net file as well - util.del_fie(os.path.join(path_helper.get_cpath("data"), "no-net")) + util.del_file(os.path.join(path_helper.get_cpath("data"), "no-net")) # Stage 5 welcome(name) try: @@ -173,18 +182,38 @@ def main_init(name, args): args=[settings.PER_INSTANCE], freq=settings.PER_INSTANCE) if not ran: - init.consume(settings.ALWAYS) + init.consume(settings.PER_ALWAYS) except Exception as e: util.logexc(LOG, "Consuming user data failed!") return 1 # Stage 8 - + tr = stages.Transforms(init, cfg_extra_paths) + # Stage 9 - TODO is this really needed?? + try: + outfmt_orig = outfmt + errfmt_orig = errfmt + (outfmt, errfmt) = util.get_output_cfg(tr.cfg, name) + if outfmt_orig != outfmt or errfmt_orig != errfmt: + LOG.warn("Stdout, stderr changing to (%s, %s)", outfmt, errfmt) + (outfmt, errfmt) = util.fixup_output(tr.cfg, name) + except: + util.logexc(LOG, "Failed to adjust output redirection!") + # Stage 10 + section_name = TR_TPL % (name) + (ran_am, failures) = tr.run(section_name) + if not ran_am: + msg = "No %s transforms to run under section %s" % (name, section_name) + sys.stderr.write("%s\n" % (msg)) + LOG.debug(msg) + return 0 + return len(failures) + -def main_config(name, args): +def main_config(_name, _args): pass -def main_final(name, args): +def main_final(_name, _args): pass @@ -202,18 +231,27 @@ def main(): subparsers = parser.add_subparsers() # Each action and its suboptions (if any) - parser_init = subparsers.add_parser('init', help='initializes cloud-init and performs \'init\' transforms') + parser_init = subparsers.add_parser('init', + help=('initializes cloud-init and' + ' performs \'init\' transforms')) parser_init.add_argument("--local", '-l', action='store_true', help="start in local mode", default=False) - parser_init.set_defaults(action='init') # This is used so that we can know which action is selected + # This is used so that we can know which action is selected + parser_init.set_defaults(action='init') - parser_config = subparsers.add_parser('config', help='performs cloud-init \'config\' transforms') + parser_config = subparsers.add_parser('config', + help=('performs cloud-init ' + '\'config\' transforms')) parser_config.set_defaults(action='config') - parser_final = subparsers.add_parser('final', help='performs cloud-init \'final\' transforms') + parser_final = subparsers.add_parser('final', + help=('performs cloud-init ' + '\'final\' transforms')) parser_final.set_defaults(action='final') - parser_query = subparsers.add_parser('query', help='query information stored in cloud-init') + parser_query = subparsers.add_parser('query', + help=('query information stored ' + 'in cloud-init')) parser_query.add_argument("--name", action="store", help="item name to query on", required=True, -- cgit v1.2.3 From a07ff5c4be32ae12aea2cc916c010c3b2b22f1b5 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 19 Jun 2012 19:07:07 -0700 Subject: 1. Use the help message to show the default 2. Add a mode option in to the 'config' subparser that allows the user to affect the transform selection phase 3. Some tiny pylint warnings. --- bin/cloud-init2.py | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init2.py b/bin/cloud-init2.py index 1f938f01..10cf4614 100755 --- a/bin/cloud-init2.py +++ b/bin/cloud-init2.py @@ -167,7 +167,7 @@ def main_init(name, args): welcome(name) try: init.fetch() - except sources.DataSourceNotFoundException as e: + except sources.DataSourceNotFoundException: util.logexc(LOG, "No instance datasource found!") # TODO: Return 0 or 1?? return 1 @@ -223,25 +223,36 @@ def main(): version='%(prog)s ' + (version.version_string())) parser.add_argument('--file', '-f', action='append', dest='files', - help='additional configuration file to include', + help=('additional yaml configuration' + ' files to use'), type=argparse.FileType('rb')) - parser.add_argument('--debug', '-d', action='store_true', - help='show additional pre-action logging', + parser.add_argument('--debug', '-d', action='store_true', + help=('show additional pre-action' + ' logging (default: %(default)s)'), default=False) subparsers = parser.add_subparsers() + # Possible mode names + mode_names = ('init', 'config', 'final') + # Each action and its suboptions (if any) parser_init = subparsers.add_parser('init', help=('initializes cloud-init and' ' performs \'init\' transforms')) parser_init.add_argument("--local", '-l', action='store_true', - help="start in local mode", default=False) + help="start in local mode (default: %(default)s)", + default=False) # This is used so that we can know which action is selected parser_init.set_defaults(action='init') parser_config = subparsers.add_parser('config', help=('performs cloud-init ' '\'config\' transforms')) + parser_config.add_argument("--mode", '-m', action='store', + help=("transform configuration name " + "to use (default: %(default)s)"), + default='config', + choices=mode_names) parser_config.set_defaults(action='config') parser_final = subparsers.add_parser('final', -- cgit v1.2.3 From 693c90e7efc0f1bf722ec52f70653010b10aad8b Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 19 Jun 2012 20:57:43 -0700 Subject: 1. Move the running of transforms to a common function that returns how a useful exit code 2. Add in the single transform running stub, which seems to be a feature of the previous cloud-init-cfg, making it its own action now 3. Adding in the 'config' and 'final' actions, which now both go through the same entrypoint function to setup the initial object and then fire off the needed transforms. 4. Cleanup of the argparsing code to handle the above cases. --- bin/cloud-init2.py | 150 +++++++++++++++++++++++++++++++++++++++-------------- 1 file changed, 110 insertions(+), 40 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init2.py b/bin/cloud-init2.py index 10cf4614..010a1011 100755 --- a/bin/cloud-init2.py +++ b/bin/cloud-init2.py @@ -74,6 +74,20 @@ def welcome(action): LOG.info(welcome_msg) +def run_transforms(tr, action_name, section): + full_section_name = TR_TPL % (section) + (ran_am, failures) = tr.run(full_section_name) + if not ran_am: + msg = ("No '%s' transforms to run" + " under section '%s'") % (action_name, full_section_name) + sys.stderr.write("%s\n" % (msg)) + LOG.debug(msg) + return 0 + else: + LOG.debug("Ran %s transforms with %s failures", ran_am, len(failures)) + return len(failures) + + def main_init(name, args): deps = [sources.DEP_FILESYSTEM, sources.DEP_NETWORK] if args.local: @@ -106,6 +120,7 @@ def main_init(name, args): # the transform objects configuration # 10. Run the transforms for the 'init' stage # 11. Done! + welcome(name) init = stages.Init(deps) # Stage 1 init.read_cfg(cfg_extra_paths) @@ -113,6 +128,8 @@ def main_init(name, args): outfmt = None errfmt = None try: + LOG.debug("Closing stdin") + util.close_stdin() (outfmt, errfmt) = util.fixup_output(init.cfg, name) except: util.logexc(LOG, "Failed to setup output redirection!") @@ -164,7 +181,6 @@ def main_init(name, args): # Delete the non-net file as well util.del_file(os.path.join(path_helper.get_cpath("data"), "no-net")) # Stage 5 - welcome(name) try: init.fetch() except sources.DataSourceNotFoundException: @@ -199,26 +215,73 @@ def main_init(name, args): except: util.logexc(LOG, "Failed to adjust output redirection!") # Stage 10 - section_name = TR_TPL % (name) - (ran_am, failures) = tr.run(section_name) - if not ran_am: - msg = "No %s transforms to run under section %s" % (name, section_name) - sys.stderr.write("%s\n" % (msg)) - LOG.debug(msg) - return 0 - return len(failures) + return run_transforms(tr, name, name) + + +def main_transform(_action_name, args): + name = args.mode + i_cfgs = [] + if args.files: + for fh in args.files: + i_cfgs.append(fh.name) + # Cloud-init transform stages are broken up into the following sub-stages + # 1. Ensure that the init object fetches its config without errors + # 2. Get the datasource from the init object, if it does + # not exist then that means the main_init stage never + # worked, and thus this stage can not run. + # 3. Construct the transform object + # 4. Adjust any subsequent logging/output redirections using + # the transform objects configuration + # 5. Run the transforms for the given stage name + # 6. Done! + welcome(name) + init = stages.Init(ds_deps=[]) + # Stage 1 + init.read_cfg(i_cfgs) + # Stage 2 + try: + ds = init.fetch() + except sources.DataSourceNotFoundException: + # There was no datasource found, theres nothing to do + util.logexc(LOG, 'Can not apply stage %s, no datasource found', name) + return 1 + # Stage 3 + tr_cfgs = list(i_cfgs) + cc_cfg = init.paths.get_ipath_cur('cloud_config') + if settings.CFG_ENV_NAME in os.environ: + cc_cfg = os.environ[settings.CFG_ENV_NAME] + if cc_cfg and os.path.exists(cc_cfg): + tr_cfgs.append(cc_cfg) + tr = stages.Transforms(init, tr_cfgs) + # Stage 4 + try: + LOG.debug("Closing stdin") + util.close_stdin() + (outfmt, errfmt) = util.fixup_output(tr.cfg, name) + except: + util.logexc(LOG, "Failed to setup output redirection!") + if args.debug: + # Reset so that all the debug handlers are closed out + LOG.debug(("Logging being reset, this logger may no" + " longer be active shortly")) + logging.resetLogging() + logging.setupLogging(cc.cfg) + # Stage 5 + return run_transforms(tr, name, name) -def main_config(_name, _args): +def main_query(_name, _args): pass -def main_final(_name, _args): +def main_single(_name, _args): pass def main(): parser = argparse.ArgumentParser() + + # Top level args parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + (version.version_string())) parser.add_argument('--file', '-f', action='append', @@ -232,56 +295,63 @@ def main(): default=False) subparsers = parser.add_subparsers() - # Possible mode names - mode_names = ('init', 'config', 'final') - - # Each action and its suboptions (if any) + # Each action and its sub-options (if any) parser_init = subparsers.add_parser('init', help=('initializes cloud-init and' - ' performs \'init\' transforms')) + ' performs initial transforms')) parser_init.add_argument("--local", '-l', action='store_true', help="start in local mode (default: %(default)s)", default=False) - # This is used so that we can know which action is selected - parser_init.set_defaults(action='init') + # This is used so that we can know which action is selected + + # the functor to use to run this subcommand + parser_init.set_defaults(action=('init', main_init)) - parser_config = subparsers.add_parser('config', - help=('performs cloud-init ' - '\'config\' transforms')) - parser_config.add_argument("--mode", '-m', action='store', + # These settings are used for the 'config' and 'final' stages + parser_tr = subparsers.add_parser('transform', + help=('performs transforms ' + 'using a given configuration key')) + parser_tr.add_argument("--mode", '-m', action='store', help=("transform configuration name " "to use (default: %(default)s)"), default='config', - choices=mode_names) - parser_config.set_defaults(action='config') - - parser_final = subparsers.add_parser('final', - help=('performs cloud-init ' - '\'final\' transforms')) - parser_final.set_defaults(action='final') + choices=('config', 'final')) + parser_tr.set_defaults(action=('transform', main_transform)) + # These settings are used when you want to query information + # stored in the cloud-init data objects/directories/files parser_query = subparsers.add_parser('query', help=('query information stored ' 'in cloud-init')) - parser_query.add_argument("--name", action="store", + parser_query.add_argument("--name", '-n', action="store", help="item name to query on", required=True, choices=QUERY_DATA_TYPES) - parser_query.set_defaults(action='query') + parser_query.set_defaults(action=('query', main_query)) + + # This subcommand allows you to run a single transform + parser_single = subparsers.add_parser('single', + help=('run a single transform ')) + parser_single.set_defaults(action=('single', main_single)) + parser_single.add_argument("--name", '-n', action="store", + help="transform name to run", + required=True) + parser_single.add_argument("--frequency", action="store", + help=("frequency of " + " the transform (default: %(default)s)"), + required=False, + default=settings.PER_ALWAYS, + choices=settings.FREQUENCIES) + parser_single.set_defaults(action=('single', main_single)) + + args = parser.parse_args() - + # Setup basic logging to start (until reinitialized) if args.debug: logging.setupBasicLogging() - stage_name = args.action - stage_mp = { - 'init': main_init, - 'config': main_config, - 'final': main_final, - } - func = stage_mp.get(stage_name) - return func(stage_name, args) + (name, functor) = args.action + return functor(name, args) if __name__ == '__main__': -- cgit v1.2.3 From 28e0ecf1b7c99a783bdba901544fa9c1a8e37f65 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 19 Jun 2012 21:01:34 -0700 Subject: Pylint cleanups. --- bin/cloud-init2.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init2.py b/bin/cloud-init2.py index 010a1011..bb32da45 100755 --- a/bin/cloud-init2.py +++ b/bin/cloud-init2.py @@ -23,7 +23,6 @@ import argparse import os -import traceback import sys # This is more just for running from the bin folder @@ -199,7 +198,7 @@ def main_init(name, args): freq=settings.PER_INSTANCE) if not ran: init.consume(settings.PER_ALWAYS) - except Exception as e: + except Exception: util.logexc(LOG, "Consuming user data failed!") return 1 # Stage 8 @@ -240,7 +239,7 @@ def main_transform(_action_name, args): init.read_cfg(i_cfgs) # Stage 2 try: - ds = init.fetch() + init.fetch() except sources.DataSourceNotFoundException: # There was no datasource found, theres nothing to do util.logexc(LOG, 'Can not apply stage %s, no datasource found', name) @@ -257,7 +256,7 @@ def main_transform(_action_name, args): try: LOG.debug("Closing stdin") util.close_stdin() - (outfmt, errfmt) = util.fixup_output(tr.cfg, name) + util.fixup_output(tr.cfg, name) except: util.logexc(LOG, "Failed to setup output redirection!") if args.debug: @@ -265,7 +264,7 @@ def main_transform(_action_name, args): LOG.debug(("Logging being reset, this logger may no" " longer be active shortly")) logging.resetLogging() - logging.setupLogging(cc.cfg) + logging.setupLogging(tr.cfg) # Stage 5 return run_transforms(tr, name, name) -- cgit v1.2.3 From 013d5734d2193d81502d1f73480cd49569a82dc8 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 19 Jun 2012 21:46:01 -0700 Subject: Get single module running mode mostly working (still needs a little work). --- bin/cloud-init2.py | 93 ++++++++++++++++++++++++++++++++++++++++-------------- 1 file changed, 70 insertions(+), 23 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init2.py b/bin/cloud-init2.py index bb32da45..1f8d45bd 100755 --- a/bin/cloud-init2.py +++ b/bin/cloud-init2.py @@ -73,9 +73,22 @@ def welcome(action): LOG.info(welcome_msg) -def run_transforms(tr, action_name, section): +def extract_fns(args): + # Files are already opened so lets just pass that along + # since it would of broke if it couldn't have + # read that file already... + fn_cfgs = [] + if args.files: + for fh in args.files: + # The realpath is more useful in logging + # so lets resolve to that... + fn_cfgs.append(os.path.realpath(fh.name)) + return fn_cfgs + + +def run_transform_section(tr, action_name, section): full_section_name = TR_TPL % (section) - (ran_am, failures) = tr.run(full_section_name) + (ran_am, failures) = tr.run_section(full_section_name) if not ran_am: msg = ("No '%s' transforms to run" " under section '%s'") % (action_name, full_section_name) @@ -92,14 +105,6 @@ def main_init(name, args): if args.local: deps = [sources.DEP_FILESYSTEM] - cfg_extra_paths = [] - if args.files: - # Already opened so lets just pass that along - # since it would of broke if it couldn't have - # read that file - for f in args.files: - cfg_extra_paths.append(f.name) - if not args.local: # TODO: What is this for?? root_name = "%s.d" % (settings.CLOUD_CONFIG) @@ -122,7 +127,7 @@ def main_init(name, args): welcome(name) init = stages.Init(deps) # Stage 1 - init.read_cfg(cfg_extra_paths) + init.read_cfg(extract_fns(args)) # Stage 2 outfmt = None errfmt = None @@ -201,8 +206,8 @@ def main_init(name, args): except Exception: util.logexc(LOG, "Consuming user data failed!") return 1 - # Stage 8 - tr = stages.Transforms(init, cfg_extra_paths) + # Stage 8 - TODO - do we really need to re-extract our configs? + tr = stages.Transforms(init, extract_fns(args)) # Stage 9 - TODO is this really needed?? try: outfmt_orig = outfmt @@ -214,15 +219,11 @@ def main_init(name, args): except: util.logexc(LOG, "Failed to adjust output redirection!") # Stage 10 - return run_transforms(tr, name, name) + return run_transform_section(tr, name, name) def main_transform(_action_name, args): name = args.mode - i_cfgs = [] - if args.files: - for fh in args.files: - i_cfgs.append(fh.name) # Cloud-init transform stages are broken up into the following sub-stages # 1. Ensure that the init object fetches its config without errors # 2. Get the datasource from the init object, if it does @@ -236,7 +237,7 @@ def main_transform(_action_name, args): welcome(name) init = stages.Init(ds_deps=[]) # Stage 1 - init.read_cfg(i_cfgs) + init.read_cfg(extract_fns(args)) # Stage 2 try: init.fetch() @@ -245,7 +246,7 @@ def main_transform(_action_name, args): util.logexc(LOG, 'Can not apply stage %s, no datasource found', name) return 1 # Stage 3 - tr_cfgs = list(i_cfgs) + tr_cfgs = extract_fns(args) cc_cfg = init.paths.get_ipath_cur('cloud_config') if settings.CFG_ENV_NAME in os.environ: cc_cfg = os.environ[settings.CFG_ENV_NAME] @@ -266,15 +267,57 @@ def main_transform(_action_name, args): logging.resetLogging() logging.setupLogging(tr.cfg) # Stage 5 - return run_transforms(tr, name, name) + return run_transform_section(tr, name, name) def main_query(_name, _args): pass -def main_single(_name, _args): - pass +def main_single(name, args): + # Cloud-init single stage is broken up into the following sub-stages + # 1. Ensure that the init object fetches its config without errors + # 2. Check to see if we can find the transform name + # in the 'init', 'final', 'config' stages, if not bail + # 3. Get the datasource from the init object, if it does + # not exist then that means the main_init stage never + # worked, and thus this stage can not run. + # 4. Construct the transform object + # 5. Adjust any subsequent logging/output redirections using + # the transform objects configuration + # 6. Run the single transform + # 7. Done! + transform_name = args.name + st_name = "%s:%s" % (name, transform_name) + welcome(st_name) + init = stages.Init(ds_deps=[]) + # Stage 1 + init.read_cfg(extract_fns(args)) + tr = stages.Transforms(init, extract_fns(args)) + where_look = [ + TR_TPL % ('init'), + TR_TPL % ('config'), + TR_TPL % ('final'), + ] + found_at = tr.find_transform(transform_name, where_look) + if not found_at: + msg = ("No known transform named %s " + "in sections %s") % (transform_name, where_look) + sys.stderr.write("%s\n" % (msg)) + LOG.warn(msg) + return 1 + else: + LOG.debug("Found transform %s in section/s: %s", + transform_name, found_at) + LOG.debug("Selecting section %s as its run section.", found_at[0]) + (_run_am, failures) = tr.run_single(transform_name, found_at[0], + args.transform_args, + args.frequency) + if failures: + LOG.debug("Ran %s but it failed", transform_name) + return 1 + else: + return 0 def main(): @@ -340,6 +383,10 @@ def main(): required=False, default=settings.PER_ALWAYS, choices=settings.FREQUENCIES) + parser_single.add_argument("transform_args", nargs="*", + metavar='argument', + help=('any additional arguments to' + ' pass to this transform')) parser_single.set_defaults(action=('single', main_single)) -- cgit v1.2.3 From af7a789c7d99f6ea461c14c22f3cb1959572a103 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Wed, 20 Jun 2012 12:02:17 -0700 Subject: Make the single run mode setup the output redirection and logging accordingly --- bin/cloud-init2.py | 68 +++++++++++++++++++++++++++++++++--------------------- 1 file changed, 42 insertions(+), 26 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init2.py b/bin/cloud-init2.py index 1f8d45bd..135e4114 100755 --- a/bin/cloud-init2.py +++ b/bin/cloud-init2.py @@ -54,10 +54,6 @@ QUERY_DATA_TYPES = [ LOG = logging.getLogger() -def warn(wstr): - sys.stderr.write("WARN: %s\n" % (wstr)) - - def welcome(action): msg = ("Cloud-init v. {{version}} running '{{action}}' at " "{{timestamp}}. Up {{uptime}} seconds.") @@ -202,6 +198,8 @@ def main_init(name, args): args=[settings.PER_INSTANCE], freq=settings.PER_INSTANCE) if not ran: + # Just consume anything that is set to run per + # always if nothing ran in the per instance section init.consume(settings.PER_ALWAYS) except Exception: util.logexc(LOG, "Consuming user data failed!") @@ -217,12 +215,12 @@ def main_init(name, args): LOG.warn("Stdout, stderr changing to (%s, %s)", outfmt, errfmt) (outfmt, errfmt) = util.fixup_output(tr.cfg, name) except: - util.logexc(LOG, "Failed to adjust output redirection!") + util.logexc(LOG, "Failed to re-adjust output redirection!") # Stage 10 return run_transform_section(tr, name, name) -def main_transform(_action_name, args): +def main_transform(action_name, args): name = args.mode # Cloud-init transform stages are broken up into the following sub-stages # 1. Ensure that the init object fetches its config without errors @@ -234,7 +232,7 @@ def main_transform(_action_name, args): # the transform objects configuration # 5. Run the transforms for the given stage name # 6. Done! - welcome(name) + welcome("%s:%s" % (action_name, name)) init = stages.Init(ds_deps=[]) # Stage 1 init.read_cfg(extract_fns(args)) @@ -243,7 +241,7 @@ def main_transform(_action_name, args): init.fetch() except sources.DataSourceNotFoundException: # There was no datasource found, theres nothing to do - util.logexc(LOG, 'Can not apply stage %s, no datasource found', name) + util.logexc(LOG, 'Can not apply stage %s, no datasource found!', name) return 1 # Stage 3 tr_cfgs = extract_fns(args) @@ -287,34 +285,52 @@ def main_single(name, args): # the transform objects configuration # 6. Run the single transform # 7. Done! - transform_name = args.name - st_name = "%s:%s" % (name, transform_name) - welcome(st_name) + tr_name = args.name + welcome("%s:%s" % (name, tr_name)) init = stages.Init(ds_deps=[]) # Stage 1 init.read_cfg(extract_fns(args)) tr = stages.Transforms(init, extract_fns(args)) - where_look = [ - TR_TPL % ('init'), - TR_TPL % ('config'), - TR_TPL % ('final'), - ] - found_at = tr.find_transform(transform_name, where_look) + where_look_mp = { + TR_TPL % ('init'): 'init', + TR_TPL % ('config'): 'config', + TR_TPL % ('final'): 'final', + } + where_look = list(where_look_mp.keys()) + found_at = tr.find_transform(tr_name, where_look) if not found_at: msg = ("No known transform named %s " - "in sections %s") % (transform_name, where_look) - sys.stderr.write("%s\n" % (msg)) + "in sections (%s)") % (tr_name, ", ".join(where_look)) LOG.warn(msg) return 1 else: - LOG.debug("Found transform %s in section/s: %s", - transform_name, found_at) - LOG.debug("Selecting section %s as its run section.", found_at[0]) - (_run_am, failures) = tr.run_single(transform_name, found_at[0], - args.transform_args, - args.frequency) + LOG.debug("Found transform %s in sections: %s", + tr_name, found_at) + sect_name = found_at[0] + LOG.debug("Selecting section %s as its 'source' section.", sect_name) + tr_args = args.transform_args + if tr_args: + LOG.debug("Using passed in arguments %s", tr_args) + tr_freq = args.frequency + if tr_freq: + LOG.debug("Using passed in frequency %s", tr_freq) + try: + LOG.debug("Closing stdin") + util.close_stdin() + # This seems to use the short name, instead of the long name + util.fixup_output(tr.cfg, where_look_mp.get(sect_name)) + except: + util.logexc(LOG, "Failed to setup output redirection!") + if args.debug: + # Reset so that all the debug handlers are closed out + LOG.debug(("Logging being reset, this logger may no" + " longer be active shortly")) + logging.resetLogging() + logging.setupLogging(tr.cfg) + (_run_am, failures) = tr.run_single(tr_name, sect_name, + tr_args, tr_freq) if failures: - LOG.debug("Ran %s but it failed", transform_name) + LOG.debug("Ran %s but it failed", tr_name) return 1 else: return 0 -- cgit v1.2.3 From 5c3d91a0154c71a974ba34a37bbc37d4284bc774 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Wed, 20 Jun 2012 12:10:20 -0700 Subject: Removed the older binaries, now supplemented by a unified binary. --- bin/cloud-init-cfg.py | 115 ---------- bin/cloud-init-query.py | 56 ----- bin/cloud-init.py | 576 ++++++++++++++++++++++++++++++++---------------- bin/cloud-init2.py | 421 ----------------------------------- 4 files changed, 384 insertions(+), 784 deletions(-) delete mode 100755 bin/cloud-init-cfg.py delete mode 100755 bin/cloud-init-query.py delete mode 100755 bin/cloud-init2.py (limited to 'bin') diff --git a/bin/cloud-init-cfg.py b/bin/cloud-init-cfg.py deleted file mode 100755 index 3a475c1c..00000000 --- a/bin/cloud-init-cfg.py +++ /dev/null @@ -1,115 +0,0 @@ -#!/usr/bin/python -# vi: ts=4 expandtab -# -# Copyright (C) 2009-2010 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. -# -# Author: Scott Moser -# Author: Juerg Haefliger -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 3, as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -import sys -import cloudinit -import cloudinit.util as util -import cloudinit.CloudConfig as CC -import logging -import os - - -def Usage(out=sys.stdout): - out.write("Usage: %s name\n" % sys.argv[0]) - - -def main(): - # expect to be called with - # name [ freq [ args ] - # run the cloud-config job 'name' at with given args - # or - # read cloud config jobs from config (builtin -> system) - # and run all in order - - util.close_stdin() - - modename = "config" - - if len(sys.argv) < 2: - Usage(sys.stderr) - sys.exit(1) - if sys.argv[1] == "all": - name = "all" - if len(sys.argv) > 2: - modename = sys.argv[2] - else: - freq = None - run_args = [] - name = sys.argv[1] - if len(sys.argv) > 2: - freq = sys.argv[2] - if freq == "None": - freq = None - if len(sys.argv) > 3: - run_args = sys.argv[3:] - - cfg_path = cloudinit.get_ipath_cur("cloud_config") - cfg_env_name = cloudinit.cfg_env_name - if cfg_env_name in os.environ: - cfg_path = os.environ[cfg_env_name] - - cloud = cloudinit.CloudInit(ds_deps=[]) # ds_deps=[], get only cached - try: - cloud.get_data_source() - except cloudinit.DataSourceNotFoundException as e: - # there was no datasource found, theres nothing to do - sys.exit(0) - - cc = CC.CloudConfig(cfg_path, cloud) - - try: - (outfmt, errfmt) = CC.get_output_cfg(cc.cfg, modename) - CC.redirect_output(outfmt, errfmt) - except Exception as e: - err("Failed to get and set output config: %s\n" % e) - - cloudinit.logging_set_from_cfg(cc.cfg) - log = logging.getLogger() - log.info("cloud-init-cfg %s" % sys.argv[1:]) - - module_list = [] - if name == "all": - modlist_cfg_name = "cloud_%s_modules" % modename - module_list = CC.read_cc_modules(cc.cfg, modlist_cfg_name) - if not len(module_list): - err("no modules to run in cloud_config [%s]" % modename, log) - sys.exit(0) - else: - module_list.append([name, freq] + run_args) - - failures = CC.run_cc_modules(cc, module_list, log) - if len(failures): - err("errors running cloud_config [%s]: %s" % (modename, failures), log) - sys.exit(len(failures)) - - -def err(msg, log=None): - if log: - log.error(msg) - sys.stderr.write(msg + "\n") - - -def fail(msg, log=None): - err(msg, log) - sys.exit(1) - -if __name__ == '__main__': - main() diff --git a/bin/cloud-init-query.py b/bin/cloud-init-query.py deleted file mode 100755 index 856cf462..00000000 --- a/bin/cloud-init-query.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/python -# vi: ts=4 expandtab -# -# Copyright (C) 2009-2010 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. -# -# Author: Scott Moser -# Author: Juerg Haefliger -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 3, as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -import sys -import cloudinit -import cloudinit.CloudConfig - - -def Usage(out=sys.stdout): - out.write("Usage: %s name\n" % sys.argv[0]) - - -def main(): - # expect to be called with name of item to fetch - if len(sys.argv) != 2: - Usage(sys.stderr) - sys.exit(1) - - cfg_path = cloudinit.get_ipath_cur("cloud_config") - cc = cloudinit.CloudConfig.CloudConfig(cfg_path) - data = { - 'user_data': cc.cloud.get_userdata(), - 'user_data_raw': cc.cloud.get_userdata_raw(), - 'instance_id': cc.cloud.get_instance_id(), - } - - name = sys.argv[1].replace('-', '_') - - if name not in data: - sys.stderr.write("unknown name '%s'. Known values are:\n %s\n" % - (sys.argv[1], ' '.join(data.keys()))) - sys.exit(1) - - print data[name] - sys.exit(0) - -if __name__ == '__main__': - main() diff --git a/bin/cloud-init.py b/bin/cloud-init.py index 2acea3f8..99ecdaf9 100755 --- a/bin/cloud-init.py +++ b/bin/cloud-init.py @@ -1,11 +1,13 @@ #!/usr/bin/python # vi: ts=4 expandtab # -# Copyright (C) 2009-2010 Canonical Ltd. +# Copyright (C) 2012 Canonical Ltd. # Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012 Yahoo! Inc. # # Author: Scott Moser # Author: Juerg Haefliger +# Author: Joshua Harlow # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3, as @@ -19,211 +21,401 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import subprocess -import sys - -import cloudinit -import cloudinit.util as util -import cloudinit.CloudConfig as CC -import cloudinit.DataSource as ds -import cloudinit.netinfo as netinfo -import time -import traceback -import logging -import errno +import argparse import os +import sys - -def warn(wstr): - sys.stderr.write("WARN:%s" % wstr) - - -def main(): - util.close_stdin() - - cmds = ("start", "start-local") - deps = {"start": (ds.DEP_FILESYSTEM, ds.DEP_NETWORK), - "start-local": (ds.DEP_FILESYSTEM, )} - - cmd = "" - if len(sys.argv) > 1: - cmd = sys.argv[1] - - cfg_path = None - if len(sys.argv) > 2: - # this is really for debugging only - # but you can invoke on development system with ./config/cloud.cfg - cfg_path = sys.argv[2] - - if not cmd in cmds: - sys.stderr.write("bad command %s. use one of %s\n" % (cmd, cmds)) - sys.exit(1) - - now = time.strftime("%a, %d %b %Y %H:%M:%S %z", time.gmtime()) - try: - uptimef = open("/proc/uptime") - uptime = uptimef.read().split(" ")[0] - uptimef.close() - except IOError as e: - warn("unable to open /proc/uptime\n") - uptime = "na" - - cmdline_msg = None - cmdline_exc = None - if cmd == "start": - target = "%s.d/%s" % (cloudinit.system_config, - "91_kernel_cmdline_url.cfg") - if os.path.exists(target): - cmdline_msg = "cmdline: %s existed" % target - else: - cmdline = util.get_cmdline() - try: - (key, url, content) = cloudinit.get_cmdline_url( - cmdline=cmdline) - if key and content: - util.write_file(target, content, mode=0600) - cmdline_msg = ("cmdline: wrote %s from %s, %s" % - (target, key, url)) - elif key: - cmdline_msg = ("cmdline: %s, %s had no cloud-config" % - (key, url)) - except Exception: - cmdline_exc = ("cmdline: '%s' raised exception\n%s" % - (cmdline, traceback.format_exc())) - warn(cmdline_exc) - - try: - cfg = cloudinit.get_base_cfg(cfg_path) - except Exception as e: - warn("Failed to get base config. falling back to builtin: %s\n" % e) - try: - cfg = cloudinit.get_builtin_cfg() - except Exception as e: - warn("Unable to load builtin config\n") - raise - +# This is more just for running from the bin folder +possible_topdir = os.path.normpath(os.path.join(os.path.abspath( + sys.argv[0]), os.pardir, os.pardir)) +if os.path.exists(os.path.join(possible_topdir, "cloudinit", "__init__.py")): + sys.path.insert(0, possible_topdir) + +from cloudinit import log as logging +from cloudinit import netinfo +from cloudinit import settings +from cloudinit import sources +from cloudinit import stages +from cloudinit import templater +from cloudinit import util +from cloudinit import version + + +# Transform section template +TR_TPL = "cloud_%s_modules" + +# Things u can query on +QUERY_DATA_TYPES = [ + 'data', + 'data_raw', + 'instance_id', +] + +LOG = logging.getLogger() + + +def welcome(action): + msg = ("Cloud-init v. {{version}} running '{{action}}' at " + "{{timestamp}}. Up {{uptime}} seconds.") + tpl_params = { + 'version': version.version_string(), + 'uptime': util.uptime(), + 'timestamp': util.time_rfc2822(), + 'action': action, + } + welcome_msg = "%s" % (templater.render_string(msg, tpl_params)) + sys.stderr.write("%s\n" % (welcome_msg)) + sys.stderr.flush() + LOG.info(welcome_msg) + + +def extract_fns(args): + # Files are already opened so lets just pass that along + # since it would of broke if it couldn't have + # read that file already... + fn_cfgs = [] + if args.files: + for fh in args.files: + # The realpath is more useful in logging + # so lets resolve to that... + fn_cfgs.append(os.path.realpath(fh.name)) + return fn_cfgs + + +def run_transform_section(tr, action_name, section): + full_section_name = TR_TPL % (section) + (ran_am, failures) = tr.run_section(full_section_name) + if not ran_am: + msg = ("No '%s' transforms to run" + " under section '%s'") % (action_name, full_section_name) + sys.stderr.write("%s\n" % (msg)) + LOG.debug(msg) + return 0 + else: + LOG.debug("Ran %s transforms with %s failures", ran_am, len(failures)) + return len(failures) + + +def main_init(name, args): + deps = [sources.DEP_FILESYSTEM, sources.DEP_NETWORK] + if args.local: + deps = [sources.DEP_FILESYSTEM] + + if not args.local: + # TODO: What is this for?? + root_name = "%s.d" % (settings.CLOUD_CONFIG) + target_fn = os.path.join(root_name, "91_kernel_cmdline_url.cfg") + util.read_write_cmdline_url(target_fn) + + # Cloud-init 'init' stage is broken up into the following sub-stages + # 1. Ensure that the init object fetches its config without errors + # 2. Setup logging/output redirections with resultant config (if any) + # 3. Initialize the cloud-init filesystem + # 4. Check if we can stop early by looking for various files + # 5. Fetch the datasource + # 6. Connect to the current instance location + update the cache + # 7. Consume the userdata (handlers get activated here) + # 8. Construct the transform object + # 9. Adjust any subsequent logging/output redirections using + # the transform objects configuration + # 10. Run the transforms for the 'init' stage + # 11. Done! + welcome(name) + init = stages.Init(deps) + # Stage 1 + init.read_cfg(extract_fns(args)) + # Stage 2 + outfmt = None + errfmt = None try: - (outfmt, errfmt) = CC.get_output_cfg(cfg, "init") - CC.redirect_output(outfmt, errfmt) - except Exception as e: - warn("Failed to get and set output config: %s\n" % e) - - cloudinit.logging_set_from_cfg(cfg) - log = logging.getLogger() - - if cmdline_exc: - log.debug(cmdline_exc) - elif cmdline_msg: - log.debug(cmdline_msg) - + LOG.debug("Closing stdin") + util.close_stdin() + (outfmt, errfmt) = util.fixup_output(init.cfg, name) + except: + util.logexc(LOG, "Failed to setup output redirection!") + if args.debug: + # Reset so that all the debug handlers are closed out + LOG.debug(("Logging being reset, this logger may no" + " longer be active shortly")) + logging.resetLogging() + logging.setupLogging(init.cfg) + # Stage 3 try: - cloudinit.initfs() - except Exception as e: - warn("failed to initfs, likely bad things to come: %s\n" % str(e)) - - nonet_path = "%s/%s" % (cloudinit.get_cpath("data"), "no-net") - - if cmd == "start": - print netinfo.debug_info() - - stop_files = (cloudinit.get_ipath_cur("obj_pkl"), nonet_path) - # if starting as the network start, there are cases - # where everything is already done for us, and it makes - # most sense to exit early and silently - for f in stop_files: + init.initialize() + except Exception: + util.logexc(LOG, "Failed to initialize, likely bad things to come!") + # Stage 4 + path_helper = init.paths + if not args.local: + sys.stderr.write("%s\n" % (netinfo.debug_info())) + LOG.debug(("Checking to see if files that we need already" + " exist from a previous run that would allow us" + " to stop early.")) + stop_files = [ + os.path.join(path_helper.get_cpath("data"), "no-net"), + path_helper.get_ipath_cur("obj_pkl"), + ] + existing_files = [] + for fn in stop_files: try: - fp = open(f, "r") - fp.close() - except: - continue - - log.debug("no need for cloud-init start to run (%s)\n", f) - sys.exit(0) - elif cmd == "start-local": - # cache is not instance specific, so it has to be purged + c = util.load_file(fn) + if len(c): + existing_files.append((fn, len(c))) + except Exception: + pass + if existing_files: + LOG.debug("Exiting early due to the existence of %s files", + existing_files) + return 0 + else: + # The cache is not instance specific, so it has to be purged # but we want 'start' to benefit from a cache if - # a previous start-local populated one - manclean = util.get_cfg_option_bool(cfg, 'manual_cache_clean', False) - if manclean: - log.debug("not purging cache, manual_cache_clean = True") - cloudinit.purge_cache(not manclean) - - try: - os.unlink(nonet_path) - except OSError as e: - if e.errno != errno.ENOENT: - raise - - msg = "cloud-init %s running: %s. up %s seconds" % (cmd, now, uptime) - sys.stderr.write(msg + "\n") - sys.stderr.flush() - - log.info(msg) - - cloud = cloudinit.CloudInit(ds_deps=deps[cmd]) - + # a previous start-local populated one... + manual_clean = util.get_cfg_option_bool(init.cfg, + 'manual_cache_clean', False) + if manual_clean: + LOG.debug("Not purging instance link, manual cleaning enabled") + init.purge_cache(False) + else: + init.purge_cache() + # Delete the non-net file as well + util.del_file(os.path.join(path_helper.get_cpath("data"), "no-net")) + # Stage 5 try: - cloud.get_data_source() - except cloudinit.DataSourceNotFoundException as e: - sys.stderr.write("no instance data found in %s\n" % cmd) - sys.exit(0) - - # set this as the current instance - cloud.set_cur_instance() - - # store the metadata - cloud.update_cache() - - msg = "found data source: %s" % cloud.datasource - sys.stderr.write(msg + "\n") - log.debug(msg) - - # parse the user data (ec2-run-userdata.py) + init.fetch() + except sources.DataSourceNotFoundException: + util.logexc(LOG, "No instance datasource found!") + # TODO: Return 0 or 1?? + return 1 + # Stage 6 + iid = init.instancify() + LOG.debug("%s will now be targeting instance id: %s", name, iid) + init.update() + # Stage 7 try: - ran = cloud.sem_and_run("consume_userdata", cloudinit.per_instance, - cloud.consume_userdata, [cloudinit.per_instance], False) + (ran, _results) = init.cloudify().run('consume_userdata', + init.consume, + args=[settings.PER_INSTANCE], + freq=settings.PER_INSTANCE) if not ran: - cloud.consume_userdata(cloudinit.per_always) - except: - warn("consuming user data failed!\n") - raise - - cfg_path = cloudinit.get_ipath_cur("cloud_config") - cc = CC.CloudConfig(cfg_path, cloud) - - # if the output config changed, update output and err + # Just consume anything that is set to run per + # always if nothing ran in the per instance section + init.consume(settings.PER_ALWAYS) + except Exception: + util.logexc(LOG, "Consuming user data failed!") + return 1 + # Stage 8 - TODO - do we really need to re-extract our configs? + tr = stages.Transforms(init, extract_fns(args)) + # Stage 9 - TODO is this really needed?? try: outfmt_orig = outfmt errfmt_orig = errfmt - (outfmt, errfmt) = CC.get_output_cfg(cc.cfg, "init") + (outfmt, errfmt) = util.get_output_cfg(tr.cfg, name) if outfmt_orig != outfmt or errfmt_orig != errfmt: - warn("stdout, stderr changing to (%s,%s)" % (outfmt, errfmt)) - CC.redirect_output(outfmt, errfmt) - except Exception as e: - warn("Failed to get and set output config: %s\n" % e) - - # send the cloud-config ready event - cc_path = cloudinit.get_ipath_cur('cloud_config') - cc_ready = cc.cfg.get("cc_ready_cmd", - ['initctl', 'emit', 'cloud-config', - '%s=%s' % (cloudinit.cfg_env_name, cc_path)]) - if cc_ready: - if isinstance(cc_ready, str): - cc_ready = ['sh', '-c', cc_ready] - subprocess.Popen(cc_ready).communicate() - - module_list = CC.read_cc_modules(cc.cfg, "cloud_init_modules") - - failures = [] - if len(module_list): - failures = CC.run_cc_modules(cc, module_list, log) + LOG.warn("Stdout, stderr changing to (%s, %s)", outfmt, errfmt) + (outfmt, errfmt) = util.fixup_output(tr.cfg, name) + except: + util.logexc(LOG, "Failed to re-adjust output redirection!") + # Stage 10 + return run_transform_section(tr, name, name) + + +def main_transform(action_name, args): + name = args.mode + # Cloud-init transform stages are broken up into the following sub-stages + # 1. Ensure that the init object fetches its config without errors + # 2. Get the datasource from the init object, if it does + # not exist then that means the main_init stage never + # worked, and thus this stage can not run. + # 3. Construct the transform object + # 4. Adjust any subsequent logging/output redirections using + # the transform objects configuration + # 5. Run the transforms for the given stage name + # 6. Done! + welcome("%s:%s" % (action_name, name)) + init = stages.Init(ds_deps=[]) + # Stage 1 + init.read_cfg(extract_fns(args)) + # Stage 2 + try: + init.fetch() + except sources.DataSourceNotFoundException: + # There was no datasource found, theres nothing to do + util.logexc(LOG, 'Can not apply stage %s, no datasource found!', name) + return 1 + # Stage 3 + tr_cfgs = extract_fns(args) + cc_cfg = init.paths.get_ipath_cur('cloud_config') + if settings.CFG_ENV_NAME in os.environ: + cc_cfg = os.environ[settings.CFG_ENV_NAME] + if cc_cfg and os.path.exists(cc_cfg): + tr_cfgs.append(cc_cfg) + tr = stages.Transforms(init, tr_cfgs) + # Stage 4 + try: + LOG.debug("Closing stdin") + util.close_stdin() + util.fixup_output(tr.cfg, name) + except: + util.logexc(LOG, "Failed to setup output redirection!") + if args.debug: + # Reset so that all the debug handlers are closed out + LOG.debug(("Logging being reset, this logger may no" + " longer be active shortly")) + logging.resetLogging() + logging.setupLogging(tr.cfg) + # Stage 5 + return run_transform_section(tr, name, name) + + +def main_query(name, _args): + raise NotImplementedError("Action '%s' is not currently implemented" % (name)) + + +def main_single(name, args): + # Cloud-init single stage is broken up into the following sub-stages + # 1. Ensure that the init object fetches its config without errors + # 2. Check to see if we can find the transform name + # in the 'init', 'final', 'config' stages, if not bail + # 3. Get the datasource from the init object, if it does + # not exist then that means the main_init stage never + # worked, and thus this stage can not run. + # 4. Construct the transform object + # 5. Adjust any subsequent logging/output redirections using + # the transform objects configuration + # 6. Run the single transform + # 7. Done! + tr_name = args.name + welcome("%s:%s" % (name, tr_name)) + init = stages.Init(ds_deps=[]) + # Stage 1 + init.read_cfg(extract_fns(args)) + tr = stages.Transforms(init, extract_fns(args)) + where_look_mp = { + TR_TPL % ('init'): 'init', + TR_TPL % ('config'): 'config', + TR_TPL % ('final'): 'final', + } + where_look = list(where_look_mp.keys()) + found_at = tr.find_transform(tr_name, where_look) + if not found_at: + msg = ("No known transform named %s " + "in sections (%s)") % (tr_name, ", ".join(where_look)) + LOG.warn(msg) + return 1 else: - msg = "no cloud_init_modules to run" - sys.stderr.write(msg + "\n") - log.debug(msg) - sys.exit(0) + LOG.debug("Found transform %s in sections: %s", + tr_name, found_at) + sect_name = found_at[0] + LOG.debug("Selecting section %s as its 'source' section.", sect_name) + tr_args = args.transform_args + if tr_args: + LOG.debug("Using passed in arguments %s", tr_args) + tr_freq = args.frequency + if tr_freq: + LOG.debug("Using passed in frequency %s", tr_freq) + try: + LOG.debug("Closing stdin") + util.close_stdin() + # This seems to use the short name, instead of the long name + util.fixup_output(tr.cfg, where_look_mp.get(sect_name)) + except: + util.logexc(LOG, "Failed to setup output redirection!") + if args.debug: + # Reset so that all the debug handlers are closed out + LOG.debug(("Logging being reset, this logger may no" + " longer be active shortly")) + logging.resetLogging() + logging.setupLogging(tr.cfg) + (_run_am, failures) = tr.run_single(tr_name, sect_name, + tr_args, tr_freq) + if failures: + LOG.debug("Ran %s but it failed", tr_name) + return 1 + else: + return 0 + + +def main(): + parser = argparse.ArgumentParser() + + # Top level args + parser.add_argument('--version', '-v', action='version', + version='%(prog)s ' + (version.version_string())) + parser.add_argument('--file', '-f', action='append', + dest='files', + help=('additional yaml configuration' + ' files to use'), + type=argparse.FileType('rb')) + parser.add_argument('--debug', '-d', action='store_true', + help=('show additional pre-action' + ' logging (default: %(default)s)'), + default=False) + subparsers = parser.add_subparsers() + + # Each action and its sub-options (if any) + parser_init = subparsers.add_parser('init', + help=('initializes cloud-init and' + ' performs initial transforms')) + parser_init.add_argument("--local", '-l', action='store_true', + help="start in local mode (default: %(default)s)", + default=False) + # This is used so that we can know which action is selected + + # the functor to use to run this subcommand + parser_init.set_defaults(action=('init', main_init)) + + # These settings are used for the 'config' and 'final' stages + parser_tr = subparsers.add_parser('transform', + help=('performs transforms ' + 'using a given configuration key')) + parser_tr.add_argument("--mode", '-m', action='store', + help=("transform configuration name " + "to use (default: %(default)s)"), + default='config', + choices=('config', 'final')) + parser_tr.set_defaults(action=('transform', main_transform)) + + # These settings are used when you want to query information + # stored in the cloud-init data objects/directories/files + parser_query = subparsers.add_parser('query', + help=('query information stored ' + 'in cloud-init')) + parser_query.add_argument("--name", '-n', action="store", + help="item name to query on", + required=True, + choices=QUERY_DATA_TYPES) + parser_query.set_defaults(action=('query', main_query)) + + # This subcommand allows you to run a single transform + parser_single = subparsers.add_parser('single', + help=('run a single transform ')) + parser_single.set_defaults(action=('single', main_single)) + parser_single.add_argument("--name", '-n', action="store", + help="transform name to run", + required=True) + parser_single.add_argument("--frequency", action="store", + help=("frequency of " + " the transform (default: %(default)s)"), + required=False, + default=settings.PER_ALWAYS, + choices=settings.FREQUENCIES) + parser_single.add_argument("transform_args", nargs="*", + metavar='argument', + help=('any additional arguments to' + ' pass to this transform')) + parser_single.set_defaults(action=('single', main_single)) + + + args = parser.parse_args() + + # Setup basic logging to start (until reinitialized) + if args.debug: + logging.setupBasicLogging() + + (name, functor) = args.action + return functor(name, args) - sys.exit(len(failures)) if __name__ == '__main__': - main() + sys.exit(main()) + diff --git a/bin/cloud-init2.py b/bin/cloud-init2.py deleted file mode 100755 index 135e4114..00000000 --- a/bin/cloud-init2.py +++ /dev/null @@ -1,421 +0,0 @@ -#!/usr/bin/python -# vi: ts=4 expandtab -# -# Copyright (C) 2012 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. -# Copyright (C) 2012 Yahoo! Inc. -# -# Author: Scott Moser -# Author: Juerg Haefliger -# Author: Joshua Harlow -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 3, as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -import argparse -import os -import sys - -# This is more just for running from the bin folder -possible_topdir = os.path.normpath(os.path.join(os.path.abspath( - sys.argv[0]), os.pardir, os.pardir)) -if os.path.exists(os.path.join(possible_topdir, "cloudinit", "__init__.py")): - sys.path.insert(0, possible_topdir) - -from cloudinit import log as logging -from cloudinit import netinfo -from cloudinit import settings -from cloudinit import sources -from cloudinit import stages -from cloudinit import templater -from cloudinit import util -from cloudinit import version - - -# Transform section template -TR_TPL = "cloud_%s_modules" - -# Things u can query on -QUERY_DATA_TYPES = [ - 'data', - 'data_raw', - 'instance_id', -] - -LOG = logging.getLogger() - - -def welcome(action): - msg = ("Cloud-init v. {{version}} running '{{action}}' at " - "{{timestamp}}. Up {{uptime}} seconds.") - tpl_params = { - 'version': version.version_string(), - 'uptime': util.uptime(), - 'timestamp': util.time_rfc2822(), - 'action': action, - } - welcome_msg = "%s" % (templater.render_string(msg, tpl_params)) - sys.stderr.write("%s\n" % (welcome_msg)) - sys.stderr.flush() - LOG.info(welcome_msg) - - -def extract_fns(args): - # Files are already opened so lets just pass that along - # since it would of broke if it couldn't have - # read that file already... - fn_cfgs = [] - if args.files: - for fh in args.files: - # The realpath is more useful in logging - # so lets resolve to that... - fn_cfgs.append(os.path.realpath(fh.name)) - return fn_cfgs - - -def run_transform_section(tr, action_name, section): - full_section_name = TR_TPL % (section) - (ran_am, failures) = tr.run_section(full_section_name) - if not ran_am: - msg = ("No '%s' transforms to run" - " under section '%s'") % (action_name, full_section_name) - sys.stderr.write("%s\n" % (msg)) - LOG.debug(msg) - return 0 - else: - LOG.debug("Ran %s transforms with %s failures", ran_am, len(failures)) - return len(failures) - - -def main_init(name, args): - deps = [sources.DEP_FILESYSTEM, sources.DEP_NETWORK] - if args.local: - deps = [sources.DEP_FILESYSTEM] - - if not args.local: - # TODO: What is this for?? - root_name = "%s.d" % (settings.CLOUD_CONFIG) - target_fn = os.path.join(root_name, "91_kernel_cmdline_url.cfg") - util.read_write_cmdline_url(target_fn) - - # Cloud-init 'init' stage is broken up into the following sub-stages - # 1. Ensure that the init object fetches its config without errors - # 2. Setup logging/output redirections with resultant config (if any) - # 3. Initialize the cloud-init filesystem - # 4. Check if we can stop early by looking for various files - # 5. Fetch the datasource - # 6. Connect to the current instance location + update the cache - # 7. Consume the userdata (handlers get activated here) - # 8. Construct the transform object - # 9. Adjust any subsequent logging/output redirections using - # the transform objects configuration - # 10. Run the transforms for the 'init' stage - # 11. Done! - welcome(name) - init = stages.Init(deps) - # Stage 1 - init.read_cfg(extract_fns(args)) - # Stage 2 - outfmt = None - errfmt = None - try: - LOG.debug("Closing stdin") - util.close_stdin() - (outfmt, errfmt) = util.fixup_output(init.cfg, name) - except: - util.logexc(LOG, "Failed to setup output redirection!") - if args.debug: - # Reset so that all the debug handlers are closed out - LOG.debug(("Logging being reset, this logger may no" - " longer be active shortly")) - logging.resetLogging() - logging.setupLogging(init.cfg) - # Stage 3 - try: - init.initialize() - except Exception: - util.logexc(LOG, "Failed to initialize, likely bad things to come!") - # Stage 4 - path_helper = init.paths - if not args.local: - sys.stderr.write("%s\n" % (netinfo.debug_info())) - LOG.debug(("Checking to see if files that we need already" - " exist from a previous run that would allow us" - " to stop early.")) - stop_files = [ - os.path.join(path_helper.get_cpath("data"), "no-net"), - path_helper.get_ipath_cur("obj_pkl"), - ] - existing_files = [] - for fn in stop_files: - try: - c = util.load_file(fn) - if len(c): - existing_files.append((fn, len(c))) - except Exception: - pass - if existing_files: - LOG.debug("Exiting early due to the existence of %s files", - existing_files) - return 0 - else: - # The cache is not instance specific, so it has to be purged - # but we want 'start' to benefit from a cache if - # a previous start-local populated one... - manual_clean = util.get_cfg_option_bool(init.cfg, - 'manual_cache_clean', False) - if manual_clean: - LOG.debug("Not purging instance link, manual cleaning enabled") - init.purge_cache(False) - else: - init.purge_cache() - # Delete the non-net file as well - util.del_file(os.path.join(path_helper.get_cpath("data"), "no-net")) - # Stage 5 - try: - init.fetch() - except sources.DataSourceNotFoundException: - util.logexc(LOG, "No instance datasource found!") - # TODO: Return 0 or 1?? - return 1 - # Stage 6 - iid = init.instancify() - LOG.debug("%s will now be targeting instance id: %s", name, iid) - init.update() - # Stage 7 - try: - (ran, _results) = init.cloudify().run('consume_userdata', - init.consume, - args=[settings.PER_INSTANCE], - freq=settings.PER_INSTANCE) - if not ran: - # Just consume anything that is set to run per - # always if nothing ran in the per instance section - init.consume(settings.PER_ALWAYS) - except Exception: - util.logexc(LOG, "Consuming user data failed!") - return 1 - # Stage 8 - TODO - do we really need to re-extract our configs? - tr = stages.Transforms(init, extract_fns(args)) - # Stage 9 - TODO is this really needed?? - try: - outfmt_orig = outfmt - errfmt_orig = errfmt - (outfmt, errfmt) = util.get_output_cfg(tr.cfg, name) - if outfmt_orig != outfmt or errfmt_orig != errfmt: - LOG.warn("Stdout, stderr changing to (%s, %s)", outfmt, errfmt) - (outfmt, errfmt) = util.fixup_output(tr.cfg, name) - except: - util.logexc(LOG, "Failed to re-adjust output redirection!") - # Stage 10 - return run_transform_section(tr, name, name) - - -def main_transform(action_name, args): - name = args.mode - # Cloud-init transform stages are broken up into the following sub-stages - # 1. Ensure that the init object fetches its config without errors - # 2. Get the datasource from the init object, if it does - # not exist then that means the main_init stage never - # worked, and thus this stage can not run. - # 3. Construct the transform object - # 4. Adjust any subsequent logging/output redirections using - # the transform objects configuration - # 5. Run the transforms for the given stage name - # 6. Done! - welcome("%s:%s" % (action_name, name)) - init = stages.Init(ds_deps=[]) - # Stage 1 - init.read_cfg(extract_fns(args)) - # Stage 2 - try: - init.fetch() - except sources.DataSourceNotFoundException: - # There was no datasource found, theres nothing to do - util.logexc(LOG, 'Can not apply stage %s, no datasource found!', name) - return 1 - # Stage 3 - tr_cfgs = extract_fns(args) - cc_cfg = init.paths.get_ipath_cur('cloud_config') - if settings.CFG_ENV_NAME in os.environ: - cc_cfg = os.environ[settings.CFG_ENV_NAME] - if cc_cfg and os.path.exists(cc_cfg): - tr_cfgs.append(cc_cfg) - tr = stages.Transforms(init, tr_cfgs) - # Stage 4 - try: - LOG.debug("Closing stdin") - util.close_stdin() - util.fixup_output(tr.cfg, name) - except: - util.logexc(LOG, "Failed to setup output redirection!") - if args.debug: - # Reset so that all the debug handlers are closed out - LOG.debug(("Logging being reset, this logger may no" - " longer be active shortly")) - logging.resetLogging() - logging.setupLogging(tr.cfg) - # Stage 5 - return run_transform_section(tr, name, name) - - -def main_query(_name, _args): - pass - - -def main_single(name, args): - # Cloud-init single stage is broken up into the following sub-stages - # 1. Ensure that the init object fetches its config without errors - # 2. Check to see if we can find the transform name - # in the 'init', 'final', 'config' stages, if not bail - # 3. Get the datasource from the init object, if it does - # not exist then that means the main_init stage never - # worked, and thus this stage can not run. - # 4. Construct the transform object - # 5. Adjust any subsequent logging/output redirections using - # the transform objects configuration - # 6. Run the single transform - # 7. Done! - tr_name = args.name - welcome("%s:%s" % (name, tr_name)) - init = stages.Init(ds_deps=[]) - # Stage 1 - init.read_cfg(extract_fns(args)) - tr = stages.Transforms(init, extract_fns(args)) - where_look_mp = { - TR_TPL % ('init'): 'init', - TR_TPL % ('config'): 'config', - TR_TPL % ('final'): 'final', - } - where_look = list(where_look_mp.keys()) - found_at = tr.find_transform(tr_name, where_look) - if not found_at: - msg = ("No known transform named %s " - "in sections (%s)") % (tr_name, ", ".join(where_look)) - LOG.warn(msg) - return 1 - else: - LOG.debug("Found transform %s in sections: %s", - tr_name, found_at) - sect_name = found_at[0] - LOG.debug("Selecting section %s as its 'source' section.", sect_name) - tr_args = args.transform_args - if tr_args: - LOG.debug("Using passed in arguments %s", tr_args) - tr_freq = args.frequency - if tr_freq: - LOG.debug("Using passed in frequency %s", tr_freq) - try: - LOG.debug("Closing stdin") - util.close_stdin() - # This seems to use the short name, instead of the long name - util.fixup_output(tr.cfg, where_look_mp.get(sect_name)) - except: - util.logexc(LOG, "Failed to setup output redirection!") - if args.debug: - # Reset so that all the debug handlers are closed out - LOG.debug(("Logging being reset, this logger may no" - " longer be active shortly")) - logging.resetLogging() - logging.setupLogging(tr.cfg) - (_run_am, failures) = tr.run_single(tr_name, sect_name, - tr_args, tr_freq) - if failures: - LOG.debug("Ran %s but it failed", tr_name) - return 1 - else: - return 0 - - -def main(): - parser = argparse.ArgumentParser() - - # Top level args - parser.add_argument('--version', '-v', action='version', - version='%(prog)s ' + (version.version_string())) - parser.add_argument('--file', '-f', action='append', - dest='files', - help=('additional yaml configuration' - ' files to use'), - type=argparse.FileType('rb')) - parser.add_argument('--debug', '-d', action='store_true', - help=('show additional pre-action' - ' logging (default: %(default)s)'), - default=False) - subparsers = parser.add_subparsers() - - # Each action and its sub-options (if any) - parser_init = subparsers.add_parser('init', - help=('initializes cloud-init and' - ' performs initial transforms')) - parser_init.add_argument("--local", '-l', action='store_true', - help="start in local mode (default: %(default)s)", - default=False) - # This is used so that we can know which action is selected + - # the functor to use to run this subcommand - parser_init.set_defaults(action=('init', main_init)) - - # These settings are used for the 'config' and 'final' stages - parser_tr = subparsers.add_parser('transform', - help=('performs transforms ' - 'using a given configuration key')) - parser_tr.add_argument("--mode", '-m', action='store', - help=("transform configuration name " - "to use (default: %(default)s)"), - default='config', - choices=('config', 'final')) - parser_tr.set_defaults(action=('transform', main_transform)) - - # These settings are used when you want to query information - # stored in the cloud-init data objects/directories/files - parser_query = subparsers.add_parser('query', - help=('query information stored ' - 'in cloud-init')) - parser_query.add_argument("--name", '-n', action="store", - help="item name to query on", - required=True, - choices=QUERY_DATA_TYPES) - parser_query.set_defaults(action=('query', main_query)) - - # This subcommand allows you to run a single transform - parser_single = subparsers.add_parser('single', - help=('run a single transform ')) - parser_single.set_defaults(action=('single', main_single)) - parser_single.add_argument("--name", '-n', action="store", - help="transform name to run", - required=True) - parser_single.add_argument("--frequency", action="store", - help=("frequency of " - " the transform (default: %(default)s)"), - required=False, - default=settings.PER_ALWAYS, - choices=settings.FREQUENCIES) - parser_single.add_argument("transform_args", nargs="*", - metavar='argument', - help=('any additional arguments to' - ' pass to this transform')) - parser_single.set_defaults(action=('single', main_single)) - - - args = parser.parse_args() - - # Setup basic logging to start (until reinitialized) - if args.debug: - logging.setupBasicLogging() - - (name, functor) = args.action - return functor(name, args) - - -if __name__ == '__main__': - sys.exit(main()) - -- cgit v1.2.3 From 806251d1c1541d4f14a8f7ef7557633ae13c70ea Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Wed, 20 Jun 2012 12:12:53 -0700 Subject: Add message about query not being implemented yet. --- bin/cloud-init.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'bin') diff --git a/bin/cloud-init.py b/bin/cloud-init.py index 99ecdaf9..8fb3a740 100755 --- a/bin/cloud-init.py +++ b/bin/cloud-init.py @@ -269,7 +269,8 @@ def main_transform(action_name, args): def main_query(name, _args): - raise NotImplementedError("Action '%s' is not currently implemented" % (name)) + raise NotImplementedError(("Action '%s' is not" + " currently implemented") % (name)) def main_single(name, args): -- cgit v1.2.3 From eeb7328ddb83e26b1163d2d40590498a971397c5 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Wed, 20 Jun 2012 12:22:43 -0700 Subject: Remove the .py extension for this new main program binary. --- bin/cloud-init | 422 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ bin/cloud-init.py | 422 ------------------------------------------------------ 2 files changed, 422 insertions(+), 422 deletions(-) create mode 100755 bin/cloud-init delete mode 100755 bin/cloud-init.py (limited to 'bin') diff --git a/bin/cloud-init b/bin/cloud-init new file mode 100755 index 00000000..8fb3a740 --- /dev/null +++ b/bin/cloud-init @@ -0,0 +1,422 @@ +#!/usr/bin/python +# vi: ts=4 expandtab +# +# Copyright (C) 2012 Canonical Ltd. +# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2012 Yahoo! Inc. +# +# Author: Scott Moser +# Author: Juerg Haefliger +# Author: Joshua Harlow +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3, as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import argparse +import os +import sys + +# This is more just for running from the bin folder +possible_topdir = os.path.normpath(os.path.join(os.path.abspath( + sys.argv[0]), os.pardir, os.pardir)) +if os.path.exists(os.path.join(possible_topdir, "cloudinit", "__init__.py")): + sys.path.insert(0, possible_topdir) + +from cloudinit import log as logging +from cloudinit import netinfo +from cloudinit import settings +from cloudinit import sources +from cloudinit import stages +from cloudinit import templater +from cloudinit import util +from cloudinit import version + + +# Transform section template +TR_TPL = "cloud_%s_modules" + +# Things u can query on +QUERY_DATA_TYPES = [ + 'data', + 'data_raw', + 'instance_id', +] + +LOG = logging.getLogger() + + +def welcome(action): + msg = ("Cloud-init v. {{version}} running '{{action}}' at " + "{{timestamp}}. Up {{uptime}} seconds.") + tpl_params = { + 'version': version.version_string(), + 'uptime': util.uptime(), + 'timestamp': util.time_rfc2822(), + 'action': action, + } + welcome_msg = "%s" % (templater.render_string(msg, tpl_params)) + sys.stderr.write("%s\n" % (welcome_msg)) + sys.stderr.flush() + LOG.info(welcome_msg) + + +def extract_fns(args): + # Files are already opened so lets just pass that along + # since it would of broke if it couldn't have + # read that file already... + fn_cfgs = [] + if args.files: + for fh in args.files: + # The realpath is more useful in logging + # so lets resolve to that... + fn_cfgs.append(os.path.realpath(fh.name)) + return fn_cfgs + + +def run_transform_section(tr, action_name, section): + full_section_name = TR_TPL % (section) + (ran_am, failures) = tr.run_section(full_section_name) + if not ran_am: + msg = ("No '%s' transforms to run" + " under section '%s'") % (action_name, full_section_name) + sys.stderr.write("%s\n" % (msg)) + LOG.debug(msg) + return 0 + else: + LOG.debug("Ran %s transforms with %s failures", ran_am, len(failures)) + return len(failures) + + +def main_init(name, args): + deps = [sources.DEP_FILESYSTEM, sources.DEP_NETWORK] + if args.local: + deps = [sources.DEP_FILESYSTEM] + + if not args.local: + # TODO: What is this for?? + root_name = "%s.d" % (settings.CLOUD_CONFIG) + target_fn = os.path.join(root_name, "91_kernel_cmdline_url.cfg") + util.read_write_cmdline_url(target_fn) + + # Cloud-init 'init' stage is broken up into the following sub-stages + # 1. Ensure that the init object fetches its config without errors + # 2. Setup logging/output redirections with resultant config (if any) + # 3. Initialize the cloud-init filesystem + # 4. Check if we can stop early by looking for various files + # 5. Fetch the datasource + # 6. Connect to the current instance location + update the cache + # 7. Consume the userdata (handlers get activated here) + # 8. Construct the transform object + # 9. Adjust any subsequent logging/output redirections using + # the transform objects configuration + # 10. Run the transforms for the 'init' stage + # 11. Done! + welcome(name) + init = stages.Init(deps) + # Stage 1 + init.read_cfg(extract_fns(args)) + # Stage 2 + outfmt = None + errfmt = None + try: + LOG.debug("Closing stdin") + util.close_stdin() + (outfmt, errfmt) = util.fixup_output(init.cfg, name) + except: + util.logexc(LOG, "Failed to setup output redirection!") + if args.debug: + # Reset so that all the debug handlers are closed out + LOG.debug(("Logging being reset, this logger may no" + " longer be active shortly")) + logging.resetLogging() + logging.setupLogging(init.cfg) + # Stage 3 + try: + init.initialize() + except Exception: + util.logexc(LOG, "Failed to initialize, likely bad things to come!") + # Stage 4 + path_helper = init.paths + if not args.local: + sys.stderr.write("%s\n" % (netinfo.debug_info())) + LOG.debug(("Checking to see if files that we need already" + " exist from a previous run that would allow us" + " to stop early.")) + stop_files = [ + os.path.join(path_helper.get_cpath("data"), "no-net"), + path_helper.get_ipath_cur("obj_pkl"), + ] + existing_files = [] + for fn in stop_files: + try: + c = util.load_file(fn) + if len(c): + existing_files.append((fn, len(c))) + except Exception: + pass + if existing_files: + LOG.debug("Exiting early due to the existence of %s files", + existing_files) + return 0 + else: + # The cache is not instance specific, so it has to be purged + # but we want 'start' to benefit from a cache if + # a previous start-local populated one... + manual_clean = util.get_cfg_option_bool(init.cfg, + 'manual_cache_clean', False) + if manual_clean: + LOG.debug("Not purging instance link, manual cleaning enabled") + init.purge_cache(False) + else: + init.purge_cache() + # Delete the non-net file as well + util.del_file(os.path.join(path_helper.get_cpath("data"), "no-net")) + # Stage 5 + try: + init.fetch() + except sources.DataSourceNotFoundException: + util.logexc(LOG, "No instance datasource found!") + # TODO: Return 0 or 1?? + return 1 + # Stage 6 + iid = init.instancify() + LOG.debug("%s will now be targeting instance id: %s", name, iid) + init.update() + # Stage 7 + try: + (ran, _results) = init.cloudify().run('consume_userdata', + init.consume, + args=[settings.PER_INSTANCE], + freq=settings.PER_INSTANCE) + if not ran: + # Just consume anything that is set to run per + # always if nothing ran in the per instance section + init.consume(settings.PER_ALWAYS) + except Exception: + util.logexc(LOG, "Consuming user data failed!") + return 1 + # Stage 8 - TODO - do we really need to re-extract our configs? + tr = stages.Transforms(init, extract_fns(args)) + # Stage 9 - TODO is this really needed?? + try: + outfmt_orig = outfmt + errfmt_orig = errfmt + (outfmt, errfmt) = util.get_output_cfg(tr.cfg, name) + if outfmt_orig != outfmt or errfmt_orig != errfmt: + LOG.warn("Stdout, stderr changing to (%s, %s)", outfmt, errfmt) + (outfmt, errfmt) = util.fixup_output(tr.cfg, name) + except: + util.logexc(LOG, "Failed to re-adjust output redirection!") + # Stage 10 + return run_transform_section(tr, name, name) + + +def main_transform(action_name, args): + name = args.mode + # Cloud-init transform stages are broken up into the following sub-stages + # 1. Ensure that the init object fetches its config without errors + # 2. Get the datasource from the init object, if it does + # not exist then that means the main_init stage never + # worked, and thus this stage can not run. + # 3. Construct the transform object + # 4. Adjust any subsequent logging/output redirections using + # the transform objects configuration + # 5. Run the transforms for the given stage name + # 6. Done! + welcome("%s:%s" % (action_name, name)) + init = stages.Init(ds_deps=[]) + # Stage 1 + init.read_cfg(extract_fns(args)) + # Stage 2 + try: + init.fetch() + except sources.DataSourceNotFoundException: + # There was no datasource found, theres nothing to do + util.logexc(LOG, 'Can not apply stage %s, no datasource found!', name) + return 1 + # Stage 3 + tr_cfgs = extract_fns(args) + cc_cfg = init.paths.get_ipath_cur('cloud_config') + if settings.CFG_ENV_NAME in os.environ: + cc_cfg = os.environ[settings.CFG_ENV_NAME] + if cc_cfg and os.path.exists(cc_cfg): + tr_cfgs.append(cc_cfg) + tr = stages.Transforms(init, tr_cfgs) + # Stage 4 + try: + LOG.debug("Closing stdin") + util.close_stdin() + util.fixup_output(tr.cfg, name) + except: + util.logexc(LOG, "Failed to setup output redirection!") + if args.debug: + # Reset so that all the debug handlers are closed out + LOG.debug(("Logging being reset, this logger may no" + " longer be active shortly")) + logging.resetLogging() + logging.setupLogging(tr.cfg) + # Stage 5 + return run_transform_section(tr, name, name) + + +def main_query(name, _args): + raise NotImplementedError(("Action '%s' is not" + " currently implemented") % (name)) + + +def main_single(name, args): + # Cloud-init single stage is broken up into the following sub-stages + # 1. Ensure that the init object fetches its config without errors + # 2. Check to see if we can find the transform name + # in the 'init', 'final', 'config' stages, if not bail + # 3. Get the datasource from the init object, if it does + # not exist then that means the main_init stage never + # worked, and thus this stage can not run. + # 4. Construct the transform object + # 5. Adjust any subsequent logging/output redirections using + # the transform objects configuration + # 6. Run the single transform + # 7. Done! + tr_name = args.name + welcome("%s:%s" % (name, tr_name)) + init = stages.Init(ds_deps=[]) + # Stage 1 + init.read_cfg(extract_fns(args)) + tr = stages.Transforms(init, extract_fns(args)) + where_look_mp = { + TR_TPL % ('init'): 'init', + TR_TPL % ('config'): 'config', + TR_TPL % ('final'): 'final', + } + where_look = list(where_look_mp.keys()) + found_at = tr.find_transform(tr_name, where_look) + if not found_at: + msg = ("No known transform named %s " + "in sections (%s)") % (tr_name, ", ".join(where_look)) + LOG.warn(msg) + return 1 + else: + LOG.debug("Found transform %s in sections: %s", + tr_name, found_at) + sect_name = found_at[0] + LOG.debug("Selecting section %s as its 'source' section.", sect_name) + tr_args = args.transform_args + if tr_args: + LOG.debug("Using passed in arguments %s", tr_args) + tr_freq = args.frequency + if tr_freq: + LOG.debug("Using passed in frequency %s", tr_freq) + try: + LOG.debug("Closing stdin") + util.close_stdin() + # This seems to use the short name, instead of the long name + util.fixup_output(tr.cfg, where_look_mp.get(sect_name)) + except: + util.logexc(LOG, "Failed to setup output redirection!") + if args.debug: + # Reset so that all the debug handlers are closed out + LOG.debug(("Logging being reset, this logger may no" + " longer be active shortly")) + logging.resetLogging() + logging.setupLogging(tr.cfg) + (_run_am, failures) = tr.run_single(tr_name, sect_name, + tr_args, tr_freq) + if failures: + LOG.debug("Ran %s but it failed", tr_name) + return 1 + else: + return 0 + + +def main(): + parser = argparse.ArgumentParser() + + # Top level args + parser.add_argument('--version', '-v', action='version', + version='%(prog)s ' + (version.version_string())) + parser.add_argument('--file', '-f', action='append', + dest='files', + help=('additional yaml configuration' + ' files to use'), + type=argparse.FileType('rb')) + parser.add_argument('--debug', '-d', action='store_true', + help=('show additional pre-action' + ' logging (default: %(default)s)'), + default=False) + subparsers = parser.add_subparsers() + + # Each action and its sub-options (if any) + parser_init = subparsers.add_parser('init', + help=('initializes cloud-init and' + ' performs initial transforms')) + parser_init.add_argument("--local", '-l', action='store_true', + help="start in local mode (default: %(default)s)", + default=False) + # This is used so that we can know which action is selected + + # the functor to use to run this subcommand + parser_init.set_defaults(action=('init', main_init)) + + # These settings are used for the 'config' and 'final' stages + parser_tr = subparsers.add_parser('transform', + help=('performs transforms ' + 'using a given configuration key')) + parser_tr.add_argument("--mode", '-m', action='store', + help=("transform configuration name " + "to use (default: %(default)s)"), + default='config', + choices=('config', 'final')) + parser_tr.set_defaults(action=('transform', main_transform)) + + # These settings are used when you want to query information + # stored in the cloud-init data objects/directories/files + parser_query = subparsers.add_parser('query', + help=('query information stored ' + 'in cloud-init')) + parser_query.add_argument("--name", '-n', action="store", + help="item name to query on", + required=True, + choices=QUERY_DATA_TYPES) + parser_query.set_defaults(action=('query', main_query)) + + # This subcommand allows you to run a single transform + parser_single = subparsers.add_parser('single', + help=('run a single transform ')) + parser_single.set_defaults(action=('single', main_single)) + parser_single.add_argument("--name", '-n', action="store", + help="transform name to run", + required=True) + parser_single.add_argument("--frequency", action="store", + help=("frequency of " + " the transform (default: %(default)s)"), + required=False, + default=settings.PER_ALWAYS, + choices=settings.FREQUENCIES) + parser_single.add_argument("transform_args", nargs="*", + metavar='argument', + help=('any additional arguments to' + ' pass to this transform')) + parser_single.set_defaults(action=('single', main_single)) + + + args = parser.parse_args() + + # Setup basic logging to start (until reinitialized) + if args.debug: + logging.setupBasicLogging() + + (name, functor) = args.action + return functor(name, args) + + +if __name__ == '__main__': + sys.exit(main()) + diff --git a/bin/cloud-init.py b/bin/cloud-init.py deleted file mode 100755 index 8fb3a740..00000000 --- a/bin/cloud-init.py +++ /dev/null @@ -1,422 +0,0 @@ -#!/usr/bin/python -# vi: ts=4 expandtab -# -# Copyright (C) 2012 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. -# Copyright (C) 2012 Yahoo! Inc. -# -# Author: Scott Moser -# Author: Juerg Haefliger -# Author: Joshua Harlow -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 3, as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -import argparse -import os -import sys - -# This is more just for running from the bin folder -possible_topdir = os.path.normpath(os.path.join(os.path.abspath( - sys.argv[0]), os.pardir, os.pardir)) -if os.path.exists(os.path.join(possible_topdir, "cloudinit", "__init__.py")): - sys.path.insert(0, possible_topdir) - -from cloudinit import log as logging -from cloudinit import netinfo -from cloudinit import settings -from cloudinit import sources -from cloudinit import stages -from cloudinit import templater -from cloudinit import util -from cloudinit import version - - -# Transform section template -TR_TPL = "cloud_%s_modules" - -# Things u can query on -QUERY_DATA_TYPES = [ - 'data', - 'data_raw', - 'instance_id', -] - -LOG = logging.getLogger() - - -def welcome(action): - msg = ("Cloud-init v. {{version}} running '{{action}}' at " - "{{timestamp}}. Up {{uptime}} seconds.") - tpl_params = { - 'version': version.version_string(), - 'uptime': util.uptime(), - 'timestamp': util.time_rfc2822(), - 'action': action, - } - welcome_msg = "%s" % (templater.render_string(msg, tpl_params)) - sys.stderr.write("%s\n" % (welcome_msg)) - sys.stderr.flush() - LOG.info(welcome_msg) - - -def extract_fns(args): - # Files are already opened so lets just pass that along - # since it would of broke if it couldn't have - # read that file already... - fn_cfgs = [] - if args.files: - for fh in args.files: - # The realpath is more useful in logging - # so lets resolve to that... - fn_cfgs.append(os.path.realpath(fh.name)) - return fn_cfgs - - -def run_transform_section(tr, action_name, section): - full_section_name = TR_TPL % (section) - (ran_am, failures) = tr.run_section(full_section_name) - if not ran_am: - msg = ("No '%s' transforms to run" - " under section '%s'") % (action_name, full_section_name) - sys.stderr.write("%s\n" % (msg)) - LOG.debug(msg) - return 0 - else: - LOG.debug("Ran %s transforms with %s failures", ran_am, len(failures)) - return len(failures) - - -def main_init(name, args): - deps = [sources.DEP_FILESYSTEM, sources.DEP_NETWORK] - if args.local: - deps = [sources.DEP_FILESYSTEM] - - if not args.local: - # TODO: What is this for?? - root_name = "%s.d" % (settings.CLOUD_CONFIG) - target_fn = os.path.join(root_name, "91_kernel_cmdline_url.cfg") - util.read_write_cmdline_url(target_fn) - - # Cloud-init 'init' stage is broken up into the following sub-stages - # 1. Ensure that the init object fetches its config without errors - # 2. Setup logging/output redirections with resultant config (if any) - # 3. Initialize the cloud-init filesystem - # 4. Check if we can stop early by looking for various files - # 5. Fetch the datasource - # 6. Connect to the current instance location + update the cache - # 7. Consume the userdata (handlers get activated here) - # 8. Construct the transform object - # 9. Adjust any subsequent logging/output redirections using - # the transform objects configuration - # 10. Run the transforms for the 'init' stage - # 11. Done! - welcome(name) - init = stages.Init(deps) - # Stage 1 - init.read_cfg(extract_fns(args)) - # Stage 2 - outfmt = None - errfmt = None - try: - LOG.debug("Closing stdin") - util.close_stdin() - (outfmt, errfmt) = util.fixup_output(init.cfg, name) - except: - util.logexc(LOG, "Failed to setup output redirection!") - if args.debug: - # Reset so that all the debug handlers are closed out - LOG.debug(("Logging being reset, this logger may no" - " longer be active shortly")) - logging.resetLogging() - logging.setupLogging(init.cfg) - # Stage 3 - try: - init.initialize() - except Exception: - util.logexc(LOG, "Failed to initialize, likely bad things to come!") - # Stage 4 - path_helper = init.paths - if not args.local: - sys.stderr.write("%s\n" % (netinfo.debug_info())) - LOG.debug(("Checking to see if files that we need already" - " exist from a previous run that would allow us" - " to stop early.")) - stop_files = [ - os.path.join(path_helper.get_cpath("data"), "no-net"), - path_helper.get_ipath_cur("obj_pkl"), - ] - existing_files = [] - for fn in stop_files: - try: - c = util.load_file(fn) - if len(c): - existing_files.append((fn, len(c))) - except Exception: - pass - if existing_files: - LOG.debug("Exiting early due to the existence of %s files", - existing_files) - return 0 - else: - # The cache is not instance specific, so it has to be purged - # but we want 'start' to benefit from a cache if - # a previous start-local populated one... - manual_clean = util.get_cfg_option_bool(init.cfg, - 'manual_cache_clean', False) - if manual_clean: - LOG.debug("Not purging instance link, manual cleaning enabled") - init.purge_cache(False) - else: - init.purge_cache() - # Delete the non-net file as well - util.del_file(os.path.join(path_helper.get_cpath("data"), "no-net")) - # Stage 5 - try: - init.fetch() - except sources.DataSourceNotFoundException: - util.logexc(LOG, "No instance datasource found!") - # TODO: Return 0 or 1?? - return 1 - # Stage 6 - iid = init.instancify() - LOG.debug("%s will now be targeting instance id: %s", name, iid) - init.update() - # Stage 7 - try: - (ran, _results) = init.cloudify().run('consume_userdata', - init.consume, - args=[settings.PER_INSTANCE], - freq=settings.PER_INSTANCE) - if not ran: - # Just consume anything that is set to run per - # always if nothing ran in the per instance section - init.consume(settings.PER_ALWAYS) - except Exception: - util.logexc(LOG, "Consuming user data failed!") - return 1 - # Stage 8 - TODO - do we really need to re-extract our configs? - tr = stages.Transforms(init, extract_fns(args)) - # Stage 9 - TODO is this really needed?? - try: - outfmt_orig = outfmt - errfmt_orig = errfmt - (outfmt, errfmt) = util.get_output_cfg(tr.cfg, name) - if outfmt_orig != outfmt or errfmt_orig != errfmt: - LOG.warn("Stdout, stderr changing to (%s, %s)", outfmt, errfmt) - (outfmt, errfmt) = util.fixup_output(tr.cfg, name) - except: - util.logexc(LOG, "Failed to re-adjust output redirection!") - # Stage 10 - return run_transform_section(tr, name, name) - - -def main_transform(action_name, args): - name = args.mode - # Cloud-init transform stages are broken up into the following sub-stages - # 1. Ensure that the init object fetches its config without errors - # 2. Get the datasource from the init object, if it does - # not exist then that means the main_init stage never - # worked, and thus this stage can not run. - # 3. Construct the transform object - # 4. Adjust any subsequent logging/output redirections using - # the transform objects configuration - # 5. Run the transforms for the given stage name - # 6. Done! - welcome("%s:%s" % (action_name, name)) - init = stages.Init(ds_deps=[]) - # Stage 1 - init.read_cfg(extract_fns(args)) - # Stage 2 - try: - init.fetch() - except sources.DataSourceNotFoundException: - # There was no datasource found, theres nothing to do - util.logexc(LOG, 'Can not apply stage %s, no datasource found!', name) - return 1 - # Stage 3 - tr_cfgs = extract_fns(args) - cc_cfg = init.paths.get_ipath_cur('cloud_config') - if settings.CFG_ENV_NAME in os.environ: - cc_cfg = os.environ[settings.CFG_ENV_NAME] - if cc_cfg and os.path.exists(cc_cfg): - tr_cfgs.append(cc_cfg) - tr = stages.Transforms(init, tr_cfgs) - # Stage 4 - try: - LOG.debug("Closing stdin") - util.close_stdin() - util.fixup_output(tr.cfg, name) - except: - util.logexc(LOG, "Failed to setup output redirection!") - if args.debug: - # Reset so that all the debug handlers are closed out - LOG.debug(("Logging being reset, this logger may no" - " longer be active shortly")) - logging.resetLogging() - logging.setupLogging(tr.cfg) - # Stage 5 - return run_transform_section(tr, name, name) - - -def main_query(name, _args): - raise NotImplementedError(("Action '%s' is not" - " currently implemented") % (name)) - - -def main_single(name, args): - # Cloud-init single stage is broken up into the following sub-stages - # 1. Ensure that the init object fetches its config without errors - # 2. Check to see if we can find the transform name - # in the 'init', 'final', 'config' stages, if not bail - # 3. Get the datasource from the init object, if it does - # not exist then that means the main_init stage never - # worked, and thus this stage can not run. - # 4. Construct the transform object - # 5. Adjust any subsequent logging/output redirections using - # the transform objects configuration - # 6. Run the single transform - # 7. Done! - tr_name = args.name - welcome("%s:%s" % (name, tr_name)) - init = stages.Init(ds_deps=[]) - # Stage 1 - init.read_cfg(extract_fns(args)) - tr = stages.Transforms(init, extract_fns(args)) - where_look_mp = { - TR_TPL % ('init'): 'init', - TR_TPL % ('config'): 'config', - TR_TPL % ('final'): 'final', - } - where_look = list(where_look_mp.keys()) - found_at = tr.find_transform(tr_name, where_look) - if not found_at: - msg = ("No known transform named %s " - "in sections (%s)") % (tr_name, ", ".join(where_look)) - LOG.warn(msg) - return 1 - else: - LOG.debug("Found transform %s in sections: %s", - tr_name, found_at) - sect_name = found_at[0] - LOG.debug("Selecting section %s as its 'source' section.", sect_name) - tr_args = args.transform_args - if tr_args: - LOG.debug("Using passed in arguments %s", tr_args) - tr_freq = args.frequency - if tr_freq: - LOG.debug("Using passed in frequency %s", tr_freq) - try: - LOG.debug("Closing stdin") - util.close_stdin() - # This seems to use the short name, instead of the long name - util.fixup_output(tr.cfg, where_look_mp.get(sect_name)) - except: - util.logexc(LOG, "Failed to setup output redirection!") - if args.debug: - # Reset so that all the debug handlers are closed out - LOG.debug(("Logging being reset, this logger may no" - " longer be active shortly")) - logging.resetLogging() - logging.setupLogging(tr.cfg) - (_run_am, failures) = tr.run_single(tr_name, sect_name, - tr_args, tr_freq) - if failures: - LOG.debug("Ran %s but it failed", tr_name) - return 1 - else: - return 0 - - -def main(): - parser = argparse.ArgumentParser() - - # Top level args - parser.add_argument('--version', '-v', action='version', - version='%(prog)s ' + (version.version_string())) - parser.add_argument('--file', '-f', action='append', - dest='files', - help=('additional yaml configuration' - ' files to use'), - type=argparse.FileType('rb')) - parser.add_argument('--debug', '-d', action='store_true', - help=('show additional pre-action' - ' logging (default: %(default)s)'), - default=False) - subparsers = parser.add_subparsers() - - # Each action and its sub-options (if any) - parser_init = subparsers.add_parser('init', - help=('initializes cloud-init and' - ' performs initial transforms')) - parser_init.add_argument("--local", '-l', action='store_true', - help="start in local mode (default: %(default)s)", - default=False) - # This is used so that we can know which action is selected + - # the functor to use to run this subcommand - parser_init.set_defaults(action=('init', main_init)) - - # These settings are used for the 'config' and 'final' stages - parser_tr = subparsers.add_parser('transform', - help=('performs transforms ' - 'using a given configuration key')) - parser_tr.add_argument("--mode", '-m', action='store', - help=("transform configuration name " - "to use (default: %(default)s)"), - default='config', - choices=('config', 'final')) - parser_tr.set_defaults(action=('transform', main_transform)) - - # These settings are used when you want to query information - # stored in the cloud-init data objects/directories/files - parser_query = subparsers.add_parser('query', - help=('query information stored ' - 'in cloud-init')) - parser_query.add_argument("--name", '-n', action="store", - help="item name to query on", - required=True, - choices=QUERY_DATA_TYPES) - parser_query.set_defaults(action=('query', main_query)) - - # This subcommand allows you to run a single transform - parser_single = subparsers.add_parser('single', - help=('run a single transform ')) - parser_single.set_defaults(action=('single', main_single)) - parser_single.add_argument("--name", '-n', action="store", - help="transform name to run", - required=True) - parser_single.add_argument("--frequency", action="store", - help=("frequency of " - " the transform (default: %(default)s)"), - required=False, - default=settings.PER_ALWAYS, - choices=settings.FREQUENCIES) - parser_single.add_argument("transform_args", nargs="*", - metavar='argument', - help=('any additional arguments to' - ' pass to this transform')) - parser_single.set_defaults(action=('single', main_single)) - - - args = parser.parse_args() - - # Setup basic logging to start (until reinitialized) - if args.debug: - logging.setupBasicLogging() - - (name, functor) = args.action - return functor(name, args) - - -if __name__ == '__main__': - sys.exit(main()) - -- cgit v1.2.3 From 2d4b57a6d12227ad03658e4b5d812c6fdca42049 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Wed, 20 Jun 2012 16:30:30 -0700 Subject: 1. Use the name modules where transforms was used 2. Reflect the move back to config 'modules' in the other cli options 3. Have the single mode not need to lookup the module but use the general import path --- bin/cloud-init | 180 +++++++++++++++++++++++++++------------------------------ 1 file changed, 84 insertions(+), 96 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init b/bin/cloud-init index 8fb3a740..032d5f39 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -25,12 +25,19 @@ import argparse import os import sys -# This is more just for running from the bin folder +# This is more just for running from the bin folder so that +# cloud-init binary can find the cloudinit module possible_topdir = os.path.normpath(os.path.join(os.path.abspath( sys.argv[0]), os.pardir, os.pardir)) if os.path.exists(os.path.join(possible_topdir, "cloudinit", "__init__.py")): sys.path.insert(0, possible_topdir) +# This is so config modules can be found +if os.path.exists(os.path.join(possible_topdir, + "cloudinit", 'config', "__init__.py")): + sys.path.insert(0, os.path.join(possible_topdir, 'cloudinit', 'config')) + + from cloudinit import log as logging from cloudinit import netinfo from cloudinit import settings @@ -41,8 +48,8 @@ from cloudinit import util from cloudinit import version -# Transform section template -TR_TPL = "cloud_%s_modules" +# Module section template +MOD_SECTION_TPL = "cloud_%s_modules" # Things u can query on QUERY_DATA_TYPES = [ @@ -68,7 +75,6 @@ def welcome(action): sys.stderr.flush() LOG.info(welcome_msg) - def extract_fns(args): # Files are already opened so lets just pass that along # since it would of broke if it couldn't have @@ -82,17 +88,17 @@ def extract_fns(args): return fn_cfgs -def run_transform_section(tr, action_name, section): - full_section_name = TR_TPL % (section) - (ran_am, failures) = tr.run_section(full_section_name) +def run_module_section(mods, action_name, section): + full_section_name = MOD_SECTION_TPL % (section) + (ran_am, failures) = mods.run_section(full_section_name) if not ran_am: - msg = ("No '%s' transforms to run" + msg = ("No '%s' modules to run" " under section '%s'") % (action_name, full_section_name) sys.stderr.write("%s\n" % (msg)) LOG.debug(msg) return 0 else: - LOG.debug("Ran %s transforms with %s failures", ran_am, len(failures)) + LOG.debug("Ran %s modules with %s failures", ran_am, len(failures)) return len(failures) @@ -115,10 +121,10 @@ def main_init(name, args): # 5. Fetch the datasource # 6. Connect to the current instance location + update the cache # 7. Consume the userdata (handlers get activated here) - # 8. Construct the transform object + # 8. Construct the modules object # 9. Adjust any subsequent logging/output redirections using - # the transform objects configuration - # 10. Run the transforms for the 'init' stage + # the modules objects configuration + # 10. Run the modules for the 'init' stage # 11. Done! welcome(name) init = stages.Init(deps) @@ -205,32 +211,32 @@ def main_init(name, args): util.logexc(LOG, "Consuming user data failed!") return 1 # Stage 8 - TODO - do we really need to re-extract our configs? - tr = stages.Transforms(init, extract_fns(args)) + mods = stages.Modules(init, extract_fns(args)) # Stage 9 - TODO is this really needed?? try: outfmt_orig = outfmt errfmt_orig = errfmt - (outfmt, errfmt) = util.get_output_cfg(tr.cfg, name) + (outfmt, errfmt) = util.get_output_cfg(mods.cfg, name) if outfmt_orig != outfmt or errfmt_orig != errfmt: LOG.warn("Stdout, stderr changing to (%s, %s)", outfmt, errfmt) - (outfmt, errfmt) = util.fixup_output(tr.cfg, name) + (outfmt, errfmt) = util.fixup_output(mods.cfg, name) except: util.logexc(LOG, "Failed to re-adjust output redirection!") # Stage 10 - return run_transform_section(tr, name, name) + return run_module_section(mods, name, name) -def main_transform(action_name, args): +def main_modules(action_name, args): name = args.mode - # Cloud-init transform stages are broken up into the following sub-stages + # Cloud-init 'modules' stages are broken up into the following sub-stages # 1. Ensure that the init object fetches its config without errors # 2. Get the datasource from the init object, if it does # not exist then that means the main_init stage never # worked, and thus this stage can not run. - # 3. Construct the transform object + # 3. Construct the modules object # 4. Adjust any subsequent logging/output redirections using - # the transform objects configuration - # 5. Run the transforms for the given stage name + # the modules objects configuration + # 5. Run the modules for the given stage name # 6. Done! welcome("%s:%s" % (action_name, name)) init = stages.Init(ds_deps=[]) @@ -244,18 +250,18 @@ def main_transform(action_name, args): util.logexc(LOG, 'Can not apply stage %s, no datasource found!', name) return 1 # Stage 3 - tr_cfgs = extract_fns(args) + mod_cfgs = extract_fns(args) cc_cfg = init.paths.get_ipath_cur('cloud_config') if settings.CFG_ENV_NAME in os.environ: cc_cfg = os.environ[settings.CFG_ENV_NAME] if cc_cfg and os.path.exists(cc_cfg): - tr_cfgs.append(cc_cfg) - tr = stages.Transforms(init, tr_cfgs) + mod_cfgs.append(cc_cfg) + mods = stages.Modules(init, mod_cfgs) # Stage 4 try: LOG.debug("Closing stdin") util.close_stdin() - util.fixup_output(tr.cfg, name) + util.fixup_output(mods.cfg, name) except: util.logexc(LOG, "Failed to setup output redirection!") if args.debug: @@ -263,9 +269,9 @@ def main_transform(action_name, args): LOG.debug(("Logging being reset, this logger may no" " longer be active shortly")) logging.resetLogging() - logging.setupLogging(tr.cfg) + logging.setupLogging(mods.cfg) # Stage 5 - return run_transform_section(tr, name, name) + return run_module_section(mods, name, name) def main_query(name, _args): @@ -276,65 +282,49 @@ def main_query(name, _args): def main_single(name, args): # Cloud-init single stage is broken up into the following sub-stages # 1. Ensure that the init object fetches its config without errors - # 2. Check to see if we can find the transform name - # in the 'init', 'final', 'config' stages, if not bail - # 3. Get the datasource from the init object, if it does - # not exist then that means the main_init stage never - # worked, and thus this stage can not run. - # 4. Construct the transform object - # 5. Adjust any subsequent logging/output redirections using - # the transform objects configuration - # 6. Run the single transform - # 7. Done! - tr_name = args.name - welcome("%s:%s" % (name, tr_name)) + # 2. Construct the modules object + # 3. Adjust any subsequent logging/output redirections using + # the modules objects configuration + # 4. Run the single module + # 5. Done! + mod_name = args.name + welcome("%s:%s" % (name, mod_name)) init = stages.Init(ds_deps=[]) # Stage 1 init.read_cfg(extract_fns(args)) - tr = stages.Transforms(init, extract_fns(args)) - where_look_mp = { - TR_TPL % ('init'): 'init', - TR_TPL % ('config'): 'config', - TR_TPL % ('final'): 'final', - } - where_look = list(where_look_mp.keys()) - found_at = tr.find_transform(tr_name, where_look) - if not found_at: - msg = ("No known transform named %s " - "in sections (%s)") % (tr_name, ", ".join(where_look)) - LOG.warn(msg) + mods = stages.Modules(init, extract_fns(args)) + mod_args = args.module_args + if mod_args: + LOG.debug("Using passed in arguments %s", mod_args) + mod_freq = args.frequency + if mod_freq: + LOG.debug("Using passed in frequency %s", mod_freq) + # Stage 3 + try: + LOG.debug("Closing stdin") + util.close_stdin() + util.fixup_output(mods.cfg, None) + except: + util.logexc(LOG, "Failed to setup output redirection!") + if args.debug: + # Reset so that all the debug handlers are closed out + LOG.debug(("Logging being reset, this logger may no" + " longer be active shortly")) + logging.resetLogging() + logging.setupLogging(mods.cfg) + # Stage 4 + try: + (_run_am, failures) = mods.run_single(mod_name, + mod_args, + mod_freq) + except ImportError: + util.logexc(LOG, "Failed at importing module %s", mod_name) + return 1 + if failures: + LOG.debug("Ran %s but it failed", mod_name) return 1 else: - LOG.debug("Found transform %s in sections: %s", - tr_name, found_at) - sect_name = found_at[0] - LOG.debug("Selecting section %s as its 'source' section.", sect_name) - tr_args = args.transform_args - if tr_args: - LOG.debug("Using passed in arguments %s", tr_args) - tr_freq = args.frequency - if tr_freq: - LOG.debug("Using passed in frequency %s", tr_freq) - try: - LOG.debug("Closing stdin") - util.close_stdin() - # This seems to use the short name, instead of the long name - util.fixup_output(tr.cfg, where_look_mp.get(sect_name)) - except: - util.logexc(LOG, "Failed to setup output redirection!") - if args.debug: - # Reset so that all the debug handlers are closed out - LOG.debug(("Logging being reset, this logger may no" - " longer be active shortly")) - logging.resetLogging() - logging.setupLogging(tr.cfg) - (_run_am, failures) = tr.run_single(tr_name, sect_name, - tr_args, tr_freq) - if failures: - LOG.debug("Ran %s but it failed", tr_name) - return 1 - else: - return 0 + return 0 def main(): @@ -357,7 +347,7 @@ def main(): # Each action and its sub-options (if any) parser_init = subparsers.add_parser('init', help=('initializes cloud-init and' - ' performs initial transforms')) + ' performs initial modules')) parser_init.add_argument("--local", '-l', action='store_true', help="start in local mode (default: %(default)s)", default=False) @@ -366,15 +356,15 @@ def main(): parser_init.set_defaults(action=('init', main_init)) # These settings are used for the 'config' and 'final' stages - parser_tr = subparsers.add_parser('transform', - help=('performs transforms ' + parser_mod = subparsers.add_parser('modules', + help=('activates modules ' 'using a given configuration key')) - parser_tr.add_argument("--mode", '-m', action='store', - help=("transform configuration name " + parser_mod.add_argument("--mode", '-m', action='store', + help=("module configuration name " "to use (default: %(default)s)"), default='config', - choices=('config', 'final')) - parser_tr.set_defaults(action=('transform', main_transform)) + choices=('init', 'config', 'final')) + parser_mod.set_defaults(action=('modules', main_modules)) # These settings are used when you want to query information # stored in the cloud-init data objects/directories/files @@ -387,23 +377,21 @@ def main(): choices=QUERY_DATA_TYPES) parser_query.set_defaults(action=('query', main_query)) - # This subcommand allows you to run a single transform + # This subcommand allows you to run a single module parser_single = subparsers.add_parser('single', - help=('run a single transform ')) + help=('run a single module ')) parser_single.set_defaults(action=('single', main_single)) parser_single.add_argument("--name", '-n', action="store", - help="transform name to run", + help="module name to run", required=True) parser_single.add_argument("--frequency", action="store", - help=("frequency of " - " the transform (default: %(default)s)"), + help=("frequency of the module"), required=False, - default=settings.PER_ALWAYS, choices=settings.FREQUENCIES) - parser_single.add_argument("transform_args", nargs="*", + parser_single.add_argument("module_args", nargs="*", metavar='argument', help=('any additional arguments to' - ' pass to this transform')) + ' pass to this module')) parser_single.set_defaults(action=('single', main_single)) -- cgit v1.2.3 From ec4bdc4fb8d8d3a8f8b4f498eb47eac740485ede Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Wed, 20 Jun 2012 17:13:55 -0700 Subject: Massive pylint + pep8 fixups! --- bin/cloud-init | 20 +++++++-------- cloudinit/cloud.py | 3 ++- cloudinit/config/__init__.py | 2 +- cloudinit/config/cc_chef.py | 14 +++++------ cloudinit/config/cc_disable_ec2_metadata.py | 4 +-- cloudinit/config/cc_final_message.py | 8 +++--- cloudinit/config/cc_foo.py | 8 +++--- cloudinit/config/cc_keys_to_console.py | 8 +++--- cloudinit/config/cc_landscape.py | 2 +- cloudinit/config/cc_mcollective.py | 4 +-- cloudinit/config/cc_mounts.py | 13 +++++----- cloudinit/config/cc_phone_home.py | 11 +++++--- cloudinit/config/cc_puppet.py | 2 +- cloudinit/config/cc_resizefs.py | 10 ++++---- cloudinit/config/cc_salt_minion.py | 2 +- cloudinit/config/cc_scripts_per_boot.py | 6 ++--- cloudinit/config/cc_scripts_per_instance.py | 6 ++--- cloudinit/config/cc_scripts_per_once.py | 6 ++--- cloudinit/config/cc_scripts_user.py | 6 ++--- cloudinit/config/cc_set_passwords.py | 4 +-- cloudinit/config/cc_ssh.py | 39 +++++++++++++++-------------- cloudinit/distros/__init__.py | 1 - cloudinit/distros/rhel.py | 14 +++++------ cloudinit/distros/ubuntu.py | 6 ++--- cloudinit/handlers/__init__.py | 8 +++--- cloudinit/helpers.py | 6 ++--- cloudinit/log.py | 2 -- cloudinit/settings.py | 2 +- cloudinit/sources/DataSourceCloudStack.py | 2 +- cloudinit/sources/DataSourceConfigDrive.py | 2 +- cloudinit/sources/DataSourceEc2.py | 6 ++--- cloudinit/sources/DataSourceMAAS.py | 1 + cloudinit/sources/DataSourceNoCloud.py | 2 +- cloudinit/ssh_util.py | 5 ++-- cloudinit/stages.py | 6 ++--- cloudinit/url_helper.py | 14 +++++------ cloudinit/user_data.py | 28 +++++++++------------ cloudinit/util.py | 37 ++++++++++++++------------- 38 files changed, 159 insertions(+), 161 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init b/bin/cloud-init index 032d5f39..c1788ef4 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -75,6 +75,7 @@ def welcome(action): sys.stderr.flush() LOG.info(welcome_msg) + def extract_fns(args): # Files are already opened so lets just pass that along # since it would of broke if it couldn't have @@ -329,11 +330,11 @@ def main_single(name, args): def main(): parser = argparse.ArgumentParser() - + # Top level args - parser.add_argument('--version', '-v', action='version', + parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + (version.version_string())) - parser.add_argument('--file', '-f', action='append', + parser.add_argument('--file', '-f', action='append', dest='files', help=('additional yaml configuration' ' files to use'), @@ -345,18 +346,18 @@ def main(): subparsers = parser.add_subparsers() # Each action and its sub-options (if any) - parser_init = subparsers.add_parser('init', + parser_init = subparsers.add_parser('init', help=('initializes cloud-init and' ' performs initial modules')) parser_init.add_argument("--local", '-l', action='store_true', help="start in local mode (default: %(default)s)", default=False) - # This is used so that we can know which action is selected + + # This is used so that we can know which action is selected + # the functor to use to run this subcommand parser_init.set_defaults(action=('init', main_init)) # These settings are used for the 'config' and 'final' stages - parser_mod = subparsers.add_parser('modules', + parser_mod = subparsers.add_parser('modules', help=('activates modules ' 'using a given configuration key')) parser_mod.add_argument("--mode", '-m', action='store', @@ -368,7 +369,7 @@ def main(): # These settings are used when you want to query information # stored in the cloud-init data objects/directories/files - parser_query = subparsers.add_parser('query', + parser_query = subparsers.add_parser('query', help=('query information stored ' 'in cloud-init')) parser_query.add_argument("--name", '-n', action="store", @@ -378,7 +379,7 @@ def main(): parser_query.set_defaults(action=('query', main_query)) # This subcommand allows you to run a single module - parser_single = subparsers.add_parser('single', + parser_single = subparsers.add_parser('single', help=('run a single module ')) parser_single.set_defaults(action=('single', main_single)) parser_single.add_argument("--name", '-n', action="store", @@ -394,10 +395,10 @@ def main(): ' pass to this module')) parser_single.set_defaults(action=('single', main_single)) - args = parser.parse_args() # Setup basic logging to start (until reinitialized) + # iff in debug mode... if args.debug: logging.setupBasicLogging() @@ -407,4 +408,3 @@ def main(): if __name__ == '__main__': sys.exit(main()) - diff --git a/cloudinit/cloud.py b/cloudinit/cloud.py index 90679202..6cdcb76a 100644 --- a/cloudinit/cloud.py +++ b/cloudinit/cloud.py @@ -38,6 +38,7 @@ LOG = logging.getLogger(__name__) # as providing a backwards compatible object that can be maintained # while the stages/other objects can be worked on independently... + class Cloud(object): def __init__(self, datasource, paths, cfg, distro, runners): self.datasource = datasource @@ -71,7 +72,7 @@ class Cloud(object): # The rest of thes are just useful proxies def get_userdata(self): return self.datasource.get_userdata() - + def get_instance_id(self): return self.datasource.get_instance_id() diff --git a/cloudinit/config/__init__.py b/cloudinit/config/__init__.py index 74e2f275..02e32462 100644 --- a/cloudinit/config/__init__.py +++ b/cloudinit/config/__init__.py @@ -25,7 +25,7 @@ from cloudinit import log as logging LOG = logging.getLogger(__name__) -# This prefix is used to make it less +# This prefix is used to make it less # of a change that when importing # we will not find something else with the same # name in the lookup path... diff --git a/cloudinit/config/cc_chef.py b/cloudinit/config/cc_chef.py index 4e8ef346..74af2a7e 100644 --- a/cloudinit/config/cc_chef.py +++ b/cloudinit/config/cc_chef.py @@ -24,7 +24,7 @@ import os from cloudinit import templater from cloudinit import util -ruby_version_default = "1.8" +RUBY_VERSION_DEFAULT = "1.8" def handle(name, cfg, cloud, log, _args): @@ -38,11 +38,11 @@ def handle(name, cfg, cloud, log, _args): # Ensure the chef directories we use exist c_dirs = [ - '/etc/chef', - '/var/log/chef', - '/var/lib/chef', - '/var/cache/chef', - '/var/backups/chef', + '/etc/chef', + '/var/log/chef', + '/var/lib/chef', + '/var/cache/chef', + '/var/backups/chef', '/var/run/chef', ] for d in c_dirs: @@ -92,7 +92,7 @@ def handle(name, cfg, cloud, log, _args): # this will install and run the chef-client from gems chef_version = util.get_cfg_option_str(chef_cfg, 'version', None) ruby_version = util.get_cfg_option_str(chef_cfg, 'ruby_version', - ruby_version_default) + RUBY_VERSION_DEFAULT) install_chef_from_gems(cloud.distro, ruby_version, chef_version) # and finally, run chef-client log.debug('Running chef-client') diff --git a/cloudinit/config/cc_disable_ec2_metadata.py b/cloudinit/config/cc_disable_ec2_metadata.py index c7d26029..62cca7cc 100644 --- a/cloudinit/config/cc_disable_ec2_metadata.py +++ b/cloudinit/config/cc_disable_ec2_metadata.py @@ -24,13 +24,13 @@ from cloudinit.settings import PER_ALWAYS frequency = PER_ALWAYS -reject_cmd = ['route', 'add', '-host', '169.254.169.254', 'reject'] +REJECT_CMD = ['route', 'add', '-host', '169.254.169.254', 'reject'] def handle(name, cfg, _cloud, log, _args): disabled = util.get_cfg_option_bool(cfg, "disable_ec2_metadata", False) if disabled: - util.subp(reject_cmd) + util.subp(REJECT_CMD) else: log.debug(("Skipping transform named %s," " disabling the ec2 route not enabled"), name) diff --git a/cloudinit/config/cc_final_message.py b/cloudinit/config/cc_final_message.py index c257b6d0..fd59aa1e 100644 --- a/cloudinit/config/cc_final_message.py +++ b/cloudinit/config/cc_final_message.py @@ -28,7 +28,7 @@ from cloudinit.settings import PER_ALWAYS frequency = PER_ALWAYS -final_message_def = ("Cloud-init v. {{version}} finished at {{timestamp}}." +FINAL_MESSAGE_DEF = ("Cloud-init v. {{version}} finished at {{timestamp}}." " Up {{uptime}} seconds.") @@ -39,21 +39,21 @@ def handle(_name, cfg, cloud, log, args): msg_in = args[0] else: msg_in = util.get_cfg_option_str(cfg, "final_message") - + if not msg_in: template_fn = cloud.get_template_filename('final_message') if template_fn: msg_in = util.load_file(template_fn) if not msg_in: - msg_in = final_message_def + msg_in = FINAL_MESSAGE_DEF uptime = util.uptime() ts = util.time_rfc2822() cver = version.version_string() try: subs = { - 'uptime': uptime, + 'uptime': uptime, 'timestamp': ts, 'version': cver, } diff --git a/cloudinit/config/cc_foo.py b/cloudinit/config/cc_foo.py index 99135704..e81e7faa 100644 --- a/cloudinit/config/cc_foo.py +++ b/cloudinit/config/cc_foo.py @@ -30,19 +30,19 @@ from cloudinit.settings import PER_INSTANCE # as well as any datasource provided configuration # c) A cloud object that can be used to access various # datasource and paths for the given distro and data provided -# by the various datasource instance types. +# by the various datasource instance types. # d) A argument list that may or may not be empty to this module. # Typically those are from module configuration where the module # is defined with some extra configuration that will eventually # be translated from yaml into arguments to this module. # 2. A optional 'frequency' that defines how often this module should be ran. -# Typically one of PER_INSTANCE, PER_ALWAYS, PER_ONCE. If not -# provided PER_INSTANCE will be assumed. +# Typically one of PER_INSTANCE, PER_ALWAYS, PER_ONCE. If not +# provided PER_INSTANCE will be assumed. # See settings.py for these constants. # 3. A optional 'distros' array/set/tuple that defines the known distros # this module will work with (if not all of them). This is used to write # a warning out if a module is being ran on a untested distribution for -# informational purposes. If non existent all distros are assumed and +# informational purposes. If non existent all distros are assumed and # no warning occurs. frequency = PER_INSTANCE diff --git a/cloudinit/config/cc_keys_to_console.py b/cloudinit/config/cc_keys_to_console.py index 40758198..a8fb3ba7 100644 --- a/cloudinit/config/cc_keys_to_console.py +++ b/cloudinit/config/cc_keys_to_console.py @@ -26,13 +26,13 @@ from cloudinit import util frequency = PER_INSTANCE # This is a tool that cloud init provides -helper_tool = '/usr/lib/cloud-init/write-ssh-key-fingerprints' +HELPER_TOOL = '/usr/lib/cloud-init/write-ssh-key-fingerprints' def handle(name, cfg, cloud, log, _args): - if not os.path.exists(helper_tool): + if not os.path.exists(HELPER_TOOL): log.warn(("Unable to activate transform %s," - " helper tool not found at %s"), name, helper_tool) + " helper tool not found at %s"), name, HELPER_TOOL) return fp_blacklist = util.get_cfg_option_list(cfg, @@ -42,7 +42,7 @@ def handle(name, cfg, cloud, log, _args): ["ssh-dss"]) try: - cmd = [helper_tool] + cmd = [HELPER_TOOL] cmd.append(','.join(fp_blacklist)) cmd.append(','.join(key_blacklist)) (stdout, _stderr) = util.subp(cmd) diff --git a/cloudinit/config/cc_landscape.py b/cloudinit/config/cc_landscape.py index 29ce41b9..599276a7 100644 --- a/cloudinit/config/cc_landscape.py +++ b/cloudinit/config/cc_landscape.py @@ -62,7 +62,7 @@ def handle(name, cfg, cloud, log, _args): ls_cloudcfg = cfg.get("landscape", {}) if not isinstance(ls_cloudcfg, dict): - raise Exception(("'landscape' key existed in config," + raise Exception(("'landscape' key existed in config," " but not a dictionary type," " is a %s instead"), util.obj_name(ls_cloudcfg)) diff --git a/cloudinit/config/cc_mcollective.py b/cloudinit/config/cc_mcollective.py index 4cec6494..ba5e13ca 100644 --- a/cloudinit/config/cc_mcollective.py +++ b/cloudinit/config/cc_mcollective.py @@ -52,7 +52,7 @@ def handle(name, cfg, cloud, log, _args): # It doesn't contain any sections so just add one temporarily # Use a hash id based off the contents, # just incase of conflicts... (try to not have any...) - # This is so that an error won't occur when reading (and no + # This is so that an error won't occur when reading (and no # sections exist in the file) section_tpl = "[nullsection_%s]" attempts = 0 @@ -85,7 +85,7 @@ def handle(name, cfg, cloud, log, _args): # the previous server.cfg and create our new one old_fn = "%s.old" % (server_cfg_fn) util.rename(server_cfg_fn, old_fn) - # Now we got the whole file, write to disk except the section + # Now we got the whole file, write to disk except the section # we added so that config parser won't error out when trying to read. # Note below, that we've just used ConfigParser because it generally # works. Below, we remove the initial 'nullsection' header. diff --git a/cloudinit/config/cc_mounts.py b/cloudinit/config/cc_mounts.py index 700fbc44..ab097c2a 100644 --- a/cloudinit/config/cc_mounts.py +++ b/cloudinit/config/cc_mounts.py @@ -24,10 +24,10 @@ import re from cloudinit import util -# shortname matches 'sda', 'sda1', 'xvda', 'hda', 'sdb', xvdb, vda, vdd1 -shortname_filter = r"^[x]{0,1}[shv]d[a-z][0-9]*$" -shortname = re.compile(shortname_filter) -ws = re.compile("[%s]+" % (whitespace)) +# Shortname matches 'sda', 'sda1', 'xvda', 'hda', 'sdb', xvdb, vda, vdd1 +SHORTNAME_FILTER = r"^[x]{0,1}[shv]d[a-z][0-9]*$" +SHORTNAME = re.compile(SHORTNAME_FILTER) +WS = re.compile("[%s]+" % (whitespace)) def is_mdname(name): @@ -55,7 +55,6 @@ def handle(_name, cfg, cloud, log, _args): if "mounts" in cfg: cfgmnt = cfg["mounts"] - for i in range(len(cfgmnt)): # skip something that wasn't a list if not isinstance(cfgmnt[i], list): @@ -85,7 +84,7 @@ def handle(_name, cfg, cloud, log, _args): cfgmnt[i][0] = renamed log.debug("Mapped metadata name %s to %s", startname, renamed) else: - if shortname.match(startname): + if SHORTNAME.match(startname): renamed = "/dev/%s" % startname log.debug("Mapped shortname name %s to %s", startname, renamed) cfgmnt[i][0] = renamed @@ -171,7 +170,7 @@ def handle(_name, cfg, cloud, log, _args): fstab = util.load_file(cloud.paths.join(True, "/etc/fstab")) for line in fstab.splitlines(): try: - toks = ws.split(line) + toks = WS.split(line) if toks[3].find(comment) != -1: continue except: diff --git a/cloudinit/config/cc_phone_home.py b/cloudinit/config/cc_phone_home.py index a8752527..dcb07b66 100644 --- a/cloudinit/config/cc_phone_home.py +++ b/cloudinit/config/cc_phone_home.py @@ -26,8 +26,13 @@ from cloudinit.settings import PER_INSTANCE frequency = PER_INSTANCE -post_list_all = ['pub_key_dsa', 'pub_key_rsa', 'pub_key_ecdsa', - 'instance_id', 'hostname'] +POST_LIST_ALL = [ + 'pub_key_dsa', + 'pub_key_rsa', + 'pub_key_ecdsa', + 'instance_id', + 'hostname' +] # phone_home: @@ -63,7 +68,7 @@ def handle(name, cfg, cloud, log, args): " is not an integer, using %s instead"), tries) if post_list == "all": - post_list = post_list_all + post_list = POST_LIST_ALL all_keys = {} all_keys['instance_id'] = cloud.get_instance_id() diff --git a/cloudinit/config/cc_puppet.py b/cloudinit/config/cc_puppet.py index 5fb88bf2..5154efba 100644 --- a/cloudinit/config/cc_puppet.py +++ b/cloudinit/config/cc_puppet.py @@ -63,7 +63,7 @@ def handle(name, cfg, cloud, log, _args): util.ensure_dir(pp_ssl_dir, 0771) util.chownbyid(pp_ssl_dir, pwd.getpwnam('puppet').pw_uid, 0) - pp_ssl_certs = cloud.paths.join(False, + pp_ssl_certs = cloud.paths.join(False, '/var/lib/puppet/ssl/certs/') util.ensure_dir(pp_ssl_certs) util.chownbyid(pp_ssl_certs, diff --git a/cloudinit/config/cc_resizefs.py b/cloudinit/config/cc_resizefs.py index 1690094a..c019989e 100644 --- a/cloudinit/config/cc_resizefs.py +++ b/cloudinit/config/cc_resizefs.py @@ -27,7 +27,7 @@ from cloudinit.settings import PER_ALWAYS frequency = PER_ALWAYS -resize_fs_prefixes_cmds = [ +RESIZE_FS_PREFIXES_CMDS = [ ('ext', 'resize2fs'), ('xfs', 'xfs_growfs'), ] @@ -89,16 +89,16 @@ def handle(name, cfg, cloud, log, args): # occurs this temporary file will still benefit from # auto deletion tfh.unlink_now() - + st_dev = nodeify_path(devpth, resize_what, log) fs_type = get_fs_type(st_dev, devpth, log) if not fs_type: log.warn("Could not determine filesystem type of %s", resize_what) return - + resizer = None fstype_lc = fs_type.lower() - for (pfix, root_cmd) in resize_fs_prefixes_cmds: + for (pfix, root_cmd) in RESIZE_FS_PREFIXES_CMDS: if fstype_lc.startswith(pfix): resizer = root_cmd break @@ -112,7 +112,7 @@ def handle(name, cfg, cloud, log, args): resize_cmd = [resizer, devpth] if resize_root == "noblock": - # Fork to a child that will run + # Fork to a child that will run # the resize command util.fork_cb(do_resize, resize_cmd, log) # Don't delete the file now in the parent diff --git a/cloudinit/config/cc_salt_minion.py b/cloudinit/config/cc_salt_minion.py index 16f5286d..986e6db6 100644 --- a/cloudinit/config/cc_salt_minion.py +++ b/cloudinit/config/cc_salt_minion.py @@ -32,7 +32,7 @@ def handle(name, cfg, cloud, log, _args): # Start by installing the salt package ... cloud.distro.install_packages(["salt"]) - + # Ensure we can configure files at the right dir config_dir = salt_cfg.get("config_dir", '/etc/salt') config_dir = cloud.paths.join(False, config_dir) diff --git a/cloudinit/config/cc_scripts_per_boot.py b/cloudinit/config/cc_scripts_per_boot.py index 364e1d02..d3c47442 100644 --- a/cloudinit/config/cc_scripts_per_boot.py +++ b/cloudinit/config/cc_scripts_per_boot.py @@ -26,16 +26,16 @@ from cloudinit.settings import PER_ALWAYS frequency = PER_ALWAYS -script_subdir = 'per-boot' +SCRIPT_SUBDIR = 'per-boot' def handle(name, _cfg, cloud, log, _args): # Comes from the following: # https://forums.aws.amazon.com/thread.jspa?threadID=96918 - runparts_path = os.path.join(cloud.get_cpath(), 'scripts', script_subdir) + runparts_path = os.path.join(cloud.get_cpath(), 'scripts', SCRIPT_SUBDIR) try: util.runparts(runparts_path) except: log.warn("Failed to run transform %s (%s in %s)", - name, script_subdir, runparts_path) + name, SCRIPT_SUBDIR, runparts_path) raise diff --git a/cloudinit/config/cc_scripts_per_instance.py b/cloudinit/config/cc_scripts_per_instance.py index d75ab47d..8e428ac2 100644 --- a/cloudinit/config/cc_scripts_per_instance.py +++ b/cloudinit/config/cc_scripts_per_instance.py @@ -26,16 +26,16 @@ from cloudinit.settings import PER_INSTANCE frequency = PER_INSTANCE -script_subdir = 'per-instance' +SCRIPT_SUBDIR = 'per-instance' def handle(name, _cfg, cloud, log, _args): # Comes from the following: # https://forums.aws.amazon.com/thread.jspa?threadID=96918 - runparts_path = os.path.join(cloud.get_cpath(), 'scripts', script_subdir) + runparts_path = os.path.join(cloud.get_cpath(), 'scripts', SCRIPT_SUBDIR) try: util.runparts(runparts_path) except: log.warn("Failed to run transform %s (%s in %s)", - name, script_subdir, runparts_path) + name, SCRIPT_SUBDIR, runparts_path) raise diff --git a/cloudinit/config/cc_scripts_per_once.py b/cloudinit/config/cc_scripts_per_once.py index 80f8c325..e7a29a44 100644 --- a/cloudinit/config/cc_scripts_per_once.py +++ b/cloudinit/config/cc_scripts_per_once.py @@ -26,16 +26,16 @@ from cloudinit.settings import PER_ONCE frequency = PER_ONCE -script_subdir = 'per-once' +SCRIPT_SUBDIR = 'per-once' def handle(name, _cfg, cloud, log, _args): # Comes from the following: # https://forums.aws.amazon.com/thread.jspa?threadID=96918 - runparts_path = os.path.join(cloud.get_cpath(), 'scripts', script_subdir) + runparts_path = os.path.join(cloud.get_cpath(), 'scripts', SCRIPT_SUBDIR) try: util.runparts(runparts_path) except: log.warn("Failed to run transform %s (%s in %s)", - name, script_subdir, runparts_path) + name, SCRIPT_SUBDIR, runparts_path) raise diff --git a/cloudinit/config/cc_scripts_user.py b/cloudinit/config/cc_scripts_user.py index f4fe3a2a..1ff05aae 100644 --- a/cloudinit/config/cc_scripts_user.py +++ b/cloudinit/config/cc_scripts_user.py @@ -26,17 +26,17 @@ from cloudinit.settings import PER_INSTANCE frequency = PER_INSTANCE -script_subdir = 'scripts' +SCRIPT_SUBDIR = 'scripts' def handle(name, _cfg, cloud, log, _args): # This is written to by the user data handlers # Ie, any custom shell scripts that come down # go here... - runparts_path = os.path.join(cloud.get_ipath_cur(), script_subdir) + runparts_path = os.path.join(cloud.get_ipath_cur(), SCRIPT_SUBDIR) try: util.runparts(runparts_path) except: log.warn("Failed to run transform %s (%s in %s)", - name, script_subdir, runparts_path) + name, SCRIPT_SUBDIR, runparts_path) raise diff --git a/cloudinit/config/cc_set_passwords.py b/cloudinit/config/cc_set_passwords.py index e7049f22..ce17f357 100644 --- a/cloudinit/config/cc_set_passwords.py +++ b/cloudinit/config/cc_set_passwords.py @@ -25,7 +25,7 @@ from cloudinit import util from string import letters, digits # pylint: disable=W0402 # We are removing certain 'painful' letters/numbers -pw_set = (letters.translate(None, 'loLOI') + +PW_SET = (letters.translate(None, 'loLOI') + digits.translate(None, '01')) @@ -148,4 +148,4 @@ def handle(_name, cfg, cloud, log, args): def rand_user_password(pwlen=9): - return util.rand_str(pwlen, select_from=pw_set) + return util.rand_str(pwlen, select_from=PW_SET) diff --git a/cloudinit/config/cc_ssh.py b/cloudinit/config/cc_ssh.py index e5e99560..4019ae90 100644 --- a/cloudinit/config/cc_ssh.py +++ b/cloudinit/config/cc_ssh.py @@ -24,11 +24,11 @@ import glob from cloudinit import util from cloudinit import ssh_util -DISABLE_ROOT_OPTS = ( "no-port-forwarding,no-agent-forwarding," -"no-X11-forwarding,command=\"echo \'Please login as the user \\\"$USER\\\" " +DISABLE_ROOT_OPTS = ("no-port-forwarding,no-agent-forwarding," +"no-X11-forwarding,command=\"echo \'Please login as the user \\\"$USER\\\" " "rather than the user \\\"root\\\".\';echo;sleep 10\"") -key2file = { +KEY_2_FILE = { "rsa_private": ("/etc/ssh/ssh_host_rsa_key", 0600), "rsa_public": ("/etc/ssh/ssh_host_rsa_key.pub", 0644), "dsa_private": ("/etc/ssh/ssh_host_dsa_key", 0600), @@ -37,15 +37,17 @@ key2file = { "ecdsa_public": ("/etc/ssh/ssh_host_ecdsa_key.pub", 0644), } -priv2pub = { - 'rsa_private': 'rsa_public', +PRIV_2_PUB = { + 'rsa_private': 'rsa_public', 'dsa_private': 'dsa_public', 'ecdsa_private': 'ecdsa_public', } -key_gen_tpl = 'o=$(ssh-keygen -yf "%s") && echo "$o" root@localhost > "%s"' +KEY_GEN_TPL = 'o=$(ssh-keygen -yf "%s") && echo "$o" root@localhost > "%s"' -generate_keys = ['rsa', 'dsa', 'ecdsa'] +GENERATE_KEY_NAMES = ['rsa', 'dsa', 'ecdsa'] + +KEY_FILE_TPL = '/etc/ssh/ssh_host_%s_key' def handle(_name, cfg, cloud, log, _args): @@ -58,21 +60,21 @@ def handle(_name, cfg, cloud, log, _args): util.del_file(f) except: util.logexc(log, "Failed deleting key file %s", f) - + if "ssh_keys" in cfg: # if there are keys in cloud-config, use them for (key, val) in cfg["ssh_keys"].iteritems(): - if key in key2file: - tgt_fn = key2file[key][0] - tgt_perms = key2file[key][1] + if key in KEY_2_FILE: + tgt_fn = KEY_2_FILE[key][0] + tgt_perms = KEY_2_FILE[key][1] util.write_file(cloud.paths.join(False, tgt_fn), val, tgt_perms) - for (priv, pub) in priv2pub.iteritems(): + for (priv, pub) in PRIV_2_PUB.iteritems(): if pub in cfg['ssh_keys'] or not priv in cfg['ssh_keys']: continue - pair = (key2file[priv][0], key2file[pub][0]) - cmd = ['sh', '-xc', key_gen_tpl % pair] + pair = (KEY_2_FILE[priv][0], KEY_2_FILE[pub][0]) + cmd = ['sh', '-xc', KEY_GEN_TPL % pair] try: # TODO: Is this guard needed? with util.SeLinuxGuard("/etc/ssh", recursive=True): @@ -84,12 +86,11 @@ def handle(_name, cfg, cloud, log, _args): else: # if not, generate them genkeys = util.get_cfg_option_list(cfg, - 'ssh_genkeytypes', - generate_keys) + 'ssh_genkeytypes', + GENERATE_KEY_NAMES) for keytype in genkeys: - keyfile = '/etc/ssh/ssh_host_%s_key' % (keytype) - keyfile = cloud.paths.join(False, keyfile) - util.ensure_dir(os.path.dirname(keyfile)) + keyfile = cloud.paths.join(False, KEY_FILE_TPL % (keytype)) + util.ensure_dir(os.path.dirname(keyfile)) if not os.path.exists(keyfile): cmd = ['ssh-keygen', '-t', keytype, '-N', '', '-f', keyfile] try: diff --git a/cloudinit/distros/__init__.py b/cloudinit/distros/__init__.py index 45dd85ec..25a60c52 100644 --- a/cloudinit/distros/__init__.py +++ b/cloudinit/distros/__init__.py @@ -157,4 +157,3 @@ def fetch(distro_name, mods=(__name__, )): % (distro_name)) distro_cls = getattr(mod, 'Distro') return distro_cls - diff --git a/cloudinit/distros/rhel.py b/cloudinit/distros/rhel.py index b67ae5b8..5cbefa6e 100644 --- a/cloudinit/distros/rhel.py +++ b/cloudinit/distros/rhel.py @@ -35,7 +35,7 @@ class Distro(distros.Distro): def __init__(self, name, cfg, paths): distros.Distro.__init__(self, name, cfg, paths) - + def install_packages(self, pkglist): self.package_command('install', pkglist) @@ -210,12 +210,12 @@ class Distro(distros.Distro): def package_command(self, command, args=None): cmd = ['yum'] # If enabled, then yum will be tolerant of errors on the command line - # with regard to packages. - # For example: if you request to install foo, bar and baz and baz is + # with regard to packages. + # For example: if you request to install foo, bar and baz and baz is # installed; yum won't error out complaining that baz is already - # installed. + # installed. cmd.append("-t") - # Determines whether or not yum prompts for confirmation + # Determines whether or not yum prompts for confirmation # of critical actions. We don't want to prompt... cmd.append("-y") cmd.append(command) @@ -223,8 +223,8 @@ class Distro(distros.Distro): cmd.extend(args) # Allow the output of this to flow outwards (ie not be captured) util.subp(cmd, capture=False) - - + + # This is a util function to translate a ubuntu /etc/network/interfaces 'blob' # to a rhel equiv. that can then be written to /etc/sysconfig/network-scripts/ # TODO remove when we have python-netcf active... diff --git a/cloudinit/distros/ubuntu.py b/cloudinit/distros/ubuntu.py index 5a1b572e..fd7b7b8d 100644 --- a/cloudinit/distros/ubuntu.py +++ b/cloudinit/distros/ubuntu.py @@ -36,11 +36,11 @@ class Distro(distros.Distro): def __init__(self, name, cfg, paths): distros.Distro.__init__(self, name, cfg, paths) - # This will be used to restrict certain + # This will be used to restrict certain # calls from repeatly happening (when they # should only happen say once per instance...) self._runner = helpers.Runners(paths) - + def install_packages(self, pkglist): self._update_package_sources() self.package_command('install', pkglist) @@ -131,4 +131,4 @@ class Distro(distros.Distro): def _update_package_sources(self): self._runner.run("update-sources", self.package_command, - ["update"], freq=PER_INSTANCE) \ No newline at end of file + ["update"], freq=PER_INSTANCE) diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index c6f2119c..d52b1cba 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -104,7 +104,7 @@ def run_part(mod, data, ctype, filename, payload, frequency): except: mod_ver = 1 try: - LOG.debug("Calling handler %s (%s, %s, %s) with frequency %s", + LOG.debug("Calling handler %s (%s, %s, %s) with frequency %s", mod, ctype, filename, mod_ver, frequency) if mod_ver >= 2: # Treat as v. 2 which does get a frequency @@ -114,7 +114,7 @@ def run_part(mod, data, ctype, filename, payload, frequency): mod.handle_part(data, ctype, filename, payload) except: util.logexc(LOG, ("Failed calling handler %s (%s, %s, %s)" - " with frequency %s"), + " with frequency %s"), mod, ctype, filename, mod_ver, frequency) @@ -178,7 +178,7 @@ def walker_callback(pdata, ctype, filename, payload): payload, pdata['frequency']) -# Callback is a function that will be called with +# Callback is a function that will be called with # (data, content_type, filename, payload) def walk(msg, callback, data): partnum = 0 @@ -226,5 +226,3 @@ def type_from_starts_with(payload, default=None): if payload_lc.startswith(text): return INCLUSION_TYPES_MAP[text] return default - - diff --git a/cloudinit/helpers.py b/cloudinit/helpers.py index 45633e0f..4447d1ee 100644 --- a/cloudinit/helpers.py +++ b/cloudinit/helpers.py @@ -84,7 +84,7 @@ class FileSemaphores(object): try: util.del_dir(self.sem_path) except (IOError, OSError): - util.logexc(LOG, "Failed deleting semaphore directory %s", + util.logexc(LOG, "Failed deleting semaphore directory %s", self.sem_path) def _acquire(self, name, freq): @@ -212,7 +212,7 @@ class Paths(object): self.cfgs = path_cfgs # Populate all the initial paths self.cloud_dir = self.join(False, - path_cfgs.get('cloud_dir', + path_cfgs.get('cloud_dir', '/var/lib/cloud')) self.instance_link = os.path.join(self.cloud_dir, 'instance') self.boot_finished = os.path.join(self.instance_link, "boot-finished") @@ -237,7 +237,7 @@ class Paths(object): # Set when a datasource becomes active self.datasource = ds - # joins the paths but also appends a read + # joins the paths but also appends a read # or write root if available def join(self, read_only, *paths): if read_only: diff --git a/cloudinit/log.py b/cloudinit/log.py index 478946f8..fc1428a2 100644 --- a/cloudinit/log.py +++ b/cloudinit/log.py @@ -20,7 +20,6 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . - import logging import logging.handlers import logging.config @@ -53,7 +52,6 @@ def setupBasicLogging(): root.setLevel(DEBUG) - def setupLogging(cfg=None): # See if the config provides any logging conf... if not cfg: diff --git a/cloudinit/settings.py b/cloudinit/settings.py index 8a1eaeb3..fac9b862 100644 --- a/cloudinit/settings.py +++ b/cloudinit/settings.py @@ -47,7 +47,7 @@ CFG_BUILTIN = { 'paths': { 'cloud_dir': '/var/lib/cloud', 'templates_dir': '/etc/cloud/templates/', - }, + }, 'distro': 'ubuntu', }, } diff --git a/cloudinit/sources/DataSourceCloudStack.py b/cloudinit/sources/DataSourceCloudStack.py index b1817654..83c577e6 100644 --- a/cloudinit/sources/DataSourceCloudStack.py +++ b/cloudinit/sources/DataSourceCloudStack.py @@ -121,7 +121,7 @@ class DataSourceCloudStack(sources.DataSource): None, self.metadata_address) self.metadata = boto_utils.get_instance_metadata(self.api_ver, self.metadata_address) - LOG.debug("Crawl of metadata service took %s seconds", + LOG.debug("Crawl of metadata service took %s seconds", int(time.time() - start_time)) return True except Exception: diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index 5da1ffea..9905dad4 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -36,7 +36,7 @@ CFG_DRIVE_FILES = [ "meta.js", ] DEFAULT_METADATA = { - "instance-id": DEFAULT_IID, + "instance-id": DEFAULT_IID, "dsmode": DEFAULT_MODE, } CFG_DRIVE_DEV_ENV = 'CLOUD_INIT_CONFIG_DRIVE_DEVICE' diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py index 55447102..0598dfa2 100644 --- a/cloudinit/sources/DataSourceEc2.py +++ b/cloudinit/sources/DataSourceEc2.py @@ -38,7 +38,7 @@ DEF_MD_URL = "http://169.254.169.254" DEF_MD_VERSION = '2009-04-04' # Default metadata urls that will be used if none are provided -# They will be checked for 'resolveability' and some of the +# They will be checked for 'resolveability' and some of the # following may be discarded if they do not resolve DEF_MD_URLS = [DEF_MD_URL, "http://instance-data:8773"] @@ -69,7 +69,7 @@ class DataSourceEc2(sources.DataSource): None, self.metadata_address) self.metadata = boto_utils.get_instance_metadata(self.api_ver, self.metadata_address) - LOG.debug("Crawl of metadata service took %s seconds", + LOG.debug("Crawl of metadata service took %s seconds", int(time.time() - start_time)) return True except Exception: @@ -201,7 +201,7 @@ class DataSourceEc2(sources.DataSource): return None # Example: - # 'block-device-mapping': + # 'block-device-mapping': # {'ami': '/dev/sda1', # 'ephemeral0': '/dev/sdb', # 'root': '/dev/sda1'} diff --git a/cloudinit/sources/DataSourceMAAS.py b/cloudinit/sources/DataSourceMAAS.py index bb8fbac1..104e7a54 100644 --- a/cloudinit/sources/DataSourceMAAS.py +++ b/cloudinit/sources/DataSourceMAAS.py @@ -251,6 +251,7 @@ datasources = [ (DataSourceMAAS, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)), ] + # Return a list of data sources that match this set of dependencies def get_datasource_list(depends): return sources.list_from_depends(depends, datasources) diff --git a/cloudinit/sources/DataSourceNoCloud.py b/cloudinit/sources/DataSourceNoCloud.py index 2b016d1c..8499a97c 100644 --- a/cloudinit/sources/DataSourceNoCloud.py +++ b/cloudinit/sources/DataSourceNoCloud.py @@ -154,7 +154,7 @@ class DataSourceNoCloud(sources.DataSource): (self.dsmode in ("local", seeded_interfaces))): LOG.info("Updating network interfaces from %s", self) self.distro.apply_network(md['network-interfaces']) - + if md['dsmode'] == self.dsmode: self.seed = ",".join(found) self.metadata = md diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py index fef3d40f..45dd5535 100644 --- a/cloudinit/ssh_util.py +++ b/cloudinit/ssh_util.py @@ -103,10 +103,10 @@ class AuthKeyLineParser(object): elif curc == '"': quoted = not quoted i = i + 1 - + options = ent[0:i] options_lst = [] - + # Now use a csv parser to pull the options # out of the above string that we just found an endpoint for. # @@ -211,7 +211,6 @@ def update_authorized_keys(fname, keys): def setup_user_keys(keys, user, key_prefix, paths): - # Make sure the users .ssh dir is setup accordingly pwent = pwd.getpwnam(user) ssh_dir = os.path.join(pwent.pw_dir, '.ssh') diff --git a/cloudinit/stages.py b/cloudinit/stages.py index ae6e2de5..84a965c2 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -287,7 +287,7 @@ class Init(object): def cloudify(self): # Form the needed options to cloudify our members - return cloud.Cloud(self.datasource, + return cloud.Cloud(self.datasource, self.paths, self.cfg, self.distro, helpers.Runners(self.paths)) @@ -318,7 +318,7 @@ class Init(object): def consume(self, frequency=PER_INSTANCE): cdir = self.paths.get_cpath("handlers") idir = self.paths.get_ipath("handlers") - + # Add the path to the plugins dir to the top of our list for import # instance dir should be read before cloud-dir if cdir and cdir not in sys.path: @@ -417,7 +417,7 @@ class Modules(object): except: util.logexc(LOG, ("Failed loading of datasource" " config object from %s"), self.datasource) - + if self.base_cfg: t_cfgs.append(self.base_cfg) diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py index 1c583eba..223278ce 100644 --- a/cloudinit/url_helper.py +++ b/cloudinit/url_helper.py @@ -47,11 +47,11 @@ class UrlResponse(object): @property def contents(self): return self._contents - + @property def headers(self): return self._headers - + def __str__(self): if not self.contents: return '' @@ -66,7 +66,7 @@ class UrlResponse(object): return True else: return False - + def readurl(url, data=None, timeout=None, retries=0, sec_between=1, headers=None): @@ -89,8 +89,8 @@ def readurl(url, data=None, timeout=None, excepts = [] LOG.info(("Attempting to open '%s' with %s attempts" - " (%s retries, timeout=%s) to be performed"), - url, attempts, retries, timeout) + " (%s retries, timeout=%s) to be performed"), + url, attempts, retries, timeout) open_args = {} if timeout is not None: open_args['timeout'] = int(timeout) @@ -112,7 +112,7 @@ def readurl(url, data=None, timeout=None, excepts.append(e) except urllib2.URLError as e: # This can be a message string or - # another exception instance + # another exception instance # (socket.error for remote URLs, OSError for local URLs). if (isinstance(e.reason, (OSError)) and e.reason.errno == errno.ENOENT): @@ -128,7 +128,7 @@ def readurl(url, data=None, timeout=None, # Didn't work out LOG.warn("Failed reading from %s after %s attempts", url, attempts) - + # It must of errored at least once for code # to get here so re-raise the last error LOG.debug("%s errors occured, re-raising the last one", len(excepts)) diff --git a/cloudinit/user_data.py b/cloudinit/user_data.py index b7902d44..4babb8e5 100644 --- a/cloudinit/user_data.py +++ b/cloudinit/user_data.py @@ -65,33 +65,33 @@ class UserDataProcessor(object): # multipart/* are just containers if part.get_content_maintype() == 'multipart': continue - + ctype = None ctype_orig = part.get_content_type() payload = part.get_payload(decode=True) - + if not ctype_orig: ctype_orig = UNDEF_TYPE - + if ctype_orig in TYPE_NEEDED: ctype = handlers.type_from_starts_with(payload) - + if ctype is None: ctype = ctype_orig - + if ctype in INCLUDE_TYPES: self._do_include(payload, append_msg) continue - + if ctype in ARCHIVE_TYPES: self._explode_archive(payload, append_msg) continue - + if 'Content-Type' in base_msg: base_msg.replace_header('Content-Type', ctype) else: base_msg['Content-Type'] = ctype - + self._attach_part(append_msg, part) def _get_include_once_filename(self, entry): @@ -108,8 +108,8 @@ class UserDataProcessor(object): lc_line = line.lower() if lc_line.startswith("#include-once"): line = line[len("#include-once"):].lstrip() - # Every following include will now - # not be refetched.... but will be + # Every following include will now + # not be refetched.... but will be # re-read from a local urlcache (if it worked) include_once_on = True elif lc_line.startswith("#include"): @@ -190,10 +190,10 @@ class UserDataProcessor(object): """ if ATTACHMENT_FIELD not in outer_msg: outer_msg[ATTACHMENT_FIELD] = '0' - + if new_count is not None: outer_msg.replace_header(ATTACHMENT_FIELD, str(new_count)) - + fetched_count = 0 try: fetched_count = int(outer_msg.get(ATTACHMENT_FIELD)) @@ -234,7 +234,3 @@ def convert_string(raw_data, headers=None): msg = MIMEBase(maintype, subtype, *headers) msg.set_payload(data) return msg - - - - diff --git a/cloudinit/util.py b/cloudinit/util.py index 91d20a76..56c01fab 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -35,7 +35,7 @@ import pwd import random import shutil import socket -import string # pylint: disable=W0402 +import string # pylint: disable=W0402 import subprocess import sys import tempfile @@ -153,13 +153,15 @@ def SilentTemporaryFile(**kwargs): # file to unlink has been unlinked elsewhere.. LOG.debug("Created temporary file %s", fh.name) fh.unlink = del_file - # Add a new method that will unlink + + # Add a new method that will unlink # right 'now' but still lets the exit # method attempt to remove it (which will # not throw due to our del file being quiet # about files that are not there) def unlink_now(): fh.unlink(fh.name) + setattr(fh, 'unlink_now', unlink_now) return fh @@ -199,7 +201,7 @@ def is_false_str(val, addons=None): def translate_bool(val, addons=None): if not val: - # This handles empty lists and false and + # This handles empty lists and false and # other things that python believes are false return False # If its already a boolean skip @@ -214,7 +216,6 @@ def rand_str(strlen=32, select_from=None): return "".join([random.choice(select_from) for _x in range(0, strlen)]) - def read_conf(fname): try: return load_yaml(load_file(fname), default={}) @@ -275,7 +276,7 @@ def is_ipv4(instr): def merge_base_cfg(cfgfile, cfg_builtin=None): syscfg = read_conf_with_confd(cfgfile) - + kern_contents = read_cc_from_cmdline() kerncfg = {} if kern_contents: @@ -575,7 +576,7 @@ def load_yaml(blob, default=None, allowed=(dict,)): try: blob = str(blob) LOG.debug(("Attempting to load yaml from string " - "of length %s with allowed root types %s"), + "of length %s with allowed root types %s"), len(blob), allowed) converted = yaml.load(blob) if not isinstance(converted, allowed): @@ -625,7 +626,7 @@ def read_conf_d(confd): # remove anything not ending in '.cfg' confs = [f for f in confs if f.endswith(".cfg")] - + # remove anything not a file confs = [f for f in confs if os.path.isfile(os.path.join(confd, f))] @@ -726,9 +727,9 @@ def get_fqdn_from_hosts(hostname, filename="/etc/hosts"): """ For each host a single line should be present with the following information: - - IP_address canonical_hostname [aliases...] - + + IP_address canonical_hostname [aliases...] + Fields of the entry are separated by any number of blanks and/or tab characters. Text from a "#" character until the end of the line is a comment, and is ignored. Host names may contain only alphanumeric @@ -747,7 +748,7 @@ def get_fqdn_from_hosts(hostname, filename="/etc/hosts"): if not line: continue - # If there there is less than 3 entries + # If there there is less than 3 entries # (IP_address, canonical_hostname, alias) # then ignore this line toks = line.split() @@ -829,7 +830,7 @@ def close_stdin(): os.dup2(fp.fileno(), sys.stdin.fileno()) -def find_devs_with(criteria=None, oformat='device', +def find_devs_with(criteria=None, oformat='device', tag=None, no_cache=False, path=None): """ find devices matching given criteria (via blkid) @@ -841,23 +842,23 @@ def find_devs_with(criteria=None, oformat='device', blk_id_cmd = ['blkid'] options = [] if criteria: - # Search for block devices with tokens named NAME that + # Search for block devices with tokens named NAME that # have the value 'value' and display any devices which are found. # Common values for NAME include TYPE, LABEL, and UUID. # If there are no devices specified on the command line, - # all block devices will be searched; otherwise, + # all block devices will be searched; otherwise, # only search the devices specified by the user. options.append("-t%s" % (criteria)) if tag: # For each (specified) device, show only the tags that match tag. options.append("-s%s" % (tag)) if no_cache: - # If you want to start with a clean cache - # (i.e. don't report devices previously scanned + # If you want to start with a clean cache + # (i.e. don't report devices previously scanned # but not necessarily available at this time), specify /dev/null. options.extend(["-c", "/dev/null"]) if oformat: - # Display blkid's output using the specified format. + # Display blkid's output using the specified format. # The format parameter may be: # full, value, list, device, udev, export options.append('-o%s' % (oformat)) @@ -1104,7 +1105,7 @@ def mounts(): (dev, mp, fstype, opts, _freq, _passno) = mpline.split() except: continue - # If the name of the mount point contains spaces these + # If the name of the mount point contains spaces these # can be escaped as '\040', so undo that.. mp = mp.replace("\\040", " ") mounted[dev] = { -- cgit v1.2.3 From d0ff82b71e315d49f49cbbd8e6a7740036973a4a Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Wed, 20 Jun 2012 23:39:46 -0700 Subject: No need for this now that we have the imports done correctly --- bin/cloud-init | 6 ------ 1 file changed, 6 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init b/bin/cloud-init index c1788ef4..c4633bdf 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -32,12 +32,6 @@ possible_topdir = os.path.normpath(os.path.join(os.path.abspath( if os.path.exists(os.path.join(possible_topdir, "cloudinit", "__init__.py")): sys.path.insert(0, possible_topdir) -# This is so config modules can be found -if os.path.exists(os.path.join(possible_topdir, - "cloudinit", 'config', "__init__.py")): - sys.path.insert(0, os.path.join(possible_topdir, 'cloudinit', 'config')) - - from cloudinit import log as logging from cloudinit import netinfo from cloudinit import settings -- cgit v1.2.3 From 85f9913e10e55bad037ea70a205c40ff169b7540 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Wed, 20 Jun 2012 23:57:30 -0700 Subject: 1. Shorten the passed in name for frequency (and expand it later) 2. For single modules, if it doesn't run, print a warning and exit with a return code of 1 --- bin/cloud-init | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init b/bin/cloud-init index c4633bdf..a2f15c4b 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -52,6 +52,13 @@ QUERY_DATA_TYPES = [ 'instance_id', ] +# Frequency shortname to full name +FREQ_SHORT_NAMES = { + 'instance': settings.PER_INSTANCE, + 'always': settings.PER_ALWAYS, + 'once': settings.PER_ONCE, +} + LOG = logging.getLogger() @@ -294,6 +301,7 @@ def main_single(name, args): mod_freq = args.frequency if mod_freq: LOG.debug("Using passed in frequency %s", mod_freq) + mod_freq = FREQ_SHORT_NAMES.get(mod_freq) # Stage 3 try: LOG.debug("Closing stdin") @@ -308,15 +316,14 @@ def main_single(name, args): logging.resetLogging() logging.setupLogging(mods.cfg) # Stage 4 - try: - (_run_am, failures) = mods.run_single(mod_name, - mod_args, - mod_freq) - except ImportError: - util.logexc(LOG, "Failed at importing module %s", mod_name) - return 1 + (run_am, failures) = mods.run_single(mod_name, + mod_args, + mod_freq) if failures: - LOG.debug("Ran %s but it failed", mod_name) + LOG.warn("Ran %s but it failed!", mod_name) + return 1 + elif run_am == 0: + LOG.warn("Did not run %s, does it exist?", mod_name) return 1 else: return 0 @@ -382,7 +389,7 @@ def main(): parser_single.add_argument("--frequency", action="store", help=("frequency of the module"), required=False, - choices=settings.FREQUENCIES) + choices=list(FREQ_SHORT_NAMES.keys())) parser_single.add_argument("module_args", nargs="*", metavar='argument', help=('any additional arguments to' -- cgit v1.2.3 From f1cab0d88cbcfa7eaa698db7dcc252bb6543d6c0 Mon Sep 17 00:00:00 2001 From: harlowja Date: Thu, 21 Jun 2012 08:38:12 -0700 Subject: 1. Move all info() logging methods to debug() 2. Adjust comment on sources list from depends 3. For the /etc/timezone 'writing', add a header that says created by cloud-init --- bin/cloud-init | 3 +-- cloudinit/distros/__init__.py | 6 +++--- cloudinit/distros/ubuntu.py | 6 +++++- cloudinit/helpers.py | 2 +- cloudinit/sources/DataSourceCloudStack.py | 2 +- cloudinit/sources/DataSourceConfigDrive.py | 6 ++---- cloudinit/sources/DataSourceEc2.py | 2 +- cloudinit/sources/DataSourceMAAS.py | 2 +- cloudinit/sources/DataSourceNoCloud.py | 2 +- cloudinit/sources/__init__.py | 22 +++++++++++----------- cloudinit/url_helper.py | 14 +++++++------- 11 files changed, 34 insertions(+), 33 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init b/bin/cloud-init index a2f15c4b..d193272e 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -73,8 +73,7 @@ def welcome(action): } welcome_msg = "%s" % (templater.render_string(msg, tpl_params)) sys.stderr.write("%s\n" % (welcome_msg)) - sys.stderr.flush() - LOG.info(welcome_msg) + LOG.debug(welcome_msg) def extract_fns(args): diff --git a/cloudinit/distros/__init__.py b/cloudinit/distros/__init__.py index e0ef6ee0..6325257c 100644 --- a/cloudinit/distros/__init__.py +++ b/cloudinit/distros/__init__.py @@ -133,14 +133,14 @@ class Distro(object): raise NotImplementedError("Unknown interface action %s" % (action)) cmd = IFACE_ACTIONS[action] try: - LOG.info("Attempting to run %s interface action using command %s", - action, cmd) + LOG.debug("Attempting to run %s interface action using command %s", + action, cmd) (_out, err) = util.subp(cmd) if len(err): LOG.warn("Running %s resulted in stderr output: %s", cmd, err) return True except util.ProcessExecutionError: - util.logexc(LOG, "Running %s failed", cmd) + util.logexc(LOG, "Running interface command %s failed", cmd) return False diff --git a/cloudinit/distros/ubuntu.py b/cloudinit/distros/ubuntu.py index fd7b7b8d..15af2e7f 100644 --- a/cloudinit/distros/ubuntu.py +++ b/cloudinit/distros/ubuntu.py @@ -112,7 +112,11 @@ class Distro(distros.Distro): if not os.path.isfile(tz_file): raise Exception(("Invalid timezone %s," " no file found at %s") % (tz, tz_file)) - tz_contents = "%s\n" % tz + tz_lines = [ + "# Created by cloud-init", + str(tz), + ] + tz_contents = "\n".join(tz_lines) tz_fn = self._paths.join(False, "/etc/timezone") util.write_file(tz_fn, tz_contents) util.copy(tz_file, self._paths.join(False, "/etc/localtime")) diff --git a/cloudinit/helpers.py b/cloudinit/helpers.py index b6974f3c..6751f4a5 100644 --- a/cloudinit/helpers.py +++ b/cloudinit/helpers.py @@ -161,7 +161,7 @@ class Runners(object): if not args: args = [] if sem.has_run(name, freq): - LOG.info("%s already ran (freq=%s)", name, freq) + LOG.debug("%s already ran (freq=%s)", name, freq) return (False, None) with sem.lock(name, freq, clear_on_fail) as lk: if not lk: diff --git a/cloudinit/sources/DataSourceCloudStack.py b/cloudinit/sources/DataSourceCloudStack.py index 83c577e6..751bef4f 100644 --- a/cloudinit/sources/DataSourceCloudStack.py +++ b/cloudinit/sources/DataSourceCloudStack.py @@ -98,7 +98,7 @@ class DataSourceCloudStack(sources.DataSource): timeout=timeout, status_cb=LOG.warn) if url: - LOG.info("Using metadata source: '%s'", url) + LOG.debug("Using metadata source: '%s'", url) else: LOG.critical(("Giving up on waiting for the metadata from %s" " after %s seconds"), diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index 9905dad4..320dd1d1 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -87,10 +87,8 @@ class DataSourceConfigDrive(sources.DataSource): # Update interfaces and ifup only on the local datasource # this way the DataSourceConfigDriveNet doesn't do it also. if 'network-interfaces' in md and self.dsmode == "local": - if md['dsmode'] == "pass": - LOG.info("Updating network interfaces from configdrive") - else: - LOG.debug("Updating network interfaces from configdrive") + LOG.debug("Updating network interfaces from config drive (%s)", + md['dsmode']) self.distro.apply_network(md['network-interfaces']) self.seed = found diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py index 0598dfa2..cb460de1 100644 --- a/cloudinit/sources/DataSourceEc2.py +++ b/cloudinit/sources/DataSourceEc2.py @@ -169,7 +169,7 @@ class DataSourceEc2(sources.DataSource): timeout=timeout, status_cb=LOG.warn) if url: - LOG.info("Using metadata source: '%s'", url2base[url]) + LOG.debug("Using metadata source: '%s'", url2base[url]) else: LOG.critical("Giving up on md from %s after %s seconds", urls, int(time.time() - start_time)) diff --git a/cloudinit/sources/DataSourceMAAS.py b/cloudinit/sources/DataSourceMAAS.py index 104e7a54..22c90b7c 100644 --- a/cloudinit/sources/DataSourceMAAS.py +++ b/cloudinit/sources/DataSourceMAAS.py @@ -128,7 +128,7 @@ class DataSourceMAAS(sources.DataSource): headers_cb=self.md_headers) if url: - LOG.info("Using metadata source: '%s'", url) + LOG.debug("Using metadata source: '%s'", url) else: LOG.critical("Giving up on md from %s after %i seconds", urls, int(time.time() - starttime)) diff --git a/cloudinit/sources/DataSourceNoCloud.py b/cloudinit/sources/DataSourceNoCloud.py index 8499a97c..bed500a2 100644 --- a/cloudinit/sources/DataSourceNoCloud.py +++ b/cloudinit/sources/DataSourceNoCloud.py @@ -152,7 +152,7 @@ class DataSourceNoCloud(sources.DataSource): # ('local' for NoCloud, 'net' for NoCloudNet') if ('network-interfaces' in md and (self.dsmode in ("local", seeded_interfaces))): - LOG.info("Updating network interfaces from %s", self) + LOG.debug("Updating network interfaces from %s", self) self.distro.apply_network(md['network-interfaces']) if md['dsmode'] == self.dsmode: diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 42e924b0..b25724a5 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -166,7 +166,7 @@ class DataSource(object): def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list): ds_list = list_sources(cfg_list, ds_deps, pkg_list) ds_names = [util.obj_name(f) for f in ds_list] - LOG.info("Searching for data source in: %s", ds_names) + LOG.debug("Searching for data source in: %s", ds_names) for cls in ds_list: try: @@ -188,9 +188,9 @@ def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list): # Return an ordered list of classes that match (if any) def list_sources(cfg_list, depends, pkg_list): src_list = [] - LOG.info(("Looking for for data source in: %s," - " via packages %s that matches dependencies %s"), - cfg_list, pkg_list, depends) + LOG.debug(("Looking for for data source in: %s," + " via packages %s that matches dependencies %s"), + cfg_list, pkg_list, depends) for ds_name in cfg_list: if not ds_name.startswith(DS_PREFIX): ds_name = '%s%s' % (DS_PREFIX, ds_name) @@ -207,17 +207,17 @@ def list_sources(cfg_list, depends, pkg_list): return src_list -# depends is a list of dependencies (DEP_FILESYSTEM) -# dslist is a list of 2 item lists -# dslist = [ +# 'depends' is a list of dependencies (DEP_FILESYSTEM) +# ds_list is a list of 2 item lists +# ds_list = [ # ( class, ( depends-that-this-class-needs ) ) # } -# it returns a list of 'class' that matched these deps exactly -# it is a helper function for DataSourceCollections -def list_from_depends(depends, dslist): +# It returns a list of 'class' that matched these deps exactly +# It mainly is a helper function for DataSourceCollections +def list_from_depends(depends, ds_list): ret_list = [] depset = set(depends) - for (cls, deps) in dslist: + for (cls, deps) in ds_list: if depset == set(deps): ret_list.append(cls) return ret_list diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py index 223278ce..dbf72392 100644 --- a/cloudinit/url_helper.py +++ b/cloudinit/url_helper.py @@ -88,8 +88,8 @@ def readurl(url, data=None, timeout=None, attempts = retries + 1 excepts = [] - LOG.info(("Attempting to open '%s' with %s attempts" - " (%s retries, timeout=%s) to be performed"), + LOG.debug(("Attempting to open '%s' with %s attempts" + " (%s retries, timeout=%s) to be performed"), url, attempts, retries, timeout) open_args = {} if timeout is not None: @@ -105,8 +105,8 @@ def readurl(url, data=None, timeout=None, headers = {} if rh.headers: headers = dict(rh.headers) - LOG.info("Read from %s (%s, %sb) after %s attempts", - url, status, len(content), (i + 1)) + LOG.debug("Read from %s (%s, %sb) after %s attempts", + url, status, len(content), (i + 1)) return UrlResponse(status, content, headers) except urllib2.HTTPError as e: excepts.append(e) @@ -165,7 +165,7 @@ def wait_for_url(urls, max_wait=None, timeout=None, start_time = time.time() def log_status_cb(msg): - LOG.info(msg) + LOG.debug(msg) if status_cb is None: status_cb = log_status_cb @@ -219,8 +219,8 @@ def wait_for_url(urls, max_wait=None, timeout=None, break loop_n = loop_n + 1 - LOG.info("Please wait %s seconds while we wait to try again", - sleep_time) + LOG.debug("Please wait %s seconds while we wait to try again", + sleep_time) time.sleep(sleep_time) return False -- cgit v1.2.3 From 264ff30c0f4424fe48d2bd70c71b68bb9e5afc59 Mon Sep 17 00:00:00 2001 From: harlowja Date: Thu, 21 Jun 2012 23:57:52 -0700 Subject: Ensure that nothing was ran by checking the total count --- bin/cloud-init | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'bin') diff --git a/bin/cloud-init b/bin/cloud-init index d193272e..aff8f967 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -92,7 +92,8 @@ def extract_fns(args): def run_module_section(mods, action_name, section): full_section_name = MOD_SECTION_TPL % (section) (ran_am, failures) = mods.run_section(full_section_name) - if not ran_am: + total_attempted = ran_am + len(failures) + if total_attempted == 0: msg = ("No '%s' modules to run" " under section '%s'") % (action_name, full_section_name) sys.stderr.write("%s\n" % (msg)) -- cgit v1.2.3 From 2a2dc725670f8ff3c6024332302ff9c718ff27f2 Mon Sep 17 00:00:00 2001 From: harlowja Date: Fri, 22 Jun 2012 00:04:07 -0700 Subject: 1. Return which modules ran from the run module function 2. Use that list in the main binary & adjust related comparisions --- bin/cloud-init | 16 +++++++++------- cloudinit/stages.py | 10 ++++++---- 2 files changed, 15 insertions(+), 11 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init b/bin/cloud-init index aff8f967..68c7ba76 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -91,8 +91,8 @@ def extract_fns(args): def run_module_section(mods, action_name, section): full_section_name = MOD_SECTION_TPL % (section) - (ran_am, failures) = mods.run_section(full_section_name) - total_attempted = ran_am + len(failures) + (which_ran, failures) = mods.run_section(full_section_name) + total_attempted = len(which_ran) + len(failures) if total_attempted == 0: msg = ("No '%s' modules to run" " under section '%s'") % (action_name, full_section_name) @@ -100,7 +100,8 @@ def run_module_section(mods, action_name, section): LOG.debug(msg) return 0 else: - LOG.debug("Ran %s modules with %s failures", ran_am, len(failures)) + LOG.debug("Ran %s modules with %s failures", + len(which_ran), len(failures)) return len(failures) @@ -316,16 +317,17 @@ def main_single(name, args): logging.resetLogging() logging.setupLogging(mods.cfg) # Stage 4 - (run_am, failures) = mods.run_single(mod_name, - mod_args, - mod_freq) + (which_ran, failures) = mods.run_single(mod_name, + mod_args, + mod_freq) if failures: LOG.warn("Ran %s but it failed!", mod_name) return 1 - elif run_am == 0: + elif not which_ran: LOG.warn("Did not run %s, does it exist?", mod_name) return 1 else: + # Guess it worked return 0 diff --git a/cloudinit/stages.py b/cloudinit/stages.py index cf5e6924..9481db83 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -517,10 +517,12 @@ class Modules(object): return mostly_mods def _run_modules(self, mostly_mods): - failures = [] d_name = self.init.distro.name cc = self.init.cloudify() - am_ran = 0 + # Return which ones ran + # and which ones failed + the exception of why it failed + failures = [] + which_ran = [] for (mod, name, freq, args) in mostly_mods: try: # Try the modules frequency, otherwise fallback to a known one @@ -540,14 +542,14 @@ class Modules(object): func_args = [name, self.cfg, cc, config.LOG, args] # Mark it as having started running - am_ran += 1 + which_ran.append(name) # This name will affect the semaphore name created run_name = "config-%s" % (name) cc.run(run_name, mod.handle, func_args, freq=freq) except Exception as e: util.logexc(LOG, "Running %s (%s) failed", name, mod) failures.append((name, e)) - return (am_ran, failures) + return (which_ran, failures) def run_single(self, mod_name, args=None, freq=None): # Form the users module 'specs' -- cgit v1.2.3 From c8c8c09890a21540bf3c9c0212e6e2c01aecc4db Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Wed, 27 Jun 2012 17:07:42 -0700 Subject: No longer need to do this special config appending, the config merger class handles this. --- bin/cloud-init | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init b/bin/cloud-init index 68c7ba76..0b879876 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -253,13 +253,7 @@ def main_modules(action_name, args): util.logexc(LOG, 'Can not apply stage %s, no datasource found!', name) return 1 # Stage 3 - mod_cfgs = extract_fns(args) - cc_cfg = init.paths.get_ipath_cur('cloud_config') - if settings.CFG_ENV_NAME in os.environ: - cc_cfg = os.environ[settings.CFG_ENV_NAME] - if cc_cfg and os.path.exists(cc_cfg): - mod_cfgs.append(cc_cfg) - mods = stages.Modules(init, mod_cfgs) + mods = stages.Modules(init, extract_fns(args)) # Stage 4 try: LOG.debug("Closing stdin") -- cgit v1.2.3 From cb7274fa1ded413b0c5a19152ddf6e791aba98cf Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Thu, 28 Jun 2012 15:13:19 -0700 Subject: 1. Update with smosers code review and comments (and put some of those comments into the files) 2. Rename consume() to consume_userdata() as it helps in figuring out what this does. 3. Fixup the tests due to #2 --- bin/cloud-init | 49 ++++++++++++++++++++++++++++------------ cloudinit/stages.py | 19 ++++++++-------- tests/unittests/test_userdata.py | 10 ++++---- 3 files changed, 49 insertions(+), 29 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init b/bin/cloud-init index 0b879876..d3ef092f 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -34,13 +34,15 @@ if os.path.exists(os.path.join(possible_topdir, "cloudinit", "__init__.py")): from cloudinit import log as logging from cloudinit import netinfo -from cloudinit import settings from cloudinit import sources from cloudinit import stages from cloudinit import templater from cloudinit import util from cloudinit import version +from cloudinit.settings import (PER_INSTANCE, PER_ALWAYS, PER_ONCE, + CLOUD_CONFIG) + # Module section template MOD_SECTION_TPL = "cloud_%s_modules" @@ -54,9 +56,9 @@ QUERY_DATA_TYPES = [ # Frequency shortname to full name FREQ_SHORT_NAMES = { - 'instance': settings.PER_INSTANCE, - 'always': settings.PER_ALWAYS, - 'once': settings.PER_ONCE, + 'instance': PER_INSTANCE, + 'always': PER_ALWAYS, + 'once': PER_ONCE, } LOG = logging.getLogger() @@ -111,8 +113,15 @@ def main_init(name, args): deps = [sources.DEP_FILESYSTEM] if not args.local: - # TODO: What is this for?? - root_name = "%s.d" % (settings.CLOUD_CONFIG) + # See doc/kernel-cmdline.txt + # + # This is used in maas datasource, in "ephemeral" (read-only root) + # environment where the instance netboots to iscsi ro root. + # and the entity that controls the pxe config has to configure + # the maas datasource. + # + # Could be used elsewhere, only works on network based (not local). + root_name = "%s.d" % (CLOUD_CONFIG) target_fn = os.path.join(root_name, "91_kernel_cmdline_url.cfg") util.read_write_cmdline_url(target_fn) @@ -194,22 +203,34 @@ def main_init(name, args): init.fetch() except sources.DataSourceNotFoundException: util.logexc(LOG, "No instance datasource found!") - # TODO: Return 0 or 1?? - return 1 + # In the case of cloud-init (net mode) it is a bit + # more likely that the user would consider it + # failure if nothing was found. When using + # upstart it will also mentions job failure + # in console log if exit code is != 0. + if args.local: + return 0 + else: + return 1 # Stage 6 iid = init.instancify() LOG.debug("%s will now be targeting instance id: %s", name, iid) init.update() # Stage 7 try: + # Attempt to consume the data per instance. + # This may run user-data handlers and/or perform + # url downloads and such as needed. (ran, _results) = init.cloudify().run('consume_userdata', - init.consume, - args=[settings.PER_INSTANCE], - freq=settings.PER_INSTANCE) + init.consume_userdata, + args=[PER_INSTANCE], + freq=PER_INSTANCE) if not ran: - # Just consume anything that is set to run per - # always if nothing ran in the per instance section - init.consume(settings.PER_ALWAYS) + # Just consume anything that is set to run per-always + # if nothing ran in the per-instance code + # + # TODO: should this always happen?? (even if the above runs?) + init.consume_userdata(PER_ALWAYS) except Exception: util.logexc(LOG, "Consuming user data failed!") return 1 diff --git a/cloudinit/stages.py b/cloudinit/stages.py index 47f6e188..6689e4c9 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -192,13 +192,13 @@ class Init(object): cfg_list = self.cfg.get('datasource_list') or [] return (cfg_list, pkg_list) - def _get_data_source(self, local_only=False): + def _get_data_source(self): if self.datasource: return self.datasource ds = self._restore_from_cache() if ds: LOG.debug("Restored from cache, datasource: %s", ds) - if not ds and not local_only: + if not ds: (cfg_list, pkg_list) = self._get_datasources() # Deep copy so that user-data handlers can not modify # (which will affect user-data handlers down the line...) @@ -209,11 +209,10 @@ class Init(object): cfg_list, pkg_list) LOG.debug("Loaded datasource %s - %s", dsname, ds) - if ds: - self.datasource = ds - # Ensure we adjust our path members datasource - # now that we have one (thus allowing ipath to be used) - self.paths.datasource = ds + self.datasource = ds + # Ensure we adjust our path members datasource + # now that we have one (thus allowing ipath to be used) + self.paths.datasource = ds return ds def _get_instance_subdirs(self): @@ -275,8 +274,8 @@ class Init(object): "%s\n" % (previous_iid)) return iid - def fetch(self, local_only=False): - return self._get_data_source(local_only) + def fetch(self): + return self._get_data_source() def instancify(self): return self._reflect_cur_instance() @@ -312,7 +311,7 @@ class Init(object): ] return def_handlers - def consume(self, frequency=PER_INSTANCE): + def consume_userdata(self, frequency=PER_INSTANCE): cdir = self.paths.get_cpath("handlers") idir = self._get_ipath("handlers") diff --git a/tests/unittests/test_userdata.py b/tests/unittests/test_userdata.py index eeddde7d..861642b6 100644 --- a/tests/unittests/test_userdata.py +++ b/tests/unittests/test_userdata.py @@ -68,7 +68,7 @@ class TestConsumeUserData(MockerTestCase): log_file = self.capture_log(logging.WARNING) ci.fetch() - ci.consume() + ci.consume_userdata() self.assertIn( "Unhandled non-multipart (text/x-not-multipart) userdata:", log_file.getvalue()) @@ -85,7 +85,7 @@ class TestConsumeUserData(MockerTestCase): log_file = self.capture_log(logging.WARNING) ci.fetch() - ci.consume() + ci.consume_userdata() self.assertIn( "Unhandled unknown content-type (text/plain)", log_file.getvalue()) @@ -104,7 +104,7 @@ class TestConsumeUserData(MockerTestCase): log_file = self.capture_log(logging.WARNING) ci.fetch() - ci.consume() + ci.consume_userdata() self.assertEqual("", log_file.getvalue()) def test_mime_text_x_shellscript(self): @@ -122,7 +122,7 @@ class TestConsumeUserData(MockerTestCase): log_file = self.capture_log(logging.WARNING) ci.fetch() - ci.consume() + ci.consume_userdata() self.assertEqual("", log_file.getvalue()) def test_mime_text_plain_shell(self): @@ -140,5 +140,5 @@ class TestConsumeUserData(MockerTestCase): log_file = self.capture_log(logging.WARNING) ci.fetch() - ci.consume() + ci.consume_userdata() self.assertEqual("", log_file.getvalue()) -- cgit v1.2.3 From d56da066da89cd5ca89cb52e1a99e1d3e0a570d9 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Fri, 29 Jun 2012 11:41:15 -0700 Subject: Add a message about why the second consume_userdata occurs since its not very obvious. --- bin/cloud-init | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'bin') diff --git a/bin/cloud-init b/bin/cloud-init index d3ef092f..025195d4 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -229,7 +229,8 @@ def main_init(name, args): # Just consume anything that is set to run per-always # if nothing ran in the per-instance code # - # TODO: should this always happen?? (even if the above runs?) + # See: https://bugs.launchpad.net/bugs/819507 for a little + # reason behind this... init.consume_userdata(PER_ALWAYS) except Exception: util.logexc(LOG, "Consuming user data failed!") -- cgit v1.2.3 From 3795d9efbddc04ebdc193b414b3a73c36660e81a Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Fri, 29 Jun 2012 13:27:11 -0700 Subject: Fix the datasource not being fetched in single running mode. --- bin/cloud-init | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init b/bin/cloud-init index 025195d4..e00913e3 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -301,16 +301,27 @@ def main_query(name, _args): def main_single(name, args): # Cloud-init single stage is broken up into the following sub-stages # 1. Ensure that the init object fetches its config without errors - # 2. Construct the modules object - # 3. Adjust any subsequent logging/output redirections using + # 2. Attempt to fetch the datasource (warn if it doesn't work) + # 3. Construct the modules object + # 4. Adjust any subsequent logging/output redirections using # the modules objects configuration - # 4. Run the single module - # 5. Done! + # 5. Run the single module + # 6. Done! mod_name = args.name welcome("%s:%s" % (name, mod_name)) init = stages.Init(ds_deps=[]) # Stage 1 init.read_cfg(extract_fns(args)) + # Stage 2 + try: + init.fetch() + except sources.DataSourceNotFoundException: + # There was no datasource found, + # that might be bad (or ok) depending on + # the module being ran (so continue on) + util.logexc(LOG, ("Failed to fetch your datasource," + " likely bad things to come!")) + # Stage 3 mods = stages.Modules(init, extract_fns(args)) mod_args = args.module_args if mod_args: @@ -319,7 +330,7 @@ def main_single(name, args): if mod_freq: LOG.debug("Using passed in frequency %s", mod_freq) mod_freq = FREQ_SHORT_NAMES.get(mod_freq) - # Stage 3 + # Stage 4 try: LOG.debug("Closing stdin") util.close_stdin() @@ -332,7 +343,7 @@ def main_single(name, args): " longer be active shortly")) logging.resetLogging() logging.setupLogging(mods.cfg) - # Stage 4 + # Stage 5 (which_ran, failures) = mods.run_single(mod_name, mod_args, mod_freq) -- cgit v1.2.3 From b4c9e36721965d6b15a35c1c4b035e3656dd547e Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Fri, 29 Jun 2012 13:44:31 -0700 Subject: Add a force option which will allow users to continue running when no datasource is found. --- bin/cloud-init | 26 +++++++++++++++++++------- 1 file changed, 19 insertions(+), 7 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init b/bin/cloud-init index e00913e3..0330cb2b 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -202,16 +202,18 @@ def main_init(name, args): try: init.fetch() except sources.DataSourceNotFoundException: - util.logexc(LOG, "No instance datasource found!") + util.logexc(LOG, ("No instance datasource found!" + " Likely bad things to come!")) # In the case of cloud-init (net mode) it is a bit # more likely that the user would consider it # failure if nothing was found. When using # upstart it will also mentions job failure # in console log if exit code is != 0. - if args.local: - return 0 - else: - return 1 + if not args.force: + if args.local: + return 0 + else: + return 1 # Stage 6 iid = init.instancify() LOG.debug("%s will now be targeting instance id: %s", name, iid) @@ -272,8 +274,11 @@ def main_modules(action_name, args): init.fetch() except sources.DataSourceNotFoundException: # There was no datasource found, theres nothing to do - util.logexc(LOG, 'Can not apply stage %s, no datasource found!', name) - return 1 + util.logexc(LOG, ('Can not apply stage %s, ' + 'no datasource found!' + " Likely bad things to come!"), name) + if not args.force: + return 1 # Stage 3 mods = stages.Modules(init, extract_fns(args)) # Stage 4 @@ -321,6 +326,8 @@ def main_single(name, args): # the module being ran (so continue on) util.logexc(LOG, ("Failed to fetch your datasource," " likely bad things to come!")) + if not args.force: + return 1 # Stage 3 mods = stages.Modules(init, extract_fns(args)) mod_args = args.module_args @@ -373,6 +380,11 @@ def main(): help=('show additional pre-action' ' logging (default: %(default)s)'), default=False) + parser.add_argument('--force', action='store_true', + help=('force running even if no datasource is' + ' found (use at your own risk)'), + dest='force', + default=False) subparsers = parser.add_subparsers() # Each action and its sub-options (if any) -- cgit v1.2.3 From cc23412d7a4841667179d70571689bb41a167d32 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Fri, 29 Jun 2012 14:42:06 -0700 Subject: If logging hasn't been enabled via '--debug' at least log the exceptions that occur to stderr. --- bin/cloud-init | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) (limited to 'bin') diff --git a/bin/cloud-init b/bin/cloud-init index 0330cb2b..22901e15 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -24,6 +24,7 @@ import argparse import os import sys +import traceback # This is more just for running from the bin folder so that # cloud-init binary can find the cloudinit module @@ -64,6 +65,18 @@ FREQ_SHORT_NAMES = { LOG = logging.getLogger() +# Used for when a logger may not be active +# and we still want to print exceptions... +def print_exc(msg=''): + if msg: + sys.stderr.write("%s\n" % (msg)) + sys.stderr.write('-' * 60) + sys.stderr.write("\n") + traceback.print_exc(file=sys.stderr) + sys.stderr.write('-' * 60) + sys.stderr.write("\n") + + def welcome(action): msg = ("Cloud-init v. {{version}} running '{{action}}' at " "{{timestamp}}. Up {{uptime}} seconds.") @@ -151,6 +164,7 @@ def main_init(name, args): (outfmt, errfmt) = util.fixup_output(init.cfg, name) except: util.logexc(LOG, "Failed to setup output redirection!") + print_exc("Failed to setup output redirection!") if args.debug: # Reset so that all the debug handlers are closed out LOG.debug(("Logging being reset, this logger may no" @@ -277,6 +291,9 @@ def main_modules(action_name, args): util.logexc(LOG, ('Can not apply stage %s, ' 'no datasource found!' " Likely bad things to come!"), name) + print_exc(('Can not apply stage %s, ' + 'no datasource found!' + " Likely bad things to come!") % (name)) if not args.force: return 1 # Stage 3 @@ -326,6 +343,8 @@ def main_single(name, args): # the module being ran (so continue on) util.logexc(LOG, ("Failed to fetch your datasource," " likely bad things to come!")) + print_exc(("Failed to fetch your datasource," + " likely bad things to come!")) if not args.force: return 1 # Stage 3 -- cgit v1.2.3 From aa9189e89ec101fba8ca36919a7c800f8b28d5c0 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sun, 1 Jul 2012 00:19:49 -0700 Subject: Add a multi log function that can write to stderr, console and a log debug, useful in certain cases --- bin/cloud-init | 4 +--- cloudinit/config/cc_final_message.py | 6 +----- cloudinit/config/cc_keys_to_console.py | 6 +++--- cloudinit/util.py | 11 +++++++++++ 4 files changed, 16 insertions(+), 11 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init b/bin/cloud-init index 22901e15..fce4fe8c 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -86,9 +86,7 @@ def welcome(action): 'timestamp': util.time_rfc2822(), 'action': action, } - welcome_msg = "%s" % (templater.render_string(msg, tpl_params)) - sys.stderr.write("%s\n" % (welcome_msg)) - LOG.debug(welcome_msg) + util.multi_log("%s\n" % (templater.render_string(msg, tpl_params))) def extract_fns(args): diff --git a/cloudinit/config/cc_final_message.py b/cloudinit/config/cc_final_message.py index fd59aa1e..711e7b5b 100644 --- a/cloudinit/config/cc_final_message.py +++ b/cloudinit/config/cc_final_message.py @@ -18,8 +18,6 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import sys - from cloudinit import templater from cloudinit import util from cloudinit import version @@ -57,9 +55,7 @@ def handle(_name, cfg, cloud, log, args): 'timestamp': ts, 'version': cver, } - # Use stdout, stderr or the logger?? - content = templater.render_string(msg_in, subs) - sys.stderr.write("%s\n" % (content)) + util.multi_log("%s\n" % (templater.render_string(msg_in, subs))) except Exception: util.logexc(log, "Failed to render final message template") diff --git a/cloudinit/config/cc_keys_to_console.py b/cloudinit/config/cc_keys_to_console.py index d4c877f7..da7d5219 100644 --- a/cloudinit/config/cc_keys_to_console.py +++ b/cloudinit/config/cc_keys_to_console.py @@ -29,7 +29,7 @@ frequency = PER_INSTANCE HELPER_TOOL = '/usr/lib/cloud-init/write-ssh-key-fingerprints' -def handle(name, cfg, cloud, log, _args): +def handle(name, cfg, _cloud, log, _args): if not os.path.exists(HELPER_TOOL): log.warn(("Unable to activate module %s," " helper tool not found at %s"), name, HELPER_TOOL) @@ -46,7 +46,7 @@ def handle(name, cfg, cloud, log, _args): cmd.append(','.join(fp_blacklist)) cmd.append(','.join(key_blacklist)) (stdout, _stderr) = util.subp(cmd) - util.write_file(cloud.paths.join(False, '/dev/console'), stdout) + util.multi_log("%s\n" % (stdout.strip()), stderr=False) except: - log.warn("Writing keys to /dev/console failed!") + log.warn("Writing keys to the system console failed!") raise diff --git a/cloudinit/util.py b/cloudinit/util.py index 0c592656..e6219d66 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -271,6 +271,17 @@ def find_modules(root_dir): return entries +def multi_log(text, console=True, stderr=True, log=None): + if stderr: + sys.stderr.write(text) + if console: + with open('/dev/console', 'wb') as wfh: + wfh.write(text) + wfh.flush() + if log: + log.debug(text) + + def is_ipv4(instr): """ determine if input string is a ipv4 address. return boolean""" toks = instr.split('.') -- cgit v1.2.3 From 879946c26f005f5ae5c2bbdd537beb295d7f4773 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Mon, 2 Jul 2012 10:56:22 -0700 Subject: 1. Move the welcome message template string to a constant at the top of the module 2. Fix the usage of multi_log to log to only one of the places (for now) 3. Update comment about multi-log and why write_file isn't used in this case --- bin/cloud-init | 11 ++++++++--- cloudinit/config/cc_final_message.py | 3 ++- cloudinit/config/cc_keys_to_console.py | 3 ++- cloudinit/util.py | 7 +++++-- 4 files changed, 17 insertions(+), 7 deletions(-) (limited to 'bin') diff --git a/bin/cloud-init b/bin/cloud-init index fce4fe8c..c7863db1 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -45,6 +45,10 @@ from cloudinit.settings import (PER_INSTANCE, PER_ALWAYS, PER_ONCE, CLOUD_CONFIG) +# Pretty little welcome message template +WELCOME_MSG_TPL = ("Cloud-init v. {{version}} running '{{action}}' at " + "{{timestamp}}. Up {{uptime}} seconds.") + # Module section template MOD_SECTION_TPL = "cloud_%s_modules" @@ -56,6 +60,7 @@ QUERY_DATA_TYPES = [ ] # Frequency shortname to full name +# (so users don't have to remember the full name...) FREQ_SHORT_NAMES = { 'instance': PER_INSTANCE, 'always': PER_ALWAYS, @@ -78,15 +83,15 @@ def print_exc(msg=''): def welcome(action): - msg = ("Cloud-init v. {{version}} running '{{action}}' at " - "{{timestamp}}. Up {{uptime}} seconds.") tpl_params = { 'version': version.version_string(), 'uptime': util.uptime(), 'timestamp': util.time_rfc2822(), 'action': action, } - util.multi_log("%s\n" % (templater.render_string(msg, tpl_params))) + tpl_msg = templater.render_string(WELCOME_MSG_TPL, tpl_params) + util.multi_log("%s\n" % (tpl_msg), + console=False, stderr=True) def extract_fns(args): diff --git a/cloudinit/config/cc_final_message.py b/cloudinit/config/cc_final_message.py index 711e7b5b..b1caca47 100644 --- a/cloudinit/config/cc_final_message.py +++ b/cloudinit/config/cc_final_message.py @@ -55,7 +55,8 @@ def handle(_name, cfg, cloud, log, args): 'timestamp': ts, 'version': cver, } - util.multi_log("%s\n" % (templater.render_string(msg_in, subs))) + util.multi_log("%s\n" % (templater.render_string(msg_in, subs)), + console=False, stderr=True) except Exception: util.logexc(log, "Failed to render final message template") diff --git a/cloudinit/config/cc_keys_to_console.py b/cloudinit/config/cc_keys_to_console.py index da7d5219..ed7af690 100644 --- a/cloudinit/config/cc_keys_to_console.py +++ b/cloudinit/config/cc_keys_to_console.py @@ -46,7 +46,8 @@ def handle(name, cfg, _cloud, log, _args): cmd.append(','.join(fp_blacklist)) cmd.append(','.join(key_blacklist)) (stdout, _stderr) = util.subp(cmd) - util.multi_log("%s\n" % (stdout.strip()), stderr=False) + util.multi_log("%s\n" % (stdout.strip()), + stderr=False, console=True) except: log.warn("Writing keys to the system console failed!") raise diff --git a/cloudinit/util.py b/cloudinit/util.py index 6b23a0ee..4c29432b 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -275,15 +275,18 @@ def find_modules(root_dir): return entries -def multi_log(text, console=True, stderr=True, log=None): +def multi_log(text, console=True, stderr=True, + log=None, log_level=logging.DEBUG): if stderr: sys.stderr.write(text) if console: + # Don't use the write_file since + # this might be 'sensitive' info (not debug worthy?) with open('/dev/console', 'wb') as wfh: wfh.write(text) wfh.flush() if log: - log.debug(text) + log.log(log_level, text) def is_ipv4(instr): -- cgit v1.2.3