From 9c762cc3fa13782397a15bd3c68e9c62a3cba689 Mon Sep 17 00:00:00 2001 From: Shraddha Pandhe Date: Fri, 6 Dec 2013 11:16:17 -0800 Subject: This is a new debug module thats supposed to print out some information about the instance being created. The module can be included at any stage of the process - init/config/final LP: #1258619 --- cloudinit/config/cc_debug.py | 82 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 82 insertions(+) create mode 100644 cloudinit/config/cc_debug.py (limited to 'cloudinit/config') diff --git a/cloudinit/config/cc_debug.py b/cloudinit/config/cc_debug.py new file mode 100644 index 00000000..86c61d68 --- /dev/null +++ b/cloudinit/config/cc_debug.py @@ -0,0 +1,82 @@ +# vi: ts=4 expandtab +# +# Copyright (C) 2013 Yahoo! Inc. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3, as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from StringIO import StringIO + +from cloudinit import util + +import copy + +import yaml + + +def _format_yaml(obj): + try: + formatted = yaml.safe_dump(obj, + line_break="\n", + indent=4, + explicit_start=True, + explicit_end=True, + default_flow_style=False) + return formatted.strip() + except: + return "???" + + +def _make_header(text): + header = StringIO() + header.write("-" * 80) + header.write("\n") + header.write(text.center(80, ' ')) + header.write("\n") + header.write("-" * 80) + header.write("\n") + return header.getvalue() + + +def handle(name, cfg, cloud, log, _args): + verbose = util.get_cfg_option_bool(cfg, 'verbose', default=True) + if not verbose: + log.debug(("Skipping module named %s," + " verbose printing disabled"), name) + return + # Clean out some keys that we just don't care about showing... + dump_cfg = copy.deepcopy(cfg) + for k in ['log_cfgs']: + dump_cfg.pop(k, None) + all_keys = list(dump_cfg.keys()) + for k in all_keys: + if k.startswith("_"): + dump_cfg.pop(k, None) + # Now dump it... + to_print = StringIO() + to_print.write(_make_header("Config")) + to_print.write(_format_yaml(dump_cfg)) + to_print.write("\n") + to_print.write(_make_header("MetaData")) + to_print.write(_format_yaml(cloud.datasource.metadata)) + to_print.write("\n") + to_print.write(_make_header("Misc")) + to_print.write("Datasource: %s\n" % (util.obj_name(cloud.datasource))) + to_print.write("Distro: %s\n" % (util.obj_name(cloud.distro))) + to_print.write("Hostname: %s\n" % (cloud.get_hostname(True))) + to_print.write("Instance ID: %s\n" % (cloud.get_instance_id())) + to_print.write("Locale: %s\n" % (cloud.get_locale())) + to_print.write("Launch IDX: %s\n" % (cloud.launch_index)) + contents = to_print.getvalue() + for line in contents.splitlines(): + line = "ci-info: %s\n" % (line) + util.multi_log(line, console=True, stderr=False) -- cgit v1.2.3 From c0d263968d93e44fbaaa98d284690ac93a6ce024 Mon Sep 17 00:00:00 2001 From: Shraddha Pandhe Date: Wed, 11 Dec 2013 14:21:18 -0800 Subject: bug: 1258619 added namespace for config options, output file, commandline argument for output file --- cloudinit/config/cc_debug.py | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) (limited to 'cloudinit/config') diff --git a/cloudinit/config/cc_debug.py b/cloudinit/config/cc_debug.py index 86c61d68..85dc5c58 100644 --- a/cloudinit/config/cc_debug.py +++ b/cloudinit/config/cc_debug.py @@ -15,11 +15,9 @@ # along with this program. If not, see . from StringIO import StringIO - from cloudinit import util - +from cloudinit import type_utils import copy - import yaml @@ -47,12 +45,17 @@ def _make_header(text): return header.getvalue() -def handle(name, cfg, cloud, log, _args): - verbose = util.get_cfg_option_bool(cfg, 'verbose', default=True) +def handle(name, cfg, cloud, log, args): + verbose = util.get_cfg_by_path(cfg, ('debug','verbose'), default=True) if not verbose: log.debug(("Skipping module named %s," " verbose printing disabled"), name) return + out_file = None + if args: + out_file = args[0] + else: + out_file = util.get_cfg_by_path(cfg, ('debug','output')) # Clean out some keys that we just don't care about showing... dump_cfg = copy.deepcopy(cfg) for k in ['log_cfgs']: @@ -70,8 +73,8 @@ def handle(name, cfg, cloud, log, _args): to_print.write(_format_yaml(cloud.datasource.metadata)) to_print.write("\n") to_print.write(_make_header("Misc")) - to_print.write("Datasource: %s\n" % (util.obj_name(cloud.datasource))) - to_print.write("Distro: %s\n" % (util.obj_name(cloud.distro))) + to_print.write("Datasource: %s\n" % (type_utils.obj_name(cloud.datasource))) + to_print.write("Distro: %s\n" % (type_utils.obj_name(cloud.distro))) to_print.write("Hostname: %s\n" % (cloud.get_hostname(True))) to_print.write("Instance ID: %s\n" % (cloud.get_instance_id())) to_print.write("Locale: %s\n" % (cloud.get_locale())) @@ -79,4 +82,7 @@ def handle(name, cfg, cloud, log, _args): contents = to_print.getvalue() for line in contents.splitlines(): line = "ci-info: %s\n" % (line) - util.multi_log(line, console=True, stderr=False) + if out_file: + util.write_file(out_file, line, 0644, "a") + else: + util.multi_log(line, console=True, stderr=False) -- cgit v1.2.3 From 7c6f2de4f7f66874d6c9131c04cb84637955e5ce Mon Sep 17 00:00:00 2001 From: Shraddha Pandhe Date: Wed, 11 Dec 2013 15:41:14 -0800 Subject: essage --- cloudinit/config/cc_debug.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) (limited to 'cloudinit/config') diff --git a/cloudinit/config/cc_debug.py b/cloudinit/config/cc_debug.py index 85dc5c58..a3d99da8 100644 --- a/cloudinit/config/cc_debug.py +++ b/cloudinit/config/cc_debug.py @@ -67,10 +67,10 @@ def handle(name, cfg, cloud, log, args): # Now dump it... to_print = StringIO() to_print.write(_make_header("Config")) - to_print.write(_format_yaml(dump_cfg)) + to_print.write(util.yaml_dumps(dump_cfg)) to_print.write("\n") to_print.write(_make_header("MetaData")) - to_print.write(_format_yaml(cloud.datasource.metadata)) + to_print.write(util.yaml_dumps(cloud.datasource.metadata)) to_print.write("\n") to_print.write(_make_header("Misc")) to_print.write("Datasource: %s\n" % (type_utils.obj_name(cloud.datasource))) @@ -80,9 +80,11 @@ def handle(name, cfg, cloud, log, args): to_print.write("Locale: %s\n" % (cloud.get_locale())) to_print.write("Launch IDX: %s\n" % (cloud.launch_index)) contents = to_print.getvalue() + content_to_file = [] for line in contents.splitlines(): line = "ci-info: %s\n" % (line) - if out_file: - util.write_file(out_file, line, 0644, "a") - else: - util.multi_log(line, console=True, stderr=False) + content_to_file.append(line) + if out_file: + util.write_file(out_file, "".join(content_to_file), 0644, "w") + else: + util.multi_log("".join(content_to_file), console=True, stderr=False) -- cgit v1.2.3 From bce8220f1688af1e155661bfd57e73fe23c42522 Mon Sep 17 00:00:00 2001 From: Shraddha Pandhe Date: Wed, 11 Dec 2013 15:51:40 -0800 Subject: Removed method _format_yaml --- cloudinit/config/cc_debug.py | 13 ------------- 1 file changed, 13 deletions(-) (limited to 'cloudinit/config') diff --git a/cloudinit/config/cc_debug.py b/cloudinit/config/cc_debug.py index a3d99da8..971af71b 100644 --- a/cloudinit/config/cc_debug.py +++ b/cloudinit/config/cc_debug.py @@ -21,19 +21,6 @@ import copy import yaml -def _format_yaml(obj): - try: - formatted = yaml.safe_dump(obj, - line_break="\n", - indent=4, - explicit_start=True, - explicit_end=True, - default_flow_style=False) - return formatted.strip() - except: - return "???" - - def _make_header(text): header = StringIO() header.write("-" * 80) -- cgit v1.2.3 From 67ede943d3a02878061be2314300644c636b14f8 Mon Sep 17 00:00:00 2001 From: Shraddha Pandhe Date: Wed, 11 Dec 2013 15:56:08 -0800 Subject: Removed yaml import --- cloudinit/config/cc_debug.py | 1 - 1 file changed, 1 deletion(-) (limited to 'cloudinit/config') diff --git a/cloudinit/config/cc_debug.py b/cloudinit/config/cc_debug.py index 971af71b..c0a447cb 100644 --- a/cloudinit/config/cc_debug.py +++ b/cloudinit/config/cc_debug.py @@ -18,7 +18,6 @@ from StringIO import StringIO from cloudinit import util from cloudinit import type_utils import copy -import yaml def _make_header(text): -- cgit v1.2.3 From 5414797f1b9286ddbe82c8637dfff74cf352b967 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Thu, 12 Dec 2013 11:47:53 -0500 Subject: fix pep8 and pylint warnings This fixes warnings raised by: ./tools/run-pep8 cloudinit/config/cc_debug.py ./tools/run-pylint cloudinit/config/cc_debug.py --- cloudinit/config/cc_debug.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) (limited to 'cloudinit/config') diff --git a/cloudinit/config/cc_debug.py b/cloudinit/config/cc_debug.py index c0a447cb..55fdf2dd 100644 --- a/cloudinit/config/cc_debug.py +++ b/cloudinit/config/cc_debug.py @@ -32,7 +32,7 @@ def _make_header(text): def handle(name, cfg, cloud, log, args): - verbose = util.get_cfg_by_path(cfg, ('debug','verbose'), default=True) + verbose = util.get_cfg_by_path(cfg, ('debug', 'verbose'), default=True) if not verbose: log.debug(("Skipping module named %s," " verbose printing disabled"), name) @@ -41,7 +41,7 @@ def handle(name, cfg, cloud, log, args): if args: out_file = args[0] else: - out_file = util.get_cfg_by_path(cfg, ('debug','output')) + out_file = util.get_cfg_by_path(cfg, ('debug', 'output')) # Clean out some keys that we just don't care about showing... dump_cfg = copy.deepcopy(cfg) for k in ['log_cfgs']: @@ -59,7 +59,8 @@ def handle(name, cfg, cloud, log, args): to_print.write(util.yaml_dumps(cloud.datasource.metadata)) to_print.write("\n") to_print.write(_make_header("Misc")) - to_print.write("Datasource: %s\n" % (type_utils.obj_name(cloud.datasource))) + to_print.write("Datasource: %s\n" % + (type_utils.obj_name(cloud.datasource))) to_print.write("Distro: %s\n" % (type_utils.obj_name(cloud.distro))) to_print.write("Hostname: %s\n" % (cloud.get_hostname(True))) to_print.write("Instance ID: %s\n" % (cloud.get_instance_id())) -- cgit v1.2.3 From fd5231ae771cd3b87c26ac2b0839fb672bf0acee Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Thu, 12 Dec 2013 11:50:55 -0500 Subject: be verbose explicitly if run from cmdline. Let the command line (or module args) that set outfile explicitly override a config'd value of 'verbose'. Ie, if /etc/cloud/cloud.cfg.d/my.cfg had: debug: verbose: False but the user ran: cloud-init single --frequency=always --name=debug output.txt Then they probably wanted to have the debug in output.txt even though verbose was configured to False. --- cloudinit/config/cc_debug.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) (limited to 'cloudinit/config') diff --git a/cloudinit/config/cc_debug.py b/cloudinit/config/cc_debug.py index 55fdf2dd..cfd31fa1 100644 --- a/cloudinit/config/cc_debug.py +++ b/cloudinit/config/cc_debug.py @@ -33,15 +33,17 @@ def _make_header(text): def handle(name, cfg, cloud, log, args): verbose = util.get_cfg_by_path(cfg, ('debug', 'verbose'), default=True) - if not verbose: - log.debug(("Skipping module named %s," - " verbose printing disabled"), name) - return - out_file = None if args: + # if args are provided (from cmdline) then explicitly set verbose out_file = args[0] + verbose = True else: out_file = util.get_cfg_by_path(cfg, ('debug', 'output')) + + if not verbose: + log.debug(("Skipping module named %s," + " verbose printing disabled"), name) + return # Clean out some keys that we just don't care about showing... dump_cfg = copy.deepcopy(cfg) for k in ['log_cfgs']: -- cgit v1.2.3 From a5727fe1477c9cc4288d1ac41f70bd1ab7d7928a Mon Sep 17 00:00:00 2001 From: Ben Howard Date: Wed, 8 Jan 2014 17:16:24 -0700 Subject: Significant re-working of the userdata handling and introduction of vendordata. Vendordata is a datasource provided userdata-like blob that is parsed similiarly to userdata, execept at the user's pleasure. cloudinit/config/cc_scripts_vendor.py: added vendor script cloud config cloudinit/config/cc_vendor_scripts_per_boot.py: added vendor per boot cloud config cloudinit/config/cc_vendor_scripts_per_instance.py: added vendor per instance vendor cloud config cloudinit/config/cc_vendor_scripts_per_once.py: added per once vendor cloud config script doc/examples/cloud-config-vendor-data.txt: documentation of vendor-data examples doc/vendordata.txt: documentation of vendordata for vendors (RENAMED) tests/unittests/test_userdata.py => tests/unittests/test_userdata.py TO: tests/unittests/test_userdata.py => tests/unittests/test_data.py: userdata test cases are not expanded to confirm superiority over vendor data. bin/cloud-init: change instances of 'consume_userdata' to 'consume_data' cloudinit/handlers/cloud_config.py: Added vendor script handling to default cloud-config modules cloudinit/handlers/shell_script.py: Added ability to change the path key to support vendor provided 'vendor-scripts'. Defaults to 'script'. cloudinit/helpers.py: - Changed ConfigMerger to include handling of vendordata. - Changed helpers to include paths for vendordata. cloudinit/sources/__init__.py: Added functions for helping vendordata - get_vendordata_raw(): returns vendordata unprocessed - get_vendordata(): returns vendordata through userdata processor - has_vendordata(): indicator if vendordata is present - consume_vendordata(): datasource directive for indicating explict user approval of vendordata consumption. Defaults to 'false' cloudinit/stages.py: Re-jiggered for handling of vendordata - _initial_subdirs(): added vendor script definition - update(): added self._store_vendordata() - [ADDED] _store_vendordata(): store vendordata - _get_default_handlers(): modified to allow for filtering which handlers will run against vendordata - [ADDED] _do_handlers(): moved logic from consume_userdata to _do_handlers(). This allows _consume_vendordata() and _consume_userdata() to use the same code path. - [RENAMED] consume_userdata() to _consume_userdata() - [ADDED] _consume_vendordata() for handling vendordata - run after userdata to get user cloud-config - uses ConfigMerger to get the configuration from the instance perspective about whether or not to use vendordata - [ADDED] consume_data() to call _consume_{user,vendor}data cloudinit/util.py: - [ADDED] get_nested_option_as_list() used by cc_vendor* for getting a nested value from a dict and returned as a list - runparts(): added 'exe_prefix' for running exe with a prefix, used by cc_vendor* config/cloud.cfg: Added vendor script execution as default tests/unittests/test_runs/test_merge_run.py: changed consume_userdata() to consume_data() tests/unittests/test_runs/test_simple_run.py: changed consume_userdata() to consume_data() --- bin/cloud-init | 6 +- cloudinit/config/cc_scripts_vendor.py | 44 ++ cloudinit/config/cc_vendor_scripts_per_boot.py | 43 ++ cloudinit/config/cc_vendor_scripts_per_instance.py | 43 ++ cloudinit/config/cc_vendor_scripts_per_once.py | 43 ++ cloudinit/handlers/cloud_config.py | 2 + cloudinit/handlers/shell_script.py | 2 + cloudinit/helpers.py | 29 +- cloudinit/sources/__init__.py | 28 +- cloudinit/stages.py | 158 ++++++- cloudinit/user_data.py | 6 +- cloudinit/util.py | 30 +- config/cloud.cfg | 4 + doc/examples/cloud-config-vendor-data.txt | 16 + doc/vendordata.txt | 93 ++++ tests/unittests/test_data.py | 505 +++++++++++++++++++++ tests/unittests/test_runs/test_merge_run.py | 4 +- tests/unittests/test_runs/test_simple_run.py | 4 +- tests/unittests/test_userdata.py | 308 ------------- 19 files changed, 1024 insertions(+), 344 deletions(-) create mode 100644 cloudinit/config/cc_scripts_vendor.py create mode 100644 cloudinit/config/cc_vendor_scripts_per_boot.py create mode 100644 cloudinit/config/cc_vendor_scripts_per_instance.py create mode 100644 cloudinit/config/cc_vendor_scripts_per_once.py create mode 100644 doc/examples/cloud-config-vendor-data.txt create mode 100644 doc/vendordata.txt create mode 100644 tests/unittests/test_data.py delete mode 100644 tests/unittests/test_userdata.py (limited to 'cloudinit/config') diff --git a/bin/cloud-init b/bin/cloud-init index b4f9fd07..80a1df05 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -261,8 +261,8 @@ def main_init(name, args): # Attempt to consume the data per instance. # This may run user-data handlers and/or perform # url downloads and such as needed. - (ran, _results) = init.cloudify().run('consume_userdata', - init.consume_userdata, + (ran, _results) = init.cloudify().run('consume_data', + init.consume_data, args=[PER_INSTANCE], freq=PER_INSTANCE) if not ran: @@ -271,7 +271,7 @@ def main_init(name, args): # # See: https://bugs.launchpad.net/bugs/819507 for a little # reason behind this... - init.consume_userdata(PER_ALWAYS) + init.consume_data(PER_ALWAYS) except Exception: util.logexc(LOG, "Consuming user data failed!") return 1 diff --git a/cloudinit/config/cc_scripts_vendor.py b/cloudinit/config/cc_scripts_vendor.py new file mode 100644 index 00000000..5809a4ba --- /dev/null +++ b/cloudinit/config/cc_scripts_vendor.py @@ -0,0 +1,44 @@ +# vi: ts=4 expandtab +# +# Copyright (C) 2011-2014 Canonical Ltd. +# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# +# Author: Scott Moser +# Author: Ben Howard +# Author: Juerg Haefliger +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3, as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import os + +from cloudinit import util + +from cloudinit.settings import PER_INSTANCE + +frequency = PER_INSTANCE + +SCRIPT_SUBDIR = 'vendor' + + +def handle(name, _cfg, cloud, log, _args): + # This is written to by the user data handlers + # Ie, any custom shell scripts that come down + # go here... + runparts_path = os.path.join(cloud.get_ipath_cur(), 'scripts', + SCRIPT_SUBDIR) + try: + util.runparts(runparts_path) + except: + log.warn("Failed to run module %s (%s in %s)", + name, SCRIPT_SUBDIR, runparts_path) + raise diff --git a/cloudinit/config/cc_vendor_scripts_per_boot.py b/cloudinit/config/cc_vendor_scripts_per_boot.py new file mode 100644 index 00000000..80446e99 --- /dev/null +++ b/cloudinit/config/cc_vendor_scripts_per_boot.py @@ -0,0 +1,43 @@ +# vi: ts=4 expandtab +# +# Copyright (C) 2011-2014 Canonical Ltd. +# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# +# Author: Scott Moser +# Author: Ben Howard +# Author: Juerg Haefliger +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3, as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import os + +from cloudinit import util + +from cloudinit.settings import PER_ALWAYS + +frequency = PER_ALWAYS + +SCRIPT_SUBDIR = 'per-boot' + + +def handle(name, cfg, cloud, log, _args): + runparts_path = os.path.join(cloud.get_cpath(), 'scripts', 'vendor', + SCRIPT_SUBDIR) + vendor_prefix = util.get_nested_option_as_list(cfg, 'vendor_data', + 'prefix') + try: + util.runparts(runparts_path, exe_prefix=vendor_prefix) + except: + log.warn("Failed to run module %s (%s in %s)", + name, SCRIPT_SUBDIR, runparts_path) + raise diff --git a/cloudinit/config/cc_vendor_scripts_per_instance.py b/cloudinit/config/cc_vendor_scripts_per_instance.py new file mode 100644 index 00000000..2d27a0c4 --- /dev/null +++ b/cloudinit/config/cc_vendor_scripts_per_instance.py @@ -0,0 +1,43 @@ +# vi: ts=4 expandtab +# +# Copyright (C) 2011-2014 Canonical Ltd. +# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# +# Author: Scott Moser +# Author: Ben Howard +# Author: Juerg Haefliger +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3, as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import os + +from cloudinit import util + +from cloudinit.settings import PER_INSTANCE + +frequency = PER_INSTANCE + +SCRIPT_SUBDIR = 'per-instance' + + +def handle(name, cfg, cloud, log, _args): + runparts_path = os.path.join(cloud.get_cpath(), 'scripts', 'vendor', + SCRIPT_SUBDIR) + vendor_prefix = util.get_nested_option_as_list(cfg, 'vendor_data', + 'prefix') + try: + util.runparts(runparts_path, exe_prefix=vendor_prefix) + except: + log.warn("Failed to run module %s (%s in %s)", + name, SCRIPT_SUBDIR, runparts_path) + raise diff --git a/cloudinit/config/cc_vendor_scripts_per_once.py b/cloudinit/config/cc_vendor_scripts_per_once.py new file mode 100644 index 00000000..ad3e13c8 --- /dev/null +++ b/cloudinit/config/cc_vendor_scripts_per_once.py @@ -0,0 +1,43 @@ +# vi: ts=4 expandtab +# +# Copyright (C) 2011-2014 Canonical Ltd. +# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# +# Author: Scott Moser +# Author: Ben Howard +# Author: Juerg Haefliger +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3, as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import os + +from cloudinit import util + +from cloudinit.settings import PER_ONCE + +frequency = PER_ONCE + +SCRIPT_SUBDIR = 'per-once' + + +def handle(name, cfg, cloud, log, _args): + runparts_path = os.path.join(cloud.get_cpath(), 'scripts', 'vendor', + SCRIPT_SUBDIR) + vendor_prefix = util.get_nested_option_as_list(cfg, 'vendor_data', + 'prefix') + try: + util.runparts(runparts_path, exe_prefix=vendor_prefix) + except: + log.warn("Failed to run module %s (%s in %s)", + name, SCRIPT_SUBDIR, runparts_path) + raise diff --git a/cloudinit/handlers/cloud_config.py b/cloudinit/handlers/cloud_config.py index 34a73115..4232700f 100644 --- a/cloudinit/handlers/cloud_config.py +++ b/cloudinit/handlers/cloud_config.py @@ -66,6 +66,8 @@ class CloudConfigPartHandler(handlers.Handler): handlers.Handler.__init__(self, PER_ALWAYS, version=3) self.cloud_buf = None self.cloud_fn = paths.get_ipath("cloud_config") + if 'cloud_config_path' in _kwargs: + self.cloud_fn = paths.get_ipath(_kwargs["cloud_config_path"]) self.file_names = [] def list_types(self): diff --git a/cloudinit/handlers/shell_script.py b/cloudinit/handlers/shell_script.py index 62289d98..30c1ed89 100644 --- a/cloudinit/handlers/shell_script.py +++ b/cloudinit/handlers/shell_script.py @@ -36,6 +36,8 @@ class ShellScriptPartHandler(handlers.Handler): def __init__(self, paths, **_kwargs): handlers.Handler.__init__(self, PER_ALWAYS) self.script_dir = paths.get_ipath_cur('scripts') + if 'script_path' in _kwargs: + self.script_dir = paths.get_ipath_cur(_kwargs['script_path']) def list_types(self): return [ diff --git a/cloudinit/helpers.py b/cloudinit/helpers.py index e5eac6a7..f9da697c 100644 --- a/cloudinit/helpers.py +++ b/cloudinit/helpers.py @@ -200,11 +200,13 @@ class Runners(object): class ConfigMerger(object): def __init__(self, paths=None, datasource=None, - additional_fns=None, base_cfg=None): + additional_fns=None, base_cfg=None, + include_vendor=True): self._paths = paths self._ds = datasource self._fns = additional_fns self._base_cfg = base_cfg + self._include_vendor = include_vendor # Created on first use self._cfg = None @@ -237,13 +239,19 @@ class ConfigMerger(object): # a configuration file to use when running... if not self._paths: return i_cfgs - cc_fn = self._paths.get_ipath_cur('cloud_config') - if cc_fn and os.path.isfile(cc_fn): - try: - i_cfgs.append(util.read_conf(cc_fn)) - except: - util.logexc(LOG, 'Failed loading of cloud-config from %s', - cc_fn) + + cc_paths = ['cloud_config'] + if self._include_vendor: + cc_paths.append('vendor_cloud_config') + + for cc_p in cc_paths: + cc_fn = self._paths.get_ipath_cur(cc_p) + if cc_fn and os.path.isfile(cc_fn): + try: + i_cfgs.append(util.read_conf(cc_fn)) + except: + util.logexc(LOG, 'Failed loading of cloud-config from %s', + cc_fn) return i_cfgs def _read_cfg(self): @@ -331,13 +339,18 @@ class Paths(object): self.lookups = { "handlers": "handlers", "scripts": "scripts", + "vendor_scripts": "scripts/vendor", "sem": "sem", "boothooks": "boothooks", "userdata_raw": "user-data.txt", "userdata": "user-data.txt.i", "obj_pkl": "obj.pkl", "cloud_config": "cloud-config.txt", + "vendor_cloud_config": "vendor-cloud-config.txt", "data": "data", + "vendordata_raw": "vendor-data.txt", + "vendordata": "vendor-data.txt.i", + "mergedvendoruser": "vendor-user-data.txt", } # Set when a datasource becomes active self.datasource = ds diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 7dc1fbde..a7c7993f 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -53,6 +53,8 @@ class DataSource(object): self.userdata = None self.metadata = None self.userdata_raw = None + self.vendordata = None + self.vendordata_raw = None # find the datasource config name. # remove 'DataSource' from classname on front, and remove 'Net' on end. @@ -77,9 +79,28 @@ class DataSource(object): if self.userdata is None: self.userdata = self.ud_proc.process(self.get_userdata_raw()) if apply_filter: - return self._filter_userdata(self.userdata) + return self._filter_xdata(self.userdata) return self.userdata + def get_vendordata(self, apply_filter=False): + if self.vendordata is None: + self.vendordata = self.ud_proc.process(self.get_vendordata_raw()) + if apply_filter: + return self._filter_xdata(self.vendordata) + return self.vendordata + + def has_vendordata(self): + if self.vendordata_raw is not None: + return True + return False + + def consume_vendordata(self): + """ + The datasource may allow for consumption of vendordata, but only + when the datasource has allowed it. The default is false. + """ + return False + @property def launch_index(self): if not self.metadata: @@ -88,7 +109,7 @@ class DataSource(object): return self.metadata['launch-index'] return None - def _filter_userdata(self, processed_ud): + def _filter_xdata(self, processed_ud): filters = [ launch_index.Filter(util.safe_int(self.launch_index)), ] @@ -104,6 +125,9 @@ class DataSource(object): def get_userdata_raw(self): return self.userdata_raw + def get_vendordata_raw(self): + return self.vendordata_raw + # the data sources' config_obj is a cloud-config formated # object that came to it from ways other than cloud-config # because cloud-config content would be handled elsewhere diff --git a/cloudinit/stages.py b/cloudinit/stages.py index 07c55802..043b3257 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -26,7 +26,8 @@ import copy import os import sys -from cloudinit.settings import (PER_INSTANCE, FREQUENCIES, CLOUD_CONFIG) +from cloudinit.settings import (PER_ALWAYS, PER_INSTANCE, FREQUENCIES, + CLOUD_CONFIG) from cloudinit import handlers @@ -123,6 +124,10 @@ class Init(object): os.path.join(c_dir, 'scripts', 'per-instance'), os.path.join(c_dir, 'scripts', 'per-once'), os.path.join(c_dir, 'scripts', 'per-boot'), + os.path.join(c_dir, 'scripts', 'vendor'), + os.path.join(c_dir, 'scripts', 'vendor', 'per-boot'), + os.path.join(c_dir, 'scripts', 'vendor', 'per-instance'), + os.path.join(c_dir, 'scripts', 'vendor', 'per-once'), os.path.join(c_dir, 'seed'), os.path.join(c_dir, 'instances'), os.path.join(c_dir, 'handlers'), @@ -319,6 +324,7 @@ class Init(object): if not self._write_to_cache(): return self._store_userdata() + self._store_vendordata() def _store_userdata(self): raw_ud = "%s" % (self.datasource.get_userdata_raw()) @@ -326,21 +332,62 @@ class Init(object): processed_ud = "%s" % (self.datasource.get_userdata()) util.write_file(self._get_ipath('userdata'), processed_ud, 0600) - def _default_userdata_handlers(self): + def _store_vendordata(self): + raw_vd = "%s" % (self.datasource.get_vendordata_raw()) + util.write_file(self._get_ipath('vendordata_raw'), raw_vd, 0600) + processed_vd = "%s" % (self.datasource.get_vendordata()) + util.write_file(self._get_ipath('vendordata'), processed_vd, 0600) + + def _get_default_handlers(self, user_data=False, vendor_data=False, + excluded=None): opts = { 'paths': self.paths, 'datasource': self.datasource, } + + def conditional_get(cls, mod): + cls_name = cls.__name__.split('.')[-1] + _mod = getattr(cls, mod) + if not excluded: + return _mod(**opts) + + if cls_name not in excluded: + _mod = getattr(cls, mod) + return _mod(**opts) + # TODO(harlowja) Hmmm, should we dynamically import these?? def_handlers = [ - cc_part.CloudConfigPartHandler(**opts), - ss_part.ShellScriptPartHandler(**opts), - bh_part.BootHookPartHandler(**opts), - up_part.UpstartJobPartHandler(**opts), + conditional_get(bh_part, 'BootHookPartHandler'), + conditional_get(up_part, 'UpstartJobPartHandler'), ] - return def_handlers - def consume_userdata(self, frequency=PER_INSTANCE): + # Add in the shell script part handler + if user_data: + def_handlers.extend([ + conditional_get(cc_part, 'CloudConfigPartHandler'), + conditional_get(ss_part, 'ShellScriptPartHandler')]) + + # This changes the path for the vendor script execution + if vendor_data: + opts['script_path'] = "vendor_scripts" + opts['cloud_config_path'] = "vendor_cloud_config" + def_handlers.extend([ + conditional_get(cc_part, 'CloudConfigPartHandler'), + conditional_get(ss_part, 'ShellScriptPartHandler')]) + + return [x for x in def_handlers if x is not None] + + def _default_userdata_handlers(self): + return self._get_default_handlers(user_data=True) + + def _default_vendordata_handlers(self, excluded=None): + return self._get_default_handlers(vendor_data=True, excluded=excluded) + + def _do_handlers(self, data_msg, c_handlers_list, frequency): + """ + Generalized handlers suitable for use with either vendordata + or userdata + """ cdir = self.paths.get_cpath("handlers") idir = self._get_ipath("handlers") @@ -352,12 +399,6 @@ class Init(object): if d and d not in sys.path: sys.path.insert(0, d) - # Ensure datasource fetched before activation (just incase) - user_data_msg = self.datasource.get_userdata(True) - - # This keeps track of all the active handlers - c_handlers = helpers.ContentHandlers() - def register_handlers_in_dir(path): # Attempts to register any handler modules under the given path. if not path or not os.path.isdir(path): @@ -382,13 +423,16 @@ class Init(object): util.logexc(LOG, "Failed to register handler from %s", fname) + # This keeps track of all the active handlers + c_handlers = helpers.ContentHandlers() + # Add any handlers in the cloud-dir register_handlers_in_dir(cdir) # Register any other handlers that come from the default set. This # is done after the cloud-dir handlers so that the cdir modules can # take over the default user-data handler content-types. - for mod in self._default_userdata_handlers(): + for mod in c_handlers_list: types = c_handlers.register(mod, overwrite=False) if types: LOG.debug("Added default handler for %s from %s", types, mod) @@ -420,7 +464,7 @@ class Init(object): # names... 'handlercount': 0, } - handlers.walk(user_data_msg, handlers.walker_callback, + handlers.walk(data_msg, handlers.walker_callback, data=part_data) def finalize_handlers(): @@ -442,6 +486,12 @@ class Init(object): finally: finalize_handlers() + def consume_data(self, frequency=PER_INSTANCE): + # Consume the userdata first, because we need want to let the part + # handlers run first (for merging stuff) + self._consume_userdata(frequency) + self._consume_vendordata(frequency) + # Perform post-consumption adjustments so that # modules that run during the init stage reflect # this consumed set. @@ -453,6 +503,82 @@ class Init(object): # objects before the load of the userdata happened, # this is expected. + def _consume_vendordata(self, frequency=PER_ALWAYS): + """ + Consume the vendordata and run the part handlers on it + """ + if not self.datasource.has_vendordata(): + LOG.info("datasource did not provide vendor data") + return + + # User-data should have been consumed first. If it has, then we can + # read it and simply parse it. This means that the datasource can + # define if the vendordata can be consumed too....i.e this method + # gives us a lot of flexibility. + _cc_merger = helpers.ConfigMerger(paths=self._paths, + datasource=self.datasource, + additional_fns=[], + base_cfg=self.cfg, + include_vendor=False) + _cc = _cc_merger.cfg + + if not self.datasource.consume_vendordata(): + if not isinstance(_cc, dict): + LOG.info(("userdata does explicitly allow vendordata " + "consumption")) + return + + if 'vendor_data' not in _cc: + LOG.info(("no 'vendor_data' directive found in the" + "conf files. Skipping consumption of vendordata")) + return + + # This allows for the datasource to signal explicit conditions when + # when the user has opted in to user-data + if self.datasource.consume_vendordata(): + LOG.info(("datasource has indicated that vendordata that user" + " opted-in via another channel")) + + vdc = _cc.get('vendor_data') + no_handlers = None + if isinstance(vdc, dict): + enabled = vdc.get('enabled') + no_handlers = vdc.get('no_run') + + if enabled is None: + LOG.info("vendordata will not be consumed: user has not opted-in") + return + elif util.is_false(enabled): + LOG.info("user has requested NO vendordata consumption") + return + + LOG.info("vendor data will be consumed") + + # Ensure vendordata source fetched before activation (just incase) + vendor_data_msg = self.datasource.get_vendordata(True) + + # This keeps track of all the active handlers, while excluding what the + # users doesn't want run, i.e. boot_hook, cloud_config, shell_script + c_handlers_list = self._default_vendordata_handlers( + excluded=no_handlers) + + # Run the handlers + self._do_handlers(vendor_data_msg, c_handlers_list, frequency) + + def _consume_userdata(self, frequency=PER_INSTANCE): + """ + Consume the userdata and run the part handlers + """ + + # Ensure datasource fetched before activation (just incase) + user_data_msg = self.datasource.get_userdata(True) + + # This keeps track of all the active handlers + c_handlers_list = self._default_userdata_handlers() + + # Run the handlers + self._do_handlers(user_data_msg, c_handlers_list, frequency) + class Modules(object): def __init__(self, init, cfg_files=None): diff --git a/cloudinit/user_data.py b/cloudinit/user_data.py index d49ea094..3032ef70 100644 --- a/cloudinit/user_data.py +++ b/cloudinit/user_data.py @@ -88,7 +88,11 @@ class UserDataProcessor(object): def process(self, blob): accumulating_msg = MIMEMultipart() - self._process_msg(convert_string(blob), accumulating_msg) + if isinstance(blob, list): + for b in blob: + self._process_msg(convert_string(b), accumulating_msg) + else: + self._process_msg(convert_string(blob), accumulating_msg) return accumulating_msg def _process_msg(self, base_msg, append_msg): diff --git a/cloudinit/util.py b/cloudinit/util.py index a8ddb390..b69e2bb0 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -606,7 +606,7 @@ def del_dir(path): shutil.rmtree(path) -def runparts(dirp, skip_no_exist=True): +def runparts(dirp, skip_no_exist=True, exe_prefix=None): if skip_no_exist and not os.path.isdir(dirp): return @@ -617,7 +617,10 @@ def runparts(dirp, skip_no_exist=True): if os.path.isfile(exe_path) and os.access(exe_path, os.X_OK): attempted.append(exe_path) try: - subp([exe_path], capture=False) + exe_cmd = exe_prefix + if isinstance(exe_prefix, list): + exe_cmd.extend(exe_path) + subp([exe_cmd], capture=False) except ProcessExecutionError as e: logexc(LOG, "Failed running %s [%s]", exe_path, e.exit_code) failed.append(e) @@ -1847,3 +1850,26 @@ def expand_dotted_devname(dotted): return toks else: return (dotted, None) + + +def get_nested_option_as_list(dct, first, second): + """ + Return a nested option from a dict as a list + """ + if not isinstance(dct, dict): + raise TypeError("get_nested_option_as_list only works with dicts") + root = dct.get(first) + if not isinstance(root, dict): + return None + + token = root.get(second) + if isinstance(token, list): + return token + elif isinstance(token, dict): + ret_list = [] + for k, v in dct.iteritems(): + ret_list.append((k, v)) + return ret_list + elif isinstance(token, str): + return token.split() + return None diff --git a/config/cloud.cfg b/config/cloud.cfg index a07cd3b0..f325ad1e 100644 --- a/config/cloud.cfg +++ b/config/cloud.cfg @@ -64,6 +64,10 @@ cloud_config_modules: # The modules that run in the 'final' stage cloud_final_modules: - rightscale_userdata + - vendor-scripts-per-once + - vendor-scripts-per-boot + - vendor-scripts-per-instance + - script-vendor - scripts-per-once - scripts-per-boot - scripts-per-instance diff --git a/doc/examples/cloud-config-vendor-data.txt b/doc/examples/cloud-config-vendor-data.txt new file mode 100644 index 00000000..7f90847b --- /dev/null +++ b/doc/examples/cloud-config-vendor-data.txt @@ -0,0 +1,16 @@ +#cloud-config +# +# This explains how to control vendordata via a cloud-config +# +# On select Datasources, vendors have a channel for the consumptions +# of all support user-data types via a special channel called +# vendordata. Users of the end system are given ultimate control. +# +vendor_data: + enabled: True + prefix: /usr/bin/ltrace + +# enabled: whether it is enabled or not +# prefix: the command to run before any vendor scripts. +# Note: this is a fairly weak method of containment. It should +# be used to profile a script, not to prevent its run diff --git a/doc/vendordata.txt b/doc/vendordata.txt new file mode 100644 index 00000000..63a6c999 --- /dev/null +++ b/doc/vendordata.txt @@ -0,0 +1,93 @@ +=== Overview === +Vendordata is data provided by the entity that launches an instance. +The cloud provider makes this data available to the instance via in one +way or another. + +Vendordata follows the same rules as user-data, with the following +caveauts: + 1. Users have ultimate control over vendordata + 2. By default it only runs on first boot + 3. Vendordata runs at the users pleasure. If the use of + vendordata is required for the instance to run, then + vendordata should not be used. + 4. Most vendor operations should be done either via script, + boot_hook or upstart job. + +Vendors utilizing the vendordata channel are strongly advised to +use the #cloud-config-jsonp method, otherwise they risk that a +user can accidently override choices. + +Further, we strongly advise vendors to not 'be evil'. By evil, we +mean any action that could compromise a system. Since users trust +you, please take care to make sure that any vendordata is safe, +atomic, indopenant and does not put your users at risk. + +cloud-init can read this input and act on it in different ways. + +=== Input Formats === +cloud-init will download and cache to filesystem any vendor-data that it +finds. However, certain types of vendor-data are handled specially. + + * Gzip Compressed Content + content found to be gzip compressed will be uncompressed, and + these rules applied to the uncompressed data + + * Mime Multi Part archive + This list of rules is applied to each part of this multi-part file + Using a mime-multi part file, the user can specify more than one + type of data. For example, both a user data script and a + cloud-config type could be specified. + + * vendor-data Script + begins with: #! or Content-Type: text/x-shellscript + script will be executed at "rc.local-like" level during first boot. + rc.local-like means "very late in the boot sequence" + + * Include File + begins with #include or Content-Type: text/x-include-url + This content is a "include" file. The file contains a list of + urls, one per line. Each of the URLs will be read, and their content + will be passed through this same set of rules. Ie, the content + read from the URL can be gzipped, mime-multi-part, or plain text + +* Include File Once + begins with #include-once or Content-Type: text/x-include-once-url + This content is a "include" file. The file contains a list of + urls, one per line. Each of the URLs will be read, and their content + will be passed through this same set of rules. Ie, the content + read from the URL can be gzipped, mime-multi-part, or plain text + This file will just be downloaded only once per instance, and its + contents cached for subsequent boots. This allows you to pass in + one-time-use or expiring URLs. + + * Cloud Config Data + begins with #cloud-config or Content-Type: text/cloud-config + + This content is "cloud-config" data. See the examples for a + commented example of supported config formats. + + * Upstart Job + begins with #upstart-job or Content-Type: text/upstart-job + + Content is placed into a file in /etc/init, and will be consumed + by upstart as any other upstart job. + + * Cloud Boothook + begins with #cloud-boothook or Content-Type: text/cloud-boothook + + This content is "boothook" data. It is stored in a file under + /var/lib/cloud and then executed immediately. + + This is the earliest "hook" available. Note, that there is no + mechanism provided for running only once. The boothook must take + care of this itself. It is provided with the instance id in the + environment variable "INSTANCE_ID". This could be made use of to + provide a 'once-per-instance' + +=== Examples === +There are examples in the examples subdirectory. +Additionally, the 'tools' directory contains 'write-mime-multipart', +which can be used to easily generate mime-multi-part files from a list +of input files. That data can then be given to an instance. + +See 'write-mime-multipart --help' for usage. diff --git a/tests/unittests/test_data.py b/tests/unittests/test_data.py new file mode 100644 index 00000000..44395f06 --- /dev/null +++ b/tests/unittests/test_data.py @@ -0,0 +1,505 @@ +"""Tests for handling of userdata within cloud init.""" + +import StringIO + +import gzip +import logging +import os + +from email.mime.application import MIMEApplication +from email.mime.base import MIMEBase +from email.mime.multipart import MIMEMultipart + +from cloudinit import handlers +from cloudinit import helpers as c_helpers +from cloudinit import log +from cloudinit.settings import (PER_INSTANCE) +from cloudinit import sources +from cloudinit import stages +from cloudinit import util + +INSTANCE_ID = "i-testing" + +from tests.unittests import helpers + + +class FakeDataSource(sources.DataSource): + + def __init__(self, userdata=None, vendordata=None, + consume_vendor=False): + sources.DataSource.__init__(self, {}, None, None) + self.metadata = {'instance-id': INSTANCE_ID} + self.userdata_raw = userdata + self.vendordata_raw = vendordata + self._consume_vendor = consume_vendor + + def consume_vendordata(self): + return self._consume_vendor + + +# FIXME: these tests shouldn't be checking log output?? +# Weirddddd... +class TestConsumeUserData(helpers.FilesystemMockingTestCase): + + def setUp(self): + helpers.FilesystemMockingTestCase.setUp(self) + self._log = None + self._log_file = None + self._log_handler = None + + def tearDown(self): + helpers.FilesystemMockingTestCase.tearDown(self) + if self._log_handler and self._log: + self._log.removeHandler(self._log_handler) + + def _patchIn(self, root): + self.restore() + self.patchOS(root) + self.patchUtils(root) + + def capture_log(self, lvl=logging.DEBUG): + log_file = StringIO.StringIO() + self._log_handler = logging.StreamHandler(log_file) + self._log_handler.setLevel(lvl) + self._log = log.getLogger() + self._log.addHandler(self._log_handler) + return log_file + + def test_simple_jsonp(self): + blob = ''' +#cloud-config-jsonp +[ + { "op": "add", "path": "/baz", "value": "qux" }, + { "op": "add", "path": "/bar", "value": "qux2" } +] +''' + + ci = stages.Init() + ci.datasource = FakeDataSource(blob) + new_root = self.makeDir() + self.patchUtils(new_root) + self.patchOS(new_root) + ci.fetch() + ci.consume_data() + cc_contents = util.load_file(ci.paths.get_ipath("cloud_config")) + cc = util.load_yaml(cc_contents) + self.assertEquals(2, len(cc)) + self.assertEquals('qux', cc['baz']) + self.assertEquals('qux2', cc['bar']) + + def test_simple_jsonp_vendor_and_user(self): + # test that user-data wins over vendor + user_blob = ''' +#cloud-config-jsonp +[ + { "op": "add", "path": "/baz", "value": "qux" }, + { "op": "add", "path": "/bar", "value": "qux2" }, + { "op": "add", "path": "/vendor_data", "value": {"enabled": "true"}} +] +''' + vendor_blob = ''' +#cloud-config-jsonp +[ + { "op": "add", "path": "/baz", "value": "quxA" }, + { "op": "add", "path": "/bar", "value": "quxB" }, + { "op": "add", "path": "/foo", "value": "quxC" } +] +''' + new_root = self.makeDir() + self._patchIn(new_root) + initer = stages.Init() + initer.datasource = FakeDataSource(user_blob, vendordata=vendor_blob) + initer.read_cfg() + initer.initialize() + initer.fetch() + _iid = initer.instancify() + initer.update() + initer.cloudify().run('consume_data', + initer.consume_data, + args=[PER_INSTANCE], + freq=PER_INSTANCE) + mods = stages.Modules(initer) + (_which_ran, _failures) = mods.run_section('cloud_init_modules') + cfg = mods.cfg + self.assertIn('vendor_data', cfg) + self.assertEquals('qux', cfg['baz']) + self.assertEquals('qux2', cfg['bar']) + self.assertEquals('quxC', cfg['foo']) + + def test_simple_jsonp_no_vendor_consumed(self): + # make sure that vendor data is not consumed + user_blob = ''' +#cloud-config-jsonp +[ + { "op": "add", "path": "/baz", "value": "qux" }, + { "op": "add", "path": "/bar", "value": "qux2" } +] +''' + vendor_blob = ''' +#cloud-config-jsonp +[ + { "op": "add", "path": "/baz", "value": "quxA" }, + { "op": "add", "path": "/bar", "value": "quxB" }, + { "op": "add", "path": "/foo", "value": "quxC" } +] +''' + new_root = self.makeDir() + self._patchIn(new_root) + initer = stages.Init() + initer.datasource = FakeDataSource(user_blob, vendordata=vendor_blob) + initer.read_cfg() + initer.initialize() + initer.fetch() + _iid = initer.instancify() + initer.update() + initer.cloudify().run('consume_data', + initer.consume_data, + args=[PER_INSTANCE], + freq=PER_INSTANCE) + mods = stages.Modules(initer) + (_which_ran, _failures) = mods.run_section('cloud_init_modules') + cfg = mods.cfg + self.assertEquals('qux', cfg['baz']) + self.assertEquals('qux2', cfg['bar']) + self.assertNotIn('foo', cfg) + + def test_mixed_cloud_config(self): + blob_cc = ''' +#cloud-config +a: b +c: d +''' + message_cc = MIMEBase("text", "cloud-config") + message_cc.set_payload(blob_cc) + + blob_jp = ''' +#cloud-config-jsonp +[ + { "op": "replace", "path": "/a", "value": "c" }, + { "op": "remove", "path": "/c" } +] +''' + + message_jp = MIMEBase('text', "cloud-config-jsonp") + message_jp.set_payload(blob_jp) + + message = MIMEMultipart() + message.attach(message_cc) + message.attach(message_jp) + + ci = stages.Init() + ci.datasource = FakeDataSource(str(message)) + new_root = self.makeDir() + self.patchUtils(new_root) + self.patchOS(new_root) + ci.fetch() + ci.consume_data() + cc_contents = util.load_file(ci.paths.get_ipath("cloud_config")) + cc = util.load_yaml(cc_contents) + self.assertEquals(1, len(cc)) + self.assertEquals('c', cc['a']) + + def test_vendor_with_datasource_perm(self): + vendor_blob = ''' +#cloud-config +a: b +name: vendor +run: + - x + - y +''' + + new_root = self.makeDir() + self._patchIn(new_root) + initer = stages.Init() + initer.datasource = FakeDataSource('', vendordata=vendor_blob, + consume_vendor=True) + initer.read_cfg() + initer.initialize() + initer.fetch() + _iid = initer.instancify() + initer.update() + initer.cloudify().run('consume_data', + initer.consume_data, + args=[PER_INSTANCE], + freq=PER_INSTANCE) + mods = stages.Modules(initer) + (_which_ran, _failures) = mods.run_section('cloud_init_modules') + cfg = mods.cfg + self.assertEquals('b', cfg['a']) + self.assertEquals('vendor', cfg['name']) + self.assertIn('x', cfg['run']) + self.assertIn('y', cfg['run']) + + def test_vendor_user_yaml_cloud_config(self): + vendor_blob = ''' +#cloud-config +a: b +name: vendor +run: + - x + - y +''' + + user_blob = ''' +#cloud-config +a: c +vendor_data: + enabled: True + prefix: /bin/true +name: user +run: + - z +''' + new_root = self.makeDir() + self._patchIn(new_root) + initer = stages.Init() + initer.datasource = FakeDataSource(user_blob, vendordata=vendor_blob) + initer.read_cfg() + initer.initialize() + initer.fetch() + _iid = initer.instancify() + initer.update() + initer.cloudify().run('consume_data', + initer.consume_data, + args=[PER_INSTANCE], + freq=PER_INSTANCE) + mods = stages.Modules(initer) + (_which_ran, _failures) = mods.run_section('cloud_init_modules') + cfg = mods.cfg + self.assertIn('vendor_data', cfg) + self.assertEquals('c', cfg['a']) + self.assertEquals('user', cfg['name']) + self.assertNotIn('x', cfg['run']) + self.assertNotIn('y', cfg['run']) + self.assertIn('z', cfg['run']) + + def test_vendordata_script(self): + vendor_blob = ''' +#!/bin/bash +echo "test" +''' + + user_blob = ''' +#cloud-config +vendor_data: + enabled: True + prefix: /bin/true +''' + new_root = self.makeDir() + self._patchIn(new_root) + initer = stages.Init() + initer.datasource = FakeDataSource(user_blob, vendordata=vendor_blob) + initer.read_cfg() + initer.initialize() + initer.fetch() + _iid = initer.instancify() + initer.update() + initer.cloudify().run('consume_data', + initer.consume_data, + args=[PER_INSTANCE], + freq=PER_INSTANCE) + mods = stages.Modules(initer) + (_which_ran, _failures) = mods.run_section('cloud_init_modules') + cfg = mods.cfg + vendor_script = initer.paths.get_ipath_cur('vendor_scripts') + vendor_script_fns = "%s%s/part-001" % (new_root, vendor_script) + self.assertTrue(os.path.exists(vendor_script_fns)) + + + + def test_merging_cloud_config(self): + blob = ''' +#cloud-config +a: b +e: f +run: + - b + - c +''' + message1 = MIMEBase("text", "cloud-config") + message1.set_payload(blob) + + blob2 = ''' +#cloud-config +a: e +e: g +run: + - stuff + - morestuff +''' + message2 = MIMEBase("text", "cloud-config") + message2['X-Merge-Type'] = ('dict(recurse_array,' + 'recurse_str)+list(append)+str(append)') + message2.set_payload(blob2) + + blob3 = ''' +#cloud-config +e: + - 1 + - 2 + - 3 +p: 1 +''' + message3 = MIMEBase("text", "cloud-config") + message3.set_payload(blob3) + + messages = [message1, message2, message3] + + paths = c_helpers.Paths({}, ds=FakeDataSource('')) + cloud_cfg = handlers.cloud_config.CloudConfigPartHandler(paths) + + new_root = self.makeDir() + self.patchUtils(new_root) + self.patchOS(new_root) + cloud_cfg.handle_part(None, handlers.CONTENT_START, None, None, None, + None) + for i, m in enumerate(messages): + headers = dict(m) + fn = "part-%s" % (i + 1) + payload = m.get_payload(decode=True) + cloud_cfg.handle_part(None, headers['Content-Type'], + fn, payload, None, headers) + cloud_cfg.handle_part(None, handlers.CONTENT_END, None, None, None, + None) + contents = util.load_file(paths.get_ipath('cloud_config')) + contents = util.load_yaml(contents) + self.assertEquals(contents['run'], ['b', 'c', 'stuff', 'morestuff']) + self.assertEquals(contents['a'], 'be') + self.assertEquals(contents['e'], [1, 2, 3]) + self.assertEquals(contents['p'], 1) + + def test_unhandled_type_warning(self): + """Raw text without magic is ignored but shows warning.""" + ci = stages.Init() + data = "arbitrary text\n" + ci.datasource = FakeDataSource(data) + + mock_write = self.mocker.replace("cloudinit.util.write_file", + passthrough=False) + mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) + self.mocker.replay() + + log_file = self.capture_log(logging.WARNING) + ci.fetch() + ci.consume_data() + self.assertIn( + "Unhandled non-multipart (text/x-not-multipart) userdata:", + log_file.getvalue()) + + def test_mime_gzip_compressed(self): + """Tests that individual message gzip encoding works.""" + + def gzip_part(text): + contents = StringIO.StringIO() + f = gzip.GzipFile(fileobj=contents, mode='w') + f.write(str(text)) + f.flush() + f.close() + return MIMEApplication(contents.getvalue(), 'gzip') + + base_content1 = ''' +#cloud-config +a: 2 +''' + + base_content2 = ''' +#cloud-config +b: 3 +c: 4 +''' + + message = MIMEMultipart('test') + message.attach(gzip_part(base_content1)) + message.attach(gzip_part(base_content2)) + ci = stages.Init() + ci.datasource = FakeDataSource(str(message)) + new_root = self.makeDir() + self.patchUtils(new_root) + self.patchOS(new_root) + ci.fetch() + ci.consume_data() + contents = util.load_file(ci.paths.get_ipath("cloud_config")) + contents = util.load_yaml(contents) + self.assertTrue(isinstance(contents, dict)) + self.assertEquals(3, len(contents)) + self.assertEquals(2, contents['a']) + self.assertEquals(3, contents['b']) + self.assertEquals(4, contents['c']) + + def test_mime_text_plain(self): + """Mime message of type text/plain is ignored but shows warning.""" + ci = stages.Init() + message = MIMEBase("text", "plain") + message.set_payload("Just text") + ci.datasource = FakeDataSource(message.as_string()) + + mock_write = self.mocker.replace("cloudinit.util.write_file", + passthrough=False) + mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) + self.mocker.replay() + + log_file = self.capture_log(logging.WARNING) + ci.fetch() + ci.consume_data() + self.assertIn( + "Unhandled unknown content-type (text/plain)", + log_file.getvalue()) + + def test_shellscript(self): + """Raw text starting #!/bin/sh is treated as script.""" + ci = stages.Init() + script = "#!/bin/sh\necho hello\n" + ci.datasource = FakeDataSource(script) + + outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001") + mock_write = self.mocker.replace("cloudinit.util.write_file", + passthrough=False) + mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) + mock_write(outpath, script, 0700) + self.mocker.replay() + + log_file = self.capture_log(logging.WARNING) + ci.fetch() + ci.consume_data() + self.assertEqual("", log_file.getvalue()) + + def test_mime_text_x_shellscript(self): + """Mime message of type text/x-shellscript is treated as script.""" + ci = stages.Init() + script = "#!/bin/sh\necho hello\n" + message = MIMEBase("text", "x-shellscript") + message.set_payload(script) + ci.datasource = FakeDataSource(message.as_string()) + + outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001") + mock_write = self.mocker.replace("cloudinit.util.write_file", + passthrough=False) + mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) + mock_write(outpath, script, 0700) + self.mocker.replay() + + log_file = self.capture_log(logging.WARNING) + ci.fetch() + ci.consume_data() + self.assertEqual("", log_file.getvalue()) + + def test_mime_text_plain_shell(self): + """Mime type text/plain starting #!/bin/sh is treated as script.""" + ci = stages.Init() + script = "#!/bin/sh\necho hello\n" + message = MIMEBase("text", "plain") + message.set_payload(script) + ci.datasource = FakeDataSource(message.as_string()) + + outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001") + mock_write = self.mocker.replace("cloudinit.util.write_file", + passthrough=False) + mock_write(outpath, script, 0700) + mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) + self.mocker.replay() + + log_file = self.capture_log(logging.WARNING) + ci.fetch() + ci.consume_data() + self.assertEqual("", log_file.getvalue()) diff --git a/tests/unittests/test_runs/test_merge_run.py b/tests/unittests/test_runs/test_merge_run.py index d9c3a455..5ffe95a2 100644 --- a/tests/unittests/test_runs/test_merge_run.py +++ b/tests/unittests/test_runs/test_merge_run.py @@ -35,8 +35,8 @@ class TestMergeRun(helpers.FilesystemMockingTestCase): initer.datasource.userdata_raw = ud _iid = initer.instancify() initer.update() - initer.cloudify().run('consume_userdata', - initer.consume_userdata, + initer.cloudify().run('consume_data', + initer.consume_data, args=[PER_INSTANCE], freq=PER_INSTANCE) mirrors = initer.distro.get_option('package_mirrors') diff --git a/tests/unittests/test_runs/test_simple_run.py b/tests/unittests/test_runs/test_simple_run.py index 60ef812a..9a7178d1 100644 --- a/tests/unittests/test_runs/test_simple_run.py +++ b/tests/unittests/test_runs/test_simple_run.py @@ -66,8 +66,8 @@ class TestSimpleRun(helpers.FilesystemMockingTestCase): initer.update() self.assertTrue(os.path.islink("var/lib/cloud/instance")) - initer.cloudify().run('consume_userdata', - initer.consume_userdata, + initer.cloudify().run('consume_data', + initer.consume_data, args=[PER_INSTANCE], freq=PER_INSTANCE) diff --git a/tests/unittests/test_userdata.py b/tests/unittests/test_userdata.py deleted file mode 100644 index 5ffe8f0a..00000000 --- a/tests/unittests/test_userdata.py +++ /dev/null @@ -1,308 +0,0 @@ -"""Tests for handling of userdata within cloud init.""" - -import StringIO - -import gzip -import logging -import os - -from email.mime.application import MIMEApplication -from email.mime.base import MIMEBase -from email.mime.multipart import MIMEMultipart - -from cloudinit import handlers -from cloudinit import helpers as c_helpers -from cloudinit import log -from cloudinit import sources -from cloudinit import stages -from cloudinit import util - -INSTANCE_ID = "i-testing" - -from tests.unittests import helpers - - -class FakeDataSource(sources.DataSource): - - def __init__(self, userdata): - sources.DataSource.__init__(self, {}, None, None) - self.metadata = {'instance-id': INSTANCE_ID} - self.userdata_raw = userdata - - -# FIXME: these tests shouldn't be checking log output?? -# Weirddddd... -class TestConsumeUserData(helpers.FilesystemMockingTestCase): - - def setUp(self): - helpers.FilesystemMockingTestCase.setUp(self) - self._log = None - self._log_file = None - self._log_handler = None - - def tearDown(self): - helpers.FilesystemMockingTestCase.tearDown(self) - if self._log_handler and self._log: - self._log.removeHandler(self._log_handler) - - def capture_log(self, lvl=logging.DEBUG): - log_file = StringIO.StringIO() - self._log_handler = logging.StreamHandler(log_file) - self._log_handler.setLevel(lvl) - self._log = log.getLogger() - self._log.addHandler(self._log_handler) - return log_file - - def test_simple_jsonp(self): - blob = ''' -#cloud-config-jsonp -[ - { "op": "add", "path": "/baz", "value": "qux" }, - { "op": "add", "path": "/bar", "value": "qux2" } -] -''' - - ci = stages.Init() - ci.datasource = FakeDataSource(blob) - new_root = self.makeDir() - self.patchUtils(new_root) - self.patchOS(new_root) - ci.fetch() - ci.consume_userdata() - cc_contents = util.load_file(ci.paths.get_ipath("cloud_config")) - cc = util.load_yaml(cc_contents) - self.assertEquals(2, len(cc)) - self.assertEquals('qux', cc['baz']) - self.assertEquals('qux2', cc['bar']) - - def test_mixed_cloud_config(self): - blob_cc = ''' -#cloud-config -a: b -c: d -''' - message_cc = MIMEBase("text", "cloud-config") - message_cc.set_payload(blob_cc) - - blob_jp = ''' -#cloud-config-jsonp -[ - { "op": "replace", "path": "/a", "value": "c" }, - { "op": "remove", "path": "/c" } -] -''' - - message_jp = MIMEBase('text', "cloud-config-jsonp") - message_jp.set_payload(blob_jp) - - message = MIMEMultipart() - message.attach(message_cc) - message.attach(message_jp) - - ci = stages.Init() - ci.datasource = FakeDataSource(str(message)) - new_root = self.makeDir() - self.patchUtils(new_root) - self.patchOS(new_root) - ci.fetch() - ci.consume_userdata() - cc_contents = util.load_file(ci.paths.get_ipath("cloud_config")) - cc = util.load_yaml(cc_contents) - self.assertEquals(1, len(cc)) - self.assertEquals('c', cc['a']) - - def test_merging_cloud_config(self): - blob = ''' -#cloud-config -a: b -e: f -run: - - b - - c -''' - message1 = MIMEBase("text", "cloud-config") - message1.set_payload(blob) - - blob2 = ''' -#cloud-config -a: e -e: g -run: - - stuff - - morestuff -''' - message2 = MIMEBase("text", "cloud-config") - message2['X-Merge-Type'] = ('dict(recurse_array,' - 'recurse_str)+list(append)+str(append)') - message2.set_payload(blob2) - - blob3 = ''' -#cloud-config -e: - - 1 - - 2 - - 3 -p: 1 -''' - message3 = MIMEBase("text", "cloud-config") - message3.set_payload(blob3) - - messages = [message1, message2, message3] - - paths = c_helpers.Paths({}, ds=FakeDataSource('')) - cloud_cfg = handlers.cloud_config.CloudConfigPartHandler(paths) - - new_root = self.makeDir() - self.patchUtils(new_root) - self.patchOS(new_root) - cloud_cfg.handle_part(None, handlers.CONTENT_START, None, None, None, - None) - for i, m in enumerate(messages): - headers = dict(m) - fn = "part-%s" % (i + 1) - payload = m.get_payload(decode=True) - cloud_cfg.handle_part(None, headers['Content-Type'], - fn, payload, None, headers) - cloud_cfg.handle_part(None, handlers.CONTENT_END, None, None, None, - None) - contents = util.load_file(paths.get_ipath('cloud_config')) - contents = util.load_yaml(contents) - self.assertEquals(contents['run'], ['b', 'c', 'stuff', 'morestuff']) - self.assertEquals(contents['a'], 'be') - self.assertEquals(contents['e'], [1, 2, 3]) - self.assertEquals(contents['p'], 1) - - def test_unhandled_type_warning(self): - """Raw text without magic is ignored but shows warning.""" - ci = stages.Init() - data = "arbitrary text\n" - ci.datasource = FakeDataSource(data) - - mock_write = self.mocker.replace("cloudinit.util.write_file", - passthrough=False) - mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) - self.mocker.replay() - - log_file = self.capture_log(logging.WARNING) - ci.fetch() - ci.consume_userdata() - self.assertIn( - "Unhandled non-multipart (text/x-not-multipart) userdata:", - log_file.getvalue()) - - def test_mime_gzip_compressed(self): - """Tests that individual message gzip encoding works.""" - - def gzip_part(text): - contents = StringIO.StringIO() - f = gzip.GzipFile(fileobj=contents, mode='w') - f.write(str(text)) - f.flush() - f.close() - return MIMEApplication(contents.getvalue(), 'gzip') - - base_content1 = ''' -#cloud-config -a: 2 -''' - - base_content2 = ''' -#cloud-config -b: 3 -c: 4 -''' - - message = MIMEMultipart('test') - message.attach(gzip_part(base_content1)) - message.attach(gzip_part(base_content2)) - ci = stages.Init() - ci.datasource = FakeDataSource(str(message)) - new_root = self.makeDir() - self.patchUtils(new_root) - self.patchOS(new_root) - ci.fetch() - ci.consume_userdata() - contents = util.load_file(ci.paths.get_ipath("cloud_config")) - contents = util.load_yaml(contents) - self.assertTrue(isinstance(contents, dict)) - self.assertEquals(3, len(contents)) - self.assertEquals(2, contents['a']) - self.assertEquals(3, contents['b']) - self.assertEquals(4, contents['c']) - - def test_mime_text_plain(self): - """Mime message of type text/plain is ignored but shows warning.""" - ci = stages.Init() - message = MIMEBase("text", "plain") - message.set_payload("Just text") - ci.datasource = FakeDataSource(message.as_string()) - - mock_write = self.mocker.replace("cloudinit.util.write_file", - passthrough=False) - mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) - self.mocker.replay() - - log_file = self.capture_log(logging.WARNING) - ci.fetch() - ci.consume_userdata() - self.assertIn( - "Unhandled unknown content-type (text/plain)", - log_file.getvalue()) - - def test_shellscript(self): - """Raw text starting #!/bin/sh is treated as script.""" - ci = stages.Init() - script = "#!/bin/sh\necho hello\n" - ci.datasource = FakeDataSource(script) - - outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001") - mock_write = self.mocker.replace("cloudinit.util.write_file", - passthrough=False) - mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) - mock_write(outpath, script, 0700) - self.mocker.replay() - - log_file = self.capture_log(logging.WARNING) - ci.fetch() - ci.consume_userdata() - self.assertEqual("", log_file.getvalue()) - - def test_mime_text_x_shellscript(self): - """Mime message of type text/x-shellscript is treated as script.""" - ci = stages.Init() - script = "#!/bin/sh\necho hello\n" - message = MIMEBase("text", "x-shellscript") - message.set_payload(script) - ci.datasource = FakeDataSource(message.as_string()) - - outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001") - mock_write = self.mocker.replace("cloudinit.util.write_file", - passthrough=False) - mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) - mock_write(outpath, script, 0700) - self.mocker.replay() - - log_file = self.capture_log(logging.WARNING) - ci.fetch() - ci.consume_userdata() - self.assertEqual("", log_file.getvalue()) - - def test_mime_text_plain_shell(self): - """Mime type text/plain starting #!/bin/sh is treated as script.""" - ci = stages.Init() - script = "#!/bin/sh\necho hello\n" - message = MIMEBase("text", "plain") - message.set_payload(script) - ci.datasource = FakeDataSource(message.as_string()) - - outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001") - mock_write = self.mocker.replace("cloudinit.util.write_file", - passthrough=False) - mock_write(outpath, script, 0700) - mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) - self.mocker.replay() - - log_file = self.capture_log(logging.WARNING) - ci.fetch() - ci.consume_userdata() - self.assertEqual("", log_file.getvalue()) -- cgit v1.2.3 From 22f4ce4476a292f32c26f3d965f63145a644a164 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Wed, 15 Jan 2014 15:12:48 -0500 Subject: header, comment cleanup --- cloudinit/config/cc_scripts_vendor.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) (limited to 'cloudinit/config') diff --git a/cloudinit/config/cc_scripts_vendor.py b/cloudinit/config/cc_scripts_vendor.py index 5809a4ba..0168f668 100644 --- a/cloudinit/config/cc_scripts_vendor.py +++ b/cloudinit/config/cc_scripts_vendor.py @@ -1,11 +1,8 @@ # vi: ts=4 expandtab # -# Copyright (C) 2011-2014 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# Copyright (C) 2014 Canonical Ltd. # -# Author: Scott Moser # Author: Ben Howard -# Author: Juerg Haefliger # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3, as @@ -31,9 +28,8 @@ SCRIPT_SUBDIR = 'vendor' def handle(name, _cfg, cloud, log, _args): - # This is written to by the user data handlers - # Ie, any custom shell scripts that come down - # go here... + # This is written to by the vendor data handlers + # any vendor data shell scripts get placed in runparts_path runparts_path = os.path.join(cloud.get_ipath_cur(), 'scripts', SCRIPT_SUBDIR) try: -- cgit v1.2.3 From 42f2cffadabfefb0469ade2f1f1c3ce5edabc9fa Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Wed, 15 Jan 2014 15:42:02 -0500 Subject: remove vendor-scripts-per-{boot,instance,once} I don't see a real need for these. The intent of the 'per-boot' or 'per-instance' or 'per-once' config modules is to handle running scripts that were already inserted into the instance. If the vendor is doing that, then there is value in vendor-data. Ie, they'd already modified the image, they might as well have just put the stuff in that they wanted. --- cloudinit/config/cc_vendor_scripts_per_boot.py | 43 ---------------------- cloudinit/config/cc_vendor_scripts_per_instance.py | 43 ---------------------- cloudinit/config/cc_vendor_scripts_per_once.py | 43 ---------------------- config/cloud.cfg | 3 -- 4 files changed, 132 deletions(-) delete mode 100644 cloudinit/config/cc_vendor_scripts_per_boot.py delete mode 100644 cloudinit/config/cc_vendor_scripts_per_instance.py delete mode 100644 cloudinit/config/cc_vendor_scripts_per_once.py (limited to 'cloudinit/config') diff --git a/cloudinit/config/cc_vendor_scripts_per_boot.py b/cloudinit/config/cc_vendor_scripts_per_boot.py deleted file mode 100644 index 80446e99..00000000 --- a/cloudinit/config/cc_vendor_scripts_per_boot.py +++ /dev/null @@ -1,43 +0,0 @@ -# vi: ts=4 expandtab -# -# Copyright (C) 2011-2014 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. -# -# Author: Scott Moser -# Author: Ben Howard -# Author: Juerg Haefliger -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 3, as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -import os - -from cloudinit import util - -from cloudinit.settings import PER_ALWAYS - -frequency = PER_ALWAYS - -SCRIPT_SUBDIR = 'per-boot' - - -def handle(name, cfg, cloud, log, _args): - runparts_path = os.path.join(cloud.get_cpath(), 'scripts', 'vendor', - SCRIPT_SUBDIR) - vendor_prefix = util.get_nested_option_as_list(cfg, 'vendor_data', - 'prefix') - try: - util.runparts(runparts_path, exe_prefix=vendor_prefix) - except: - log.warn("Failed to run module %s (%s in %s)", - name, SCRIPT_SUBDIR, runparts_path) - raise diff --git a/cloudinit/config/cc_vendor_scripts_per_instance.py b/cloudinit/config/cc_vendor_scripts_per_instance.py deleted file mode 100644 index 2d27a0c4..00000000 --- a/cloudinit/config/cc_vendor_scripts_per_instance.py +++ /dev/null @@ -1,43 +0,0 @@ -# vi: ts=4 expandtab -# -# Copyright (C) 2011-2014 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. -# -# Author: Scott Moser -# Author: Ben Howard -# Author: Juerg Haefliger -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 3, as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -import os - -from cloudinit import util - -from cloudinit.settings import PER_INSTANCE - -frequency = PER_INSTANCE - -SCRIPT_SUBDIR = 'per-instance' - - -def handle(name, cfg, cloud, log, _args): - runparts_path = os.path.join(cloud.get_cpath(), 'scripts', 'vendor', - SCRIPT_SUBDIR) - vendor_prefix = util.get_nested_option_as_list(cfg, 'vendor_data', - 'prefix') - try: - util.runparts(runparts_path, exe_prefix=vendor_prefix) - except: - log.warn("Failed to run module %s (%s in %s)", - name, SCRIPT_SUBDIR, runparts_path) - raise diff --git a/cloudinit/config/cc_vendor_scripts_per_once.py b/cloudinit/config/cc_vendor_scripts_per_once.py deleted file mode 100644 index ad3e13c8..00000000 --- a/cloudinit/config/cc_vendor_scripts_per_once.py +++ /dev/null @@ -1,43 +0,0 @@ -# vi: ts=4 expandtab -# -# Copyright (C) 2011-2014 Canonical Ltd. -# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. -# -# Author: Scott Moser -# Author: Ben Howard -# Author: Juerg Haefliger -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 3, as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -import os - -from cloudinit import util - -from cloudinit.settings import PER_ONCE - -frequency = PER_ONCE - -SCRIPT_SUBDIR = 'per-once' - - -def handle(name, cfg, cloud, log, _args): - runparts_path = os.path.join(cloud.get_cpath(), 'scripts', 'vendor', - SCRIPT_SUBDIR) - vendor_prefix = util.get_nested_option_as_list(cfg, 'vendor_data', - 'prefix') - try: - util.runparts(runparts_path, exe_prefix=vendor_prefix) - except: - log.warn("Failed to run module %s (%s in %s)", - name, SCRIPT_SUBDIR, runparts_path) - raise diff --git a/config/cloud.cfg b/config/cloud.cfg index 8ed3522d..b746e3db 100644 --- a/config/cloud.cfg +++ b/config/cloud.cfg @@ -64,9 +64,6 @@ cloud_config_modules: # The modules that run in the 'final' stage cloud_final_modules: - rightscale_userdata - - vendor-scripts-per-once - - vendor-scripts-per-boot - - vendor-scripts-per-instance - scripts-vendor - scripts-per-once - scripts-per-boot -- cgit v1.2.3 From 9e19b276fdb6cf9c1f5252f6f9bdcba076c5f09e Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Wed, 15 Jan 2014 17:13:24 -0500 Subject: replace get_nested_option_as_list with get_cfg_by_path, improve ruparts this makes runparts take exe_prefix and do string to list conversion inside. that means we don't have to do it in cc_scripts_vendor. Also, get_nested_option_as_list was essentially get_cfg_by_path anyway. --- cloudinit/config/cc_scripts_vendor.py | 7 +++++-- cloudinit/util.py | 38 ++++++++++------------------------- 2 files changed, 16 insertions(+), 29 deletions(-) (limited to 'cloudinit/config') diff --git a/cloudinit/config/cc_scripts_vendor.py b/cloudinit/config/cc_scripts_vendor.py index 0168f668..0c9e504e 100644 --- a/cloudinit/config/cc_scripts_vendor.py +++ b/cloudinit/config/cc_scripts_vendor.py @@ -27,13 +27,16 @@ frequency = PER_INSTANCE SCRIPT_SUBDIR = 'vendor' -def handle(name, _cfg, cloud, log, _args): +def handle(name, cfg, cloud, log, _args): # This is written to by the vendor data handlers # any vendor data shell scripts get placed in runparts_path runparts_path = os.path.join(cloud.get_ipath_cur(), 'scripts', SCRIPT_SUBDIR) + + prefix = util.get_cfg_by_path(cfg, ('vendor_data', 'prefix'), []) + try: - util.runparts(runparts_path) + util.runparts(runparts_path, exe_prefix=prefix) except: log.warn("Failed to run module %s (%s in %s)", name, SCRIPT_SUBDIR, runparts_path) diff --git a/cloudinit/util.py b/cloudinit/util.py index 6b30af5e..3ce54f28 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -614,15 +614,22 @@ def runparts(dirp, skip_no_exist=True, exe_prefix=None): failed = [] attempted = [] + + if exe_prefix is None: + prefix = [] + elif isinstance(exe_prefix, str): + prefix = [str(exe_prefix)] + elif isinstance(exe_prefix, list): + prefix = exe_prefix + else: + raise TypeError("exe_prefix must be None, str, or list") + for exe_name in sorted(os.listdir(dirp)): exe_path = os.path.join(dirp, exe_name) if os.path.isfile(exe_path) and os.access(exe_path, os.X_OK): attempted.append(exe_path) try: - exe_cmd = exe_prefix - if isinstance(exe_prefix, list): - exe_cmd.extend(exe_path) - subp([exe_cmd], capture=False) + subp(prefix + [exe_path], capture=False) except ProcessExecutionError as e: logexc(LOG, "Failed running %s [%s]", exe_path, e.exit_code) failed.append(e) @@ -1852,26 +1859,3 @@ def expand_dotted_devname(dotted): return toks else: return (dotted, None) - - -def get_nested_option_as_list(dct, first, second): - """ - Return a nested option from a dict as a list - """ - if not isinstance(dct, dict): - raise TypeError("get_nested_option_as_list only works with dicts") - root = dct.get(first) - if not isinstance(root, dict): - return None - - token = root.get(second) - if isinstance(token, list): - return token - elif isinstance(token, dict): - ret_list = [] - for k, v in dct.iteritems(): - ret_list.append((k, v)) - return ret_list - elif isinstance(token, str): - return token.split() - return None -- cgit v1.2.3 From 98fd17c55b637f4e1d136c954567c1d9b23e6c20 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Fri, 17 Jan 2014 11:16:56 -0500 Subject: remove support for resizing via 'parted resizepart' This was previously broken anyway. It doesn't seem like there was an easy way to actually support it, so for now I'm removing it entirely. growpart works well enough. --- ChangeLog | 2 + cloudinit/config/cc_growpart.py | 28 +---------- doc/examples/cloud-config-growpart.txt | 4 +- .../test_handler/test_handler_growpart.py | 55 ++-------------------- 4 files changed, 9 insertions(+), 80 deletions(-) (limited to 'cloudinit/config') diff --git a/ChangeLog b/ChangeLog index 1c240c68..46a27df3 100644 --- a/ChangeLog +++ b/ChangeLog @@ -11,6 +11,8 @@ unicode). - config/cloud.cfg.d/05_logging.cfg: provide a default 'output' setting, to redirect cloud-init stderr and stdout /var/log/cloud-init-output.log. + - drop support for resizing partitions with parted entirely (LP: #1212492). + This was broken as it was anyway. 0.7.4: - fix issue mounting 'ephemeral0' if ephemeral0 was an alias for a partitioned block device with target filesystem on ephemeral0.1. diff --git a/cloudinit/config/cc_growpart.py b/cloudinit/config/cc_growpart.py index 0dd92a46..6bddf847 100644 --- a/cloudinit/config/cc_growpart.py +++ b/cloudinit/config/cc_growpart.py @@ -80,30 +80,6 @@ class ResizeFailedException(Exception): pass -class ResizeParted(object): - def available(self): - myenv = os.environ.copy() - myenv['LANG'] = 'C' - - try: - (out, _err) = util.subp(["parted", "--help"], env=myenv) - if re.search(r"COMMAND.*resizepart\s+", out, re.DOTALL): - return True - - except util.ProcessExecutionError: - pass - return False - - def resize(self, diskdev, partnum, partdev): - before = get_size(partdev) - try: - util.subp(["parted", diskdev, "resizepart", partnum]) - except util.ProcessExecutionError as e: - raise ResizeFailedException(e) - - return (before, get_size(partdev)) - - class ResizeGrowPart(object): def available(self): myenv = os.environ.copy() @@ -279,6 +255,4 @@ def handle(_name, cfg, _cloud, log, _args): else: log.debug("'%s' %s: %s" % (entry, action, msg)) -# LP: 1212444 FIXME re-order and favor ResizeParted -#RESIZERS = (('growpart', ResizeGrowPart),) -RESIZERS = (('growpart', ResizeGrowPart), ('parted', ResizeParted)) +RESIZERS = (('growpart', ResizeGrowPart),) diff --git a/doc/examples/cloud-config-growpart.txt b/doc/examples/cloud-config-growpart.txt index a459573d..393d5164 100644 --- a/doc/examples/cloud-config-growpart.txt +++ b/doc/examples/cloud-config-growpart.txt @@ -5,12 +5,10 @@ # # mode: # values: -# * auto: use any option possible (growpart or parted) +# * auto: use any option possible (any available) # if none are available, do not warn, but debug. # * growpart: use growpart to grow partitions # if growpart is not available, this is an error. -# * parted: use parted (parted resizepart) to resize partitions -# if parted is not available, this is an error. # * off, false # # devices: diff --git a/tests/unittests/test_handler/test_handler_growpart.py b/tests/unittests/test_handler/test_handler_growpart.py index c0497e08..996526d3 100644 --- a/tests/unittests/test_handler/test_handler_growpart.py +++ b/tests/unittests/test_handler/test_handler_growpart.py @@ -12,50 +12,9 @@ import re import unittest # growpart: -# mode: auto # off, on, auto, 'growpart', 'parted' +# mode: auto # off, on, auto, 'growpart' # devices: ['root'] -HELP_PARTED_NO_RESIZE = """ -Usage: parted [OPTION]... [DEVICE [COMMAND [PARAMETERS]...]...] -Apply COMMANDs with PARAMETERS to DEVICE. If no COMMAND(s) are given, run in -interactive mode. - -OPTIONs: - - -COMMANDs: - - quit exit program - rescue START END rescue a lost partition near START - and END - resize NUMBER START END resize partition NUMBER and its file - system - rm NUMBER delete partition NUMBER - -Report bugs to bug-parted@gnu.org -""" - -HELP_PARTED_RESIZE = """ -Usage: parted [OPTION]... [DEVICE [COMMAND [PARAMETERS]...]...] -Apply COMMANDs with PARAMETERS to DEVICE. If no COMMAND(s) are given, run in -interactive mode. - -OPTIONs: - - -COMMANDs: - - quit exit program - rescue START END rescue a lost partition near START - and END - resize NUMBER START END resize partition NUMBER and its file - system - resizepart NUMBER END resize partition NUMBER - rm NUMBER delete partition NUMBER - -Report bugs to bug-parted@gnu.org -""" - HELP_GROWPART_RESIZE = """ growpart disk partition rewrite partition table so that partition takes up all the space it can @@ -122,11 +81,8 @@ class TestConfig(MockerTestCase): # Order must be correct self.mocker.order() - @unittest.skip("until LP: #1212444 fixed") def test_no_resizers_auto_is_fine(self): subp = self.mocker.replace(util.subp, passthrough=False) - subp(['parted', '--help'], env={'LANG': 'C'}) - self.mocker.result((HELP_PARTED_NO_RESIZE, "")) subp(['growpart', '--help'], env={'LANG': 'C'}) self.mocker.result((HELP_GROWPART_NO_RESIZE, "")) self.mocker.replay() @@ -144,15 +100,14 @@ class TestConfig(MockerTestCase): self.assertRaises(ValueError, self.handle, self.name, config, self.cloud_init, self.log, self.args) - @unittest.skip("until LP: #1212444 fixed") - def test_mode_auto_prefers_parted(self): + def test_mode_auto_prefers_growpart(self): subp = self.mocker.replace(util.subp, passthrough=False) - subp(['parted', '--help'], env={'LANG': 'C'}) - self.mocker.result((HELP_PARTED_RESIZE, "")) + subp(['growpart', '--help'], env={'LANG': 'C'}) + self.mocker.result((HELP_GROWPART_RESIZE, "")) self.mocker.replay() ret = cc_growpart.resizer_factory(mode="auto") - self.assertTrue(isinstance(ret, cc_growpart.ResizeParted)) + self.assertTrue(isinstance(ret, cc_growpart.ResizeGrowPart)) def test_handle_with_no_growpart_entry(self): #if no 'growpart' entry in config, then mode=auto should be used -- cgit v1.2.3 From 84514cdff8ff025df052fe6301d2a7ed751d7d61 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Wed, 22 Jan 2014 16:54:07 -0500 Subject: cc_resizefs: figure out what /dev/root means via kernel cmdline If mount_info says that the root filesystem is on /dev/root and /dev/root does not exist, then we'll try to glean that information from the linux kernel cmdline. This situation occurs at least when you boot without an initramfs for the current ppc64el cloud images: qemu-system-ppc64 ... -kernel my.kernel -append 'root=/dev/sda' When doing that, /proc/1/mountinfo will say '/dev/root' for '/'. --- ChangeLog | 2 ++ cloudinit/config/cc_resizefs.py | 33 +++++++++++++++++++++++++++++++-- 2 files changed, 33 insertions(+), 2 deletions(-) (limited to 'cloudinit/config') diff --git a/ChangeLog b/ChangeLog index cb9586f0..32f2a8d0 100644 --- a/ChangeLog +++ b/ChangeLog @@ -17,6 +17,8 @@ - drop dependency on boto for crawling ec2 metadata service. - add 'Requires' on sudo (for OpenNebula datasource) in rpm specs, and 'Recommends' in the debian/control.in [Vlastimil Holer] + - if mount_info reports /dev/root is a device path for /, then convert + that to a device via help of kernel cmdline. 0.7.4: - fix issue mounting 'ephemeral0' if ephemeral0 was an alias for a partitioned block device with target filesystem on ephemeral0.1. diff --git a/cloudinit/config/cc_resizefs.py b/cloudinit/config/cc_resizefs.py index 56040fdd..388ca66f 100644 --- a/cloudinit/config/cc_resizefs.py +++ b/cloudinit/config/cc_resizefs.py @@ -51,6 +51,25 @@ RESIZE_FS_PREFIXES_CMDS = [ NOBLOCK = "noblock" +def rootdev_from_cmdline(cmdline): + found = None + for tok in cmdline.split(): + if tok.startswith("root="): + found = tok[5:] + break + if found is None: + return None + + if found.startswith("/dev/"): + return found + if found.startswith("LABEL="): + return "/dev/disk/by-label/" + found[len("LABEL="):] + if found.startswith("UUID="): + return "/dev/disk/by-uuid/" + found[len("UUID="):] + + return "/dev/" + found + + def handle(name, cfg, _cloud, log, args): if len(args) != 0: resize_root = args[0] @@ -78,10 +97,20 @@ def handle(name, cfg, _cloud, log, args): info = "dev=%s mnt_point=%s path=%s" % (devpth, mount_point, resize_what) log.debug("resize_info: %s" % info) + container = util.is_container() + + if (devpth == "/dev/root" and not os.path.exists(devpth) and + not container): + devpth = rootdev_from_cmdline(util.get_cmdline()) + if devpth is None: + log.warn("Unable to find device '/dev/root'") + return + log.debug("Converted /dev/root to '%s' per kernel cmdline", devpth) + try: statret = os.stat(devpth) except OSError as exc: - if util.is_container() and exc.errno == errno.ENOENT: + if container and exc.errno == errno.ENOENT: log.debug("Device '%s' did not exist in container. " "cannot resize: %s" % (devpth, info)) elif exc.errno == errno.ENOENT: @@ -92,7 +121,7 @@ def handle(name, cfg, _cloud, log, args): return if not stat.S_ISBLK(statret.st_mode): - if util.is_container(): + if container: log.debug("device '%s' not a block device in container." " cannot resize: %s" % (devpth, info)) else: -- cgit v1.2.3 From c833a84f08019ba4413937f2f1b1f12a4ffe5632 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Thu, 23 Jan 2014 14:28:59 -0500 Subject: pep8 --- cloudinit/config/cc_debug.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'cloudinit/config') diff --git a/cloudinit/config/cc_debug.py b/cloudinit/config/cc_debug.py index cfd31fa1..7219b0f8 100644 --- a/cloudinit/config/cc_debug.py +++ b/cloudinit/config/cc_debug.py @@ -14,10 +14,10 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -from StringIO import StringIO -from cloudinit import util from cloudinit import type_utils +from cloudinit import util import copy +from StringIO import StringIO def _make_header(text): -- cgit v1.2.3