From a5727fe1477c9cc4288d1ac41f70bd1ab7d7928a Mon Sep 17 00:00:00 2001 From: Ben Howard Date: Wed, 8 Jan 2014 17:16:24 -0700 Subject: Significant re-working of the userdata handling and introduction of vendordata. Vendordata is a datasource provided userdata-like blob that is parsed similiarly to userdata, execept at the user's pleasure. cloudinit/config/cc_scripts_vendor.py: added vendor script cloud config cloudinit/config/cc_vendor_scripts_per_boot.py: added vendor per boot cloud config cloudinit/config/cc_vendor_scripts_per_instance.py: added vendor per instance vendor cloud config cloudinit/config/cc_vendor_scripts_per_once.py: added per once vendor cloud config script doc/examples/cloud-config-vendor-data.txt: documentation of vendor-data examples doc/vendordata.txt: documentation of vendordata for vendors (RENAMED) tests/unittests/test_userdata.py => tests/unittests/test_userdata.py TO: tests/unittests/test_userdata.py => tests/unittests/test_data.py: userdata test cases are not expanded to confirm superiority over vendor data. bin/cloud-init: change instances of 'consume_userdata' to 'consume_data' cloudinit/handlers/cloud_config.py: Added vendor script handling to default cloud-config modules cloudinit/handlers/shell_script.py: Added ability to change the path key to support vendor provided 'vendor-scripts'. Defaults to 'script'. cloudinit/helpers.py: - Changed ConfigMerger to include handling of vendordata. - Changed helpers to include paths for vendordata. cloudinit/sources/__init__.py: Added functions for helping vendordata - get_vendordata_raw(): returns vendordata unprocessed - get_vendordata(): returns vendordata through userdata processor - has_vendordata(): indicator if vendordata is present - consume_vendordata(): datasource directive for indicating explict user approval of vendordata consumption. Defaults to 'false' cloudinit/stages.py: Re-jiggered for handling of vendordata - _initial_subdirs(): added vendor script definition - update(): added self._store_vendordata() - [ADDED] _store_vendordata(): store vendordata - _get_default_handlers(): modified to allow for filtering which handlers will run against vendordata - [ADDED] _do_handlers(): moved logic from consume_userdata to _do_handlers(). This allows _consume_vendordata() and _consume_userdata() to use the same code path. - [RENAMED] consume_userdata() to _consume_userdata() - [ADDED] _consume_vendordata() for handling vendordata - run after userdata to get user cloud-config - uses ConfigMerger to get the configuration from the instance perspective about whether or not to use vendordata - [ADDED] consume_data() to call _consume_{user,vendor}data cloudinit/util.py: - [ADDED] get_nested_option_as_list() used by cc_vendor* for getting a nested value from a dict and returned as a list - runparts(): added 'exe_prefix' for running exe with a prefix, used by cc_vendor* config/cloud.cfg: Added vendor script execution as default tests/unittests/test_runs/test_merge_run.py: changed consume_userdata() to consume_data() tests/unittests/test_runs/test_simple_run.py: changed consume_userdata() to consume_data() --- cloudinit/sources/__init__.py | 28 ++++++++++++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 7dc1fbde..a7c7993f 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -53,6 +53,8 @@ class DataSource(object): self.userdata = None self.metadata = None self.userdata_raw = None + self.vendordata = None + self.vendordata_raw = None # find the datasource config name. # remove 'DataSource' from classname on front, and remove 'Net' on end. @@ -77,9 +79,28 @@ class DataSource(object): if self.userdata is None: self.userdata = self.ud_proc.process(self.get_userdata_raw()) if apply_filter: - return self._filter_userdata(self.userdata) + return self._filter_xdata(self.userdata) return self.userdata + def get_vendordata(self, apply_filter=False): + if self.vendordata is None: + self.vendordata = self.ud_proc.process(self.get_vendordata_raw()) + if apply_filter: + return self._filter_xdata(self.vendordata) + return self.vendordata + + def has_vendordata(self): + if self.vendordata_raw is not None: + return True + return False + + def consume_vendordata(self): + """ + The datasource may allow for consumption of vendordata, but only + when the datasource has allowed it. The default is false. + """ + return False + @property def launch_index(self): if not self.metadata: @@ -88,7 +109,7 @@ class DataSource(object): return self.metadata['launch-index'] return None - def _filter_userdata(self, processed_ud): + def _filter_xdata(self, processed_ud): filters = [ launch_index.Filter(util.safe_int(self.launch_index)), ] @@ -104,6 +125,9 @@ class DataSource(object): def get_userdata_raw(self): return self.userdata_raw + def get_vendordata_raw(self): + return self.vendordata_raw + # the data sources' config_obj is a cloud-config formated # object that came to it from ways other than cloud-config # because cloud-config content would be handled elsewhere -- cgit v1.2.3 From 9874d0590dba4a67ff7268a6a1d22207088e1a13 Mon Sep 17 00:00:00 2001 From: Ben Howard Date: Thu, 9 Jan 2014 08:31:52 -0700 Subject: Added vendordata to SmartOS --- cloudinit/sources/DataSourceSmartOS.py | 2 ++ 1 file changed, 2 insertions(+) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py index 551b20c4..ccfee931 100644 --- a/cloudinit/sources/DataSourceSmartOS.py +++ b/cloudinit/sources/DataSourceSmartOS.py @@ -47,6 +47,7 @@ SMARTOS_ATTRIB_MAP = { 'iptables_disable': ('iptables_disable', True), 'motd_sys_info': ('motd_sys_info', True), 'availability_zone': ('datacenter_name', True), + 'vendordata': ('sdc:operator-script', False), } DS_NAME = 'SmartOS' @@ -154,6 +155,7 @@ class DataSourceSmartOS(sources.DataSource): self.metadata = util.mergemanydict([md, self.metadata]) self.userdata_raw = ud + self.vendordata_raw = vendordata return True def device_name_to_device(self, name): -- cgit v1.2.3 From b9314a18a052e934643c93b81dc938b3e5b69307 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Thu, 9 Jan 2014 13:12:40 -0500 Subject: Azure: minor changes for filename as strings and logging. We were passing a unicode string to 'runcmd' in the path to the .crt file. That is because the keyname was coming from ovf file as unicode. Ie: u'/var/lib/waagent/6BE7A7C3C8A8F4B123CCA5D0C2F1BE4CA7B63ED7.crt' Then, logging was extending not appending errors. --- ChangeLog | 2 ++ cloudinit/sources/DataSourceAzure.py | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) (limited to 'cloudinit/sources') diff --git a/ChangeLog b/ChangeLog index 1286e7c1..8029f9af 100644 --- a/ChangeLog +++ b/ChangeLog @@ -7,6 +7,8 @@ apt_get_wrapper (LP: #1236531). - convert paths provided in config-drive 'files' to string before writing (LP: #1260072). + - Azure: minor changes in logging output. ensure filenames are strings (not + unicode). 0.7.4: - fix issue mounting 'ephemeral0' if ephemeral0 was an alias for a partitioned block device with target filesystem on ephemeral0.1. diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index b18c57e7..97f151d6 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -154,7 +154,7 @@ class DataSourceAzureNet(sources.DataSource): fp_files = [] for pk in self.cfg.get('_pubkeys', []): - bname = pk['fingerprint'] + ".crt" + bname = str(pk['fingerprint'] + ".crt") fp_files += [os.path.join(mycfg['data_dir'], bname)] missing = util.log_time(logfunc=LOG.debug, msg="waiting for files", @@ -247,7 +247,7 @@ def pubkeys_from_crt_files(flist): try: pubkeys.append(crtfile_to_pubkey(fname)) except util.ProcessExecutionError: - errors.extend(fname) + errors.append(fname) if errors: LOG.warn("failed to convert the crt files to pubkey: %s" % errors) -- cgit v1.2.3 From 66aa9826b818c3478516104b38039fecbd717b6b Mon Sep 17 00:00:00 2001 From: Paul Querna Date: Thu, 9 Jan 2014 21:14:51 +0000 Subject: Allow a Config Drive source on a partition, if the label matches. --- cloudinit/sources/DataSourceConfigDrive.py | 6 ++++-- tests/unittests/test_datasource/test_configdrive.py | 5 +++-- 2 files changed, 7 insertions(+), 4 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index 4f437244..2a244496 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -284,8 +284,10 @@ def find_candidate_devs(): # followed by fstype items, but with dupes removed combined = (by_label + [d for d in by_fstype if d not in by_label]) - # We are looking for block device (sda, not sda1), ignore partitions - combined = [d for d in combined if not util.is_partition(d)] + # We are looking for a block device or partition with necessary label or + # an unpartitioned block device. + combined = [d for d in combined + if d in by_label or not util.is_partition(d)] return combined diff --git a/tests/unittests/test_datasource/test_configdrive.py b/tests/unittests/test_datasource/test_configdrive.py index d5935294..3c1e8add 100644 --- a/tests/unittests/test_datasource/test_configdrive.py +++ b/tests/unittests/test_datasource/test_configdrive.py @@ -285,10 +285,11 @@ class TestConfigDriveDataSource(MockerTestCase): self.assertEqual(["/dev/vdb", "/dev/zdd"], ds.find_candidate_devs()) - # verify that partitions are not considered + # verify that partitions are considered, but only if they have a label. devs_with_answers = {"TYPE=vfat": ["/dev/sda1"], "TYPE=iso9660": [], "LABEL=config-2": ["/dev/vdb3"]} - self.assertEqual([], ds.find_candidate_devs()) + self.assertEqual(["/dev/vdb3"], + ds.find_candidate_devs()) finally: util.find_devs_with = orig_find_devs_with -- cgit v1.2.3 From 8a952c7c7797e2a1dfcd2be1c3a983de767de04e Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Thu, 16 Jan 2014 16:54:23 -0500 Subject: DataSource: remove has_vendordata and consume_vendordata, drop filters remove apply_filter from get_vendordata. I can't think of a good reason to filter vendor-data per instance-id. remove has_vendordata and consume_vendordata. has vendordata is always "true", whether or not there is something to operate is determined by: if ds.vendordata_raw() consume_vendordata is based on config entirely. --- cloudinit/sources/__init__.py | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index a7c7993f..7e11c1ca 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -82,25 +82,11 @@ class DataSource(object): return self._filter_xdata(self.userdata) return self.userdata - def get_vendordata(self, apply_filter=False): + def get_vendordata(self) if self.vendordata is None: self.vendordata = self.ud_proc.process(self.get_vendordata_raw()) - if apply_filter: - return self._filter_xdata(self.vendordata) return self.vendordata - def has_vendordata(self): - if self.vendordata_raw is not None: - return True - return False - - def consume_vendordata(self): - """ - The datasource may allow for consumption of vendordata, but only - when the datasource has allowed it. The default is false. - """ - return False - @property def launch_index(self): if not self.metadata: -- cgit v1.2.3 From b94c9790e055960fccf3b159d86db85ef37fb34f Mon Sep 17 00:00:00 2001 From: Ben Howard Date: Thu, 16 Jan 2014 16:32:57 -0700 Subject: Fixed typos --- cloudinit/sources/DataSourceSmartOS.py | 2 +- cloudinit/sources/__init__.py | 2 +- cloudinit/stages.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py index ccfee931..6593ce6e 100644 --- a/cloudinit/sources/DataSourceSmartOS.py +++ b/cloudinit/sources/DataSourceSmartOS.py @@ -155,7 +155,7 @@ class DataSourceSmartOS(sources.DataSource): self.metadata = util.mergemanydict([md, self.metadata]) self.userdata_raw = ud - self.vendordata_raw = vendordata + self.vendordata_raw = md['vendordata'] return True def device_name_to_device(self, name): diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 7e11c1ca..4b3bf62f 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -82,7 +82,7 @@ class DataSource(object): return self._filter_xdata(self.userdata) return self.userdata - def get_vendordata(self) + def get_vendordata(self): if self.vendordata is None: self.vendordata = self.ud_proc.process(self.get_vendordata_raw()) return self.vendordata diff --git a/cloudinit/stages.py b/cloudinit/stages.py index 19fbe706..5dced998 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -514,8 +514,8 @@ class Init(object): LOG.debug("vendordata consumption is disabled.") return - enabled = vdc.get('enabled') - no_handlers = vdc.get('disabled_handlers', None) + enabled = vdcfg.get('enabled') + no_handlers = vdcfg.get('disabled_handlers', None) LOG.debug("vendor data will be consumed. disabled_handlers=%s", no_handlers) -- cgit v1.2.3 From 92aa725a284c08be9234bd792227e5896c4b1d1c Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Thu, 16 Jan 2014 20:11:27 -0500 Subject: DataSourceOpenNebula:parse_shell_config skip 'SECONDS' var if seen SECONDS is a special variable in bash, it gets set to the time the shell has been alive. This would cause us to fail randomly (if the process happened to take more than 1 second, then SECONDS would be defined). --- cloudinit/sources/DataSourceOpenNebula.py | 2 +- tests/unittests/test_datasource/test_opennebula.py | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/DataSourceOpenNebula.py b/cloudinit/sources/DataSourceOpenNebula.py index 07dc25ff..b0464cbb 100644 --- a/cloudinit/sources/DataSourceOpenNebula.py +++ b/cloudinit/sources/DataSourceOpenNebula.py @@ -323,7 +323,7 @@ def parse_shell_config(content, keylist=None, bash=None, asuser=None, (output, _error) = util.subp(cmd, data=bcmd) # exclude vars in bash that change on their own or that we used - excluded = ("RANDOM", "LINENO", "_", "__v") + excluded = ("RANDOM", "LINENO", "SECONDS", "_", "__v") preset = {} ret = {} target = None diff --git a/tests/unittests/test_datasource/test_opennebula.py b/tests/unittests/test_datasource/test_opennebula.py index e1812a88..ce9ee9f4 100644 --- a/tests/unittests/test_datasource/test_opennebula.py +++ b/tests/unittests/test_datasource/test_opennebula.py @@ -258,6 +258,14 @@ iface eth0 inet static ''') +class TestParseShellConfig(MockerTestCase): + def test_no_seconds(self): + cfg = '\n'.join(["foo=bar", "SECONDS=2", "xx=foo"]) + # we could test 'sleep 2', but that would make the test run slower. + ret = ds.parse_shell_config(cfg); + self.assertEqual(ret, {"foo": "bar", "xx": "foo"}) + + def populate_context_dir(path, variables): data = "# Context variables generated by OpenNebula\n" for (k, v) in variables.iteritems(): -- cgit v1.2.3