From 66aa9826b818c3478516104b38039fecbd717b6b Mon Sep 17 00:00:00 2001 From: Paul Querna Date: Thu, 9 Jan 2014 21:14:51 +0000 Subject: Allow a Config Drive source on a partition, if the label matches. --- tests/unittests/test_datasource/test_configdrive.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) (limited to 'tests/unittests') diff --git a/tests/unittests/test_datasource/test_configdrive.py b/tests/unittests/test_datasource/test_configdrive.py index d5935294..3c1e8add 100644 --- a/tests/unittests/test_datasource/test_configdrive.py +++ b/tests/unittests/test_datasource/test_configdrive.py @@ -285,10 +285,11 @@ class TestConfigDriveDataSource(MockerTestCase): self.assertEqual(["/dev/vdb", "/dev/zdd"], ds.find_candidate_devs()) - # verify that partitions are not considered + # verify that partitions are considered, but only if they have a label. devs_with_answers = {"TYPE=vfat": ["/dev/sda1"], "TYPE=iso9660": [], "LABEL=config-2": ["/dev/vdb3"]} - self.assertEqual([], ds.find_candidate_devs()) + self.assertEqual(["/dev/vdb3"], + ds.find_candidate_devs()) finally: util.find_devs_with = orig_find_devs_with -- cgit v1.2.3 From ba84f51f0143a8ca1ca5113ae932505ce1bfe5e5 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Thu, 23 Jan 2014 14:41:09 -0800 Subject: Skip retry and continued fetch of userdata when NOT_FOUND When a 404 http code comes back from the fetching of ec2 data, instead of retrying immediatly stop the fetching process and in the userdata fetching function handle this case as a special case of no userdata being fetched (an empty string in this case). --- cloudinit/ec2_utils.py | 27 ++++++++++++++++++++++++--- cloudinit/url_helper.py | 17 +++++++++-------- cloudinit/util.py | 5 +++-- tests/unittests/test_datasource/test_maas.py | 3 ++- tests/unittests/test_ec2_util.py | 8 ++++++++ 5 files changed, 46 insertions(+), 14 deletions(-) (limited to 'tests/unittests') diff --git a/cloudinit/ec2_utils.py b/cloudinit/ec2_utils.py index 92a22747..cd94ad4c 100644 --- a/cloudinit/ec2_utils.py +++ b/cloudinit/ec2_utils.py @@ -16,6 +16,7 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . +import httplib from urlparse import (urlparse, urlunparse) import functools @@ -23,9 +24,11 @@ import json import urllib from cloudinit import log as logging +from cloudinit import url_helper from cloudinit import util LOG = logging.getLogger(__name__) +SKIP_USERDATA_CODES = frozenset([httplib.NOT_FOUND]) def maybe_json_object(text): @@ -138,20 +141,38 @@ class MetadataMaterializer(object): return joined +def _skip_retry_on_codes(status_codes, request_args, cause): + """Returns if a request should retry based on a given set of codes that + case retrying to be stopped/skipped. + """ + if cause.code in status_codes: + return False + return True + + def get_instance_userdata(api_version='latest', metadata_address='http://169.254.169.254', ssl_details=None, timeout=5, retries=5): ud_url = combine_url(metadata_address, api_version) ud_url = combine_url(ud_url, 'user-data') + user_data = '' try: + # It is ok for userdata to not exist (thats why we are stopping if + # NOT_FOUND occurs) and just in that case returning an empty string. + exception_cb = functools.partial(_skip_retry_on_codes, + SKIP_USERDATA_CODES) response = util.read_file_or_url(ud_url, ssl_details=ssl_details, timeout=timeout, - retries=retries) - return str(response) + retries=retries, + exception_cb=exception_cb) + user_data = str(response) + except url_helper.UrlError as e: + if e.code not in SKIP_USERDATA_CODES: + util.logexc(LOG, "Failed fetching userdata from url %s", ud_url) except Exception: util.logexc(LOG, "Failed fetching userdata from url %s", ud_url) - return '' + return user_data def get_instance_metadata(api_version='latest', diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py index 19a30409..42edf9cf 100644 --- a/cloudinit/url_helper.py +++ b/cloudinit/url_helper.py @@ -103,7 +103,7 @@ class UrlError(IOError): def readurl(url, data=None, timeout=None, retries=0, sec_between=1, headers=None, headers_cb=None, ssl_details=None, - check_status=True, allow_redirects=True): + check_status=True, allow_redirects=True, exception_cb=None): url = _cleanurl(url) req_args = { 'url': url, @@ -163,14 +163,13 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1, # Handle retrying ourselves since the built-in support # doesn't handle sleeping between tries... for i in range(0, manual_tries): + req_args['headers'] = headers_cb(url) + filtered_req_args = {} + for (k, v) in req_args.items(): + if k == 'data': + continue + filtered_req_args[k] = v try: - req_args['headers'] = headers_cb(url) - filtered_req_args = {} - for (k, v) in req_args.items(): - if k == 'data': - continue - filtered_req_args[k] = v - LOG.debug("[%s/%s] open '%s' with %s configuration", i, manual_tries, url, filtered_req_args) @@ -196,6 +195,8 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1, # ssl exceptions are not going to get fixed by waiting a # few seconds break + if exception_cb and not exception_cb(filtered_req_args, e): + break if i + 1 < manual_tries and sec_between > 0: LOG.debug("Please wait %s seconds while we wait to try again", sec_between) diff --git a/cloudinit/util.py b/cloudinit/util.py index 77f9ab36..e1263f47 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -691,7 +691,7 @@ def fetch_ssl_details(paths=None): def read_file_or_url(url, timeout=5, retries=10, headers=None, data=None, sec_between=1, ssl_details=None, - headers_cb=None): + headers_cb=None, exception_cb=None): url = url.lstrip() if url.startswith("/"): url = "file://%s" % url @@ -708,7 +708,8 @@ def read_file_or_url(url, timeout=5, retries=10, headers_cb=headers_cb, data=data, sec_between=sec_between, - ssl_details=ssl_details) + ssl_details=ssl_details, + exception_cb=exception_cb) def load_yaml(blob, default=None, allowed=(dict,)): diff --git a/tests/unittests/test_datasource/test_maas.py b/tests/unittests/test_datasource/test_maas.py index 2007a6df..ebfb24da 100644 --- a/tests/unittests/test_datasource/test_maas.py +++ b/tests/unittests/test_datasource/test_maas.py @@ -119,7 +119,8 @@ class TestMAASDataSource(mocker.MockerTestCase): mock_request(url, headers=None, timeout=mocker.ANY, data=mocker.ANY, sec_between=mocker.ANY, ssl_details=mocker.ANY, retries=mocker.ANY, - headers_cb=my_headers_cb) + headers_cb=my_headers_cb, + exception_cb=mocker.ANY) resp = valid.get(key) self.mocker.result(util.StringResponse(resp)) self.mocker.replay() diff --git a/tests/unittests/test_ec2_util.py b/tests/unittests/test_ec2_util.py index dd588aca..957dc3f2 100644 --- a/tests/unittests/test_ec2_util.py +++ b/tests/unittests/test_ec2_util.py @@ -33,6 +33,14 @@ class TestEc2Util(helpers.TestCase): userdata = eu.get_instance_userdata(self.VERSION, retries=0) self.assertEquals('', userdata) + @hp.activate + def test_userdata_fetch_fail_server_not_found(self): + hp.register_uri(hp.GET, + 'http://169.254.169.254/%s/user-data' % (self.VERSION), + status=404) + userdata = eu.get_instance_userdata(self.VERSION) + self.assertEquals('', userdata) + @hp.activate def test_metadata_fetch_no_keys(self): base_url = 'http://169.254.169.254/%s/meta-data' % (self.VERSION) -- cgit v1.2.3 From fb55c1079375454d2a2a2f82c6c1812759eeb1f1 Mon Sep 17 00:00:00 2001 From: Ben Howard Date: Fri, 24 Jan 2014 12:29:04 -0700 Subject: Fixes for SmartOS datasource (LP: #1272115): 1. fixed conflation of user-data and cloud-init user-data. Cloud-init user-data is now namespaced as 'cloud-init:user-data'. 2. user-scripts are now fetched from the meta-data service each boot and executed as in the scripts directory 3. datacenter name is now namespaced as sdc:datacenter 4. user-scripts should be shebanged if there is no file magic --- cloudinit/sources/DataSourceSmartOS.py | 45 +++++++- cloudinit/util.py | 72 ++++++++++++ doc/sources/smartos/README.rst | 92 ++++++++++++--- tests/unittests/test_datasource/test_smartos.py | 145 ++++++++++++++++++++++-- 4 files changed, 322 insertions(+), 32 deletions(-) (limited to 'tests/unittests') diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py index 6593ce6e..6bd4a5c7 100644 --- a/cloudinit/sources/DataSourceSmartOS.py +++ b/cloudinit/sources/DataSourceSmartOS.py @@ -25,7 +25,9 @@ # requests on the console. For example, to get the hostname, you # would send "GET hostname" on /dev/ttyS1. # - +# Certain behavior is defined by the DataDictionary +# http://us-east.manta.joyent.com/jmc/public/mdata/datadict.html +# Comments with "@datadictionary" are snippets of the definition import base64 from cloudinit import log as logging @@ -43,10 +45,11 @@ SMARTOS_ATTRIB_MAP = { 'local-hostname': ('hostname', True), 'public-keys': ('root_authorized_keys', True), 'user-script': ('user-script', False), - 'user-data': ('user-data', False), + 'legacy-user-data': ('user-data', False), + 'user-data': ('cloud-init:user-data', False), 'iptables_disable': ('iptables_disable', True), 'motd_sys_info': ('motd_sys_info', True), - 'availability_zone': ('datacenter_name', True), + 'availability_zone': ('sdc:datacenter_name', True), 'vendordata': ('sdc:operator-script', False), } @@ -71,7 +74,11 @@ BUILTIN_DS_CONFIG = { 'seed_timeout': 60, 'no_base64_decode': ['root_authorized_keys', 'motd_sys_info', - 'iptables_disable'], + 'iptables_disable', + 'user-data', + 'user-script', + 'sdc:datacenter_name', + ], 'base64_keys': [], 'base64_all': False, 'disk_aliases': {'ephemeral0': '/dev/vdb'}, @@ -88,6 +95,11 @@ BUILTIN_CLOUD_CONFIG = { 'device': 'ephemeral0'}], } +# @datadictionary: this is legacy path for placing files from metadata +# per the SmartOS location. It is not preferable, but is done for +# legacy reasons +LEGACY_USER_D = "/var/db" + class DataSourceSmartOS(sources.DataSource): def __init__(self, sys_cfg, distro, paths): @@ -107,6 +119,9 @@ class DataSourceSmartOS(sources.DataSource): self.smartos_no_base64 = self.ds_cfg.get('no_base64_decode') self.b64_keys = self.ds_cfg.get('base64_keys') self.b64_all = self.ds_cfg.get('base64_all') + self.script_base_d = os.path.join(self.paths.get_cpath("scripts")) + self.user_script_d = os.path.join(self.paths.get_cpath("scripts"), + 'per-boot') def __str__(self): root = sources.DataSource.__str__(self) @@ -144,14 +159,32 @@ class DataSourceSmartOS(sources.DataSource): smartos_noun, strip = attribute md[ci_noun] = self.query(smartos_noun, strip=strip) + # @datadictionary: This key has no defined format, but its value + # is written to the file /var/db/mdata-user-data on each boot prior + # to the phase that runs user-script. This file is not to be executed. + # This allows a configuration file of some kind to be injected into + # the machine to be consumed by the user-script when it runs. + u_script = md.get('user-script') + u_script_f = "%s/99_user_script" % self.user_script_d + u_script_l = "%s/user-script" % LEGACY_USER_D + util.write_content(u_script, u_script_f, link=u_script_l, + executable=True) + + # @datadictionary: This key may contain a program that is written + # to a file in the filesystem of the guest on each boot and then + # executed. It may be of any format that would be considered + # executable in the guest instance. + u_data = md.get('legacy-user-data') + u_data_f = "%s/mdata-user-data" % LEGACY_USER_D + util.write_content(u_data, u_data_f) + + # Handle the cloud-init regular meta if not md['local-hostname']: md['local-hostname'] = system_uuid ud = None if md['user-data']: ud = md['user-data'] - elif md['user-script']: - ud = md['user-script'] self.metadata = util.mergemanydict([md, self.metadata]) self.userdata_raw = ud diff --git a/cloudinit/util.py b/cloudinit/util.py index 77f9ab36..5f64cb69 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -1904,3 +1904,75 @@ def expand_dotted_devname(dotted): return toks else: return (dotted, None) + + +def write_executable_content(script, script_f): + """ + This writes executable content and ensures that the shebang + exists. + """ + write_file(script_f, script, mode=0700) + try: + cmd = ["file", "--brief", "--mime-type", script_f] + (f_type, _err) = subp(cmd) + + LOG.debug("script %s mime type is %s" % (script_f, f_type)) + + # if the magic is text/plain, re-write with the shebang + if f_type.strip() == "text/plain": + with open(script_f, 'w') as f: + f.write("#!/bin/bash\n") + f.write(script) + LOG.debug("added shebang to file %s" % script_f) + + except ProcessExecutionError as e: + logexc(LOG, "Failed to identify script type for %s" % script_f, e) + return False + + except IOError as e: + logexc(LOG, "Failed to add shebang to file %s" % script_f, e) + return False + + return True + + +def write_content(content, content_f, link=None, + executable=False): + """ + Write the content to content_f. Under the following rules: + 1. Backup previous content_f + 2. Write the contente + 3. If no content, remove the file + 4. If there is a link, create it + + @param content: what to write + @param content_f: the file name + @param backup_d: the directory to save the backup at + @param link: if defined, location to create a symlink to + @param executable: is the file executable + """ + + if content: + if not executable: + write_file(content_f, content, mode=0400) + else: + w = write_executable_content(content, content_f) + if not w: + LOG.debug("failed to write file to %s" % content_f) + return False + + if not content and os.path.exists(content_f): + os.unlink(content_f) + + if link: + try: + if os.path.islink(link): + os.unlink(link) + if content and os.path.exists(content_f): + ensure_dir(os.path.dirname(link)) + os.symlink(content_f, link) + except IOError as e: + logexc(LOG, "failed establishing content link", e) + return False + + return True diff --git a/doc/sources/smartos/README.rst b/doc/sources/smartos/README.rst index 8b63e520..e63f311f 100644 --- a/doc/sources/smartos/README.rst +++ b/doc/sources/smartos/README.rst @@ -16,11 +16,35 @@ responds with the status and if "SUCCESS" returns until a single ".\n". New versions of the SmartOS tooling will include support for base64 encoded data. -Userdata --------- - -In SmartOS parlance, user-data is a actually meta-data. This userdata can be -provided as key-value pairs. +Meta-data channels +------------------ + +Cloud-init supports three modes of delivering user/meta-data via the flexible +channels of SmartOS. + +* user-data is written to /var/db/user-data + - per the spec, user-data is for consumption by the end-user, not provisioning + tools + - cloud-init entirely ignores this channel other than writting it to disk + - removal of the meta-data key means that /var/db/user-data gets removed + - a backup of previous meta-data is maintained as /var/db/user-data. + - is the epoch time when cloud-init ran + +* user-script is written to /var/lib/cloud/scripts/per-boot/99_user_data + - this is executed each boot + - a link is created to /var/db/user-script + - previous versions of the user-script is written to + /var/lib/cloud/scripts/per-boot.backup/99_user_script.. + - is the epoch time when cloud-init ran. + - when the 'user-script' meta-data key goes missing, the user-script is + removed from the file system, although a backup is maintained. + - if the script is not shebanged (i.e. starts with #!), then + or is not an executable, cloud-init will add a shebang of "#!/bin/bash" + +* cloud-init:user-data is treated like on other Clouds. + - this channel is used for delivering _all_ cloud-init instructions + - scripts delivered over this channel must be well formed (i.e. must have + a shebang) Cloud-init supports reading the traditional meta-data fields supported by the SmartOS tools. These are: @@ -32,19 +56,49 @@ SmartOS tools. These are: Note: At this time iptables_disable and enable_motd_sys_info are read but are not actioned. -user-script ------------ - -SmartOS traditionally supports sending over a user-script for execution at the -rc.local level. Cloud-init supports running user-scripts as if they were -cloud-init user-data. In this sense, anything with a shell interpreter -directive will run. - -user-data and user-script -------------------------- - -In the event that a user defines the meta-data key of "user-data" it will -always supersede any user-script data. This is for consistency. +disabling user-script +--------------------- + +Cloud-init uses the per-boot script functionality to handle the execution +of the user-script. If you want to prevent this use a cloud-config of: + +#cloud-config +cloud_final_modules: + - scripts-per-once + - scripts-per-instance + - scripts-user + - ssh-authkey-fingerprints + - keys-to-console + - phone-home + - final-message + - power-state-change + +Alternatively you can use the json patch method +#cloud-config-jsonp +[ + { "op": "replace", + "path": "/cloud_final_modules", + "value": ["scripts-per-once", + "scripts-per-instance", + "scripts-user", + "ssh-authkey-fingerprints", + "keys-to-console", + "phone-home", + "final-message", + "power-state-change"] + } +] + +The default cloud-config includes "script-per-boot". Cloud-init will still +ingest and write the user-data but will not execute it, when you disable +the per-boot script handling. + +Note: Unless you have an explicit use-case, it is recommended that you not + disable the per-boot script execution, especially if you are using + any of the life-cycle management features of SmartOS. + +The cloud-config needs to be delivered over the cloud-init:user-data channel +in order for cloud-init to ingest it. base64 ------ @@ -54,6 +108,8 @@ are provided by SmartOS: * root_authorized_keys * enable_motd_sys_info * iptables_disable + * user-data + * user-script This list can be changed through system config of variable 'no_base64_decode'. diff --git a/tests/unittests/test_datasource/test_smartos.py b/tests/unittests/test_datasource/test_smartos.py index 956767d8..ae427bb5 100644 --- a/tests/unittests/test_datasource/test_smartos.py +++ b/tests/unittests/test_datasource/test_smartos.py @@ -27,6 +27,10 @@ from cloudinit import helpers from cloudinit.sources import DataSourceSmartOS from mocker import MockerTestCase +import os +import os.path +import re +import stat import uuid MOCK_RETURNS = { @@ -35,7 +39,11 @@ MOCK_RETURNS = { 'disable_iptables_flag': None, 'enable_motd_sys_info': None, 'test-var1': 'some data', - 'user-data': '\n'.join(['#!/bin/sh', '/bin/true', '']), + 'cloud-init:user-data': '\n'.join(['#!/bin/sh', '/bin/true', '']), + 'sdc:datacenter_name': 'somewhere2', + 'sdc:operator-script': '\n'.join(['bin/true', '']), + 'user-data': '\n'.join(['something', '']), + 'user-script': '\n'.join(['/bin/true', '']), } DMI_DATA_RETURN = (str(uuid.uuid4()), 'smartdc') @@ -101,6 +109,7 @@ class TestSmartOSDataSource(MockerTestCase): def setUp(self): # makeDir comes from MockerTestCase self.tmp = self.makeDir() + self.legacy_user_d = self.makeDir() # patch cloud_dir, so our 'seed_dir' is guaranteed empty self.paths = helpers.Paths({'cloud_dir': self.tmp}) @@ -138,6 +147,7 @@ class TestSmartOSDataSource(MockerTestCase): sys_cfg['datasource'] = sys_cfg.get('datasource', {}) sys_cfg['datasource']['SmartOS'] = ds_cfg + self.apply_patches([(mod, 'LEGACY_USER_D', self.legacy_user_d)]) self.apply_patches([(mod, 'get_serial', _get_serial)]) self.apply_patches([(mod, 'dmi_data', _dmi_data)]) dsrc = mod.DataSourceSmartOS(sys_cfg, distro=None, @@ -194,7 +204,7 @@ class TestSmartOSDataSource(MockerTestCase): # metadata provided base64_all of true my_returns = MOCK_RETURNS.copy() my_returns['base64_all'] = "true" - for k in ('hostname', 'user-data'): + for k in ('hostname', 'cloud-init:user-data'): my_returns[k] = base64.b64encode(my_returns[k]) dsrc = self._get_ds(mockdata=my_returns) @@ -202,7 +212,7 @@ class TestSmartOSDataSource(MockerTestCase): self.assertTrue(ret) self.assertEquals(MOCK_RETURNS['hostname'], dsrc.metadata['local-hostname']) - self.assertEquals(MOCK_RETURNS['user-data'], + self.assertEquals(MOCK_RETURNS['cloud-init:user-data'], dsrc.userdata_raw) self.assertEquals(MOCK_RETURNS['root_authorized_keys'], dsrc.metadata['public-keys']) @@ -213,9 +223,9 @@ class TestSmartOSDataSource(MockerTestCase): def test_b64_userdata(self): my_returns = MOCK_RETURNS.copy() - my_returns['b64-user-data'] = "true" + my_returns['b64-cloud-init:user-data'] = "true" my_returns['b64-hostname'] = "true" - for k in ('hostname', 'user-data'): + for k in ('hostname', 'cloud-init:user-data'): my_returns[k] = base64.b64encode(my_returns[k]) dsrc = self._get_ds(mockdata=my_returns) @@ -223,7 +233,8 @@ class TestSmartOSDataSource(MockerTestCase): self.assertTrue(ret) self.assertEquals(MOCK_RETURNS['hostname'], dsrc.metadata['local-hostname']) - self.assertEquals(MOCK_RETURNS['user-data'], dsrc.userdata_raw) + self.assertEquals(MOCK_RETURNS['cloud-init:user-data'], + dsrc.userdata_raw) self.assertEquals(MOCK_RETURNS['root_authorized_keys'], dsrc.metadata['public-keys']) @@ -238,13 +249,131 @@ class TestSmartOSDataSource(MockerTestCase): self.assertTrue(ret) self.assertEquals(MOCK_RETURNS['hostname'], dsrc.metadata['local-hostname']) - self.assertEquals(MOCK_RETURNS['user-data'], dsrc.userdata_raw) + self.assertEquals(MOCK_RETURNS['cloud-init:user-data'], + dsrc.userdata_raw) def test_userdata(self): dsrc = self._get_ds(mockdata=MOCK_RETURNS) ret = dsrc.get_data() self.assertTrue(ret) - self.assertEquals(MOCK_RETURNS['user-data'], dsrc.userdata_raw) + self.assertEquals(MOCK_RETURNS['user-data'], + dsrc.metadata['legacy-user-data']) + self.assertEquals(MOCK_RETURNS['cloud-init:user-data'], + dsrc.userdata_raw) + + def test_sdc_scripts(self): + dsrc = self._get_ds(mockdata=MOCK_RETURNS) + ret = dsrc.get_data() + self.assertTrue(ret) + self.assertEquals(MOCK_RETURNS['user-script'], + dsrc.metadata['user-script']) + + legacy_script_f = "%s/user-script" % self.legacy_user_d + self.assertTrue(os.path.exists(legacy_script_f)) + self.assertTrue(os.path.islink(legacy_script_f)) + user_script_perm = oct(os.stat(legacy_script_f)[stat.ST_MODE])[-3:] + self.assertEquals(user_script_perm, '700') + + def test_scripts_shebanged(self): + dsrc = self._get_ds(mockdata=MOCK_RETURNS) + ret = dsrc.get_data() + self.assertTrue(ret) + self.assertEquals(MOCK_RETURNS['user-script'], + dsrc.metadata['user-script']) + + legacy_script_f = "%s/user-script" % self.legacy_user_d + self.assertTrue(os.path.exists(legacy_script_f)) + self.assertTrue(os.path.islink(legacy_script_f)) + shebang = None + with open(legacy_script_f, 'r') as f: + shebang = f.readlines()[0].strip() + self.assertEquals(shebang, "#!/bin/bash") + user_script_perm = oct(os.stat(legacy_script_f)[stat.ST_MODE])[-3:] + self.assertEquals(user_script_perm, '700') + + def test_scripts_shebang_not_added(self): + """ + Test that the SmartOS requirement that plain text scripts + are executable. This test makes sure that plain texts scripts + with out file magic have it added appropriately by cloud-init. + """ + + my_returns = MOCK_RETURNS.copy() + my_returns['user-script'] = '\n'.join(['#!/usr/bin/perl', + 'print("hi")', '']) + + dsrc = self._get_ds(mockdata=my_returns) + ret = dsrc.get_data() + self.assertTrue(ret) + self.assertEquals(my_returns['user-script'], + dsrc.metadata['user-script']) + + legacy_script_f = "%s/user-script" % self.legacy_user_d + self.assertTrue(os.path.exists(legacy_script_f)) + self.assertTrue(os.path.islink(legacy_script_f)) + shebang = None + with open(legacy_script_f, 'r') as f: + shebang = f.readlines()[0].strip() + self.assertEquals(shebang, "#!/usr/bin/perl") + + def test_scripts_removed(self): + """ + Since SmartOS requires that the user script is fetched + each boot, we want to make sure that the information + is backed-up for user-review later. + + This tests the behavior of when a script is removed. It makes + sure that a) the previous script is backed-up; and 2) that + there is no script remaining. + """ + + script_d = os.path.join(self.tmp, "scripts", "per-boot") + os.makedirs(script_d) + + test_script_f = "%s/99_user_script" % script_d + with open(test_script_f, 'w') as f: + f.write("TEST DATA") + + my_returns = MOCK_RETURNS.copy() + del my_returns['user-script'] + + dsrc = self._get_ds(mockdata=my_returns) + ret = dsrc.get_data() + self.assertTrue(ret) + self.assertFalse(dsrc.metadata['user-script']) + self.assertFalse(os.path.exists(test_script_f)) + + def test_userdata_removed(self): + """ + User-data in the SmartOS world is supposed to be written to a file + each and every boot. This tests to make sure that in the event the + legacy user-data is removed, the existing user-data is backed-up and + there is no /var/db/user-data left. + """ + + user_data_f = "%s/mdata-user-data" % self.legacy_user_d + with open(user_data_f, 'w') as f: + f.write("PREVIOUS") + + my_returns = MOCK_RETURNS.copy() + del my_returns['user-data'] + + dsrc = self._get_ds(mockdata=my_returns) + ret = dsrc.get_data() + self.assertTrue(ret) + self.assertFalse(dsrc.metadata.get('legacy-user-data')) + + found_new = False + for root, _dirs, files in os.walk(self.legacy_user_d): + for name in files: + name_f = os.path.join(root, name) + permissions = oct(os.stat(name_f)[stat.ST_MODE])[-3:] + if re.match(r'.*\/mdata-user-data$', name_f): + found_new = True + print name_f + self.assertEquals(permissions, '400') + + self.assertFalse(found_new) def test_disable_iptables_flag(self): dsrc = self._get_ds(mockdata=MOCK_RETURNS) -- cgit v1.2.3 From 4919cd124e57e82ecfcdaa9bfcbc051c719708e6 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Fri, 24 Jan 2014 15:29:09 -0500 Subject: pylint and long line fixes. This fixes up many long lines to be < 80 chars and some other pylint issues. pylint 1.1 (in trusty) is now complaining about the lazy logging, so I'll clean that up when I touch things. --- cloudinit/config/cc_growpart.py | 3 +- cloudinit/distros/freebsd.py | 47 +++++++++++----------- cloudinit/netinfo.py | 30 ++++++-------- cloudinit/user_data.py | 3 +- cloudinit/util.py | 12 +++--- .../unittests/test_datasource/test_configdrive.py | 2 +- 6 files changed, 49 insertions(+), 48 deletions(-) (limited to 'tests/unittests') diff --git a/cloudinit/config/cc_growpart.py b/cloudinit/config/cc_growpart.py index b81951ad..f52c41f0 100644 --- a/cloudinit/config/cc_growpart.py +++ b/cloudinit/config/cc_growpart.py @@ -223,7 +223,8 @@ def resize_devices(resizer, devices): "stat of '%s' failed: %s" % (blockdev, e),)) continue - if not stat.S_ISBLK(statret.st_mode) and not stat.S_ISCHR(statret.st_mode): + if (not stat.S_ISBLK(statret.st_mode) and + not stat.S_ISCHR(statret.st_mode)): info.append((devent, RESIZE.SKIPPED, "device '%s' not a block device" % blockdev,)) continue diff --git a/cloudinit/distros/freebsd.py b/cloudinit/distros/freebsd.py index d28860eb..4c0c6d29 100644 --- a/cloudinit/distros/freebsd.py +++ b/cloudinit/distros/freebsd.py @@ -16,15 +16,14 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . +import re + from cloudinit import distros from cloudinit import helpers from cloudinit import log as logging -from cloudinit import netinfo from cloudinit import ssh_util from cloudinit import util -from cloudinit.settings import PER_INSTANCE - LOG = logging.getLogger(__name__) @@ -39,26 +38,27 @@ class Distro(distros.Distro): # Updates a key in /etc/rc.conf. def updatercconf(self, key, value): - LOG.debug("updatercconf: %s => %s" % (key, value)) + LOG.debug("updatercconf: %s => %s", key, value) conf = self.loadrcconf() configchanged = False for item in conf: if item == key and conf[item] != value: conf[item] = value - LOG.debug("[rc.conf]: Value %s for key %s needs to be changed" % (value, key)) + LOG.debug("[rc.conf]: Value %s for key %s needs to be changed", + value, key) configchanged = True if configchanged: LOG.debug("Writing new /etc/rc.conf file") - with open('/etc/rc.conf', 'w') as file: + with open('/etc/rc.conf', 'w') as fp: for keyval in conf.items(): - file.write("%s=%s\n" % keyval) + fp.write("%s=%s\n" % keyval) # Load the contents of /etc/rc.conf and store all keys in a dict. def loadrcconf(self): conf = {} - with open("/etc/rc.conf") as file: - for line in file: + with open("/etc/rc.conf") as fp: + for line in fp: tok = line.split('=') conf[tok[0]] = tok[1].rstrip() return conf @@ -75,7 +75,7 @@ class Distro(distros.Distro): sys_hostname = self._read_hostname() return ('rc.conf', sys_hostname) - def _read_hostname(self, default=None): + def _read_hostname(self, filename, default=None): hostname = None try: hostname = self.readrcconf('hostname') @@ -90,17 +90,17 @@ class Distro(distros.Distro): return fqdn return hostname - def _write_hostname(self, your_hostname, out_fn): - self.updatercconf('hostname', your_hostname) + def _write_hostname(self, hostname, filename): + self.updatercconf('hostname', hostname) def create_group(self, name, members): group_add_cmd = ['pw', '-n', name] if util.is_group(name): - LOG.warn("Skipping creation of existing group '%s'" % name) + LOG.warn("Skipping creation of existing group '%s'", name) else: try: util.subp(group_add_cmd) - LOG.info("Created new group %s" % name) + LOG.info("Created new group %s", name) except Exception: util.logexc("Failed to create group %s", name) @@ -111,11 +111,11 @@ class Distro(distros.Distro): "; user does not exist.", member, name) continue util.subp(['pw', 'usermod', '-n', name, '-G', member]) - LOG.info("Added user '%s' to group '%s'" % (member, name)) + LOG.info("Added user '%s' to group '%s'", member, name) def add_user(self, name, **kwargs): if util.is_user(name): - LOG.info("User %s already exists, skipping." % name) + LOG.info("User %s already exists, skipping.", name) return False adduser_cmd = ['pw', 'useradd', '-n', name] @@ -170,7 +170,7 @@ class Distro(distros.Distro): raise e # TODO: - def set_passwd(self, name, **kwargs): + def set_passwd(self, user, passwd, hashed=False): return False def lock_passwd(self, name): @@ -182,7 +182,7 @@ class Distro(distros.Distro): # TODO: def write_sudo_rules(self, name, rules, sudo_file=None): - LOG.debug("[write_sudo_rules] Name: %s" % name) + LOG.debug("[write_sudo_rules] Name: %s", name) def create_user(self, name, **kwargs): self.add_user(name, **kwargs) @@ -217,7 +217,8 @@ class Distro(distros.Distro): origconf = open(loginconf, 'r') for line in origconf: - newconf.write(re.sub('^default:', r'default:lang=%s:' % locale, line)) + newconf.write(re.sub(r'^default:', + r'default:lang=%s:' % locale, line)) newconf.close() origconf.close() # Make a backup of login.conf. @@ -233,14 +234,14 @@ class Distro(distros.Distro): util.logexc("Failed to apply locale %s", locale) copyfile(backupconf, loginconf) - def install_packages(): + def install_packages(self, pkglist): return - def package_command(): + def package_command(self, cmd, args=None, pkgs=None): return - def set_timezone(): + def set_timezone(self, tz): return - def update_package_sources(): + def update_package_sources(self): return diff --git a/cloudinit/netinfo.py b/cloudinit/netinfo.py index 63f720e4..ac3c011f 100644 --- a/cloudinit/netinfo.py +++ b/cloudinit/netinfo.py @@ -44,7 +44,7 @@ def netdev_info(empty=""): # If the output of ifconfig doesn't contain the required info in the # obvious place, use a regex filter to be sure. elif len(toks) > 1: - if re.search("flags=\d+ %r" % (link, source)) + LOG.debug("Creating symbolic link from %r => %r", link, source) os.symlink(source, link) @@ -1444,7 +1445,8 @@ def uptime(): size = ctypes.c_size_t() buf = ctypes.c_int() size.value = ctypes.sizeof(buf) - libc.sysctlbyname("kern.boottime", ctypes.byref(buf), ctypes.byref(size), None, 0) + libc.sysctlbyname("kern.boottime", ctypes.byref(buf), + ctypes.byref(size), None, 0) now = time.time() bootup = buf.value uptime_str = now - bootup @@ -1793,7 +1795,7 @@ def parse_mount(path): (mountoutput, _err) = subp("mount") mount_locs = mountoutput.splitlines() for line in mount_locs: - m = re.search('^(/dev/[\S]+) on (/.*) \((.+), .+, (.+)\)$', line) + m = re.search(r'^(/dev/[\S]+) on (/.*) \((.+), .+, (.+)\)$', line) devpth = m.group(1) mount_point = m.group(2) fs_type = m.group(3) diff --git a/tests/unittests/test_datasource/test_configdrive.py b/tests/unittests/test_datasource/test_configdrive.py index 3c1e8add..1f4a0a0b 100644 --- a/tests/unittests/test_datasource/test_configdrive.py +++ b/tests/unittests/test_datasource/test_configdrive.py @@ -285,7 +285,7 @@ class TestConfigDriveDataSource(MockerTestCase): self.assertEqual(["/dev/vdb", "/dev/zdd"], ds.find_candidate_devs()) - # verify that partitions are considered, but only if they have a label. + # verify that partitions are considered, that have correct label. devs_with_answers = {"TYPE=vfat": ["/dev/sda1"], "TYPE=iso9660": [], "LABEL=config-2": ["/dev/vdb3"]} self.assertEqual(["/dev/vdb3"], -- cgit v1.2.3 From 0be4922d92e874b2e3300bdde65829cdb6569524 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Fri, 24 Jan 2014 22:31:28 -0500 Subject: read_file_or_url: raise UrlError with 404 on ENOENT This makes it easier to call read_file_or_url and handle file or url errors. Now read_file_or_url will raise a UrlError in either case on errors. --- cloudinit/url_helper.py | 28 ++++++++++++++++++++ cloudinit/util.py | 38 ++++++++-------------------- tests/unittests/test__init__.py | 6 ++--- tests/unittests/test_datasource/test_maas.py | 2 +- 4 files changed, 42 insertions(+), 32 deletions(-) (limited to 'tests/unittests') diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py index 227983f3..97ed75ad 100644 --- a/cloudinit/url_helper.py +++ b/cloudinit/url_helper.py @@ -20,6 +20,7 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . +import httplib import time import requests @@ -32,6 +33,8 @@ from cloudinit import version LOG = logging.getLogger(__name__) +NOT_FOUND = httplib.NOT_FOUND + # Check if requests has ssl support (added in requests >= 0.8.8) SSL_ENABLED = False CONFIG_ENABLED = False # This was added in 0.7 (but taken out in >=1.0) @@ -58,6 +61,31 @@ def _cleanurl(url): return urlunparse(parsed_url) +# Made to have same accessors as UrlResponse so that the +# read_file_or_url can return this or that object and the +# 'user' of those objects will not need to know the difference. +class StringResponse(object): + def __init__(self, contents, code=200): + self.code = code + self.headers = {} + self.contents = contents + self.url = None + + def ok(self, *args, **kwargs): # pylint: disable=W0613 + if self.code != 200: + return False + return True + + def __str__(self): + return self.contents + + +class FileResponse(StringResponse): + def __init__(self, path, contents, code=200): + StringResponse.__init__(self, contents, code=code) + self.url = path + + class UrlResponse(object): def __init__(self, response): self._response = response diff --git a/cloudinit/util.py b/cloudinit/util.py index d350ba08..b3332acd 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -74,31 +74,6 @@ FN_ALLOWED = ('_-.()' + string.digits + string.ascii_letters) CONTAINER_TESTS = ['running-in-container', 'lxc-is-container'] -# Made to have same accessors as UrlResponse so that the -# read_file_or_url can return this or that object and the -# 'user' of those objects will not need to know the difference. -class StringResponse(object): - def __init__(self, contents, code=200): - self.code = code - self.headers = {} - self.contents = contents - self.url = None - - def ok(self, *args, **kwargs): # pylint: disable=W0613 - if self.code != 200: - return False - return True - - def __str__(self): - return self.contents - - -class FileResponse(StringResponse): - def __init__(self, path, contents, code=200): - StringResponse.__init__(self, contents, code=code) - self.url = path - - class ProcessExecutionError(IOError): MESSAGE_TMPL = ('%(description)s\n' @@ -651,8 +626,8 @@ def read_optional_seed(fill, base="", ext="", timeout=5): fill['user-data'] = ud fill['meta-data'] = md return True - except IOError as e: - if e.errno == errno.ENOENT: + except url_helper.UrlError as e: + if e.code == url_helper.NOT_FOUND: return False raise @@ -699,7 +674,14 @@ def read_file_or_url(url, timeout=5, retries=10, if data: LOG.warn("Unable to post data to file resource %s", url) file_path = url[len("file://"):] - return FileResponse(file_path, contents=load_file(file_path)) + try: + contents = load_file(file_path) + except IOError as e: + code = e.errno + if e.errno == errno.ENOENT: + code = url_helper.NOT_FOUND + raise url_helper.UrlError(cause=e, code=code, headers=None) + return url_helper.FileResponse(file_path, contents=contents) else: return url_helper.readurl(url, timeout=timeout, diff --git a/tests/unittests/test__init__.py b/tests/unittests/test__init__.py index b4b20e51..8c41c1ca 100644 --- a/tests/unittests/test__init__.py +++ b/tests/unittests/test__init__.py @@ -196,7 +196,7 @@ class TestCmdlineUrl(MockerTestCase): mock_readurl = self.mocker.replace(url_helper.readurl, passthrough=False) mock_readurl(url, ARGS, KWARGS) - self.mocker.result(util.StringResponse(payload)) + self.mocker.result(url_helper.StringResponse(payload)) self.mocker.replay() self.assertEqual((key, url, None), @@ -212,7 +212,7 @@ class TestCmdlineUrl(MockerTestCase): mock_readurl = self.mocker.replace(url_helper.readurl, passthrough=False) mock_readurl(url, ARGS, KWARGS) - self.mocker.result(util.StringResponse(payload)) + self.mocker.result(url_helper.StringResponse(payload)) self.mocker.replay() self.assertEqual((key, url, payload), @@ -225,7 +225,7 @@ class TestCmdlineUrl(MockerTestCase): cmdline = "ro %s=%s bar=1" % (key, url) self.mocker.replace(url_helper.readurl, passthrough=False) - self.mocker.result(util.StringResponse("")) + self.mocker.result(url_helper.StringResponse("")) self.mocker.replay() self.assertEqual((None, None, None), diff --git a/tests/unittests/test_datasource/test_maas.py b/tests/unittests/test_datasource/test_maas.py index ebfb24da..bd5d23fd 100644 --- a/tests/unittests/test_datasource/test_maas.py +++ b/tests/unittests/test_datasource/test_maas.py @@ -122,7 +122,7 @@ class TestMAASDataSource(mocker.MockerTestCase): headers_cb=my_headers_cb, exception_cb=mocker.ANY) resp = valid.get(key) - self.mocker.result(util.StringResponse(resp)) + self.mocker.result(url_helper.StringResponse(resp)) self.mocker.replay() (userdata, metadata) = DataSourceMAAS.read_maas_seed_url(my_seed, -- cgit v1.2.3 From f405a5283ede1e0af09eb8bf0dbe5046680641b5 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Mon, 27 Jan 2014 13:03:55 -0500 Subject: populate_dir: only makedir if it doesn't exist --- tests/unittests/helpers.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'tests/unittests') diff --git a/tests/unittests/helpers.py b/tests/unittests/helpers.py index c0da0983..5b4f4208 100644 --- a/tests/unittests/helpers.py +++ b/tests/unittests/helpers.py @@ -187,7 +187,8 @@ class FilesystemMockingTestCase(ResourceUsingTestCase): def populate_dir(path, files): - os.makedirs(path) + if not os.path.exists(path): + os.makedirs(path) for (name, content) in files.iteritems(): with open(os.path.join(path, name), "w") as fp: fp.write(content) -- cgit v1.2.3 From 50c744fc4ebc0de5fc7fdfee6a874cb9cc62bba8 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Mon, 27 Jan 2014 13:05:11 -0500 Subject: add 'pathprefix2dict' utility for use by DataSourceNoCloud --- cloudinit/util.py | 25 +++++++++++++++++++++ tests/unittests/test_pathprefix2dict.py | 40 +++++++++++++++++++++++++++++++++ 2 files changed, 65 insertions(+) create mode 100644 tests/unittests/test_pathprefix2dict.py (limited to 'tests/unittests') diff --git a/cloudinit/util.py b/cloudinit/util.py index b3332acd..f36e2733 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -1889,3 +1889,28 @@ def expand_dotted_devname(dotted): return toks else: return (dotted, None) + + +def pathprefix2dict(base, required=None, optional=None, delim=os.path.sep): + # return a dictionary populated with keys in 'required' and 'optional' + # by reading files in prefix + delim + entry + if required is None: + required = [] + if optional is None: + optional = [] + + missing = [] + ret = {} + for f in required + optional: + try: + ret[f] = load_file(base + delim + f, quiet=False) + except IOError as e: + if e.errno != errno.ENOENT: + raise + if f in required: + missing.append(f) + + if len(missing): + raise ValueError("Missing required files: %s", ','.join(missing)) + + return ret diff --git a/tests/unittests/test_pathprefix2dict.py b/tests/unittests/test_pathprefix2dict.py new file mode 100644 index 00000000..c68c263c --- /dev/null +++ b/tests/unittests/test_pathprefix2dict.py @@ -0,0 +1,40 @@ +from cloudinit import util + +from mocker import MockerTestCase +from tests.unittests.helpers import populate_dir + + +class TestPathPrefix2Dict(MockerTestCase): + + def setUp(self): + self.tmp = self.makeDir() + + def test_required_only(self): + dirdata = {'f1': 'f1content', 'f2': 'f2content'} + populate_dir(self.tmp, dirdata) + + ret = util.pathprefix2dict(self.tmp, required=['f1', 'f2']) + self.assertEqual(dirdata, ret) + + def test_required_missing(self): + dirdata = {'f1': 'f1content'} + populate_dir(self.tmp, dirdata) + kwargs = {'required': ['f1', 'f2']} + self.assertRaises(ValueError, util.pathprefix2dict, self.tmp, **kwargs) + + def test_no_required_and_optional(self): + dirdata = {'f1': 'f1c', 'f2': 'f2c'} + populate_dir(self.tmp, dirdata) + + ret = util.pathprefix2dict(self.tmp, required=None, + optional=['f1', 'f2']) + self.assertEqual(dirdata, ret) + + def test_required_and_optional(self): + dirdata = {'f1': 'f1c', 'f2': 'f2c'} + populate_dir(self.tmp, dirdata) + + ret = util.pathprefix2dict(self.tmp, required=['f1'], optional=['f2']) + self.assertEqual(dirdata, ret) + +# vi: ts=4 expandtab -- cgit v1.2.3 From 1ed5098069f4fd5c798914b942cea7b87bd1afe3 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Wed, 29 Jan 2014 14:29:45 -0500 Subject: add 2 tests for vendordata --- tests/unittests/test_datasource/test_nocloud.py | 35 +++++++++++++++++++++++++ 1 file changed, 35 insertions(+) (limited to 'tests/unittests') diff --git a/tests/unittests/test_datasource/test_nocloud.py b/tests/unittests/test_datasource/test_nocloud.py index 7328b240..af575a10 100644 --- a/tests/unittests/test_datasource/test_nocloud.py +++ b/tests/unittests/test_datasource/test_nocloud.py @@ -97,6 +97,41 @@ class TestNoCloudDataSource(MockerTestCase): self.assertEqual(dsrc.metadata.get('instance-id'), 'IID') self.assertTrue(ret) + def test_nocloud_seed_with_vendordata(self): + md = {'instance-id': 'IID', 'dsmode': 'local'} + ud = "USER_DATA_HERE" + vd = "THIS IS MY VENDOR_DATA" + + populate_dir(os.path.join(self.paths.seed_dir, "nocloud"), + {'user-data': ud, 'meta-data': yaml.safe_dump(md), + 'vendor-data': vd}) + + sys_cfg = { + 'datasource': {'NoCloud': {'fs_label': None}} + } + + ds = DataSourceNoCloud.DataSourceNoCloud + + dsrc = ds(sys_cfg=sys_cfg, distro=None, paths=self.paths) + ret = dsrc.get_data() + self.assertEqual(dsrc.userdata_raw, ud) + self.assertEqual(dsrc.metadata, md) + self.assertEqual(dsrc.vendordata, vd) + self.assertTrue(ret) + + def test_nocloud_no_vendordata(self): + populate_dir(os.path.join(self.paths.seed_dir, "nocloud"), + {'user-data': "ud", 'meta-data': "instance-id: IID\n"}) + + sys_cfg = {'datasource': {'NoCloud': {'fs_label': None}}} + + ds = DataSourceNoCloud.DataSourceNoCloud + + dsrc = ds(sys_cfg=sys_cfg, distro=None, paths=self.paths) + ret = dsrc.get_data() + self.assertEqual(dsrc.userdata_raw, "ud") + self.assertFalse(dsrc.vendordata) + self.assertTrue(ret) class TestParseCommandLineData(MockerTestCase): -- cgit v1.2.3