From 21ea6154ab2aafbe51c7b23fd56e43bd1cc26b00 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Tue, 9 Jul 2013 01:35:49 -0400 Subject: add some unit tests, fix things found by doing so --- tests/unittests/test_datasource/test_azure.py | 168 ++++++++++++++++++++++++++ 1 file changed, 168 insertions(+) create mode 100644 tests/unittests/test_datasource/test_azure.py (limited to 'tests/unittests/test_datasource/test_azure.py') diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py new file mode 100644 index 00000000..179fb50a --- /dev/null +++ b/tests/unittests/test_datasource/test_azure.py @@ -0,0 +1,168 @@ +from cloudinit import helpers +from cloudinit.sources import DataSourceAzure +from tests.unittests.helpers import populate_dir + +import base64 +from mocker import MockerTestCase +import os +import yaml + + +def construct_valid_ovf_env(data=None, pubkeys=None, userdata=None): + if data is None: + data = {'HostName': 'FOOHOST'} + if pubkeys is None: + pubkeys = {} + + content = """ + + + 1.0 + + LinuxProvisioningConfiguration + """ + for key, val in data.items(): + content += "<%s>%s\n" % (key, val, key) + + if userdata: + content += "%s\n" % (base64.b64encode(userdata)) + + if pubkeys: + content += "\n" + for fp, path in pubkeys.items(): + content += " " + content += ("%s%s" % + (fp, path)) + content += " " + content += """ + + + 1.0 + + kms.core.windows.net + false + + + + """ + + return content + + +class TestAzureDataSource(MockerTestCase): + + def setUp(self): + # makeDir comes from MockerTestCase + self.tmp = self.makeDir() + + # patch cloud_dir, so our 'seed_dir' is guaranteed empty + self.paths = helpers.Paths({'cloud_dir': self.tmp}) + + self.unapply = [] + super(TestAzureDataSource, self).setUp() + + def tearDown(self): + apply_patches([i for i in reversed(self.unapply)]) + super(TestAzureDataSource, self).tearDown() + + def apply_patches(self, patches): + ret = apply_patches(patches) + self.unapply += ret + + def _get_ds(self, data): + + def dsdevs(): + return data.get('dsdevs', []) + + def invoker(cmd): + data['agent_invoked'] = cmd + + if data.get('ovfcontent') is not None: + populate_dir(os.path.join(self.paths.seed_dir, "azure"), + {'ovf-env.xml': data['ovfcontent']}) + + mod = DataSourceAzure + + if data.get('dsdevs'): + self.apply_patches([(mod, 'list_possible_azure_ds_devs', dsdevs)]) + + self.apply_patches([(mod, 'invoke_agent', invoker)]) + + dsrc = mod.DataSourceAzureNet( + data.get('sys_cfg', {}), distro=None, paths=self.paths) + + return dsrc + + def test_basic_seed_dir(self): + odata = {'HostName': "myhost", 'UserName': "myuser"} + data = {'ovfcontent': construct_valid_ovf_env(data=odata), + 'sys_cfg': {}} + + dsrc = self._get_ds(data) + ret = dsrc.get_data() + self.assertTrue(ret) + self.assertEqual(dsrc.userdata_raw, "") + self.assertEqual(dsrc.metadata['local-hostname'], odata['HostName']) + + def test_user_cfg_set_agent_command(self): + cfg = {'agent_command': "my_command"} + odata = {'HostName': "myhost", 'UserName': "myuser", + 'dscfg': yaml.dump(cfg)} + data = {'ovfcontent': construct_valid_ovf_env(data=odata)} + + dsrc = self._get_ds(data) + ret = dsrc.get_data() + self.assertTrue(ret) + self.assertEqual(data['agent_invoked'], cfg['agent_command']) + + def test_sys_cfg_set_agent_command(self): + sys_cfg = {'datasource': {'Azure': {'agent_command': '_COMMAND'}}} + data = {'ovfcontent': construct_valid_ovf_env(data={}), + 'sys_cfg': sys_cfg} + + dsrc = self._get_ds(data) + ret = dsrc.get_data() + self.assertTrue(ret) + self.assertEqual(data['agent_invoked'], '_COMMAND') + + def test_userdata_found(self): + mydata = "FOOBAR" + odata = {'UserData': base64.b64encode(mydata)} + data = {'ovfcontent': construct_valid_ovf_env(data=odata)} + + dsrc = self._get_ds(data) + ret = dsrc.get_data() + self.assertTrue(ret) + self.assertEqual(dsrc.userdata_raw, mydata) + + def test_no_datasource_expected(self): + #no source should be found if no seed_dir and no devs + data = {} + dsrc = self._get_ds({}) + ret = dsrc.get_data() + self.assertFalse(ret) + self.assertFalse('agent_invoked' in data) + + +class TestReadAzureOvf(MockerTestCase): + def test_invalid_xml_raises_non_azure_ds(self): + invalid_xml = "" + construct_valid_ovf_env(data={}) + self.assertRaises(DataSourceAzure.NonAzureDataSource, + DataSourceAzure.read_azure_ovf, invalid_xml) + + +def apply_patches(patches): + ret = [] + for (ref, name, replace) in patches: + if replace is None: + continue + orig = getattr(ref, name) + setattr(ref, name, replace) + ret.append((ref, name, orig)) + return ret -- cgit v1.2.3 From 6bea1cb867c13e05e3548c648d5f051d2c49f07b Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Tue, 9 Jul 2013 14:41:55 -0400 Subject: better handling for user/password --- cloudinit/sources/DataSourceAzure.py | 21 ++++++++++++++++++++- tests/unittests/test_datasource/test_azure.py | 27 +++++++++++++++++++++++++++ 2 files changed, 47 insertions(+), 1 deletion(-) (limited to 'tests/unittests/test_datasource/test_azure.py') diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index 143b7e4a..5037c1a3 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -160,6 +160,8 @@ def read_azure_ovf(contents): md = {'azure_data': {}} cfg = {} ud = "" + password = None + username = None for child in lpcs.childNodes: if child.nodeType == dom.TEXT_NODE or not child.localName: @@ -176,19 +178,36 @@ def read_azure_ovf(contents): if name == "userdata": ud = base64.b64decode(''.join(value.split())) elif name == "username": - cfg['system_info'] = {'default_user': {'name': value}} + username = value + elif name == "userpassword": + password = value elif name == "hostname": md['local-hostname'] = value elif name == "dscfg": cfg['datasource'] = {DS_NAME: util.load_yaml(value, default={})} elif name == "ssh": cfg['_pubkeys'] = loadAzurePubkeys(child) + elif name == "disablesshpasswordauthentication": + cfg['ssh_pwauth'] = util.is_true(value) elif simple: if name in md_props: md[name] = value else: md['azure_data'][name] = value + defuser = {} + if username: + defuser['name'] = username + if password: + defuser['password'] = password + defuser['lock_passwd'] = False + + if defuser: + cfg['system_info'] = {'default_user': defuser} + + if 'ssh_pwauth' not in cfg and password: + cfg['ssh_pwauth'] = True + return (md, ud, cfg) diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py index 179fb50a..a2347f1b 100644 --- a/tests/unittests/test_datasource/test_azure.py +++ b/tests/unittests/test_datasource/test_azure.py @@ -131,6 +131,33 @@ class TestAzureDataSource(MockerTestCase): self.assertTrue(ret) self.assertEqual(data['agent_invoked'], '_COMMAND') + def test_username_used(self): + odata = {'HostName': "myhost", 'UserName': "myuser"} + data = {'ovfcontent': construct_valid_ovf_env(data=odata)} + + dsrc = self._get_ds(data) + ret = dsrc.get_data() + self.assertTrue(ret) + self.assertEqual(dsrc.cfg['system_info']['default_user']['name'], + "myuser") + + def test_password_given(self): + odata = {'HostName': "myhost", 'UserName': "myuser", + 'UserPassword': "mypass"} + data = {'ovfcontent': construct_valid_ovf_env(data=odata)} + + dsrc = self._get_ds(data) + ret = dsrc.get_data() + self.assertTrue(ret) + self.assertTrue('default_user' in dsrc.cfg['system_info']) + defuser = dsrc.cfg['system_info']['default_user'] + + # default user shoudl be updated for password and username + # and should not be locked. + self.assertEqual(defuser['name'], odata['UserName']) + self.assertEqual(defuser['password'], odata['UserPassword']) + self.assertFalse(defuser['lock_passwd']) + def test_userdata_found(self): mydata = "FOOBAR" odata = {'UserData': base64.b64encode(mydata)} -- cgit v1.2.3 From cf1b10900626dfa6194c77b6720291e7edbaf9f6 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Tue, 9 Jul 2013 15:07:38 -0400 Subject: populate /var/lib/waagent with ovf-env.xml this will copy the ovf-env.xml file that was found to the configured directory (default /var/lib/waagent) --- cloudinit/sources/DataSourceAzure.py | 43 +++++++++++++++++++-------- tests/unittests/test_datasource/test_azure.py | 8 +++++ 2 files changed, 39 insertions(+), 12 deletions(-) (limited to 'tests/unittests/test_datasource/test_azure.py') diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index 5037c1a3..f1c7c771 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -30,7 +30,9 @@ LOG = logging.getLogger(__name__) DS_NAME = 'Azure' DEFAULT_METADATA = {"instance-id": "iid-AZURE-NODE"} AGENT_START = ['service', 'walinuxagent', 'start'] -BUILTIN_DS_CONFIG = {'datasource': {DS_NAME: {'agent_command': AGENT_START}}} +BUILTIN_DS_CONFIG = {'datasource': {DS_NAME: { + 'agent_command': AGENT_START, + 'data_dir': "/var/lib/waagent"}}} class DataSourceAzureNet(sources.DataSource): @@ -64,7 +66,7 @@ class DataSourceAzureNet(sources.DataSource): LOG.warn("%s was not mountable" % cdev) continue - (md, self.userdata_raw, cfg) = ret + (md, self.userdata_raw, cfg, files) = ret self.seed = cdev self.metadata = util.mergemanydict([md, DEFAULT_METADATA]) self.cfg = cfg @@ -76,18 +78,24 @@ class DataSourceAzureNet(sources.DataSource): if not found: return False - path = ['datasource', DS_NAME, 'agent_command'] - cmd = None + fields = [('cmd', ['datasource', DS_NAME, 'agent_command']), + ('datadir', ['datasource', DS_NAME, 'data_dir'])] + mycfg = {} for cfg in (self.cfg, self.sys_cfg, BUILTIN_DS_CONFIG): - cmd = util.get_cfg_by_path(cfg, keyp=path) - if cmd is not None: - break + for name, path in fields: + if name in mycfg: + continue + value = util.get_cfg_by_path(cfg, keyp=path) + if value is not None: + mycfg[name] = value + + write_files(mycfg['datadir'], files) try: - invoke_agent(cmd) + invoke_agent(mycfg['cmd']) except util.ProcessExecutionError: # claim the datasource even if the command failed - util.logexc(LOG, "agent command '%s' failed.", cmd) + util.logexc(LOG, "agent command '%s' failed.", mycfg['cmd']) return True @@ -95,6 +103,16 @@ class DataSourceAzureNet(sources.DataSource): return self.cfg +def write_files(datadir, files): + if not datadir: + return + if not files: + files = {} + for (name, content) in files.items(): + util.write_file(filename=os.path.join(datadir, name), + content=content, mode=0600) + + def invoke_agent(cmd): # this is a function itself to simplify patching it for test if cmd: @@ -114,7 +132,7 @@ def find_child(node, filter_func): return ret -def load_azure_ovf_pubkeys(sshnode): +def load_azure_ovf_pubkeys(_sshnode): # in the future this would return a list of dicts like: # [{'fp': '6BE7A7C3C8A8F4B123CCA5D0C2F1BE4CA7B63ED7', # 'path': 'where/to/go'}] @@ -186,7 +204,7 @@ def read_azure_ovf(contents): elif name == "dscfg": cfg['datasource'] = {DS_NAME: util.load_yaml(value, default={})} elif name == "ssh": - cfg['_pubkeys'] = loadAzurePubkeys(child) + cfg['_pubkeys'] = load_azure_ovf_pubkeys(child) elif name == "disablesshpasswordauthentication": cfg['ssh_pwauth'] = util.is_true(value) elif simple: @@ -230,7 +248,8 @@ def load_azure_ds_dir(source_dir): with open(ovf_file, "r") as fp: contents = fp.read() - return read_azure_ovf(contents) + md, ud, cfg = read_azure_ovf(contents) + return (md, ud, cfg, {'ovf-env.xml': contents}) class BrokenAzureDataSource(Exception): diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py index a2347f1b..68f4bcca 100644 --- a/tests/unittests/test_datasource/test_azure.py +++ b/tests/unittests/test_datasource/test_azure.py @@ -83,6 +83,12 @@ class TestAzureDataSource(MockerTestCase): def invoker(cmd): data['agent_invoked'] = cmd + def file_writer(datadir, files): + data['files'] = {} + data['datadir'] = datadir + for (fname, content) in files.items(): + data['files'][fname] = content + if data.get('ovfcontent') is not None: populate_dir(os.path.join(self.paths.seed_dir, "azure"), {'ovf-env.xml': data['ovfcontent']}) @@ -93,6 +99,7 @@ class TestAzureDataSource(MockerTestCase): self.apply_patches([(mod, 'list_possible_azure_ds_devs', dsdevs)]) self.apply_patches([(mod, 'invoke_agent', invoker)]) + self.apply_patches([(mod, 'write_files', file_writer)]) dsrc = mod.DataSourceAzureNet( data.get('sys_cfg', {}), distro=None, paths=self.paths) @@ -109,6 +116,7 @@ class TestAzureDataSource(MockerTestCase): self.assertTrue(ret) self.assertEqual(dsrc.userdata_raw, "") self.assertEqual(dsrc.metadata['local-hostname'], odata['HostName']) + self.assertTrue('ovf-env.xml' in data['files']) def test_user_cfg_set_agent_command(self): cfg = {'agent_command': "my_command"} -- cgit v1.2.3 From 950762bb008d25f529c71aae4c0b04f6b0134abb Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Tue, 9 Jul 2013 20:20:55 -0400 Subject: fill out load_azure_ovf_pubkeys now if there are pubkeys, the cfg['_pubkeys'] entry will have a list of dicts where each dict has 'fingerprint' and 'path' entries. The next thing to do is to block waiting for the .crt files to appear in /var/lib/waagent. --- cloudinit/sources/DataSourceAzure.py | 40 +++++++++++++++++++++++++-- tests/unittests/test_datasource/test_azure.py | 13 +++++++-- 2 files changed, 49 insertions(+), 4 deletions(-) (limited to 'tests/unittests/test_datasource/test_azure.py') diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index d8e39392..43a963ad 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -146,7 +146,7 @@ def find_child(node, filter_func): return ret -def load_azure_ovf_pubkeys(_sshnode): +def load_azure_ovf_pubkeys(sshnode): # in the future this would return a list of dicts like: # [{'fp': '6BE7A7C3C8A8F4B123CCA5D0C2F1BE4CA7B63ED7', # 'path': 'where/to/go'}] @@ -155,7 +155,43 @@ def load_azure_ovf_pubkeys(_sshnode): # ABC/ABC # ... # - return [] + results = find_child(sshnode, lambda n: n.localName == "PublicKeys") + if len(results) == 0: + return [] + if len(results) > 1: + raise BrokenAzureDataSource("Multiple 'PublicKeys'(%s) in SSH node" % + len(results)) + + pubkeys_node = results[0] + pubkeys = find_child(pubkeys_node, lambda n: n.localName == "PublicKey") + + if len(pubkeys) == 0: + return [] + + found = [] + text_node = minidom.Document.TEXT_NODE + + for pk_node in pubkeys: + if not pk_node.hasChildNodes(): + continue + cur = {'fingerprint': "", 'path': ""} + for child in pk_node.childNodes: + if (child.nodeType == text_node or not child.localName): + continue + + name = child.localName.lower() + + if name not in cur.keys(): + continue + + if (len(child.childNodes) != 1 or + child.childNodes[0].nodeType != text_node): + continue + + cur[name] = child.childNodes[0].wholeText.strip() + found.append(cur) + + return found def read_azure_ovf(contents): diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py index 68f4bcca..be6fab70 100644 --- a/tests/unittests/test_datasource/test_azure.py +++ b/tests/unittests/test_datasource/test_azure.py @@ -34,11 +34,12 @@ def construct_valid_ovf_env(data=None, pubkeys=None, userdata=None): if pubkeys: content += "\n" - for fp, path in pubkeys.items(): + for fp, path in pubkeys: content += " " content += ("%s%s" % (fp, path)) - content += " " + content += "\n" + content += "" content += """ @@ -191,6 +192,14 @@ class TestReadAzureOvf(MockerTestCase): self.assertRaises(DataSourceAzure.NonAzureDataSource, DataSourceAzure.read_azure_ovf, invalid_xml) + def test_load_with_pubkeys(self): + mypklist = [{'fingerprint': 'fp1', 'path': 'path1'}] + pubkeys = [(x['fingerprint'], x['path']) for x in mypklist] + content = construct_valid_ovf_env(pubkeys=pubkeys) + (md, ud, cfg) = DataSourceAzure.read_azure_ovf(content) + for mypk in mypklist: + self.assertIn(mypk, cfg['_pubkeys']) + def apply_patches(patches): ret = [] -- cgit v1.2.3 From ec22feeefe309187107e0fb5471136f1c8a646c9 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Tue, 9 Jul 2013 20:36:28 -0400 Subject: build up the 'wait_for' list including fingerprint.crt files --- cloudinit/sources/DataSourceAzure.py | 6 ++++++ tests/unittests/test_datasource/test_azure.py | 13 +++++++++++++ 2 files changed, 19 insertions(+) (limited to 'tests/unittests/test_datasource/test_azure.py') diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index 43a963ad..ab570344 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -111,6 +111,12 @@ class DataSourceAzureNet(sources.DataSource): # claim the datasource even if the command failed util.logexc(LOG, "agent command '%s' failed.", mycfg['cmd']) + wait_for = [os.path.join(mycfg['datadir'], "SharedConfig.xml")] + + for pk in self.cfg.get('_pubkeys', []): + bname = pk['fingerprint'] + ".crt" + wait_for += [os.path.join(mycfg['datadir'], bname)] + return True def get_config_obj(self): diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py index be6fab70..a7094ec6 100644 --- a/tests/unittests/test_datasource/test_azure.py +++ b/tests/unittests/test_datasource/test_azure.py @@ -185,6 +185,19 @@ class TestAzureDataSource(MockerTestCase): self.assertFalse(ret) self.assertFalse('agent_invoked' in data) + def test_cfg_has_pubkeys(self): + odata = {'HostName': "myhost", 'UserName': "myuser"} + mypklist = [{'fingerprint': 'fp1', 'path': 'path1'}] + pubkeys = [(x['fingerprint'], x['path']) for x in mypklist] + data = {'ovfcontent': construct_valid_ovf_env(data=odata, + pubkeys=pubkeys)} + + dsrc = self._get_ds(data) + ret = dsrc.get_data() + self.assertTrue(ret) + for mypk in mypklist: + self.assertIn(mypk, dsrc.cfg['_pubkeys']) + class TestReadAzureOvf(MockerTestCase): def test_invalid_xml_raises_non_azure_ds(self): -- cgit v1.2.3 From ce949d5b4c94caf9c1df6393abe86de2872e05ae Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Wed, 10 Jul 2013 13:08:23 -0400 Subject: add waiting for files and reading of crt keys --- cloudinit/sources/DataSourceAzure.py | 46 ++++++++++++++++++++++++++- packages/debian/changelog.in | 2 +- tests/unittests/test_datasource/test_azure.py | 21 +++++++++--- 3 files changed, 62 insertions(+), 7 deletions(-) (limited to 'tests/unittests/test_datasource/test_azure.py') diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index ab570344..200bede5 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -19,6 +19,7 @@ import base64 import os import os.path +import time from xml.dom import minidom from cloudinit import log as logging @@ -113,9 +114,18 @@ class DataSourceAzureNet(sources.DataSource): wait_for = [os.path.join(mycfg['datadir'], "SharedConfig.xml")] + fp_files = [] for pk in self.cfg.get('_pubkeys', []): bname = pk['fingerprint'] + ".crt" - wait_for += [os.path.join(mycfg['datadir'], bname)] + fp_files += [os.path.join(mycfg['datadir'], bname)] + + missing = wait_for_files(wait_for + fp_files) + if len(missing): + LOG.warn("Did not find files, but going on: %s" % missing) + + pubkeys = pubkeys_from_crt_files(fp_files) + + self.metadata['public-keys'] = pubkeys return True @@ -123,6 +133,40 @@ class DataSourceAzureNet(sources.DataSource): return self.cfg +def crtfile_to_pubkey(fname): + pipeline = ('openssl x509 -noout -pubkey < "$0" |' + 'ssh-keygen -i -m PKCS8 -f /dev/stdin') + (out, _err) = util.subp(['sh', '-c', pipeline, fname], capture=True) + return out.rstrip() + + +def pubkeys_from_crt_files(flist): + pubkeys = [] + errors = [] + for fname in flist: + try: + pubkeys.append(crtfile_to_pubkey(fname)) + except util.ProcessExecutionError: + errors.extend(fname) + + if errors: + LOG.warn("failed to convert the crt files to pubkey: %s" % errors) + + return pubkeys + + +def wait_for_files(flist, maxwait=60, naplen=.5): + need = set(flist) + waited = 0 + while waited < maxwait: + need -= set([f for f in need if os.path.exists(f)]) + if len(need) == 0: + return [] + time.sleep(naplen) + waited += naplen + return need + + def write_files(datadir, files): if not datadir: return diff --git a/packages/debian/changelog.in b/packages/debian/changelog.in index e3e94f54..4944230b 100644 --- a/packages/debian/changelog.in +++ b/packages/debian/changelog.in @@ -1,5 +1,5 @@ ## This is a cheetah template -cloud-init (${version}~bzr${revision}-1) UNRELEASED; urgency=low +cloud-init (${version}~bzr${revision}-1) raring; urgency=low * build diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py index a7094ec6..74ed7197 100644 --- a/tests/unittests/test_datasource/test_azure.py +++ b/tests/unittests/test_datasource/test_azure.py @@ -81,15 +81,23 @@ class TestAzureDataSource(MockerTestCase): def dsdevs(): return data.get('dsdevs', []) - def invoker(cmd): + def _invoke_agent(cmd): data['agent_invoked'] = cmd - def file_writer(datadir, files): + def _write_files(datadir, files): data['files'] = {} data['datadir'] = datadir for (fname, content) in files.items(): data['files'][fname] = content + def _wait_for_files(flist, _maxwait=None, _naplen=None): + data['waited'] = flist + return [] + + def _pubkeys_from_crt_files(flist): + data['pubkey_files'] = flist + return ["pubkey_from: %s" % f for f in flist] + if data.get('ovfcontent') is not None: populate_dir(os.path.join(self.paths.seed_dir, "azure"), {'ovf-env.xml': data['ovfcontent']}) @@ -99,8 +107,11 @@ class TestAzureDataSource(MockerTestCase): if data.get('dsdevs'): self.apply_patches([(mod, 'list_possible_azure_ds_devs', dsdevs)]) - self.apply_patches([(mod, 'invoke_agent', invoker)]) - self.apply_patches([(mod, 'write_files', file_writer)]) + self.apply_patches([(mod, 'invoke_agent', _invoke_agent), + (mod, 'write_files', _write_files), + (mod, 'wait_for_files', _wait_for_files), + (mod, 'pubkeys_from_crt_files', + _pubkeys_from_crt_files)]) dsrc = mod.DataSourceAzureNet( data.get('sys_cfg', {}), distro=None, paths=self.paths) @@ -209,7 +220,7 @@ class TestReadAzureOvf(MockerTestCase): mypklist = [{'fingerprint': 'fp1', 'path': 'path1'}] pubkeys = [(x['fingerprint'], x['path']) for x in mypklist] content = construct_valid_ovf_env(pubkeys=pubkeys) - (md, ud, cfg) = DataSourceAzure.read_azure_ovf(content) + (_md, _ud, cfg) = DataSourceAzure.read_azure_ovf(content) for mypk in mypklist: self.assertIn(mypk, cfg['_pubkeys']) -- cgit v1.2.3 From 8f70bb7e7144f2225b4e9a589d16ae6d15992a3d Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Wed, 17 Jul 2013 13:36:32 -0400 Subject: Azure: make /var/lib/waagent with 0700 perms The walinux agent expects that the files it writes with 0644 (default umask) permissions are not globally readable. Since we were creating the directory for it, and using default umaks (0755), the files inside were readable to non-priviledged users. --- cloudinit/sources/DataSourceAzure.py | 7 +++++-- tests/unittests/test_datasource/test_azure.py | 4 +++- 2 files changed, 8 insertions(+), 3 deletions(-) (limited to 'tests/unittests/test_datasource/test_azure.py') diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index f1419296..c90d7b07 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -104,7 +104,9 @@ class DataSourceAzureNet(sources.DataSource): if value is not None: mycfg[name] = value - write_files(mycfg['datadir'], files) + # walinux agent writes files world readable, but expects + # the directory to be protected. + write_files(mycfg['datadir'], files, dirmode=0700) try: invoke_agent(mycfg['cmd']) @@ -171,11 +173,12 @@ def wait_for_files(flist, maxwait=60, naplen=.5): return need -def write_files(datadir, files): +def write_files(datadir, files, dirmode=None): if not datadir: return if not files: files = {} + util.ensure_dir(datadir, dirmode) for (name, content) in files.items(): util.write_file(filename=os.path.join(datadir, name), content=content, mode=0600) diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py index 74ed7197..c79c25d8 100644 --- a/tests/unittests/test_datasource/test_azure.py +++ b/tests/unittests/test_datasource/test_azure.py @@ -84,9 +84,10 @@ class TestAzureDataSource(MockerTestCase): def _invoke_agent(cmd): data['agent_invoked'] = cmd - def _write_files(datadir, files): + def _write_files(datadir, files, dirmode): data['files'] = {} data['datadir'] = datadir + data['datadir_mode'] = dirmode for (fname, content) in files.items(): data['files'][fname] = content @@ -129,6 +130,7 @@ class TestAzureDataSource(MockerTestCase): self.assertEqual(dsrc.userdata_raw, "") self.assertEqual(dsrc.metadata['local-hostname'], odata['HostName']) self.assertTrue('ovf-env.xml' in data['files']) + self.assertEqual(0700, data['datadir_mode']) def test_user_cfg_set_agent_command(self): cfg = {'agent_command': "my_command"} -- cgit v1.2.3 From a4310ee3db0b394dcebd4f6b49d3b25bba37fedf Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Tue, 23 Jul 2013 15:17:50 -0400 Subject: on azure datasource, grab use Deployment as the instance-id LP: #1204190 --- cloudinit/sources/DataSourceAzure.py | 44 ++++++++++++++++++++++++++- tests/unittests/test_datasource/test_azure.py | 23 +++++++++++++- 2 files changed, 65 insertions(+), 2 deletions(-) (limited to 'tests/unittests/test_datasource/test_azure.py') diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index c90d7b07..0a5caebe 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -114,7 +114,8 @@ class DataSourceAzureNet(sources.DataSource): # claim the datasource even if the command failed util.logexc(LOG, "agent command '%s' failed.", mycfg['cmd']) - wait_for = [os.path.join(mycfg['datadir'], "SharedConfig.xml")] + shcfgxml = os.path.join(mycfg['datadir'], "SharedConfig.xml") + wait_for = [shcfgxml] fp_files = [] for pk in self.cfg.get('_pubkeys', []): @@ -129,6 +130,14 @@ class DataSourceAzureNet(sources.DataSource): LOG.debug("waited %.3f seconds for %d files to appear", time.time() - start, len(wait_for)) + if shcfgxml in missing: + LOG.warn("SharedConfig.xml missing, using static instance-id") + else: + try: + self.metadata['instance-id'] = iid_from_shared_config(shcfgxml) + except ValueError as e: + LOG.warn("failed to get instance id in %s: %s" % (shcfgxml, e)) + pubkeys = pubkeys_from_crt_files(fp_files) self.metadata['public-keys'] = pubkeys @@ -252,6 +261,20 @@ def load_azure_ovf_pubkeys(sshnode): return found +def single_node_at_path(node, pathlist): + curnode = node + for tok in pathlist: + results = find_child(curnode, lambda n: n.localName == tok) + if len(results) == 0: + raise ValueError("missing %s token in %s" % (tok, str(pathlist))) + if len(results) > 1: + raise ValueError("found %s nodes of type %s looking for %s" % + (len(results), tok, str(pathlist))) + curnode = results[0] + + return curnode + + def read_azure_ovf(contents): try: dom = minidom.parseString(contents) @@ -362,6 +385,25 @@ def load_azure_ds_dir(source_dir): return (md, ud, cfg, {'ovf-env.xml': contents}) +def iid_from_shared_config(path): + with open(path, "rb") as fp: + content = fp.read() + return iid_from_shared_config_content(content) + + +def iid_from_shared_config_content(content): + """ + find INSTANCE_ID in: + + + + + """ + dom = minidom.parseString(content) + depnode = single_node_at_path(dom, ["SharedConfig", "Deployment"]) + return depnode.attributes.get('name').value + + class BrokenAzureDataSource(Exception): pass diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py index c79c25d8..2e8583f9 100644 --- a/tests/unittests/test_datasource/test_azure.py +++ b/tests/unittests/test_datasource/test_azure.py @@ -99,6 +99,10 @@ class TestAzureDataSource(MockerTestCase): data['pubkey_files'] = flist return ["pubkey_from: %s" % f for f in flist] + def _iid_from_shared_config(path): + data['iid_from_shared_cfg'] = path + return 'i-my-azure-id' + if data.get('ovfcontent') is not None: populate_dir(os.path.join(self.paths.seed_dir, "azure"), {'ovf-env.xml': data['ovfcontent']}) @@ -112,7 +116,9 @@ class TestAzureDataSource(MockerTestCase): (mod, 'write_files', _write_files), (mod, 'wait_for_files', _wait_for_files), (mod, 'pubkeys_from_crt_files', - _pubkeys_from_crt_files)]) + _pubkeys_from_crt_files), + (mod, 'iid_from_shared_config', + _iid_from_shared_config), ]) dsrc = mod.DataSourceAzureNet( data.get('sys_cfg', {}), distro=None, paths=self.paths) @@ -131,6 +137,7 @@ class TestAzureDataSource(MockerTestCase): self.assertEqual(dsrc.metadata['local-hostname'], odata['HostName']) self.assertTrue('ovf-env.xml' in data['files']) self.assertEqual(0700, data['datadir_mode']) + self.assertEqual(dsrc.metadata['instance-id'], 'i-my-azure-id') def test_user_cfg_set_agent_command(self): cfg = {'agent_command': "my_command"} @@ -227,6 +234,20 @@ class TestReadAzureOvf(MockerTestCase): self.assertIn(mypk, cfg['_pubkeys']) +class TestReadAzureSharedConfig(MockerTestCase): + def test_valid_content(self): + xml = """ + + + + + + + """ + ret = DataSourceAzure.iid_from_shared_config_content(xml) + self.assertEqual("MY_INSTANCE_ID", ret) + + def apply_patches(patches): ret = [] for (ref, name, replace) in patches: -- cgit v1.2.3 From 3d10b8d080a874be022f9e25063ba77f0293c5e8 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Thu, 25 Jul 2013 14:37:10 -0400 Subject: azure: support bouncing interfaces to publish new hostname See the added doc/sources/azure/README.rst for why this is necessary. Essentially, we now are doing the following in the get_data() method of azure datasource to publish this NewHostname: hostname NewHostName ifdown eth0; ifup eth0 LP: #1202758 --- cloudinit/sources/DataSourceAzure.py | 114 +++++++++++++++++----- doc/examples/cloud-config-datasources.txt | 5 + doc/sources/azure/README.rst | 134 ++++++++++++++++++++++++++ tests/unittests/test_datasource/test_azure.py | 77 ++++++++++++++- 4 files changed, 300 insertions(+), 30 deletions(-) create mode 100644 doc/sources/azure/README.rst (limited to 'tests/unittests/test_datasource/test_azure.py') diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index 0a5caebe..30b06fef 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -31,9 +31,20 @@ LOG = logging.getLogger(__name__) DS_NAME = 'Azure' DEFAULT_METADATA = {"instance-id": "iid-AZURE-NODE"} AGENT_START = ['service', 'walinuxagent', 'start'] -BUILTIN_DS_CONFIG = {'datasource': {DS_NAME: { - 'agent_command': AGENT_START, - 'data_dir': "/var/lib/waagent"}}} +BOUNCE_COMMAND = ("i=$interface; x=0; ifdown $i || x=$?; " + "ifup $i || x=$?; exit $x") +BUILTIN_DS_CONFIG = { + 'agent_command': AGENT_START, + 'data_dir': "/var/lib/waagent", + 'set_hostname': True, + 'hostname_bounce': { + 'interface': 'eth0', + 'policy': True, + 'command': BOUNCE_COMMAND, + 'hostname_command': 'hostname', + } +} +DS_CFG_PATH = ['datasource', DS_NAME] class DataSourceAzureNet(sources.DataSource): @@ -42,19 +53,19 @@ class DataSourceAzureNet(sources.DataSource): self.seed_dir = os.path.join(paths.seed_dir, 'azure') self.cfg = {} self.seed = None + self.ds_cfg = util.mergemanydict([ + util.get_cfg_by_path(sys_cfg, DS_CFG_PATH), + BUILTIN_DS_CONFIG]) def __str__(self): root = sources.DataSource.__str__(self) return "%s [seed=%s]" % (root, self.seed) def get_data(self): - ddir_cfgpath = ['datasource', DS_NAME, 'data_dir'] # azure removes/ejects the cdrom containing the ovf-env.xml # file on reboot. So, in order to successfully reboot we # need to look in the datadir and consider that valid - ddir = util.get_cfg_by_path(self.sys_cfg, ddir_cfgpath) - if ddir is None: - ddir = util.get_cfg_by_path(BUILTIN_DS_CONFIG, ddir_cfgpath) + ddir = self.ds_cfg['data_dir'] candidates = [self.seed_dir] candidates.extend(list_possible_azure_ds_devs()) @@ -91,36 +102,40 @@ class DataSourceAzureNet(sources.DataSource): return False if found == ddir: - LOG.debug("using cached datasource in %s", ddir) - - fields = [('cmd', ['datasource', DS_NAME, 'agent_command']), - ('datadir', ddir_cfgpath)] - mycfg = {} - for cfg in (self.cfg, self.sys_cfg, BUILTIN_DS_CONFIG): - for name, path in fields: - if name in mycfg: - continue - value = util.get_cfg_by_path(cfg, keyp=path) - if value is not None: - mycfg[name] = value + LOG.debug("using files cached in %s", ddir) + + # now update ds_cfg to reflect contents pass in config + usercfg = util.get_cfg_by_path(self.cfg, DS_CFG_PATH, {}) + self.ds_cfg = util.mergemanydict([usercfg, self.ds_cfg]) + mycfg = self.ds_cfg # walinux agent writes files world readable, but expects # the directory to be protected. - write_files(mycfg['datadir'], files, dirmode=0700) + write_files(mycfg['data_dir'], files, dirmode=0700) + + # handle the hostname 'publishing' + try: + handle_set_hostname(mycfg.get('set_hostname'), + self.metadata.get('local-hostname'), + mycfg['hostname_bounce']) + except Exception as e: + LOG.warn("Failed publishing hostname: %s" % e) + util.logexc(LOG, "handling set_hostname failed") try: - invoke_agent(mycfg['cmd']) + invoke_agent(mycfg['agent_command']) except util.ProcessExecutionError: # claim the datasource even if the command failed - util.logexc(LOG, "agent command '%s' failed.", mycfg['cmd']) + util.logexc(LOG, "agent command '%s' failed.", + mycfg['agent_command']) - shcfgxml = os.path.join(mycfg['datadir'], "SharedConfig.xml") + shcfgxml = os.path.join(mycfg['data_dir'], "SharedConfig.xml") wait_for = [shcfgxml] fp_files = [] for pk in self.cfg.get('_pubkeys', []): bname = pk['fingerprint'] + ".crt" - fp_files += [os.path.join(mycfg['datadir'], bname)] + fp_files += [os.path.join(mycfg['data_dir'], bname)] start = time.time() missing = wait_for_files(wait_for + fp_files) @@ -148,6 +163,43 @@ class DataSourceAzureNet(sources.DataSource): return self.cfg +def handle_set_hostname(enabled, hostname, cfg): + if not util.is_true(enabled): + return + + if not hostname: + LOG.warn("set_hostname was true but no local-hostname") + return + + apply_hostname_bounce(hostname=hostname, policy=cfg['policy'], + interface=cfg['interface'], + command=cfg['command'], + hostname_command=cfg['hostname_command']) + + +def apply_hostname_bounce(hostname, policy, interface, command, + hostname_command="hostname"): + # set the hostname to 'hostname' if it is not already set to that. + # then, if policy is not off, bounce the interface using command + prev_hostname = util.subp(hostname_command, capture=True)[0].strip() + + util.subp([hostname_command, hostname]) + + if util.is_false(policy): + return + + if prev_hostname == hostname and policy != "force": + return + + env = os.environ.copy() + env['interface'] = interface + + if command == "builtin": + command = BOUNCE_COMMAND + + util.subp(command, shell=(not isinstance(command, list)), capture=True) + + def crtfile_to_pubkey(fname): pipeline = ('openssl x509 -noout -pubkey < "$0" |' 'ssh-keygen -i -m PKCS8 -f /dev/stdin') @@ -319,15 +371,21 @@ def read_azure_ovf(contents): name = child.localName.lower() simple = False + value = "" if (len(child.childNodes) == 1 and child.childNodes[0].nodeType == dom.TEXT_NODE): simple = True value = child.childNodes[0].wholeText + attrs = {k: v for k, v in child.attributes.items()} + # we accept either UserData or CustomData. If both are present # then behavior is undefined. if (name == "userdata" or name == "customdata"): - ud = base64.b64decode(''.join(value.split())) + if attrs.get('encoding') in (None, "base64"): + ud = base64.b64decode(''.join(value.split())) + else: + ud = value elif name == "username": username = value elif name == "userpassword": @@ -335,7 +393,11 @@ def read_azure_ovf(contents): elif name == "hostname": md['local-hostname'] = value elif name == "dscfg": - cfg['datasource'] = {DS_NAME: util.load_yaml(value, default={})} + if attrs.get('encoding') in (None, "base64"): + dscfg = base64.b64decode(''.join(value.split())) + else: + dscfg = value + cfg['datasource'] = {DS_NAME: util.load_yaml(dscfg, default={})} elif name == "ssh": cfg['_pubkeys'] = load_azure_ovf_pubkeys(child) elif name == "disablesshpasswordauthentication": diff --git a/doc/examples/cloud-config-datasources.txt b/doc/examples/cloud-config-datasources.txt index a19353fc..6544448e 100644 --- a/doc/examples/cloud-config-datasources.txt +++ b/doc/examples/cloud-config-datasources.txt @@ -45,6 +45,11 @@ datasource: Azure: agent_command: [service, walinuxagent, start] + set_hostname: True + hostname_bounce: + interface: eth0 + policy: on # [can be 'on', 'off' or 'force'] + } SmartOS: # Smart OS datasource works over a serial console interacting with diff --git a/doc/sources/azure/README.rst b/doc/sources/azure/README.rst new file mode 100644 index 00000000..8239d1fa --- /dev/null +++ b/doc/sources/azure/README.rst @@ -0,0 +1,134 @@ +================ +Azure Datasource +================ + +This datasource finds metadata and user-data from the Azure cloud platform. + +Azure Platform +-------------- +The azure cloud-platform provides initial data to an instance via an attached +CD formated in UDF. That CD contains a 'ovf-env.xml' file that provides some +information. Additional information is obtained via interaction with the +"endpoint". The ip address of the endpoint is advertised to the instance +inside of dhcp option 245. On ubuntu, that can be seen in +/var/lib/dhcp/dhclient.eth0.leases as a colon delimited hex value (example: +``option unknown-245 64:41:60:82;`` is 100.65.96.130) + +walinuxagent +------------ +In order to operate correctly, cloud-init needs walinuxagent to provide much +of the interaction with azure. In addition to "provisioning" code, walinux +does the following on the agent is a long running daemon that handles the +following things: +- generate a x509 certificate and send that to the endpoint + +waagent.conf config +~~~~~~~~~~~~~~~~~~~ +in order to use waagent.conf with cloud-init, the following settings are recommended. Other values can be changed or set to the defaults. + + :: + + # disabling provisioning turns off all 'Provisioning.*' function + Provisioning.Enabled=n + # this is currently not handled by cloud-init, so let walinuxagent do it. + ResourceDisk.Format=y + ResourceDisk.MountPoint=/mnt + + +Userdata +-------- +Userdata is provided to cloud-init inside the ovf-env.xml file. Cloud-init +expects that user-data will be provided as base64 encoded value inside the +text child of a element named ``UserData`` or ``CustomData`` which is a direct +child of the ``LinuxProvisioningConfigurationSet`` (a sibling to ``UserName``) +If both ``UserData`` and ``CustomData`` are provided behavior is undefined on +which will be selected. + +In the example below, user-data provided is 'this is my userdata', and the +datasource config provided is ``{"agent_command": ["start", "walinuxagent"]}``. +That agent command will take affect as if it were specified in system config. + +Example: + +.. code:: + + + 1.0 + + LinuxProvisioningConfiguration + myHost + myuser + + dGhpcyBpcyBteSB1c2VyZGF0YQ=== + eyJhZ2VudF9jb21tYW5kIjogWyJzdGFydCIsICJ3YWxpbnV4YWdlbnQiXX0= + true + + + + 6BE7A7C3C8A8F4B123CCA5D0C2F1BE4CA7B63ED7 + this-value-unused + + + + + + +Configuration +------------- +Configuration for the datasource can be read from the system config's or set +via the `dscfg` entry in the `LinuxProvisioningConfigurationSet`. Content in +dscfg node is expected to be base64 encoded yaml content, and it will be +merged into the 'datasource: Azure' entry. + +The '``hostname_bounce: command``' entry can be either the literal string +'builtin' or a command to execute. The command will be invoked after the +hostname is set, and will have the 'interface' in its environment. If +``set_hostname`` is not true, then ``hostname_bounce`` will be ignored. + +An example might be: + command: ["sh", "-c", "killall dhclient; dhclient $interface"] + +.. code:: + + datasource: + agent_command + Azure: + agent_command: [service, walinuxagent, start] + set_hostname: True + hostname_bounce: + # the name of the interface to bounce + interface: eth0 + # policy can be 'on', 'off' or 'force' + policy: on + # the method 'bounce' command. + command: "builtin" + hostname_command: "hostname" + } + +hostname +-------- +When the user launches an instance, they provide a hostname for that instance. +The hostname is provided to the instance in the ovf-env.xml file as +``HostName``. + +Whatever value the instance provides in its dhcp request will resolve in the +domain returned in the 'search' request. + +The interesting issue is that a generic image will already have a hostname +configured. The ubuntu cloud images have 'ubuntu' as the hostname of the +system, and the initial dhcp request on eth0 is not guaranteed to occur after +the datasource code has been run. So, on first boot, that initial value will +be sent in the dhcp request and *that* value will resolve. + +In order to make the ``HostName`` provided in the ovf-env.xml resolve, a +dhcp request must be made with the new value. Walinuxagent (in its current +version) handles this by polling the state of hostname and bouncing ('``ifdown +eth0; ifup eth0``' the network interface if it sees that a change has been +made. + +cloud-init handles this by setting the hostname in the DataSource's 'get_data' +method via '``hostname $HostName``', and then bouncing the interface. This +behavior can be configured or disabled in the datasource config. See +'Configuration' above. diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py index 2e8583f9..c944cb13 100644 --- a/tests/unittests/test_datasource/test_azure.py +++ b/tests/unittests/test_datasource/test_azure.py @@ -26,8 +26,15 @@ def construct_valid_ovf_env(data=None, pubkeys=None, userdata=None): xmlns:i="http://www.w3.org/2001/XMLSchema-instance"> LinuxProvisioningConfiguration """ - for key, val in data.items(): - content += "<%s>%s\n" % (key, val, key) + for key, dval in data.items(): + if isinstance(dval, dict): + val = dval.get('text') + attrs = ' ' + ' '.join(["%s='%s'" % (k, v) for k, v in dval.items() + if k != 'text']) + else: + val = dval + attrs = "" + content += "<%s%s>%s\n" % (key, attrs, val, key) if userdata: content += "%s\n" % (base64.b64encode(userdata)) @@ -103,6 +110,9 @@ class TestAzureDataSource(MockerTestCase): data['iid_from_shared_cfg'] = path return 'i-my-azure-id' + def _apply_hostname_bounce(**kwargs): + data['apply_hostname_bounce'] = kwargs + if data.get('ovfcontent') is not None: populate_dir(os.path.join(self.paths.seed_dir, "azure"), {'ovf-env.xml': data['ovfcontent']}) @@ -118,7 +128,9 @@ class TestAzureDataSource(MockerTestCase): (mod, 'pubkeys_from_crt_files', _pubkeys_from_crt_files), (mod, 'iid_from_shared_config', - _iid_from_shared_config), ]) + _iid_from_shared_config), + (mod, 'apply_hostname_bounce', + _apply_hostname_bounce), ]) dsrc = mod.DataSourceAzureNet( data.get('sys_cfg', {}), distro=None, paths=self.paths) @@ -139,11 +151,26 @@ class TestAzureDataSource(MockerTestCase): self.assertEqual(0700, data['datadir_mode']) self.assertEqual(dsrc.metadata['instance-id'], 'i-my-azure-id') + def test_user_cfg_set_agent_command_plain(self): + # set dscfg in via plaintext + cfg = {'agent_command': "my_command"} + odata = {'HostName': "myhost", 'UserName': "myuser", + 'dscfg': {'text': yaml.dump(cfg), 'encoding': 'plain'}} + data = {'ovfcontent': construct_valid_ovf_env(data=odata)} + + dsrc = self._get_ds(data) + ret = dsrc.get_data() + self.assertTrue(ret) + self.assertEqual(data['agent_invoked'], cfg['agent_command']) + def test_user_cfg_set_agent_command(self): + # set dscfg in via base64 encoded yaml cfg = {'agent_command': "my_command"} odata = {'HostName': "myhost", 'UserName': "myuser", - 'dscfg': yaml.dump(cfg)} + 'dscfg': {'text': base64.b64encode(yaml.dump(cfg)), + 'encoding': 'base64'}} data = {'ovfcontent': construct_valid_ovf_env(data=odata)} + print data dsrc = self._get_ds(data) ret = dsrc.get_data() @@ -218,6 +245,48 @@ class TestAzureDataSource(MockerTestCase): for mypk in mypklist: self.assertIn(mypk, dsrc.cfg['_pubkeys']) + def test_disabled_bounce(self): + pass + + def test_apply_bounce_call_1(self): + # hostname needs to get through to apply_hostname_bounce + mydata = "FOOBAR" + odata = {'HostName': 'my-random-hostname'} + data = {'ovfcontent': construct_valid_ovf_env(data=odata)} + + self._get_ds(data).get_data() + self.assertIn('hostname', data['apply_hostname_bounce']) + self.assertEqual(data['apply_hostname_bounce']['hostname'], + odata['HostName']) + + def test_apply_bounce_call_configurable(self): + # hostname_bounce should be configurable in datasource cfg + cfg = {'hostname_bounce': {'interface': 'eth1', 'policy': 'off', + 'command': 'my-bounce-command', + 'hostname_command': 'my-hostname-command'}} + odata = {'HostName': "xhost", + 'dscfg': {'text': base64.b64encode(yaml.dump(cfg)), + 'encoding': 'base64'}} + data = {'ovfcontent': construct_valid_ovf_env(data=odata)} + self._get_ds(data).get_data() + + for k in cfg['hostname_bounce']: + self.assertIn(k, data['apply_hostname_bounce']) + + for k, v in cfg['hostname_bounce'].items(): + self.assertEqual(data['apply_hostname_bounce'][k], v) + + def test_set_hostname_disabled(self): + # config specifying set_hostname off should not bounce + cfg = {'set_hostname': False} + odata = {'HostName': "xhost", + 'dscfg': {'text': base64.b64encode(yaml.dump(cfg)), + 'encoding': 'base64'}} + data = {'ovfcontent': construct_valid_ovf_env(data=odata)} + self._get_ds(data).get_data() + + self.assertEqual(data.get('apply_hostname_bounce', "N/A"), "N/A") + class TestReadAzureOvf(MockerTestCase): def test_invalid_xml_raises_non_azure_ds(self): -- cgit v1.2.3 From 4d9668dac5fed8f713f3b4300fdb574f399c14ee Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Thu, 25 Jul 2013 15:27:44 -0400 Subject: minor azure cleanup --- cloudinit/sources/DataSourceAzure.py | 2 +- tests/unittests/test_datasource/test_azure.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) (limited to 'tests/unittests/test_datasource/test_azure.py') diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index 30b06fef..d4863429 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -54,7 +54,7 @@ class DataSourceAzureNet(sources.DataSource): self.cfg = {} self.seed = None self.ds_cfg = util.mergemanydict([ - util.get_cfg_by_path(sys_cfg, DS_CFG_PATH), + util.get_cfg_by_path(sys_cfg, DS_CFG_PATH, {}), BUILTIN_DS_CONFIG]) def __str__(self): diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py index c944cb13..4cd3f213 100644 --- a/tests/unittests/test_datasource/test_azure.py +++ b/tests/unittests/test_datasource/test_azure.py @@ -170,7 +170,6 @@ class TestAzureDataSource(MockerTestCase): 'dscfg': {'text': base64.b64encode(yaml.dump(cfg)), 'encoding': 'base64'}} data = {'ovfcontent': construct_valid_ovf_env(data=odata)} - print data dsrc = self._get_ds(data) ret = dsrc.get_data() -- cgit v1.2.3 From 2e4d57b867b71831270655956d06a8e14793a8f3 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Tue, 6 Aug 2013 11:36:30 +0100 Subject: fix pep8 and pylint warnings --- cloudinit/distros/rhel.py | 2 -- cloudinit/distros/sles.py | 2 -- tests/unittests/test_datasource/test_azure.py | 1 - 3 files changed, 5 deletions(-) (limited to 'tests/unittests/test_datasource/test_azure.py') diff --git a/cloudinit/distros/rhel.py b/cloudinit/distros/rhel.py index ece1a5ff..30195384 100644 --- a/cloudinit/distros/rhel.py +++ b/cloudinit/distros/rhel.py @@ -20,8 +20,6 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import os - from cloudinit import distros from cloudinit import helpers from cloudinit import log as logging diff --git a/cloudinit/distros/sles.py b/cloudinit/distros/sles.py index 92a1c307..f2ac4efc 100644 --- a/cloudinit/distros/sles.py +++ b/cloudinit/distros/sles.py @@ -18,8 +18,6 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import os - from cloudinit import distros from cloudinit.distros.parsers.hostname import HostnameConf diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py index 4cd3f213..06f8a5d2 100644 --- a/tests/unittests/test_datasource/test_azure.py +++ b/tests/unittests/test_datasource/test_azure.py @@ -249,7 +249,6 @@ class TestAzureDataSource(MockerTestCase): def test_apply_bounce_call_1(self): # hostname needs to get through to apply_hostname_bounce - mydata = "FOOBAR" odata = {'HostName': 'my-random-hostname'} data = {'ovfcontent': construct_valid_ovf_env(data=odata)} -- cgit v1.2.3 From e668da729a0f9cd5d93d909a9b44d74cf6925dd5 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Thu, 15 Aug 2013 13:16:01 -0400 Subject: do not set 'password', but set 'passwd' to crypt'd value 'password' was the wrong key. It should have been setting the default user's "plain_text_password". Instead of doing that, though, we're encrypting the value and putting it in 'passwd', which will then be passed on to useradd. The key value in doing this is that the plain text password will not be stored in obj.pkl. (admittedly it is still in plain text in the ovf-env.xml file). --- cloudinit/sources/DataSourceAzure.py | 7 ++++++- tests/unittests/test_datasource/test_azure.py | 11 ++++++++--- 2 files changed, 14 insertions(+), 4 deletions(-) (limited to 'tests/unittests/test_datasource/test_azure.py') diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index 1a74de21..7ec622bf 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -17,6 +17,7 @@ # along with this program. If not, see . import base64 +import crypt import os import os.path import time @@ -424,7 +425,7 @@ def read_azure_ovf(contents): if username: defuser['name'] = username if password: - defuser['password'] = password + defuser['passwd'] = encrypt_pass(password) defuser['lock_passwd'] = False if defuser: @@ -436,6 +437,10 @@ def read_azure_ovf(contents): return (md, ud, cfg) +def encrypt_pass(password, salt_id="$6$"): + return crypt.crypt(password, salt_id + util.rand_str(strlen=16)) + + def list_possible_azure_ds_devs(): # return a sorted list of devices that might have a azure datasource devlist = [] diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py index 06f8a5d2..1ca6a79d 100644 --- a/tests/unittests/test_datasource/test_azure.py +++ b/tests/unittests/test_datasource/test_azure.py @@ -2,6 +2,7 @@ from cloudinit import helpers from cloudinit.sources import DataSourceAzure from tests.unittests.helpers import populate_dir +import crypt import base64 from mocker import MockerTestCase import os @@ -207,11 +208,15 @@ class TestAzureDataSource(MockerTestCase): self.assertTrue('default_user' in dsrc.cfg['system_info']) defuser = dsrc.cfg['system_info']['default_user'] - # default user shoudl be updated for password and username - # and should not be locked. + # default user should be updated username and should not be locked. self.assertEqual(defuser['name'], odata['UserName']) - self.assertEqual(defuser['password'], odata['UserPassword']) self.assertFalse(defuser['lock_passwd']) + # passwd is crypt formated string $id$salt$encrypted + # encrypting plaintext with salt value of everything up to final '$' + # should equal that after the '$' + pos = defuser['passwd'].rfind("$") + 1 + self.assertEqual(defuser['passwd'], + crypt.crypt(odata['UserPassword'], defuser['passwd'][0:pos])) def test_userdata_found(self): mydata = "FOOBAR" -- cgit v1.2.3