summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--cloudinit/sources/DataSourceAzure.py74
-rw-r--r--tests/unittests/test_datasource/test_azure.py5
2 files changed, 63 insertions, 16 deletions
diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py
index 4053cfa6..3c7820a6 100644
--- a/cloudinit/sources/DataSourceAzure.py
+++ b/cloudinit/sources/DataSourceAzure.py
@@ -22,6 +22,7 @@ import crypt
import fnmatch
import os
import os.path
+import time
from xml.dom import minidom
from cloudinit import log as logging
@@ -35,11 +36,13 @@ LOG = logging.getLogger(__name__)
DS_NAME = 'Azure'
DEFAULT_METADATA = {"instance-id": "iid-AZURE-NODE"}
+AGENT_START = ['service', 'walinuxagent', 'start']
BOUNCE_COMMAND = ['sh', '-xc',
"i=$interface; x=0; ifdown $i || x=$?; ifup $i || x=$?; exit $x"]
DATA_DIR_CLEAN_LIST = ['SharedConfig.xml']
BUILTIN_DS_CONFIG = {
+ 'agent_command': '__builtin__',
'data_dir': "/var/lib/waagent",
'set_hostname': True,
'hostname_bounce': {
@@ -110,6 +113,56 @@ class DataSourceAzureNet(sources.DataSource):
root = sources.DataSource.__str__(self)
return "%s [seed=%s]" % (root, self.seed)
+ def get_metadata_from_agent(self):
+ temp_hostname = self.metadata.get('local-hostname')
+ hostname_command = self.ds_cfg['hostname_bounce']['hostname_command']
+ with temporary_hostname(temp_hostname, self.ds_cfg,
+ hostname_command=hostname_command) \
+ as previous_hostname:
+ if (previous_hostname is not None
+ and util.is_true(self.ds_cfg.get('set_hostname'))):
+ cfg = self.ds_cfg['hostname_bounce']
+ try:
+ perform_hostname_bounce(hostname=temp_hostname,
+ cfg=cfg,
+ prev_hostname=previous_hostname)
+ except Exception as e:
+ LOG.warn("Failed publishing hostname: %s", e)
+ util.logexc(LOG, "handling set_hostname failed")
+
+ try:
+ invoke_agent(self.ds_cfg['agent_command'])
+ except util.ProcessExecutionError:
+ # claim the datasource even if the command failed
+ util.logexc(LOG, "agent command '%s' failed.",
+ self.ds_cfg['agent_command'])
+
+ ddir = self.ds_cfg['data_dir']
+ shcfgxml = os.path.join(ddir, "SharedConfig.xml")
+ wait_for = [shcfgxml]
+
+ fp_files = []
+ for pk in self.cfg.get('_pubkeys', []):
+ bname = str(pk['fingerprint'] + ".crt")
+ fp_files += [os.path.join(ddir, bname)]
+
+ missing = util.log_time(logfunc=LOG.debug, msg="waiting for files",
+ func=wait_for_files,
+ args=(wait_for + fp_files,))
+ if len(missing):
+ LOG.warn("Did not find files, but going on: %s", missing)
+
+ metadata = {}
+ if shcfgxml in missing:
+ LOG.warn("SharedConfig.xml missing, using static instance-id")
+ else:
+ try:
+ metadata['instance-id'] = iid_from_shared_config(shcfgxml)
+ except ValueError as e:
+ LOG.warn("failed to get instance id in %s: %s", shcfgxml, e)
+ metadata['public-keys'] = pubkeys_from_crt_files(fp_files)
+ return metadata
+
def get_data(self):
# azure removes/ejects the cdrom containing the ovf-env.xml
# file on reboot. So, in order to successfully reboot we
@@ -162,8 +215,6 @@ class DataSourceAzureNet(sources.DataSource):
# now update ds_cfg to reflect contents pass in config
user_ds_cfg = util.get_cfg_by_path(self.cfg, DS_CFG_PATH, {})
self.ds_cfg = util.mergemanydict([user_ds_cfg, self.ds_cfg])
- mycfg = self.ds_cfg
- ddir = mycfg['data_dir']
if found != ddir:
cached_ovfenv = util.load_file(
@@ -184,8 +235,12 @@ class DataSourceAzureNet(sources.DataSource):
# the directory to be protected.
write_files(ddir, files, dirmode=0o700)
+ if self.ds_cfg['agent_command'] == '__builtin__':
+ metadata_func = get_metadata_from_fabric
+ else:
+ metadata_func = self.get_metadata_from_agent
try:
- fabric_data = get_metadata_from_fabric()
+ fabric_data = metadata_func()
except Exception as exc:
LOG.info("Error communicating with Azure fabric; assume we aren't"
" on Azure.", exc_info=True)
@@ -567,19 +622,6 @@ def iid_from_shared_config(path):
return iid_from_shared_config_content(content)
-def iid_from_shared_config_content(content):
- """
- find INSTANCE_ID in:
- <?xml version="1.0" encoding="utf-8"?>
- <SharedConfig version="1.0.0.0" goalStateIncarnation="1">
- <Deployment name="INSTANCE_ID" guid="{...}" incarnation="0">
- <Service name="..." guid="{00000000-0000-0000-0000-000000000000}" />
- """
- dom = minidom.parseString(content)
- depnode = single_node_at_path(dom, ["SharedConfig", "Deployment"])
- return depnode.attributes.get('name').value
-
-
class BrokenAzureDataSource(Exception):
pass
diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py
index 983be4cd..c72dc801 100644
--- a/tests/unittests/test_datasource/test_azure.py
+++ b/tests/unittests/test_datasource/test_azure.py
@@ -389,11 +389,13 @@ class TestAzureDataSource(TestCase):
def test_exception_fetching_fabric_data_doesnt_propagate(self):
ds = self._get_ds({'ovfcontent': construct_valid_ovf_env()})
+ ds.ds_cfg['agent_command'] = '__builtin__'
self.get_metadata_from_fabric.side_effect = Exception
self.assertFalse(ds.get_data())
def test_fabric_data_included_in_metadata(self):
ds = self._get_ds({'ovfcontent': construct_valid_ovf_env()})
+ ds.ds_cfg['agent_command'] = '__builtin__'
self.get_metadata_from_fabric.return_value = {'test': 'value'}
ret = ds.get_data()
self.assertTrue(ret)
@@ -419,6 +421,9 @@ class TestAzureBounce(TestCase):
self.patches.enter_context(
mock.patch.object(DataSourceAzure, 'find_ephemeral_part',
mock.MagicMock(return_value=None)))
+ self.patches.enter_context(
+ mock.patch.object(DataSourceAzure, 'get_metadata_from_fabric',
+ mock.MagicMock(return_value={})))
def setUp(self):
super(TestAzureBounce, self).setUp()