summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorScott Moser <smoser@ubuntu.com>2014-02-12 12:14:49 -0500
committerScott Moser <smoser@ubuntu.com>2014-02-12 12:14:49 -0500
commitdd1c392eeae8324e59d34d8d448e74cfda6d1712 (patch)
tree7f0236e34eb0f1770219d6c636c81eaf318f02e8
parent65da76341796a00b7bbdca514167b89f99d5a599 (diff)
parent4d80411700ca70401cda401ee9bca799b9b8c55d (diff)
downloadvyos-cloud-init-dd1c392eeae8324e59d34d8d448e74cfda6d1712.tar.gz
vyos-cloud-init-dd1c392eeae8324e59d34d8d448e74cfda6d1712.zip
merge from trunk
-rw-r--r--ChangeLog1
-rw-r--r--cloudinit/sources/DataSourceAzure.py37
-rw-r--r--cloudinit/sources/DataSourceEc2.py7
-rw-r--r--doc/examples/cloud-config-landscape.txt8
-rw-r--r--packages/redhat/cloud-init.spec.in5
-rw-r--r--packages/suse/cloud-init.spec.in2
-rw-r--r--tests/unittests/test_datasource/test_azure.py83
7 files changed, 112 insertions, 31 deletions
diff --git a/ChangeLog b/ChangeLog
index 6c8fe90a..fcc9e7cb 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -23,6 +23,7 @@
theh correct filesystem label. [Paul Querna]
- initial freebsd support [Harm Weites]
- fix in is_ipv4 to accept IP addresses with a '0' in them.
+ - Azure: fix issue when stale data in /var/lib/waagent (LP: #1269626)
0.7.4:
- fix issue mounting 'ephemeral0' if ephemeral0 was an alias for a
partitioned block device with target filesystem on ephemeral0.1.
diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py
index 97f151d6..c7331da5 100644
--- a/cloudinit/sources/DataSourceAzure.py
+++ b/cloudinit/sources/DataSourceAzure.py
@@ -34,6 +34,7 @@ DEFAULT_METADATA = {"instance-id": "iid-AZURE-NODE"}
AGENT_START = ['service', 'walinuxagent', 'start']
BOUNCE_COMMAND = ['sh', '-xc',
"i=$interface; x=0; ifdown $i || x=$?; ifup $i || x=$?; exit $x"]
+DATA_DIR_CLEAN_LIST = ['SharedConfig.xml']
BUILTIN_DS_CONFIG = {
'agent_command': AGENT_START,
@@ -101,7 +102,7 @@ class DataSourceAzureNet(sources.DataSource):
except BrokenAzureDataSource as exc:
raise exc
except util.MountFailedError:
- LOG.warn("%s was not mountable" % cdev)
+ LOG.warn("%s was not mountable", cdev)
continue
(md, self.userdata_raw, cfg, files) = ret
@@ -128,10 +129,26 @@ class DataSourceAzureNet(sources.DataSource):
user_ds_cfg = util.get_cfg_by_path(self.cfg, DS_CFG_PATH, {})
self.ds_cfg = util.mergemanydict([user_ds_cfg, self.ds_cfg])
mycfg = self.ds_cfg
+ ddir = mycfg['data_dir']
+
+ if found != ddir:
+ cached_ovfenv = util.load_file(
+ os.path.join(ddir, 'ovf-env.xml'), quiet=True)
+ if cached_ovfenv != files['ovf-env.xml']:
+ # source was not walinux-agent's datadir, so we have to clean
+ # up so 'wait_for_files' doesn't return early due to stale data
+ cleaned = []
+ for f in [os.path.join(ddir, f) for f in DATA_DIR_CLEAN_LIST]:
+ if os.path.exists(f):
+ util.del_file(f)
+ cleaned.append(f)
+ if cleaned:
+ LOG.info("removed stale file(s) in '%s': %s",
+ ddir, str(cleaned))
# walinux agent writes files world readable, but expects
# the directory to be protected.
- write_files(mycfg['data_dir'], files, dirmode=0700)
+ write_files(ddir, files, dirmode=0700)
# handle the hostname 'publishing'
try:
@@ -139,7 +156,7 @@ class DataSourceAzureNet(sources.DataSource):
self.metadata.get('local-hostname'),
mycfg['hostname_bounce'])
except Exception as e:
- LOG.warn("Failed publishing hostname: %s" % e)
+ LOG.warn("Failed publishing hostname: %s", e)
util.logexc(LOG, "handling set_hostname failed")
try:
@@ -149,13 +166,13 @@ class DataSourceAzureNet(sources.DataSource):
util.logexc(LOG, "agent command '%s' failed.",
mycfg['agent_command'])
- shcfgxml = os.path.join(mycfg['data_dir'], "SharedConfig.xml")
+ shcfgxml = os.path.join(ddir, "SharedConfig.xml")
wait_for = [shcfgxml]
fp_files = []
for pk in self.cfg.get('_pubkeys', []):
bname = str(pk['fingerprint'] + ".crt")
- fp_files += [os.path.join(mycfg['data_dir'], bname)]
+ fp_files += [os.path.join(ddir, bname)]
missing = util.log_time(logfunc=LOG.debug, msg="waiting for files",
func=wait_for_files,
@@ -169,7 +186,7 @@ class DataSourceAzureNet(sources.DataSource):
try:
self.metadata['instance-id'] = iid_from_shared_config(shcfgxml)
except ValueError as e:
- LOG.warn("failed to get instance id in %s: %s" % (shcfgxml, e))
+ LOG.warn("failed to get instance id in %s: %s", shcfgxml, e)
pubkeys = pubkeys_from_crt_files(fp_files)
@@ -250,7 +267,7 @@ def pubkeys_from_crt_files(flist):
errors.append(fname)
if errors:
- LOG.warn("failed to convert the crt files to pubkey: %s" % errors)
+ LOG.warn("failed to convert the crt files to pubkey: %s", errors)
return pubkeys
@@ -281,7 +298,7 @@ def write_files(datadir, files, dirmode=None):
def invoke_agent(cmd):
# this is a function itself to simplify patching it for test
if cmd:
- LOG.debug("invoking agent: %s" % cmd)
+ LOG.debug("invoking agent: %s", cmd)
util.subp(cmd, shell=(not isinstance(cmd, list)))
else:
LOG.debug("not invoking agent")
@@ -328,7 +345,7 @@ def load_azure_ovf_pubkeys(sshnode):
continue
cur = {'fingerprint': "", 'path': ""}
for child in pk_node.childNodes:
- if (child.nodeType == text_node or not child.localName):
+ if child.nodeType == text_node or not child.localName:
continue
name = child.localName.lower()
@@ -414,7 +431,7 @@ def read_azure_ovf(contents):
# we accept either UserData or CustomData. If both are present
# then behavior is undefined.
- if (name == "userdata" or name == "customdata"):
+ if name == "userdata" or name == "customdata":
if attrs.get('encoding') in (None, "base64"):
ud = base64.b64decode(''.join(value.split()))
else:
diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py
index f010e640..1b20ecf3 100644
--- a/cloudinit/sources/DataSourceEc2.py
+++ b/cloudinit/sources/DataSourceEc2.py
@@ -92,12 +92,9 @@ class DataSourceEc2(sources.DataSource):
except Exception:
util.logexc(LOG, "Failed to get max wait. using %s", max_wait)
- if max_wait == 0:
- return False
-
timeout = 50
try:
- timeout = int(mcfg.get("timeout", timeout))
+ timeout = max(0, int(mcfg.get("timeout", timeout)))
except Exception:
util.logexc(LOG, "Failed to get timeout, using %s", timeout)
@@ -109,6 +106,8 @@ class DataSourceEc2(sources.DataSource):
mcfg = {}
(max_wait, timeout) = self._get_url_settings()
+ if max_wait <= 0:
+ return False
# Remove addresses from the list that wont resolve.
mdurls = mcfg.get("metadata_urls", DEF_MD_URLS)
diff --git a/doc/examples/cloud-config-landscape.txt b/doc/examples/cloud-config-landscape.txt
index 74e07b62..d7ff8ef8 100644
--- a/doc/examples/cloud-config-landscape.txt
+++ b/doc/examples/cloud-config-landscape.txt
@@ -16,7 +16,7 @@ landscape:
data_path: "/var/lib/landscape/client"
http_proxy: "http://my.proxy.com/foobar"
tags: "server,cloud"
- computer_title = footitle
- https_proxy = fooproxy
- registration_key = fookey
- account_name = fooaccount
+ computer_title: footitle
+ https_proxy: fooproxy
+ registration_key: fookey
+ account_name: fooaccount
diff --git a/packages/redhat/cloud-init.spec.in b/packages/redhat/cloud-init.spec.in
index 9614e2f1..0e9862d8 100644
--- a/packages/redhat/cloud-init.spec.in
+++ b/packages/redhat/cloud-init.spec.in
@@ -34,7 +34,7 @@ Requires: e2fsprogs
Requires: net-tools
Requires: procps
Requires: shadow-utils
-Requires: sudo
+Requires: sudo >= 1.7.2p2-3
# Install pypi 'dynamic' requirements
#for $r in $requires
@@ -169,7 +169,7 @@ fi
/usr/lib/%{name}/write-ssh-key-fingerprints
# Docs
-%doc TODO LICENSE ChangeLog Requires
+%doc TODO LICENSE ChangeLog requirements.txt
%doc %{_defaultdocdir}/cloud-init/*
# Configs
@@ -180,6 +180,7 @@ fi
%dir %{_sysconfdir}/cloud/templates
%config(noreplace) %{_sysconfdir}/cloud/templates/*
%config(noreplace) %{_sysconfdir}/rsyslog.d/21-cloudinit.conf
+%config(noreplace) %{_sysconfdir}/sudoers.d/cloud-init
# Python code is here...
%{python_sitelib}/*
diff --git a/packages/suse/cloud-init.spec.in b/packages/suse/cloud-init.spec.in
index c30a6fae..53e6ad13 100644
--- a/packages/suse/cloud-init.spec.in
+++ b/packages/suse/cloud-init.spec.in
@@ -107,7 +107,7 @@ rm -r %{buildroot}/%{python_sitelib}/tests
# Move documentation
mkdir -p %{buildroot}/%{_defaultdocdir}
mv %{buildroot}/usr/share/doc/cloud-init %{buildroot}/%{_defaultdocdir}
-for doc in TODO LICENSE ChangeLog Requires ; do
+for doc in TODO LICENSE ChangeLog requirements.txt; do
cp \${doc} %{buildroot}/%{_defaultdocdir}/cloud-init
done
diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py
index aad84206..44c537f4 100644
--- a/tests/unittests/test_datasource/test_azure.py
+++ b/tests/unittests/test_datasource/test_azure.py
@@ -1,4 +1,5 @@
from cloudinit import helpers
+from cloudinit.util import load_file
from cloudinit.sources import DataSourceAzure
from tests.unittests.helpers import populate_dir
@@ -6,6 +7,7 @@ import base64
import crypt
from mocker import MockerTestCase
import os
+import stat
import yaml
@@ -72,6 +74,7 @@ class TestAzureDataSource(MockerTestCase):
# patch cloud_dir, so our 'seed_dir' is guaranteed empty
self.paths = helpers.Paths({'cloud_dir': self.tmp})
+ self.waagent_d = os.path.join(self.tmp, 'var', 'lib', 'waagent')
self.unapply = []
super(TestAzureDataSource, self).setUp()
@@ -92,13 +95,6 @@ class TestAzureDataSource(MockerTestCase):
def _invoke_agent(cmd):
data['agent_invoked'] = cmd
- def _write_files(datadir, files, dirmode):
- data['files'] = {}
- data['datadir'] = datadir
- data['datadir_mode'] = dirmode
- for (fname, content) in files.items():
- data['files'][fname] = content
-
def _wait_for_files(flist, _maxwait=None, _naplen=None):
data['waited'] = flist
return []
@@ -119,11 +115,11 @@ class TestAzureDataSource(MockerTestCase):
{'ovf-env.xml': data['ovfcontent']})
mod = DataSourceAzure
+ mod.BUILTIN_DS_CONFIG['data_dir'] = self.waagent_d
self.apply_patches([(mod, 'list_possible_azure_ds_devs', dsdevs)])
self.apply_patches([(mod, 'invoke_agent', _invoke_agent),
- (mod, 'write_files', _write_files),
(mod, 'wait_for_files', _wait_for_files),
(mod, 'pubkeys_from_crt_files',
_pubkeys_from_crt_files),
@@ -147,10 +143,18 @@ class TestAzureDataSource(MockerTestCase):
self.assertTrue(ret)
self.assertEqual(dsrc.userdata_raw, "")
self.assertEqual(dsrc.metadata['local-hostname'], odata['HostName'])
- self.assertTrue('ovf-env.xml' in data['files'])
- self.assertEqual(0700, data['datadir_mode'])
+ self.assertTrue(os.path.isfile(
+ os.path.join(self.waagent_d, 'ovf-env.xml')))
self.assertEqual(dsrc.metadata['instance-id'], 'i-my-azure-id')
+ def test_waagent_d_has_0700_perms(self):
+ # we expect /var/lib/waagent to be created 0700
+ dsrc = self._get_ds({'ovfcontent': construct_valid_ovf_env()})
+ ret = dsrc.get_data()
+ self.assertTrue(ret)
+ self.assertTrue(os.path.isdir(self.waagent_d))
+ self.assertEqual(stat.S_IMODE(os.stat(self.waagent_d).st_mode), 0700)
+
def test_user_cfg_set_agent_command_plain(self):
# set dscfg in via plaintext
# we must have friendly-to-xml formatted plaintext in yaml_cfg
@@ -338,6 +342,65 @@ class TestAzureDataSource(MockerTestCase):
self.assertEqual(userdata, dsrc.userdata_raw)
+ def test_ovf_env_arrives_in_waagent_dir(self):
+ xml = construct_valid_ovf_env(data={}, userdata="FOODATA")
+ dsrc = self._get_ds({'ovfcontent': xml})
+ dsrc.get_data()
+
+ # 'data_dir' is '/var/lib/waagent' (walinux-agent's state dir)
+ # we expect that the ovf-env.xml file is copied there.
+ ovf_env_path = os.path.join(self.waagent_d, 'ovf-env.xml')
+ self.assertTrue(os.path.exists(ovf_env_path))
+ self.assertEqual(xml, load_file(ovf_env_path))
+
+ def test_existing_ovf_same(self):
+ # waagent/SharedConfig left alone if found ovf-env.xml same as cached
+ odata = {'UserData': base64.b64encode("SOMEUSERDATA")}
+ data = {'ovfcontent': construct_valid_ovf_env(data=odata)}
+
+ populate_dir(self.waagent_d,
+ {'ovf-env.xml': data['ovfcontent'],
+ 'otherfile': 'otherfile-content',
+ 'SharedConfig.xml': 'mysharedconfig'})
+
+ dsrc = self._get_ds(data)
+ ret = dsrc.get_data()
+ self.assertTrue(ret)
+ self.assertTrue(os.path.exists(
+ os.path.join(self.waagent_d, 'ovf-env.xml')))
+ self.assertTrue(os.path.exists(
+ os.path.join(self.waagent_d, 'otherfile')))
+ self.assertTrue(os.path.exists(
+ os.path.join(self.waagent_d, 'SharedConfig.xml')))
+
+ def test_existing_ovf_diff(self):
+ # waagent/SharedConfig must be removed if ovfenv is found elsewhere
+
+ # 'get_data' should remove SharedConfig.xml in /var/lib/waagent
+ # if ovf-env.xml differs.
+ cached_ovfenv = construct_valid_ovf_env(
+ {'userdata': base64.b64encode("FOO_USERDATA")})
+ new_ovfenv = construct_valid_ovf_env(
+ {'userdata': base64.b64encode("NEW_USERDATA")})
+
+ populate_dir(self.waagent_d,
+ {'ovf-env.xml': cached_ovfenv,
+ 'SharedConfig.xml': "mysharedconfigxml",
+ 'otherfile': 'otherfilecontent'})
+
+ dsrc = self._get_ds({'ovfcontent': new_ovfenv})
+ ret = dsrc.get_data()
+ self.assertTrue(ret)
+ self.assertEqual(dsrc.userdata_raw, "NEW_USERDATA")
+ self.assertTrue(os.path.exists(
+ os.path.join(self.waagent_d, 'otherfile')))
+ self.assertFalse(
+ os.path.exists(os.path.join(self.waagent_d, 'SharedConfig.xml')))
+ self.assertTrue(
+ os.path.exists(os.path.join(self.waagent_d, 'ovf-env.xml')))
+ self.assertEqual(new_ovfenv,
+ load_file(os.path.join(self.waagent_d, 'ovf-env.xml')))
+
class TestReadAzureOvf(MockerTestCase):
def test_invalid_xml_raises_non_azure_ds(self):