summaryrefslogtreecommitdiff
path: root/cloudinit/sources
diff options
context:
space:
mode:
authorScott Moser <smoser@ubuntu.com>2014-09-22 14:00:39 -0400
committerScott Moser <smoser@ubuntu.com>2014-09-22 14:00:39 -0400
commitb9f0bcbc087a0c7c4a87f83ddf5713a4f849a488 (patch)
tree186d80c155d32f75d8efa3c6ec1ad3562f11ba0a /cloudinit/sources
parent26e6c265277cf5e29b8af311f2bb8759b0e811cd (diff)
parentb76866ad72d433cc9008a137c464c7ed44401549 (diff)
downloadvyos-cloud-init-b9f0bcbc087a0c7c4a87f83ddf5713a4f849a488.tar.gz
vyos-cloud-init-b9f0bcbc087a0c7c4a87f83ddf5713a4f849a488.zip
merge from trunk
Diffstat (limited to 'cloudinit/sources')
-rw-r--r--cloudinit/sources/DataSourceAzure.py4
-rw-r--r--cloudinit/sources/DataSourceCloudStack.py3
-rw-r--r--cloudinit/sources/DataSourceConfigDrive.py14
-rw-r--r--cloudinit/sources/DataSourceNoCloud.py2
-rw-r--r--cloudinit/sources/DataSourceOVF.py4
-rw-r--r--cloudinit/sources/DataSourceOpenNebula.py2
-rw-r--r--cloudinit/sources/DataSourceOpenStack.py27
-rw-r--r--cloudinit/sources/DataSourceSmartOS.py32
-rw-r--r--cloudinit/sources/__init__.py10
-rw-r--r--cloudinit/sources/helpers/openstack.py157
10 files changed, 156 insertions, 99 deletions
diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py
index bd75e6d8..09bc196d 100644
--- a/cloudinit/sources/DataSourceAzure.py
+++ b/cloudinit/sources/DataSourceAzure.py
@@ -452,7 +452,7 @@ def load_azure_ovf_pubkeys(sshnode):
continue
if (len(child.childNodes) != 1 or
- child.childNodes[0].nodeType != text_node):
+ child.childNodes[0].nodeType != text_node):
continue
cur[name] = child.childNodes[0].wholeText.strip()
@@ -521,7 +521,7 @@ def read_azure_ovf(contents):
simple = False
value = ""
if (len(child.childNodes) == 1 and
- child.childNodes[0].nodeType == dom.TEXT_NODE):
+ child.childNodes[0].nodeType == dom.TEXT_NODE):
simple = True
value = child.childNodes[0].wholeText
diff --git a/cloudinit/sources/DataSourceCloudStack.py b/cloudinit/sources/DataSourceCloudStack.py
index 08f661e4..1bbeca59 100644
--- a/cloudinit/sources/DataSourceCloudStack.py
+++ b/cloudinit/sources/DataSourceCloudStack.py
@@ -78,7 +78,8 @@ class DataSourceCloudStack(sources.DataSource):
(max_wait, timeout) = self._get_url_settings()
- urls = [self.metadata_address + "/latest/meta-data/instance-id"]
+ urls = [uhelp.combine_url(self.metadata_address,
+ 'latest/meta-data/instance-id')]
start_time = time.time()
url = uhelp.wait_for_url(urls=urls, max_wait=max_wait,
timeout=timeout, status_cb=LOG.warn)
diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py
index 82caf6eb..27658073 100644
--- a/cloudinit/sources/DataSourceConfigDrive.py
+++ b/cloudinit/sources/DataSourceConfigDrive.py
@@ -135,7 +135,15 @@ class DataSourceConfigDrive(openstack.SourceMixin, sources.DataSource):
self.userdata_raw = results.get('userdata')
self.version = results['version']
self.files.update(results.get('files', {}))
- self.vendordata_raw = results.get('vendordata')
+
+ vd = results.get('vendordata')
+ self.vendordata_pure = vd
+ try:
+ self.vendordata_raw = openstack.convert_vendordata_json(vd)
+ except ValueError as e:
+ LOG.warn("Invalid content in vendor-data: %s", e)
+ self.vendordata_raw = None
+
return True
@@ -170,10 +178,10 @@ def get_ds_mode(cfgdrv_ver, ds_cfg=None, user=None):
return "net"
-def read_config_drive(source_dir, version="2012-08-10"):
+def read_config_drive(source_dir):
reader = openstack.ConfigDriveReader(source_dir)
finders = [
- (reader.read_v2, [], {'version': version}),
+ (reader.read_v2, [], {}),
(reader.read_v1, [], {}),
]
excps = []
diff --git a/cloudinit/sources/DataSourceNoCloud.py b/cloudinit/sources/DataSourceNoCloud.py
index a315aae0..c26a645c 100644
--- a/cloudinit/sources/DataSourceNoCloud.py
+++ b/cloudinit/sources/DataSourceNoCloud.py
@@ -181,7 +181,7 @@ class DataSourceNoCloud(sources.DataSource):
# and the source of the seed was self.dsmode
# ('local' for NoCloud, 'net' for NoCloudNet')
if ('network-interfaces' in mydata['meta-data'] and
- (self.dsmode in ("local", seeded_interfaces))):
+ (self.dsmode in ("local", seeded_interfaces))):
LOG.debug("Updating network interfaces from %s", self)
self.distro.apply_network(
mydata['meta-data']['network-interfaces'])
diff --git a/cloudinit/sources/DataSourceOVF.py b/cloudinit/sources/DataSourceOVF.py
index 77b43e17..2f53c1ba 100644
--- a/cloudinit/sources/DataSourceOVF.py
+++ b/cloudinit/sources/DataSourceOVF.py
@@ -107,7 +107,7 @@ class DataSourceOVF(sources.DataSource):
return True
def get_public_ssh_keys(self):
- if not 'public-keys' in self.metadata:
+ if 'public-keys' not in self.metadata:
return []
pks = self.metadata['public-keys']
if isinstance(pks, (list)):
@@ -205,7 +205,7 @@ def transport_iso9660(require_iso=True):
fullp = os.path.join("/dev/", dev)
if (fullp in mounts or
- not cdmatch.match(dev) or os.path.isdir(fullp)):
+ not cdmatch.match(dev) or os.path.isdir(fullp)):
continue
try:
diff --git a/cloudinit/sources/DataSourceOpenNebula.py b/cloudinit/sources/DataSourceOpenNebula.py
index 34557f8b..e2469f6e 100644
--- a/cloudinit/sources/DataSourceOpenNebula.py
+++ b/cloudinit/sources/DataSourceOpenNebula.py
@@ -28,7 +28,7 @@ import base64
import os
import pwd
import re
-import string # pylint: disable=W0402
+import string
from cloudinit import log as logging
from cloudinit import sources
diff --git a/cloudinit/sources/DataSourceOpenStack.py b/cloudinit/sources/DataSourceOpenStack.py
index 0970d07b..469c2e2a 100644
--- a/cloudinit/sources/DataSourceOpenStack.py
+++ b/cloudinit/sources/DataSourceOpenStack.py
@@ -88,11 +88,9 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource):
md_urls = []
url2base = {}
for url in urls:
- for version in openstack.OS_VERSIONS + (openstack.OS_LATEST,):
- md_url = url_helper.combine_url(url, 'openstack',
- version, 'meta_data.json')
- md_urls.append(md_url)
- url2base[md_url] = url
+ md_url = url_helper.combine_url(url, 'openstack')
+ md_urls.append(md_url)
+ url2base[md_url] = url
(max_wait, timeout) = self._get_url_settings()
start_time = time.time()
@@ -119,8 +117,7 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource):
'Crawl of openstack metadata service',
read_metadata_service,
args=[self.metadata_address],
- kwargs={'ssl_details': self.ssl_details,
- 'version': openstack.OS_HAVANA})
+ kwargs={'ssl_details': self.ssl_details})
except openstack.NonReadable:
return False
except (openstack.BrokenMetadata, IOError):
@@ -143,20 +140,20 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource):
self.version = results['version']
self.files.update(results.get('files', {}))
- # if vendordata includes 'cloud-init', then read that explicitly
- # for cloud-init (for namespacing).
vd = results.get('vendordata')
- if isinstance(vd, dict) and 'cloud-init' in vd:
- self.vendordata_raw = vd['cloud-init']
- else:
- self.vendordata_raw = vd
+ self.vendordata_pure = vd
+ try:
+ self.vendordata_raw = openstack.convert_vendordata_json(vd)
+ except ValueError as e:
+ LOG.warn("Invalid content in vendor-data: %s", e)
+ self.vendordata_raw = None
return True
-def read_metadata_service(base_url, version=None, ssl_details=None):
+def read_metadata_service(base_url, ssl_details=None):
reader = openstack.MetadataReader(base_url, ssl_details=ssl_details)
- return reader.read_v2(version=version)
+ return reader.read_v2()
# Used to match classes to dependencies
diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py
index 65ec0339..2733a2f6 100644
--- a/cloudinit/sources/DataSourceSmartOS.py
+++ b/cloudinit/sources/DataSourceSmartOS.py
@@ -41,7 +41,7 @@ import serial
LOG = logging.getLogger(__name__)
SMARTOS_ATTRIB_MAP = {
- #Cloud-init Key : (SmartOS Key, Strip line endings)
+ # Cloud-init Key : (SmartOS Key, Strip line endings)
'local-hostname': ('hostname', True),
'public-keys': ('root_authorized_keys', True),
'user-script': ('user-script', False),
@@ -96,21 +96,21 @@ BUILTIN_CLOUD_CONFIG = {
'device': 'ephemeral0'}],
}
-## builtin vendor-data is a boothook that writes a script into
-## /var/lib/cloud/scripts/per-boot. *That* script then handles
-## executing the 'operator-script' and 'user-script' files
-## that cloud-init writes into /var/lib/cloud/instance/data/
-## if they exist.
-##
-## This is all very indirect, but its done like this so that at
-## some point in the future, perhaps cloud-init wouldn't do it at
-## all, but rather the vendor actually provide vendor-data that accomplished
-## their desires. (That is the point of vendor-data).
-##
-## cloud-init does cheat a bit, and write the operator-script and user-script
-## itself. It could have the vendor-script do that, but it seems better
-## to not require the image to contain a tool (mdata-get) to read those
-## keys when we have a perfectly good one inside cloud-init.
+# builtin vendor-data is a boothook that writes a script into
+# /var/lib/cloud/scripts/per-boot. *That* script then handles
+# executing the 'operator-script' and 'user-script' files
+# that cloud-init writes into /var/lib/cloud/instance/data/
+# if they exist.
+#
+# This is all very indirect, but its done like this so that at
+# some point in the future, perhaps cloud-init wouldn't do it at
+# all, but rather the vendor actually provide vendor-data that accomplished
+# their desires. (That is the point of vendor-data).
+#
+# cloud-init does cheat a bit, and write the operator-script and user-script
+# itself. It could have the vendor-script do that, but it seems better
+# to not require the image to contain a tool (mdata-get) to read those
+# keys when we have a perfectly good one inside cloud-init.
BUILTIN_VENDOR_DATA = """\
#cloud-boothook
#!/bin/sh
diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py
index fef4d460..7c7ef9ab 100644
--- a/cloudinit/sources/__init__.py
+++ b/cloudinit/sources/__init__.py
@@ -66,7 +66,7 @@ class DataSource(object):
name = name[0:-3]
self.ds_cfg = util.get_cfg_by_path(self.sys_cfg,
- ("datasource", name), {})
+ ("datasource", name), {})
if not ud_proc:
self.ud_proc = ud.UserDataProcessor(self.paths)
else:
@@ -166,7 +166,7 @@ class DataSource(object):
defhost = "localhost"
domain = defdomain
- if not self.metadata or not 'local-hostname' in self.metadata:
+ if not self.metadata or 'local-hostname' not in self.metadata:
# this is somewhat questionable really.
# the cloud datasource was asked for a hostname
# and didn't have one. raising error might be more appropriate
@@ -272,9 +272,9 @@ def list_sources(cfg_list, depends, pkg_list):
for ds_name in cfg_list:
if not ds_name.startswith(DS_PREFIX):
ds_name = '%s%s' % (DS_PREFIX, ds_name)
- m_locs = importer.find_module(ds_name,
- pkg_list,
- ['get_datasource_list'])
+ m_locs, _looked_locs = importer.find_module(ds_name,
+ pkg_list,
+ ['get_datasource_list'])
for m_loc in m_locs:
mod = importer.import_module(m_loc)
lister = getattr(mod, "get_datasource_list")
diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py
index 0fac0335..b7e19314 100644
--- a/cloudinit/sources/helpers/openstack.py
+++ b/cloudinit/sources/helpers/openstack.py
@@ -21,6 +21,7 @@
import abc
import base64
import copy
+import functools
import os
from cloudinit import ec2_utils
@@ -48,6 +49,7 @@ OS_LATEST = 'latest'
OS_FOLSOM = '2012-08-10'
OS_GRIZZLY = '2013-04-04'
OS_HAVANA = '2013-10-17'
+# keep this in chronological order. new supported versions go at the end.
OS_VERSIONS = (
OS_FOLSOM,
OS_GRIZZLY,
@@ -150,17 +152,40 @@ class BaseReader(object):
pass
@abc.abstractmethod
- def _path_exists(self, path):
+ def _path_read(self, path):
pass
@abc.abstractmethod
- def _path_read(self, path):
+ def _fetch_available_versions(self):
pass
@abc.abstractmethod
def _read_ec2_metadata(self):
pass
+ def _find_working_version(self):
+ try:
+ versions_available = self._fetch_available_versions()
+ except Exception as e:
+ LOG.debug("Unable to read openstack versions from %s due to: %s",
+ self.base_path, e)
+ versions_available = []
+
+ # openstack.OS_VERSIONS is stored in chronological order, so
+ # reverse it to check newest first.
+ supported = [v for v in reversed(list(OS_VERSIONS))]
+ selected_version = OS_LATEST
+
+ for potential_version in supported:
+ if potential_version not in versions_available:
+ continue
+ selected_version = potential_version
+ break
+
+ LOG.debug("Selected version '%s' from %s", selected_version,
+ versions_available)
+ return selected_version
+
def _read_content_path(self, item):
path = item.get('content_path', '').lstrip("/")
path_pieces = path.split("/")
@@ -170,24 +195,7 @@ class BaseReader(object):
path = self._path_join(self.base_path, "openstack", *path_pieces)
return self._path_read(path)
- def _find_working_version(self, version):
- search_versions = [version] + list(OS_VERSIONS)
- for potential_version in search_versions:
- if not potential_version:
- continue
- path = self._path_join(self.base_path, "openstack",
- potential_version)
- if self._path_exists(path):
- if potential_version != version:
- LOG.debug("Version '%s' not available, attempting to use"
- " version '%s' instead", version,
- potential_version)
- return potential_version
- LOG.debug("Version '%s' not available, attempting to use '%s'"
- " instead", version, OS_LATEST)
- return OS_LATEST
-
- def read_v2(self, version=None):
+ def read_v2(self):
"""Reads a version 2 formatted location.
Return a dict with metadata, userdata, ec2-metadata, dsmode,
@@ -196,6 +204,9 @@ class BaseReader(object):
If not a valid location, raise a NonReadable exception.
"""
+ load_json_anytype = functools.partial(
+ util.load_json, root_types=(dict, basestring, list))
+
def datafiles(version):
files = {}
files['metadata'] = (
@@ -214,29 +225,32 @@ class BaseReader(object):
files['vendordata'] = (
self._path_join("openstack", version, 'vendor_data.json'),
False,
- util.load_json,
+ load_json_anytype,
)
return files
- version = self._find_working_version(version)
results = {
'userdata': '',
'version': 2,
}
- data = datafiles(version)
+ data = datafiles(self._find_working_version())
for (name, (path, required, translator)) in data.iteritems():
path = self._path_join(self.base_path, path)
data = None
found = False
- if self._path_exists(path):
- try:
- data = self._path_read(path)
- except IOError:
- raise NonReadable("Failed to read: %s" % path)
- found = True
+ try:
+ data = self._path_read(path)
+ except IOError as e:
+ if not required:
+ LOG.debug("Failed reading optional path %s due"
+ " to: %s", path, e)
+ else:
+ LOG.debug("Failed reading mandatory path %s due"
+ " to: %s", path, e)
else:
- if required:
- raise NonReadable("Missing mandatory path: %s" % path)
+ found = True
+ if required and not found:
+ raise NonReadable("Missing mandatory path: %s" % path)
if found and translator:
try:
data = translator(data)
@@ -304,21 +318,27 @@ class BaseReader(object):
class ConfigDriveReader(BaseReader):
def __init__(self, base_path):
super(ConfigDriveReader, self).__init__(base_path)
+ self._versions = None
def _path_join(self, base, *add_ons):
components = [base] + list(add_ons)
return os.path.join(*components)
- def _path_exists(self, path):
- return os.path.exists(path)
-
def _path_read(self, path):
return util.load_file(path)
+ def _fetch_available_versions(self):
+ if self._versions is None:
+ path = self._path_join(self.base_path, 'openstack')
+ found = [d for d in os.listdir(path)
+ if os.path.isdir(os.path.join(path))]
+ self._versions = found
+ return self._versions
+
def _read_ec2_metadata(self):
path = self._path_join(self.base_path,
'ec2', 'latest', 'meta-data.json')
- if not self._path_exists(path):
+ if not os.path.exists(path):
return {}
else:
try:
@@ -338,7 +358,7 @@ class ConfigDriveReader(BaseReader):
found = {}
for name in FILES_V1.keys():
path = self._path_join(self.base_path, name)
- if self._path_exists(path):
+ if os.path.exists(path):
found[name] = path
if len(found) == 0:
raise NonReadable("%s: no files found" % (self.base_path))
@@ -400,17 +420,26 @@ class MetadataReader(BaseReader):
self.ssl_details = ssl_details
self.timeout = float(timeout)
self.retries = int(retries)
+ self._versions = None
+
+ def _fetch_available_versions(self):
+ # <baseurl>/openstack/ returns a newline separated list of versions
+ if self._versions is not None:
+ return self._versions
+ found = []
+ version_path = self._path_join(self.base_path, "openstack")
+ content = self._path_read(version_path)
+ for line in content.splitlines():
+ line = line.strip()
+ if not line:
+ continue
+ found.append(line)
+ self._versions = found
+ return self._versions
def _path_read(self, path):
- response = url_helper.readurl(path,
- retries=self.retries,
- ssl_details=self.ssl_details,
- timeout=self.timeout)
- return response.contents
- def _path_exists(self, path):
-
- def should_retry_cb(request, cause):
+ def should_retry_cb(_request_args, cause):
try:
code = int(cause.code)
if code >= 400:
@@ -420,15 +449,12 @@ class MetadataReader(BaseReader):
pass
return True
- try:
- response = url_helper.readurl(path,
- retries=self.retries,
- ssl_details=self.ssl_details,
- timeout=self.timeout,
- exception_cb=should_retry_cb)
- return response.ok()
- except IOError:
- return False
+ response = url_helper.readurl(path,
+ retries=self.retries,
+ ssl_details=self.ssl_details,
+ timeout=self.timeout,
+ exception_cb=should_retry_cb)
+ return response.contents
def _path_join(self, base, *add_ons):
return url_helper.combine_url(base, *add_ons)
@@ -437,3 +463,28 @@ class MetadataReader(BaseReader):
return ec2_utils.get_instance_metadata(ssl_details=self.ssl_details,
timeout=self.timeout,
retries=self.retries)
+
+
+def convert_vendordata_json(data, recurse=True):
+ """ data: a loaded json *object* (strings, arrays, dicts).
+ return something suitable for cloudinit vendordata_raw.
+
+ if data is:
+ None: return None
+ string: return string
+ list: return data
+ the list is then processed in UserDataProcessor
+ dict: return convert_vendordata_json(data.get('cloud-init'))
+ """
+ if not data:
+ return None
+ if isinstance(data, (str, unicode, basestring)):
+ return data
+ if isinstance(data, list):
+ return copy.deepcopy(data)
+ if isinstance(data, dict):
+ if recurse is True:
+ return convert_vendordata_json(data.get('cloud-init'),
+ recurse=False)
+ raise ValueError("vendordata['cloud-init'] cannot be dict")
+ raise ValueError("Unknown data type for vendordata: %s" % type(data))