diff options
author | Chad Smith <chad.smith@canonical.com> | 2018-05-23 16:08:43 -0600 |
---|---|---|
committer | Chad Smith <chad.smith@canonical.com> | 2018-05-23 16:08:43 -0600 |
commit | cd1de5f47ab6b82f2c6fd61a5f6681f33b3e5705 (patch) | |
tree | fc18fa3c21a5f9755cb893cce6b5004d5b2465a1 /cloudinit/sources/__init__.py | |
parent | 12799d96f85e210c8e1216a3b06d8a98468fedd7 (diff) | |
download | vyos-cloud-init-cd1de5f47ab6b82f2c6fd61a5f6681f33b3e5705.tar.gz vyos-cloud-init-cd1de5f47ab6b82f2c6fd61a5f6681f33b3e5705.zip |
openstack: Allow discovery in init-local using dhclient in a sandbox.
Network has not yet been configured in the init-local stage so the
openstack datasource will use dhcp-client to temporarily obtain an ipv4
address and query the metadata service at http://169.254.169.254 to get
network_data.json configuration. If present, the datasource will return
network_config version 1 config based on that network_data.json content.
Previously OpenStack datasource only setup dhcp on the fallback interface
so this represents a change in behavior to react to the full config
provided by openstack.
Also significant to OpenStack is the separation of a _crawl_data operation
from get_data(). crawl_data walks the available metadata services and
returns a dict of discovered content. get_data consumes the crawled_data,
caches it in the datasource and reacts to that data.
/run/cloud-init/instance-data.json now published network_data.json or
ec2_metadata key if that data is present on any datasource.
The main reasons for the separation of crawl from get_data:
* Enable performance metrics of cloud-init's metadata crawls on each
* Enable cloud-init modules and scripts to query and consume metadata
content which may have updated/changed after cloud-init's initial cache
during instance boot. (Think hotplug)
Also generalize common logic to base DataSource class/module:
* Move to a common UNSET variable up into base datasource module fix EC2,
ConfigDrive, OpenStack, SmartOS to use the global.
* Drop get_url_settings from Ec2, CloudStack and OpenStack and generalize
DataSource.get_url_params(). Allow subclasses to override url_max_wait,
url_timeout and url_retries params.
* Rename get_network_metadata bool to perform_dhcp_setup as it designates
whether EphemeralDHCPv4 setup is required before crawling metadata.
LP: #1749717
Diffstat (limited to 'cloudinit/sources/__init__.py')
-rw-r--r-- | cloudinit/sources/__init__.py | 76 |
1 files changed, 76 insertions, 0 deletions
diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index df0b374a..90d74575 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -9,6 +9,7 @@ # This file is part of cloud-init. See LICENSE file for license information. import abc +from collections import namedtuple import copy import json import os @@ -17,6 +18,7 @@ import six from cloudinit.atomic_helper import write_json from cloudinit import importer from cloudinit import log as logging +from cloudinit import net from cloudinit import type_utils from cloudinit import user_data as ud from cloudinit import util @@ -41,6 +43,8 @@ INSTANCE_JSON_FILE = 'instance-data.json' # Key which can be provide a cloud's official product name to cloud-init METADATA_CLOUD_NAME_KEY = 'cloud-name' +UNSET = "_unset" + LOG = logging.getLogger(__name__) @@ -48,6 +52,11 @@ class DataSourceNotFoundException(Exception): pass +class InvalidMetaDataException(Exception): + """Raised when metadata is broken, unavailable or disabled.""" + pass + + def process_base64_metadata(metadata, key_path=''): """Strip ci-b64 prefix and return metadata with base64-encoded-keys set.""" md_copy = copy.deepcopy(metadata) @@ -68,6 +77,10 @@ def process_base64_metadata(metadata, key_path=''): return md_copy +URLParams = namedtuple( + 'URLParms', ['max_wait_seconds', 'timeout_seconds', 'num_retries']) + + @six.add_metaclass(abc.ABCMeta) class DataSource(object): @@ -81,6 +94,14 @@ class DataSource(object): # Cached cloud_name as determined by _get_cloud_name _cloud_name = None + # Track the discovered fallback nic for use in configuration generation. + _fallback_interface = None + + # read_url_params + url_max_wait = -1 # max_wait < 0 means do not wait + url_timeout = 10 # timeout for each metadata url read attempt + url_retries = 5 # number of times to retry url upon 404 + def __init__(self, sys_cfg, distro, paths, ud_proc=None): self.sys_cfg = sys_cfg self.distro = distro @@ -128,6 +149,14 @@ class DataSource(object): 'meta-data': self.metadata, 'user-data': self.get_userdata_raw(), 'vendor-data': self.get_vendordata_raw()}} + if hasattr(self, 'network_json'): + network_json = getattr(self, 'network_json') + if network_json != UNSET: + instance_data['ds']['network_json'] = network_json + if hasattr(self, 'ec2_metadata'): + ec2_metadata = getattr(self, 'ec2_metadata') + if ec2_metadata != UNSET: + instance_data['ds']['ec2_metadata'] = ec2_metadata instance_data.update( self._get_standardized_metadata()) try: @@ -149,6 +178,42 @@ class DataSource(object): 'Subclasses of DataSource must implement _get_data which' ' sets self.metadata, vendordata_raw and userdata_raw.') + def get_url_params(self): + """Return the Datasource's prefered url_read parameters. + + Subclasses may override url_max_wait, url_timeout, url_retries. + + @return: A URLParams object with max_wait_seconds, timeout_seconds, + num_retries. + """ + max_wait = self.url_max_wait + try: + max_wait = int(self.ds_cfg.get("max_wait", self.url_max_wait)) + except ValueError: + util.logexc( + LOG, "Config max_wait '%s' is not an int, using default '%s'", + self.ds_cfg.get("max_wait"), max_wait) + + timeout = self.url_timeout + try: + timeout = max( + 0, int(self.ds_cfg.get("timeout", self.url_timeout))) + except ValueError: + timeout = self.url_timeout + util.logexc( + LOG, "Config timeout '%s' is not an int, using default '%s'", + self.ds_cfg.get('timeout'), timeout) + + retries = self.url_retries + try: + retries = int(self.ds_cfg.get("retries", self.url_retries)) + except Exception: + util.logexc( + LOG, "Config retries '%s' is not an int, using default '%s'", + self.ds_cfg.get('retries'), retries) + + return URLParams(max_wait, timeout, retries) + def get_userdata(self, apply_filter=False): if self.userdata is None: self.userdata = self.ud_proc.process(self.get_userdata_raw()) @@ -162,6 +227,17 @@ class DataSource(object): return self.vendordata @property + def fallback_interface(self): + """Determine the network interface used during local network config.""" + if self._fallback_interface is None: + self._fallback_interface = net.find_fallback_nic() + if self._fallback_interface is None: + LOG.warning( + "Did not find a fallback interface on %s.", + self.cloud_name) + return self._fallback_interface + + @property def cloud_name(self): """Return lowercase cloud name as determined by the datasource. |