diff options
Diffstat (limited to 'cloudinit/sources')
-rw-r--r-- | cloudinit/sources/DataSourceCloudStack.py | 31 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceConfigDrive.py | 4 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceEc2.py | 48 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceOpenStack.py | 161 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceSmartOS.py | 9 | ||||
-rw-r--r-- | cloudinit/sources/__init__.py | 76 | ||||
-rw-r--r-- | cloudinit/sources/tests/test_init.py | 89 |
7 files changed, 299 insertions, 119 deletions
diff --git a/cloudinit/sources/DataSourceCloudStack.py b/cloudinit/sources/DataSourceCloudStack.py index 0df545fc..d4b758f2 100644 --- a/cloudinit/sources/DataSourceCloudStack.py +++ b/cloudinit/sources/DataSourceCloudStack.py @@ -68,6 +68,10 @@ class DataSourceCloudStack(sources.DataSource): dsname = 'CloudStack' + # Setup read_url parameters per get_url_params. + url_max_wait = 120 + url_timeout = 50 + def __init__(self, sys_cfg, distro, paths): sources.DataSource.__init__(self, sys_cfg, distro, paths) self.seed_dir = os.path.join(paths.seed_dir, 'cs') @@ -80,33 +84,18 @@ class DataSourceCloudStack(sources.DataSource): self.metadata_address = "http://%s/" % (self.vr_addr,) self.cfg = {} - def _get_url_settings(self): - mcfg = self.ds_cfg - max_wait = 120 - try: - max_wait = int(mcfg.get("max_wait", max_wait)) - except Exception: - util.logexc(LOG, "Failed to get max wait. using %s", max_wait) + def wait_for_metadata_service(self): + url_params = self.get_url_params() - if max_wait == 0: + if url_params.max_wait_seconds <= 0: return False - timeout = 50 - try: - timeout = int(mcfg.get("timeout", timeout)) - except Exception: - util.logexc(LOG, "Failed to get timeout, using %s", timeout) - - return (max_wait, timeout) - - def wait_for_metadata_service(self): - (max_wait, timeout) = self._get_url_settings() - urls = [uhelp.combine_url(self.metadata_address, 'latest/meta-data/instance-id')] start_time = time.time() - url = uhelp.wait_for_url(urls=urls, max_wait=max_wait, - timeout=timeout, status_cb=LOG.warn) + url = uhelp.wait_for_url( + urls=urls, max_wait=url_params.max_wait_seconds, + timeout=url_params.timeout_seconds, status_cb=LOG.warn) if url: LOG.debug("Using metadata source: '%s'", url) diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index 121cf215..4cb28977 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -43,7 +43,7 @@ class DataSourceConfigDrive(openstack.SourceMixin, sources.DataSource): self.version = None self.ec2_metadata = None self._network_config = None - self.network_json = None + self.network_json = sources.UNSET self.network_eni = None self.known_macs = None self.files = {} @@ -149,7 +149,7 @@ class DataSourceConfigDrive(openstack.SourceMixin, sources.DataSource): @property def network_config(self): if self._network_config is None: - if self.network_json is not None: + if self.network_json not in (None, sources.UNSET): LOG.debug("network config provided via network_json") self._network_config = openstack.convert_net_json( self.network_json, known_macs=self.known_macs) diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py index 21e9ef84..968ab3f7 100644 --- a/cloudinit/sources/DataSourceEc2.py +++ b/cloudinit/sources/DataSourceEc2.py @@ -27,8 +27,6 @@ SKIP_METADATA_URL_CODES = frozenset([uhelp.NOT_FOUND]) STRICT_ID_PATH = ("datasource", "Ec2", "strict_id") STRICT_ID_DEFAULT = "warn" -_unset = "_unset" - class Platforms(object): # TODO Rename and move to cloudinit.cloud.CloudNames @@ -59,15 +57,16 @@ class DataSourceEc2(sources.DataSource): # for extended metadata content. IPv6 support comes in 2016-09-02 extended_metadata_versions = ['2016-09-02'] + # Setup read_url parameters per get_url_params. + url_max_wait = 120 + url_timeout = 50 + _cloud_platform = None - _network_config = _unset # Used for caching calculated network config v1 + _network_config = sources.UNSET # Used to cache calculated network cfg v1 # Whether we want to get network configuration from the metadata service. - get_network_metadata = False - - # Track the discovered fallback nic for use in configuration generation. - _fallback_interface = None + perform_dhcp_setup = False def __init__(self, sys_cfg, distro, paths): super(DataSourceEc2, self).__init__(sys_cfg, distro, paths) @@ -98,7 +97,7 @@ class DataSourceEc2(sources.DataSource): elif self.cloud_platform == Platforms.NO_EC2_METADATA: return False - if self.get_network_metadata: # Setup networking in init-local stage. + if self.perform_dhcp_setup: # Setup networking in init-local stage. if util.is_FreeBSD(): LOG.debug("FreeBSD doesn't support running dhclient with -sf") return False @@ -158,27 +157,11 @@ class DataSourceEc2(sources.DataSource): else: return self.metadata['instance-id'] - def _get_url_settings(self): - mcfg = self.ds_cfg - max_wait = 120 - try: - max_wait = int(mcfg.get("max_wait", max_wait)) - except Exception: - util.logexc(LOG, "Failed to get max wait. using %s", max_wait) - - timeout = 50 - try: - timeout = max(0, int(mcfg.get("timeout", timeout))) - except Exception: - util.logexc(LOG, "Failed to get timeout, using %s", timeout) - - return (max_wait, timeout) - def wait_for_metadata_service(self): mcfg = self.ds_cfg - (max_wait, timeout) = self._get_url_settings() - if max_wait <= 0: + url_params = self.get_url_params() + if url_params.max_wait_seconds <= 0: return False # Remove addresses from the list that wont resolve. @@ -205,7 +188,8 @@ class DataSourceEc2(sources.DataSource): start_time = time.time() url = uhelp.wait_for_url( - urls=urls, max_wait=max_wait, timeout=timeout, status_cb=LOG.warn) + urls=urls, max_wait=url_params.max_wait_seconds, + timeout=url_params.timeout_seconds, status_cb=LOG.warn) if url: self.metadata_address = url2base[url] @@ -310,11 +294,11 @@ class DataSourceEc2(sources.DataSource): @property def network_config(self): """Return a network config dict for rendering ENI or netplan files.""" - if self._network_config != _unset: + if self._network_config != sources.UNSET: return self._network_config if self.metadata is None: - # this would happen if get_data hadn't been called. leave as _unset + # this would happen if get_data hadn't been called. leave as UNSET LOG.warning( "Unexpected call to network_config when metadata is None.") return None @@ -353,9 +337,7 @@ class DataSourceEc2(sources.DataSource): self._fallback_interface = _legacy_fbnic self.fallback_nic = None else: - self._fallback_interface = net.find_fallback_nic() - if self._fallback_interface is None: - LOG.warning("Did not find a fallback interface on EC2.") + return super(DataSourceEc2, self).fallback_interface return self._fallback_interface def _crawl_metadata(self): @@ -390,7 +372,7 @@ class DataSourceEc2Local(DataSourceEc2): metadata service. If the metadata service provides network configuration then render the network configuration for that instance based on metadata. """ - get_network_metadata = True # Get metadata network config if present + perform_dhcp_setup = True # Use dhcp before querying metadata def get_data(self): supported_platforms = (Platforms.AWS,) diff --git a/cloudinit/sources/DataSourceOpenStack.py b/cloudinit/sources/DataSourceOpenStack.py index fb166ae1..1a12a3f1 100644 --- a/cloudinit/sources/DataSourceOpenStack.py +++ b/cloudinit/sources/DataSourceOpenStack.py @@ -7,6 +7,7 @@ import time from cloudinit import log as logging +from cloudinit.net.dhcp import EphemeralDHCPv4, NoDHCPLeaseError from cloudinit import sources from cloudinit import url_helper from cloudinit import util @@ -27,46 +28,25 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource): dsname = "OpenStack" + _network_config = sources.UNSET # Used to cache calculated network cfg v1 + + # Whether we want to get network configuration from the metadata service. + perform_dhcp_setup = False + def __init__(self, sys_cfg, distro, paths): super(DataSourceOpenStack, self).__init__(sys_cfg, distro, paths) self.metadata_address = None self.ssl_details = util.fetch_ssl_details(self.paths) self.version = None self.files = {} - self.ec2_metadata = None + self.ec2_metadata = sources.UNSET + self.network_json = sources.UNSET def __str__(self): root = sources.DataSource.__str__(self) mstr = "%s [%s,ver=%s]" % (root, self.dsmode, self.version) return mstr - def _get_url_settings(self): - # TODO(harlowja): this is shared with ec2 datasource, we should just - # move it to a shared location instead... - # Note: the defaults here are different though. - - # max_wait < 0 indicates do not wait - max_wait = -1 - timeout = 10 - retries = 5 - - try: - max_wait = int(self.ds_cfg.get("max_wait", max_wait)) - except Exception: - util.logexc(LOG, "Failed to get max wait. using %s", max_wait) - - try: - timeout = max(0, int(self.ds_cfg.get("timeout", timeout))) - except Exception: - util.logexc(LOG, "Failed to get timeout, using %s", timeout) - - try: - retries = int(self.ds_cfg.get("retries", retries)) - except Exception: - util.logexc(LOG, "Failed to get retries. using %s", retries) - - return (max_wait, timeout, retries) - def wait_for_metadata_service(self): urls = self.ds_cfg.get("metadata_urls", [DEF_MD_URL]) filtered = [x for x in urls if util.is_resolvable_url(x)] @@ -86,10 +66,11 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource): md_urls.append(md_url) url2base[md_url] = url - (max_wait, timeout, _retries) = self._get_url_settings() + url_params = self.get_url_params() start_time = time.time() - avail_url = url_helper.wait_for_url(urls=md_urls, max_wait=max_wait, - timeout=timeout) + avail_url = url_helper.wait_for_url( + urls=md_urls, max_wait=url_params.max_wait_seconds, + timeout=url_params.timeout_seconds) if avail_url: LOG.debug("Using metadata source: '%s'", url2base[avail_url]) else: @@ -99,38 +80,64 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource): self.metadata_address = url2base.get(avail_url) return bool(avail_url) - def _get_data(self): - try: - if not self.wait_for_metadata_service(): - return False - except IOError: - return False + def check_instance_id(self, sys_cfg): + # quickly (local check only) if self.instance_id is still valid + return sources.instance_id_matches_system_uuid(self.get_instance_id()) - (_max_wait, timeout, retries) = self._get_url_settings() + @property + def network_config(self): + """Return a network config dict for rendering ENI or netplan files.""" + if self._network_config != sources.UNSET: + return self._network_config + + # RELEASE_BLOCKER: SRU to Xenial and Artful SRU should not provide + # network_config by default unless configured in /etc/cloud/cloud.cfg*. + # Patch Xenial and Artful before release to default to False. + if util.is_false(self.ds_cfg.get('apply_network_config', True)): + self._network_config = None + return self._network_config + if self.network_json == sources.UNSET: + # this would happen if get_data hadn't been called. leave as UNSET + LOG.warning( + 'Unexpected call to network_config when network_json is None.') + return None + + LOG.debug('network config provided via network_json') + self._network_config = openstack.convert_net_json( + self.network_json, known_macs=None) + return self._network_config - try: - results = util.log_time(LOG.debug, - 'Crawl of openstack metadata service', - read_metadata_service, - args=[self.metadata_address], - kwargs={'ssl_details': self.ssl_details, - 'retries': retries, - 'timeout': timeout}) - except openstack.NonReadable: - return False - except (openstack.BrokenMetadata, IOError): - util.logexc(LOG, "Broken metadata address %s", - self.metadata_address) - return False + def _get_data(self): + """Crawl metadata, parse and persist that data for this instance. + + @return: True when metadata discovered indicates OpenStack datasource. + False when unable to contact metadata service or when metadata + format is invalid or disabled. + """ + if self.perform_dhcp_setup: # Setup networking in init-local stage. + try: + with EphemeralDHCPv4(self.fallback_interface): + results = util.log_time( + logfunc=LOG.debug, msg='Crawl of metadata service', + func=self._crawl_metadata) + except (NoDHCPLeaseError, sources.InvalidMetaDataException) as e: + util.logexc(LOG, str(e)) + return False + else: + try: + results = self._crawl_metadata() + except sources.InvalidMetaDataException as e: + util.logexc(LOG, str(e)) + return False self.dsmode = self._determine_dsmode([results.get('dsmode')]) if self.dsmode == sources.DSMODE_DISABLED: return False - md = results.get('metadata', {}) md = util.mergemanydict([md, DEFAULT_METADATA]) self.metadata = md self.ec2_metadata = results.get('ec2-metadata') + self.network_json = results.get('networkdata') self.userdata_raw = results.get('userdata') self.version = results['version'] self.files.update(results.get('files', {})) @@ -145,9 +152,50 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource): return True - def check_instance_id(self, sys_cfg): - # quickly (local check only) if self.instance_id is still valid - return sources.instance_id_matches_system_uuid(self.get_instance_id()) + def _crawl_metadata(self): + """Crawl metadata service when available. + + @returns: Dictionary with all metadata discovered for this datasource. + @raise: InvalidMetaDataException on unreadable or broken + metadata. + """ + try: + if not self.wait_for_metadata_service(): + raise sources.InvalidMetaDataException( + 'No active metadata service found') + except IOError as e: + raise sources.InvalidMetaDataException( + 'IOError contacting metadata service: {error}'.format( + error=str(e))) + + url_params = self.get_url_params() + + try: + result = util.log_time( + LOG.debug, 'Crawl of openstack metadata service', + read_metadata_service, args=[self.metadata_address], + kwargs={'ssl_details': self.ssl_details, + 'retries': url_params.num_retries, + 'timeout': url_params.timeout_seconds}) + except openstack.NonReadable as e: + raise sources.InvalidMetaDataException(str(e)) + except (openstack.BrokenMetadata, IOError): + msg = 'Broken metadata address {addr}'.format( + addr=self.metadata_address) + raise sources.InvalidMetaDataException(msg) + return result + + +class DataSourceOpenStackLocal(DataSourceOpenStack): + """Run in init-local using a dhcp discovery prior to metadata crawl. + + In init-local, no network is available. This subclass sets up minimal + networking with dhclient on a viable nic so that it can talk to the + metadata service. If the metadata service provides network configuration + then render the network configuration for that instance based on metadata. + """ + + perform_dhcp_setup = True # Get metadata network config if present def read_metadata_service(base_url, ssl_details=None, @@ -159,6 +207,7 @@ def read_metadata_service(base_url, ssl_details=None, # Used to match classes to dependencies datasources = [ + (DataSourceOpenStackLocal, (sources.DEP_FILESYSTEM,)), (DataSourceOpenStack, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)), ] diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py index fcb46b14..c91e4d59 100644 --- a/cloudinit/sources/DataSourceSmartOS.py +++ b/cloudinit/sources/DataSourceSmartOS.py @@ -165,9 +165,8 @@ class DataSourceSmartOS(sources.DataSource): dsname = "Joyent" - _unset = "_unset" - smartos_type = _unset - md_client = _unset + smartos_type = sources.UNSET + md_client = sources.UNSET def __init__(self, sys_cfg, distro, paths): sources.DataSource.__init__(self, sys_cfg, distro, paths) @@ -189,12 +188,12 @@ class DataSourceSmartOS(sources.DataSource): return "%s [client=%s]" % (root, self.md_client) def _init(self): - if self.smartos_type == self._unset: + if self.smartos_type == sources.UNSET: self.smartos_type = get_smartos_environ() if self.smartos_type is None: self.md_client = None - if self.md_client == self._unset: + if self.md_client == sources.UNSET: self.md_client = jmc_client_factory( smartos_type=self.smartos_type, metadata_sockfile=self.ds_cfg['metadata_sockfile'], diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index df0b374a..90d74575 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -9,6 +9,7 @@ # This file is part of cloud-init. See LICENSE file for license information. import abc +from collections import namedtuple import copy import json import os @@ -17,6 +18,7 @@ import six from cloudinit.atomic_helper import write_json from cloudinit import importer from cloudinit import log as logging +from cloudinit import net from cloudinit import type_utils from cloudinit import user_data as ud from cloudinit import util @@ -41,6 +43,8 @@ INSTANCE_JSON_FILE = 'instance-data.json' # Key which can be provide a cloud's official product name to cloud-init METADATA_CLOUD_NAME_KEY = 'cloud-name' +UNSET = "_unset" + LOG = logging.getLogger(__name__) @@ -48,6 +52,11 @@ class DataSourceNotFoundException(Exception): pass +class InvalidMetaDataException(Exception): + """Raised when metadata is broken, unavailable or disabled.""" + pass + + def process_base64_metadata(metadata, key_path=''): """Strip ci-b64 prefix and return metadata with base64-encoded-keys set.""" md_copy = copy.deepcopy(metadata) @@ -68,6 +77,10 @@ def process_base64_metadata(metadata, key_path=''): return md_copy +URLParams = namedtuple( + 'URLParms', ['max_wait_seconds', 'timeout_seconds', 'num_retries']) + + @six.add_metaclass(abc.ABCMeta) class DataSource(object): @@ -81,6 +94,14 @@ class DataSource(object): # Cached cloud_name as determined by _get_cloud_name _cloud_name = None + # Track the discovered fallback nic for use in configuration generation. + _fallback_interface = None + + # read_url_params + url_max_wait = -1 # max_wait < 0 means do not wait + url_timeout = 10 # timeout for each metadata url read attempt + url_retries = 5 # number of times to retry url upon 404 + def __init__(self, sys_cfg, distro, paths, ud_proc=None): self.sys_cfg = sys_cfg self.distro = distro @@ -128,6 +149,14 @@ class DataSource(object): 'meta-data': self.metadata, 'user-data': self.get_userdata_raw(), 'vendor-data': self.get_vendordata_raw()}} + if hasattr(self, 'network_json'): + network_json = getattr(self, 'network_json') + if network_json != UNSET: + instance_data['ds']['network_json'] = network_json + if hasattr(self, 'ec2_metadata'): + ec2_metadata = getattr(self, 'ec2_metadata') + if ec2_metadata != UNSET: + instance_data['ds']['ec2_metadata'] = ec2_metadata instance_data.update( self._get_standardized_metadata()) try: @@ -149,6 +178,42 @@ class DataSource(object): 'Subclasses of DataSource must implement _get_data which' ' sets self.metadata, vendordata_raw and userdata_raw.') + def get_url_params(self): + """Return the Datasource's prefered url_read parameters. + + Subclasses may override url_max_wait, url_timeout, url_retries. + + @return: A URLParams object with max_wait_seconds, timeout_seconds, + num_retries. + """ + max_wait = self.url_max_wait + try: + max_wait = int(self.ds_cfg.get("max_wait", self.url_max_wait)) + except ValueError: + util.logexc( + LOG, "Config max_wait '%s' is not an int, using default '%s'", + self.ds_cfg.get("max_wait"), max_wait) + + timeout = self.url_timeout + try: + timeout = max( + 0, int(self.ds_cfg.get("timeout", self.url_timeout))) + except ValueError: + timeout = self.url_timeout + util.logexc( + LOG, "Config timeout '%s' is not an int, using default '%s'", + self.ds_cfg.get('timeout'), timeout) + + retries = self.url_retries + try: + retries = int(self.ds_cfg.get("retries", self.url_retries)) + except Exception: + util.logexc( + LOG, "Config retries '%s' is not an int, using default '%s'", + self.ds_cfg.get('retries'), retries) + + return URLParams(max_wait, timeout, retries) + def get_userdata(self, apply_filter=False): if self.userdata is None: self.userdata = self.ud_proc.process(self.get_userdata_raw()) @@ -162,6 +227,17 @@ class DataSource(object): return self.vendordata @property + def fallback_interface(self): + """Determine the network interface used during local network config.""" + if self._fallback_interface is None: + self._fallback_interface = net.find_fallback_nic() + if self._fallback_interface is None: + LOG.warning( + "Did not find a fallback interface on %s.", + self.cloud_name) + return self._fallback_interface + + @property def cloud_name(self): """Return lowercase cloud name as determined by the datasource. diff --git a/cloudinit/sources/tests/test_init.py b/cloudinit/sources/tests/test_init.py index 452e9219..d5bc98a4 100644 --- a/cloudinit/sources/tests/test_init.py +++ b/cloudinit/sources/tests/test_init.py @@ -17,6 +17,7 @@ from cloudinit import util class DataSourceTestSubclassNet(DataSource): dsname = 'MyTestSubclass' + url_max_wait = 55 def __init__(self, sys_cfg, distro, paths, custom_userdata=None): super(DataSourceTestSubclassNet, self).__init__( @@ -70,8 +71,7 @@ class TestDataSource(CiTestCase): """Init uses DataSource.dsname for sourcing ds_cfg.""" sys_cfg = {'datasource': {'MyTestSubclass': {'key2': False}}} distro = 'distrotest' # generally should be a Distro object - paths = Paths({}) - datasource = DataSourceTestSubclassNet(sys_cfg, distro, paths) + datasource = DataSourceTestSubclassNet(sys_cfg, distro, self.paths) self.assertEqual({'key2': False}, datasource.ds_cfg) def test_str_is_classname(self): @@ -81,6 +81,91 @@ class TestDataSource(CiTestCase): 'DataSourceTestSubclassNet', str(DataSourceTestSubclassNet('', '', self.paths))) + def test_datasource_get_url_params_defaults(self): + """get_url_params default url config settings for the datasource.""" + params = self.datasource.get_url_params() + self.assertEqual(params.max_wait_seconds, self.datasource.url_max_wait) + self.assertEqual(params.timeout_seconds, self.datasource.url_timeout) + self.assertEqual(params.num_retries, self.datasource.url_retries) + + def test_datasource_get_url_params_subclassed(self): + """Subclasses can override get_url_params defaults.""" + sys_cfg = {'datasource': {'MyTestSubclass': {'key2': False}}} + distro = 'distrotest' # generally should be a Distro object + datasource = DataSourceTestSubclassNet(sys_cfg, distro, self.paths) + expected = (datasource.url_max_wait, datasource.url_timeout, + datasource.url_retries) + url_params = datasource.get_url_params() + self.assertNotEqual(self.datasource.get_url_params(), url_params) + self.assertEqual(expected, url_params) + + def test_datasource_get_url_params_ds_config_override(self): + """Datasource configuration options can override url param defaults.""" + sys_cfg = { + 'datasource': { + 'MyTestSubclass': { + 'max_wait': '1', 'timeout': '2', 'retries': '3'}}} + datasource = DataSourceTestSubclassNet( + sys_cfg, self.distro, self.paths) + expected = (1, 2, 3) + url_params = datasource.get_url_params() + self.assertNotEqual( + (datasource.url_max_wait, datasource.url_timeout, + datasource.url_retries), + url_params) + self.assertEqual(expected, url_params) + + def test_datasource_get_url_params_is_zero_or_greater(self): + """get_url_params ignores timeouts with a value below 0.""" + # Set an override that is below 0 which gets ignored. + sys_cfg = {'datasource': {'_undef': {'timeout': '-1'}}} + datasource = DataSource(sys_cfg, self.distro, self.paths) + (_max_wait, timeout, _retries) = datasource.get_url_params() + self.assertEqual(0, timeout) + + def test_datasource_get_url_uses_defaults_on_errors(self): + """On invalid system config values for url_params defaults are used.""" + # All invalid values should be logged + sys_cfg = {'datasource': { + '_undef': { + 'max_wait': 'nope', 'timeout': 'bug', 'retries': 'nonint'}}} + datasource = DataSource(sys_cfg, self.distro, self.paths) + url_params = datasource.get_url_params() + expected = (datasource.url_max_wait, datasource.url_timeout, + datasource.url_retries) + self.assertEqual(expected, url_params) + logs = self.logs.getvalue() + expected_logs = [ + "Config max_wait 'nope' is not an int, using default '-1'", + "Config timeout 'bug' is not an int, using default '10'", + "Config retries 'nonint' is not an int, using default '5'", + ] + for log in expected_logs: + self.assertIn(log, logs) + + @mock.patch('cloudinit.sources.net.find_fallback_nic') + def test_fallback_interface_is_discovered(self, m_get_fallback_nic): + """The fallback_interface is discovered via find_fallback_nic.""" + m_get_fallback_nic.return_value = 'nic9' + self.assertEqual('nic9', self.datasource.fallback_interface) + + @mock.patch('cloudinit.sources.net.find_fallback_nic') + def test_fallback_interface_logs_undiscovered(self, m_get_fallback_nic): + """Log a warning when fallback_interface can not discover the nic.""" + self.datasource._cloud_name = 'MySupahCloud' + m_get_fallback_nic.return_value = None # Couldn't discover nic + self.assertIsNone(self.datasource.fallback_interface) + self.assertEqual( + 'WARNING: Did not find a fallback interface on MySupahCloud.\n', + self.logs.getvalue()) + + @mock.patch('cloudinit.sources.net.find_fallback_nic') + def test_wb_fallback_interface_is_cached(self, m_get_fallback_nic): + """The fallback_interface is cached and won't be rediscovered.""" + self.datasource._fallback_interface = 'nic10' + self.assertEqual('nic10', self.datasource.fallback_interface) + m_get_fallback_nic.assert_not_called() + def test__get_data_unimplemented(self): """Raise an error when _get_data is not implemented.""" with self.assertRaises(NotImplementedError) as context_manager: |