diff options
author | Chad Smith <chad.smith@canonical.com> | 2018-07-01 16:46:23 -0600 |
---|---|---|
committer | Chad Smith <chad.smith@canonical.com> | 2018-07-01 16:46:23 -0600 |
commit | be9ecc12823607b4709b64408aee137bfdfc7d01 (patch) | |
tree | 808ef44d9de159bf3307f95e1f8e2d6d600c7e91 /cloudinit/sources | |
parent | 5858136215753cffc2080c85429a3c7a3754741c (diff) | |
download | vyos-cloud-init-be9ecc12823607b4709b64408aee137bfdfc7d01.tar.gz vyos-cloud-init-be9ecc12823607b4709b64408aee137bfdfc7d01.zip |
update_metadata: a datasource can support network re-config every boot
Very basic type definitions are now defined to distinguish 'boot'
events from 'new instance (first boot)'. Event types will now be handed
to a datasource.update_metadata method which can determine whether
to refresh its metadata and re-render configuration based on that
source event.
A datasource can 'subscribe' to an event by setting up the update_events
attribute on the datasource class which describe what config scope is
updated by a list of matching events. By default datasources will have
the following update_events: {'network': [EventType.BOOT_NEW_INSTANCE]}
This setting says the datasource will re-write network configuration only
on first boot of a new instance or when the instance id changes.
New methods are now present on the datasource:
- clear_cached_attrs: Resets cached datasource attributes to values
listed in datasource.cached_attr_defaults. This is performed prior to
processing a fresh metadata process to avoid keeping old/invalid
cached data around.
- update_metadata: accepts source_event_types to determine if the
metadata should be crawled again and processed
Diffstat (limited to 'cloudinit/sources')
-rw-r--r-- | cloudinit/sources/__init__.py | 78 | ||||
-rw-r--r-- | cloudinit/sources/tests/test_init.py | 83 |
2 files changed, 159 insertions, 2 deletions
diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 90d74575..f424316a 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -19,6 +19,7 @@ from cloudinit.atomic_helper import write_json from cloudinit import importer from cloudinit import log as logging from cloudinit import net +from cloudinit.event import EventType from cloudinit import type_utils from cloudinit import user_data as ud from cloudinit import util @@ -102,6 +103,25 @@ class DataSource(object): url_timeout = 10 # timeout for each metadata url read attempt url_retries = 5 # number of times to retry url upon 404 + # The datasource defines a list of supported EventTypes during which + # the datasource can react to changes in metadata and regenerate + # network configuration on metadata changes. + # A datasource which supports writing network config on each system boot + # would set update_events = {'network': [EventType.BOOT]} + + # Default: generate network config on new instance id (first boot). + update_events = {'network': [EventType.BOOT_NEW_INSTANCE]} + + # N-tuple listing default values for any metadata-related class + # attributes cached on an instance by a process_data runs. These attribute + # values are reset via clear_cached_attrs during any update_metadata call. + cached_attr_defaults = ( + ('ec2_metadata', UNSET), ('network_json', UNSET), + ('metadata', {}), ('userdata', None), ('userdata_raw', None), + ('vendordata', None), ('vendordata_raw', None)) + + _dirty_cache = False + def __init__(self, sys_cfg, distro, paths, ud_proc=None): self.sys_cfg = sys_cfg self.distro = distro @@ -134,11 +154,31 @@ class DataSource(object): 'region': self.region, 'availability-zone': self.availability_zone}} + def clear_cached_attrs(self, attr_defaults=()): + """Reset any cached metadata attributes to datasource defaults. + + @param attr_defaults: Optional tuple of (attr, value) pairs to + set instead of cached_attr_defaults. + """ + if not self._dirty_cache: + return + if attr_defaults: + attr_values = attr_defaults + else: + attr_values = self.cached_attr_defaults + + for attribute, value in attr_values: + if hasattr(self, attribute): + setattr(self, attribute, value) + if not attr_defaults: + self._dirty_cache = False + def get_data(self): """Datasources implement _get_data to setup metadata and userdata_raw. Minimally, the datasource should return a boolean True on success. """ + self._dirty_cache = True return_value = self._get_data() json_file = os.path.join(self.paths.run_dir, INSTANCE_JSON_FILE) if not return_value: @@ -174,6 +214,7 @@ class DataSource(object): return return_value def _get_data(self): + """Walk metadata sources, process crawled data and save attributes.""" raise NotImplementedError( 'Subclasses of DataSource must implement _get_data which' ' sets self.metadata, vendordata_raw and userdata_raw.') @@ -416,6 +457,41 @@ class DataSource(object): def get_package_mirror_info(self): return self.distro.get_package_mirror_info(data_source=self) + def update_metadata(self, source_event_types): + """Refresh cached metadata if the datasource supports this event. + + The datasource has a list of update_events which + trigger refreshing all cached metadata as well as refreshing the + network configuration. + + @param source_event_types: List of EventTypes which may trigger a + metadata update. + + @return True if the datasource did successfully update cached metadata + due to source_event_type. + """ + supported_events = {} + for event in source_event_types: + for update_scope, update_events in self.update_events.items(): + if event in update_events: + if not supported_events.get(update_scope): + supported_events[update_scope] = [] + supported_events[update_scope].append(event) + for scope, matched_events in supported_events.items(): + LOG.debug( + "Update datasource metadata and %s config due to events: %s", + scope, ', '.join(matched_events)) + # Each datasource has a cached config property which needs clearing + # Once cleared that config property will be regenerated from + # current metadata. + self.clear_cached_attrs((('_%s_config' % scope, UNSET),)) + if supported_events: + self.clear_cached_attrs() + result = self.get_data() + if result: + return True + return False + def check_instance_id(self, sys_cfg): # quickly (local check only) if self.instance_id is still return False @@ -520,7 +596,7 @@ def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list, reporter): with myrep: LOG.debug("Seeing if we can get any data from %s", cls) s = cls(sys_cfg, distro, paths) - if s.get_data(): + if s.update_metadata([EventType.BOOT_NEW_INSTANCE]): myrep.message = "found %s data from %s" % (mode, name) return (s, type_utils.obj_name(cls)) except Exception: diff --git a/cloudinit/sources/tests/test_init.py b/cloudinit/sources/tests/test_init.py index d5bc98a4..dcd221be 100644 --- a/cloudinit/sources/tests/test_init.py +++ b/cloudinit/sources/tests/test_init.py @@ -5,10 +5,11 @@ import os import six import stat +from cloudinit.event import EventType from cloudinit.helpers import Paths from cloudinit import importer from cloudinit.sources import ( - INSTANCE_JSON_FILE, DataSource) + INSTANCE_JSON_FILE, DataSource, UNSET) from cloudinit.tests.helpers import CiTestCase, skipIf, mock from cloudinit.user_data import UserDataProcessor from cloudinit import util @@ -381,3 +382,83 @@ class TestDataSource(CiTestCase): get_args(grandchild.get_hostname), # pylint: disable=W1505 '%s does not implement DataSource.get_hostname params' % grandchild) + + def test_clear_cached_attrs_resets_cached_attr_class_attributes(self): + """Class attributes listed in cached_attr_defaults are reset.""" + count = 0 + # Setup values for all cached class attributes + for attr, value in self.datasource.cached_attr_defaults: + setattr(self.datasource, attr, count) + count += 1 + self.datasource._dirty_cache = True + self.datasource.clear_cached_attrs() + for attr, value in self.datasource.cached_attr_defaults: + self.assertEqual(value, getattr(self.datasource, attr)) + + def test_clear_cached_attrs_noops_on_clean_cache(self): + """Class attributes listed in cached_attr_defaults are reset.""" + count = 0 + # Setup values for all cached class attributes + for attr, _ in self.datasource.cached_attr_defaults: + setattr(self.datasource, attr, count) + count += 1 + self.datasource._dirty_cache = False # Fake clean cache + self.datasource.clear_cached_attrs() + count = 0 + for attr, _ in self.datasource.cached_attr_defaults: + self.assertEqual(count, getattr(self.datasource, attr)) + count += 1 + + def test_clear_cached_attrs_skips_non_attr_class_attributes(self): + """Skip any cached_attr_defaults which aren't class attributes.""" + self.datasource._dirty_cache = True + self.datasource.clear_cached_attrs() + for attr in ('ec2_metadata', 'network_json'): + self.assertFalse(hasattr(self.datasource, attr)) + + def test_clear_cached_attrs_of_custom_attrs(self): + """Custom attr_values can be passed to clear_cached_attrs.""" + self.datasource._dirty_cache = True + cached_attr_name = self.datasource.cached_attr_defaults[0][0] + setattr(self.datasource, cached_attr_name, 'himom') + self.datasource.myattr = 'orig' + self.datasource.clear_cached_attrs( + attr_defaults=(('myattr', 'updated'),)) + self.assertEqual('himom', getattr(self.datasource, cached_attr_name)) + self.assertEqual('updated', self.datasource.myattr) + + def test_update_metadata_only_acts_on_supported_update_events(self): + """update_metadata won't get_data on unsupported update events.""" + self.assertEqual( + {'network': [EventType.BOOT_NEW_INSTANCE]}, + self.datasource.update_events) + + def fake_get_data(): + raise Exception('get_data should not be called') + + self.datasource.get_data = fake_get_data + self.assertFalse( + self.datasource.update_metadata( + source_event_types=[EventType.BOOT])) + + def test_update_metadata_returns_true_on_supported_update_event(self): + """update_metadata returns get_data response on supported events.""" + + def fake_get_data(): + return True + + self.datasource.get_data = fake_get_data + self.datasource._network_config = 'something' + self.datasource._dirty_cache = True + self.assertTrue( + self.datasource.update_metadata( + source_event_types=[ + EventType.BOOT, EventType.BOOT_NEW_INSTANCE])) + self.assertEqual(UNSET, self.datasource._network_config) + self.assertIn( + "DEBUG: Update datasource metadata and network config due to" + " events: New instance first boot", + self.logs.getvalue()) + + +# vi: ts=4 expandtab |