summaryrefslogtreecommitdiff
path: root/cloudinit/sources
diff options
context:
space:
mode:
Diffstat (limited to 'cloudinit/sources')
-rwxr-xr-xcloudinit/sources/DataSourceAzure.py15
-rw-r--r--cloudinit/sources/DataSourceEc2.py10
-rw-r--r--cloudinit/sources/DataSourceRbxCloud.py9
-rw-r--r--cloudinit/sources/DataSourceScaleway.py10
-rw-r--r--cloudinit/sources/DataSourceSmartOS.py8
-rw-r--r--cloudinit/sources/__init__.py41
-rw-r--r--cloudinit/sources/tests/test_init.py29
7 files changed, 84 insertions, 38 deletions
diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py
index 2f3390c3..dcdf9f8f 100755
--- a/cloudinit/sources/DataSourceAzure.py
+++ b/cloudinit/sources/DataSourceAzure.py
@@ -22,7 +22,7 @@ import requests
from cloudinit import dmi
from cloudinit import log as logging
from cloudinit import net
-from cloudinit.event import EventType
+from cloudinit.event import EventScope, EventType
from cloudinit.net import device_driver
from cloudinit.net.dhcp import EphemeralDHCPv4
from cloudinit import sources
@@ -338,6 +338,13 @@ def temporary_hostname(temp_hostname, cfg, hostname_command='hostname'):
class DataSourceAzure(sources.DataSource):
dsname = 'Azure'
+ # Regenerate network config new_instance boot and every boot
+ default_update_events = {EventScope.NETWORK: {
+ EventType.BOOT_NEW_INSTANCE,
+ EventType.BOOT,
+ EventType.BOOT_LEGACY
+ }}
+
_negotiated = False
_metadata_imds = sources.UNSET
_ci_pkl_version = 1
@@ -352,8 +359,6 @@ class DataSourceAzure(sources.DataSource):
BUILTIN_DS_CONFIG])
self.dhclient_lease_file = self.ds_cfg.get('dhclient_lease_file')
self._network_config = None
- # Regenerate network config new_instance boot and every boot
- self.update_events['network'].add(EventType.BOOT)
self._ephemeral_dhcp_ctx = None
self.failed_desired_api_version = False
self.iso_dev = None
@@ -2309,8 +2314,8 @@ def maybe_remove_ubuntu_network_config_scripts(paths=None):
LOG.info(
'Removing Ubuntu extended network scripts because'
' cloud-init updates Azure network configuration on the'
- ' following event: %s.',
- EventType.BOOT)
+ ' following events: %s.',
+ [EventType.BOOT.value, EventType.BOOT_LEGACY.value])
logged = True
if os.path.isdir(path):
util.del_dir(path)
diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py
index a2105dc7..8a7f7c60 100644
--- a/cloudinit/sources/DataSourceEc2.py
+++ b/cloudinit/sources/DataSourceEc2.py
@@ -8,6 +8,7 @@
#
# This file is part of cloud-init. See LICENSE file for license information.
+import copy
import os
import time
@@ -20,7 +21,7 @@ from cloudinit import sources
from cloudinit import url_helper as uhelp
from cloudinit import util
from cloudinit import warnings
-from cloudinit.event import EventType
+from cloudinit.event import EventScope, EventType
LOG = logging.getLogger(__name__)
@@ -426,7 +427,12 @@ class DataSourceEc2(sources.DataSource):
# Non-VPC (aka Classic) Ec2 instances need to rewrite the
# network config file every boot due to MAC address change.
if self.is_classic_instance():
- self.update_events['network'].add(EventType.BOOT)
+ self.default_update_events = copy.deepcopy(
+ self.default_update_events)
+ self.default_update_events[EventScope.NETWORK].add(
+ EventType.BOOT)
+ self.default_update_events[EventScope.NETWORK].add(
+ EventType.BOOT_LEGACY)
else:
LOG.warning("Metadata 'network' key not valid: %s.", net_md)
self._network_config = result
diff --git a/cloudinit/sources/DataSourceRbxCloud.py b/cloudinit/sources/DataSourceRbxCloud.py
index 0b8994bf..bb69e998 100644
--- a/cloudinit/sources/DataSourceRbxCloud.py
+++ b/cloudinit/sources/DataSourceRbxCloud.py
@@ -17,7 +17,7 @@ from cloudinit import log as logging
from cloudinit import sources
from cloudinit import subp
from cloudinit import util
-from cloudinit.event import EventType
+from cloudinit.event import EventScope, EventType
LOG = logging.getLogger(__name__)
ETC_HOSTS = '/etc/hosts'
@@ -206,10 +206,11 @@ def read_user_data_callback(mount_dir):
class DataSourceRbxCloud(sources.DataSource):
dsname = "RbxCloud"
- update_events = {'network': [
+ default_update_events = {EventScope.NETWORK: {
EventType.BOOT_NEW_INSTANCE,
- EventType.BOOT
- ]}
+ EventType.BOOT,
+ EventType.BOOT_LEGACY
+ }}
def __init__(self, sys_cfg, distro, paths):
sources.DataSource.__init__(self, sys_cfg, distro, paths)
diff --git a/cloudinit/sources/DataSourceScaleway.py b/cloudinit/sources/DataSourceScaleway.py
index 41be7665..7b8974a2 100644
--- a/cloudinit/sources/DataSourceScaleway.py
+++ b/cloudinit/sources/DataSourceScaleway.py
@@ -31,8 +31,8 @@ from cloudinit import sources
from cloudinit import url_helper
from cloudinit import util
from cloudinit import net
+from cloudinit.event import EventScope, EventType
from cloudinit.net.dhcp import EphemeralDHCPv4, NoDHCPLeaseError
-from cloudinit.event import EventType
LOG = logging.getLogger(__name__)
@@ -172,7 +172,13 @@ def query_data_api(api_type, api_address, retries, timeout):
class DataSourceScaleway(sources.DataSource):
dsname = "Scaleway"
- update_events = {'network': [EventType.BOOT_NEW_INSTANCE, EventType.BOOT]}
+ default_update_events = {
+ EventScope.NETWORK: {
+ EventType.BOOT_NEW_INSTANCE,
+ EventType.BOOT,
+ EventType.BOOT_LEGACY
+ }
+ }
def __init__(self, sys_cfg, distro, paths):
super(DataSourceScaleway, self).__init__(sys_cfg, distro, paths)
diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py
index fd292baa..9b16bf8d 100644
--- a/cloudinit/sources/DataSourceSmartOS.py
+++ b/cloudinit/sources/DataSourceSmartOS.py
@@ -36,7 +36,7 @@ from cloudinit import serial
from cloudinit import sources
from cloudinit import subp
from cloudinit import util
-from cloudinit.event import EventType
+from cloudinit.event import EventScope, EventType
LOG = logging.getLogger(__name__)
@@ -170,6 +170,11 @@ class DataSourceSmartOS(sources.DataSource):
smartos_type = sources.UNSET
md_client = sources.UNSET
+ default_update_events = {EventScope.NETWORK: {
+ EventType.BOOT_NEW_INSTANCE,
+ EventType.BOOT,
+ EventType.BOOT_LEGACY
+ }}
def __init__(self, sys_cfg, distro, paths):
sources.DataSource.__init__(self, sys_cfg, distro, paths)
@@ -181,7 +186,6 @@ class DataSourceSmartOS(sources.DataSource):
self.metadata = {}
self.network_data = None
self._network_config = None
- self.update_events['network'].add(EventType.BOOT)
self.script_base_d = os.path.join(self.paths.get_cpath("scripts"))
diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py
index 7d74f8d9..a07c4b4f 100644
--- a/cloudinit/sources/__init__.py
+++ b/cloudinit/sources/__init__.py
@@ -13,6 +13,7 @@ import copy
import json
import os
from collections import namedtuple
+from typing import Dict, List
from cloudinit import dmi
from cloudinit import importer
@@ -22,7 +23,7 @@ from cloudinit import type_utils
from cloudinit import user_data as ud
from cloudinit import util
from cloudinit.atomic_helper import write_json
-from cloudinit.event import EventType
+from cloudinit.event import EventScope, EventType
from cloudinit.filters import launch_index
from cloudinit.persistence import CloudInitPickleMixin
from cloudinit.reporting import events
@@ -175,12 +176,23 @@ class DataSource(CloudInitPickleMixin, metaclass=abc.ABCMeta):
# The datasource defines a set of supported EventTypes during which
# the datasource can react to changes in metadata and regenerate
- # network configuration on metadata changes.
- # A datasource which supports writing network config on each system boot
- # would call update_events['network'].add(EventType.BOOT).
+ # network configuration on metadata changes. These are defined in
+ # `supported_network_events`.
+ # The datasource also defines a set of default EventTypes that the
+ # datasource can react to. These are the event types that will be used
+ # if not overridden by the user.
+ # A datasource requiring to write network config on each system boot
+ # would call default_update_events['network'].add(EventType.BOOT).
# Default: generate network config on new instance id (first boot).
- update_events = {'network': set([EventType.BOOT_NEW_INSTANCE])}
+ supported_update_events = {EventScope.NETWORK: {
+ EventType.BOOT_NEW_INSTANCE,
+ EventType.BOOT,
+ EventType.BOOT_LEGACY,
+ }}
+ default_update_events = {EventScope.NETWORK: {
+ EventType.BOOT_NEW_INSTANCE,
+ }}
# N-tuple listing default values for any metadata-related class
# attributes cached on an instance by a process_data runs. These attribute
@@ -648,10 +660,12 @@ class DataSource(CloudInitPickleMixin, metaclass=abc.ABCMeta):
def get_package_mirror_info(self):
return self.distro.get_package_mirror_info(data_source=self)
- def update_metadata(self, source_event_types):
+ def update_metadata_if_supported(
+ self, source_event_types: List[EventType]
+ ) -> bool:
"""Refresh cached metadata if the datasource supports this event.
- The datasource has a list of update_events which
+ The datasource has a list of supported_update_events which
trigger refreshing all cached metadata as well as refreshing the
network configuration.
@@ -661,9 +675,9 @@ class DataSource(CloudInitPickleMixin, metaclass=abc.ABCMeta):
@return True if the datasource did successfully update cached metadata
due to source_event_type.
"""
- supported_events = {}
+ supported_events = {} # type: Dict[EventScope, set]
for event in source_event_types:
- for update_scope, update_events in self.update_events.items():
+ for update_scope, update_events in self.supported_update_events.items(): # noqa: E501
if event in update_events:
if not supported_events.get(update_scope):
supported_events[update_scope] = set()
@@ -671,7 +685,8 @@ class DataSource(CloudInitPickleMixin, metaclass=abc.ABCMeta):
for scope, matched_events in supported_events.items():
LOG.debug(
"Update datasource metadata and %s config due to events: %s",
- scope, ', '.join(matched_events))
+ scope.value,
+ ', '.join([event.value for event in matched_events]))
# Each datasource has a cached config property which needs clearing
# Once cleared that config property will be regenerated from
# current metadata.
@@ -682,7 +697,7 @@ class DataSource(CloudInitPickleMixin, metaclass=abc.ABCMeta):
if result:
return True
LOG.debug("Datasource %s not updated for events: %s", self,
- ', '.join(source_event_types))
+ ', '.join([event.value for event in source_event_types]))
return False
def check_instance_id(self, sys_cfg):
@@ -789,7 +804,9 @@ def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list, reporter):
with myrep:
LOG.debug("Seeing if we can get any data from %s", cls)
s = cls(sys_cfg, distro, paths)
- if s.update_metadata([EventType.BOOT_NEW_INSTANCE]):
+ if s.update_metadata_if_supported(
+ [EventType.BOOT_NEW_INSTANCE]
+ ):
myrep.message = "found %s data from %s" % (mode, name)
return (s, type_utils.obj_name(cls))
except Exception:
diff --git a/cloudinit/sources/tests/test_init.py b/cloudinit/sources/tests/test_init.py
index 1420a988..a2b052a6 100644
--- a/cloudinit/sources/tests/test_init.py
+++ b/cloudinit/sources/tests/test_init.py
@@ -5,7 +5,7 @@ import inspect
import os
import stat
-from cloudinit.event import EventType
+from cloudinit.event import EventScope, EventType
from cloudinit.helpers import Paths
from cloudinit import importer
from cloudinit.sources import (
@@ -618,24 +618,29 @@ class TestDataSource(CiTestCase):
self.assertEqual('himom', getattr(self.datasource, cached_attr_name))
self.assertEqual('updated', self.datasource.myattr)
+ @mock.patch.dict(DataSource.default_update_events, {
+ EventScope.NETWORK: {EventType.BOOT_NEW_INSTANCE}})
+ @mock.patch.dict(DataSource.supported_update_events, {
+ EventScope.NETWORK: {EventType.BOOT_NEW_INSTANCE}})
def test_update_metadata_only_acts_on_supported_update_events(self):
- """update_metadata won't get_data on unsupported update events."""
- self.datasource.update_events['network'].discard(EventType.BOOT)
+ """update_metadata_if_supported wont get_data on unsupported events."""
self.assertEqual(
- {'network': set([EventType.BOOT_NEW_INSTANCE])},
- self.datasource.update_events)
+ {EventScope.NETWORK: set([EventType.BOOT_NEW_INSTANCE])},
+ self.datasource.default_update_events
+ )
def fake_get_data():
raise Exception('get_data should not be called')
self.datasource.get_data = fake_get_data
self.assertFalse(
- self.datasource.update_metadata(
+ self.datasource.update_metadata_if_supported(
source_event_types=[EventType.BOOT]))
+ @mock.patch.dict(DataSource.supported_update_events, {
+ EventScope.NETWORK: {EventType.BOOT_NEW_INSTANCE}})
def test_update_metadata_returns_true_on_supported_update_event(self):
- """update_metadata returns get_data response on supported events."""
-
+ """update_metadata_if_supported returns get_data on supported events"""
def fake_get_data():
return True
@@ -643,14 +648,16 @@ class TestDataSource(CiTestCase):
self.datasource._network_config = 'something'
self.datasource._dirty_cache = True
self.assertTrue(
- self.datasource.update_metadata(
+ self.datasource.update_metadata_if_supported(
source_event_types=[
EventType.BOOT, EventType.BOOT_NEW_INSTANCE]))
self.assertEqual(UNSET, self.datasource._network_config)
+
self.assertIn(
"DEBUG: Update datasource metadata and network config due to"
- " events: New instance first boot",
- self.logs.getvalue())
+ " events: boot-new-instance",
+ self.logs.getvalue()
+ )
class TestRedactSensitiveData(CiTestCase):