diff options
author | Chris Patterson <cpatterson@microsoft.com> | 2022-02-15 12:19:19 -0500 |
---|---|---|
committer | GitHub <noreply@github.com> | 2022-02-15 11:19:19 -0600 |
commit | 101a62f2389a933676e9d0d20d9f59303b1f1833 (patch) | |
tree | c6cbc4b2c94c5cc8bf2f471c14982fa22db2250d | |
parent | 32fcbb580d6eacb06c901bc291e0fa118bb9b646 (diff) | |
download | vyos-cloud-init-101a62f2389a933676e9d0d20d9f59303b1f1833.tar.gz vyos-cloud-init-101a62f2389a933676e9d0d20d9f59303b1f1833.zip |
sources/azure: report ready in local phase (#1265)
Pre-provisioned instances report ready early in the local phase and
again in the non-local phase, during setup(). Non-PPS only reports
ready during non-local phase.
Update the process to report ready during the local phase for all
cases. Only attempt to do so if networking is up to prevent stalling
boot. We've already waited at least 20 minutes for DHCP if we're
provisioning, or 5 minutes for DHCP on normal boot requesting updated
network configuration.
- Extend _report_ready() with pubkey_info and raise exception
on error to consolidate reporting done in _negotiate() and
_report_ready().
- Remove setup(), moving relevant logic into crawl_metadata().
- Move remaining _negotiate() logic into _cleanup_markers() and
_determine_wireserver_pubkey_info().
These changes effectively fix two issues that were present:
(1) _negotiated is incorrectly set to True
When failing to report ready. _negotiate() squashed the exception and
the return value was not checked. This was probably masked due to the
forced removal of obj.pkl on Ubuntu instances, but would be preferable
once we start persisting it to prevent unnecessary re-negotiation.
(2) provisioning media is not ejected for non-PPS
_negotiate() did not pass iso_dev parameter when reporting ready. The
host will ensure this operation takes place, but it is preferable to
eject /dev/sr0 from within the guest when we're done with it.
Lastly, this removes any need for lease file parsing as the wireserver
addressed is tracked for ephemeral DHCP. A follow-up PR will remove
this now-unused logic.
Signed-off-by: Chris Patterson <cpatterson@microsoft.com>
-rwxr-xr-x | cloudinit/sources/DataSourceAzure.py | 114 | ||||
-rw-r--r-- | pyproject.toml | 2 | ||||
-rw-r--r-- | tests/unittests/sources/test_azure.py | 533 |
3 files changed, 577 insertions, 72 deletions
diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index f8e1dd02..359dfbde 100755 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -317,17 +317,16 @@ class DataSourceAzure(sources.DataSource): [util.get_cfg_by_path(sys_cfg, DS_CFG_PATH, {}), BUILTIN_DS_CONFIG] ) self.dhclient_lease_file = self.ds_cfg.get("dhclient_lease_file") + self._iso_dev = None self._network_config = None self._ephemeral_dhcp_ctx = None self._wireserver_endpoint = DEFAULT_WIRESERVER_ENDPOINT - self.iso_dev = None def _unpickle(self, ci_pkl_version: int) -> None: super()._unpickle(ci_pkl_version) self._ephemeral_dhcp_ctx = None - if not hasattr(self, "iso_dev"): - self.iso_dev = None + self._iso_dev = None self._wireserver_endpoint = DEFAULT_WIRESERVER_ENDPOINT def __str__(self): @@ -441,7 +440,6 @@ class DataSourceAzure(sources.DataSource): cfg = {} files = {} - iso_dev = None if os.path.isfile(REPROVISION_MARKER_FILE): metadata_source = "IMDS" report_diagnostic_event( @@ -462,7 +460,7 @@ class DataSourceAzure(sources.DataSource): src, load_azure_ds_dir ) # save the device for ejection later - iso_dev = src + self._iso_dev = src else: md, userdata_raw, cfg, files = load_azure_ds_dir(src) ovf_is_accessible = True @@ -497,7 +495,7 @@ class DataSourceAzure(sources.DataSource): # not have UDF support. In either case, require IMDS metadata. # If we require IMDS metadata, try harder to obtain networking, waiting # for at least 20 minutes. Otherwise only wait 5 minutes. - requires_imds_metadata = bool(iso_dev) or not ovf_is_accessible + requires_imds_metadata = bool(self._iso_dev) or not ovf_is_accessible timeout_minutes = 5 if requires_imds_metadata else 20 try: self._setup_ephemeral_networking(timeout_minutes=timeout_minutes) @@ -514,8 +512,6 @@ class DataSourceAzure(sources.DataSource): report_diagnostic_event(msg) raise sources.InvalidMetaDataException(msg) - self.iso_dev = iso_dev - # Refresh PPS type using metadata. pps_type = self._determine_pps_type(cfg, imds_md) if pps_type != PPSType.NONE: @@ -612,9 +608,23 @@ class DataSourceAzure(sources.DataSource): crawled_data["metadata"]["random_seed"] = seed crawled_data["metadata"]["instance-id"] = self._iid() - if pps_type != PPSType.NONE: - LOG.info("Reporting ready to Azure after getting ReprovisionData") - self._report_ready() + if self._negotiated is False and self._is_ephemeral_networking_up(): + # Report ready and fetch public-keys from Wireserver, if required. + pubkey_info = self._determine_wireserver_pubkey_info( + cfg=cfg, imds_md=imds_md + ) + try: + ssh_keys = self._report_ready(pubkey_info=pubkey_info) + except Exception: + # Failed to report ready, but continue with best effort. + pass + else: + LOG.debug("negotiating returned %s", ssh_keys) + if ssh_keys: + crawled_data["metadata"]["public-keys"] = ssh_keys + + self._cleanup_markers() + self._negotiated = True return crawled_data @@ -844,24 +854,6 @@ class DataSourceAzure(sources.DataSource): return iid @azure_ds_telemetry_reporter - def setup(self, is_new_instance): - if self._negotiated is False: - LOG.debug( - "negotiating for %s (new_instance=%s)", - self.get_instance_id(), - is_new_instance, - ) - ssh_keys = self._negotiate() - LOG.debug("negotiating returned %s", ssh_keys) - if ssh_keys: - self.metadata["public-keys"] = ssh_keys - self._negotiated = True - else: - LOG.debug( - "negotiating already done for %s", self.get_instance_id() - ) - - @azure_ds_telemetry_reporter def _wait_for_nic_detach(self, nl_sock): """Use the netlink socket provided to wait for nic detach event. NOTE: The function doesn't close the socket. The caller owns closing @@ -983,11 +975,12 @@ class DataSourceAzure(sources.DataSource): :raises sources.InvalidMetaDataException: On error reporting ready. """ - report_ready_succeeded = self._report_ready() - if not report_ready_succeeded: + try: + self._report_ready() + except Exception as error: msg = "Failed reporting ready while in the preprovisioning pool." report_diagnostic_event(msg, logger_func=LOG.error) - raise sources.InvalidMetaDataException(msg) + raise sources.InvalidMetaDataException(msg) from error self._create_report_ready_marker() @@ -1400,25 +1393,36 @@ class DataSourceAzure(sources.DataSource): return False - def _report_ready(self) -> bool: + @azure_ds_telemetry_reporter + def _report_ready( + self, *, pubkey_info: Optional[List[str]] = None + ) -> Optional[List[str]]: """Tells the fabric provisioning has completed. - @return: The success status of sending the ready signal. + :param pubkey_info: Fingerprints of keys to request from Wireserver. + + :raises Exception: if failed to report. + + :returns: List of SSH keys, if requested. """ try: - get_metadata_from_fabric( + data = get_metadata_from_fabric( fallback_lease_file=None, dhcp_opts=self._wireserver_endpoint, - iso_dev=self.iso_dev, + iso_dev=self._iso_dev, + pubkey_info=pubkey_info, ) - return True except Exception as e: report_diagnostic_event( "Error communicating with Azure fabric; You may experience " "connectivity issues: %s" % e, logger_func=LOG.warning, ) - return False + raise + + # Reporting ready ejected OVF media, no need to do so again. + self._iso_dev = None + return data def _ppstype_from_imds(self, imds_md: dict) -> Optional[str]: try: @@ -1464,6 +1468,7 @@ class DataSourceAzure(sources.DataSource): "{pid}: {time}\n".format(pid=os.getpid(), time=time()), ) + @azure_ds_telemetry_reporter def _reprovision(self): """Initiate the reprovisioning workflow. @@ -1479,40 +1484,29 @@ class DataSourceAzure(sources.DataSource): return (md, ud, cfg, {"ovf-env.xml": contents}) @azure_ds_telemetry_reporter - def _negotiate(self): - """Negotiate with fabric and return data from it. + def _determine_wireserver_pubkey_info( + self, *, cfg: dict, imds_md: dict + ) -> Optional[List[str]]: + """Determine the fingerprints we need to retrieve from Wireserver. - On success, returns a dictionary including 'public_keys'. - On failure, returns False. + :return: List of keys to request from Wireserver, if any, else None. """ - pubkey_info = None + pubkey_info: Optional[List[str]] = None try: - self._get_public_keys_from_imds(self.metadata["imds"]) + self._get_public_keys_from_imds(imds_md) except (KeyError, ValueError): - pubkey_info = self.cfg.get("_pubkeys", None) + pubkey_info = cfg.get("_pubkeys", None) log_msg = "Retrieved {} fingerprints from OVF".format( len(pubkey_info) if pubkey_info is not None else 0 ) report_diagnostic_event(log_msg, logger_func=LOG.debug) + return pubkey_info - LOG.debug("negotiating with fabric") - try: - ssh_keys = get_metadata_from_fabric( - fallback_lease_file=self.dhclient_lease_file, - pubkey_info=pubkey_info, - ) - except Exception as e: - report_diagnostic_event( - "Error communicating with Azure fabric; You may experience " - "connectivity issues: %s" % e, - logger_func=LOG.warning, - ) - return False - + def _cleanup_markers(self): + """Cleanup any marker files.""" util.del_file(REPORTED_READY_MARKER_FILE) util.del_file(REPROVISION_MARKER_FILE) util.del_file(REPROVISION_NIC_DETACHED_MARKER_FILE) - return ssh_keys @azure_ds_telemetry_reporter def activate(self, cfg, is_new_instance): diff --git a/pyproject.toml b/pyproject.toml index 52093fac..324d6f35 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,7 +34,6 @@ exclude=[ '^cloudinit/net/netplan\.py$', '^cloudinit/net/sysconfig\.py$', '^cloudinit/serial\.py$', - '^cloudinit/sources/DataSourceAzure\.py$', '^cloudinit/sources/DataSourceAliYun\.py$', '^cloudinit/sources/DataSourceLXD\.py$', '^cloudinit/sources/DataSourceOracle\.py$', @@ -42,7 +41,6 @@ exclude=[ '^cloudinit/sources/DataSourceSmartOS\.py$', '^cloudinit/sources/DataSourceVMware\.py$', '^cloudinit/sources/__init__\.py$', - '^cloudinit/sources/helpers/azure\.py$', '^cloudinit/sources/helpers/vmware/imc/config_file\.py$', '^cloudinit/stages\.py$', '^cloudinit/templater\.py$', diff --git a/tests/unittests/sources/test_azure.py b/tests/unittests/sources/test_azure.py index ecedc54d..5f956a63 100644 --- a/tests/unittests/sources/test_azure.py +++ b/tests/unittests/sources/test_azure.py @@ -41,24 +41,162 @@ MOCKPATH = "cloudinit.sources.DataSourceAzure." @pytest.fixture -def azure_ds(request, paths): +def azure_ds(paths): """Provide DataSourceAzure instance with mocks for minimal test case.""" with mock.patch(MOCKPATH + "_is_platform_viable", return_value=True): yield dsaz.DataSourceAzure(sys_cfg={}, distro=mock.Mock(), paths=paths) @pytest.fixture +def mock_azure_helper_readurl(): + with mock.patch( + "cloudinit.sources.helpers.azure.url_helper.readurl", autospec=True + ) as m: + yield m + + +@pytest.fixture +def mock_azure_get_metadata_from_fabric(): + with mock.patch( + MOCKPATH + "get_metadata_from_fabric", + autospec=True, + ) as m: + yield m + + +@pytest.fixture +def mock_azure_report_failure_to_fabric(): + with mock.patch( + MOCKPATH + "report_failure_to_fabric", + autospec=True, + ) as m: + yield m + + +@pytest.fixture +def mock_dmi_read_dmi_data(): + def fake_read(key: str) -> str: + if key == "system-uuid": + return "fake-system-uuid" + raise RuntimeError() + + with mock.patch( + MOCKPATH + "dmi.read_dmi_data", + side_effect=fake_read, + autospec=True, + ) as m: + yield m + + +@pytest.fixture +def mock_net_dhcp_maybe_perform_dhcp_discovery(): + with mock.patch( + "cloudinit.net.dhcp.maybe_perform_dhcp_discovery", + return_value=[ + { + "unknown-245": "aa:bb:cc:dd", + "interface": "ethBoot0", + "fixed-address": "192.168.2.9", + "routers": "192.168.2.1", + "subnet-mask": "255.255.255.0", + } + ], + autospec=True, + ) as m: + yield m + + +@pytest.fixture +def mock_net_dhcp_EphemeralIPv4Network(): + with mock.patch( + "cloudinit.net.dhcp.EphemeralIPv4Network", + autospec=True, + ) as m: + yield m + + +@pytest.fixture def mock_get_interfaces(): - """Mock for net.get_interfaces().""" with mock.patch(MOCKPATH + "net.get_interfaces", return_value=[]) as m: yield m @pytest.fixture def mock_get_interface_mac(): - """Mock for net.get_interface_mac().""" with mock.patch( - MOCKPATH + "net.get_interface_mac", return_value="001122334455" + MOCKPATH + "net.get_interface_mac", + return_value="001122334455", + ) as m: + yield m + + +@pytest.fixture +def mock_netlink(): + with mock.patch( + MOCKPATH + "netlink", + autospec=True, + ) as m: + yield m + + +@pytest.fixture +def mock_os_path_isfile(): + with mock.patch(MOCKPATH + "os.path.isfile", autospec=True) as m: + yield m + + +@pytest.fixture +def mock_readurl(): + with mock.patch(MOCKPATH + "readurl", autospec=True) as m: + yield m + + +@pytest.fixture +def mock_subp_subp(): + with mock.patch(MOCKPATH + "subp.subp", side_effect=[]) as m: + yield m + + +@pytest.fixture +def mock_util_ensure_dir(): + with mock.patch( + MOCKPATH + "util.ensure_dir", + autospec=True, + ) as m: + yield m + + +@pytest.fixture +def mock_util_find_devs_with(): + with mock.patch(MOCKPATH + "util.find_devs_with", autospec=True) as m: + yield m + + +@pytest.fixture +def mock_util_load_file(): + with mock.patch( + MOCKPATH + "util.load_file", + autospec=True, + return_value=b"", + ) as m: + yield m + + +@pytest.fixture +def mock_util_mount_cb(): + with mock.patch( + MOCKPATH + "util.mount_cb", + autospec=True, + return_value=({}, "", {}, {}), + ) as m: + yield m + + +@pytest.fixture +def mock_util_write_file(): + with mock.patch( + MOCKPATH + "util.write_file", + autospec=True, ) as m: yield m @@ -1259,7 +1397,10 @@ scbus-1 on xpt0 bus 0 dsrc.crawl_metadata() - assert m_report_ready.mock_calls == [mock.call(), mock.call()] + assert m_report_ready.mock_calls == [ + mock.call(), + mock.call(pubkey_info=None), + ] def test_waagent_d_has_0700_perms(self): # we expect /var/lib/waagent to be created 0700 @@ -1637,12 +1778,23 @@ scbus-1 on xpt0 bus 0 def test_dsaz_report_ready_returns_true_when_report_succeeds(self): dsrc = self._get_ds({"ovfcontent": construct_valid_ovf_env()}) - self.assertTrue(dsrc._report_ready()) + assert dsrc._report_ready() == [] - def test_dsaz_report_ready_returns_false_and_does_not_propagate_exc(self): + @mock.patch(MOCKPATH + "report_diagnostic_event") + def test_dsaz_report_ready_failure_reports_telemetry(self, m_report_diag): dsrc = self._get_ds({"ovfcontent": construct_valid_ovf_env()}) - self.m_get_metadata_from_fabric.side_effect = Exception - self.assertFalse(dsrc._report_ready()) + self.m_get_metadata_from_fabric.side_effect = Exception("foo") + + with pytest.raises(Exception): + dsrc._report_ready() + + assert m_report_diag.mock_calls == [ + mock.call( + "Error communicating with Azure fabric; " + "You may experience connectivity issues: foo", + logger_func=dsaz.LOG.warning, + ) + ] def test_dsaz_report_failure_returns_true_when_report_succeeds(self): dsrc = self._get_ds({"ovfcontent": construct_valid_ovf_env()}) @@ -3316,7 +3468,7 @@ class TestPreprovisioningPollIMDS(CiTestCase): } ] m_media_switch.return_value = None - m_report_ready.return_value = False + m_report_ready.side_effect = [Exception("fail")] dsa = dsaz.DataSourceAzure({}, distro=None, paths=self.paths) self.assertFalse(os.path.exists(report_file)) with mock.patch(MOCKPATH + "REPORTED_READY_MARKER_FILE", report_file): @@ -3568,6 +3720,367 @@ class TestRandomSeed(CiTestCase): self.assertEqual(deserialized["seed"], result) +class TestProvisioning: + @pytest.fixture(autouse=True) + def provisioning_setup( + self, + azure_ds, + mock_azure_get_metadata_from_fabric, + mock_azure_report_failure_to_fabric, + mock_net_dhcp_maybe_perform_dhcp_discovery, + mock_net_dhcp_EphemeralIPv4Network, + mock_dmi_read_dmi_data, + mock_get_interfaces, + mock_get_interface_mac, + mock_netlink, + mock_os_path_isfile, + mock_readurl, + mock_subp_subp, + mock_util_ensure_dir, + mock_util_find_devs_with, + mock_util_load_file, + mock_util_mount_cb, + mock_util_write_file, + ): + self.azure_ds = azure_ds + self.mock_azure_get_metadata_from_fabric = ( + mock_azure_get_metadata_from_fabric + ) + self.mock_azure_report_failure_to_fabric = ( + mock_azure_report_failure_to_fabric + ) + self.mock_net_dhcp_maybe_perform_dhcp_discovery = ( + mock_net_dhcp_maybe_perform_dhcp_discovery + ) + self.mock_net_dhcp_EphemeralIPv4Network = ( + mock_net_dhcp_EphemeralIPv4Network + ) + self.mock_dmi_read_dmi_data = mock_dmi_read_dmi_data + self.mock_get_interfaces = mock_get_interfaces + self.mock_get_interface_mac = mock_get_interface_mac + self.mock_netlink = mock_netlink + self.mock_os_path_isfile = mock_os_path_isfile + self.mock_readurl = mock_readurl + self.mock_subp_subp = mock_subp_subp + self.mock_util_ensure_dir = mock_util_ensure_dir + self.mock_util_find_devs_with = mock_util_find_devs_with + self.mock_util_load_file = mock_util_load_file + self.mock_util_mount_cb = mock_util_mount_cb + self.mock_util_write_file = mock_util_write_file + + self.imds_md = { + "extended": {"compute": {"ppsType": "None"}}, + "network": { + "interface": [ + { + "ipv4": { + "ipAddress": [ + { + "privateIpAddress": "10.0.0.22", + "publicIpAddress": "", + } + ], + "subnet": [ + {"address": "10.0.0.0", "prefix": "24"} + ], + }, + "ipv6": {"ipAddress": []}, + "macAddress": "011122334455", + }, + ] + }, + } + + def test_no_pps(self): + self.mock_readurl.side_effect = [ + mock.MagicMock(contents=json.dumps(self.imds_md).encode()), + ] + self.mock_azure_get_metadata_from_fabric.return_value = [] + self.mock_os_path_isfile.side_effect = [False, False, False] + + self.azure_ds._get_data() + + assert self.mock_os_path_isfile.mock_calls == [ + mock.call("/var/lib/cloud/data/poll_imds"), + mock.call( + os.path.join( + self.azure_ds.paths.cloud_dir, "seed/azure/ovf-env.xml" + ) + ), + mock.call("/var/lib/cloud/data/poll_imds"), + ] + + assert self.mock_readurl.mock_calls == [ + mock.call( + "http://169.254.169.254/metadata/instance?" + "api-version=2021-08-01&extended=true", + timeout=2, + headers={"Metadata": "true"}, + retries=0, + exception_cb=dsaz.retry_on_url_exc, + infinite=False, + ), + ] + + # Verify DHCP is setup once. + assert self.mock_net_dhcp_maybe_perform_dhcp_discovery.mock_calls == [ + mock.call(None, dsaz.dhcp_log_cb) + ] + assert self.azure_ds._wireserver_endpoint == "aa:bb:cc:dd" + assert self.azure_ds._is_ephemeral_networking_up() is False + + # Verify DMI usage. + assert self.mock_dmi_read_dmi_data.mock_calls == [ + mock.call("system-uuid") + ] + assert self.azure_ds.metadata["instance-id"] == "fake-system-uuid" + + # Verify IMDS metadata. + assert self.azure_ds.metadata["imds"] == self.imds_md + + # Verify reporting ready once. + assert self.mock_azure_get_metadata_from_fabric.mock_calls == [ + mock.call( + fallback_lease_file=None, + dhcp_opts="aa:bb:cc:dd", + iso_dev="/dev/sr0", + pubkey_info=None, + ) + ] + + # Verify netlink. + assert self.mock_netlink.mock_calls == [] + + def test_running_pps(self): + self.imds_md["extended"]["compute"]["ppsType"] = "Running" + ovf_data = {"HostName": "myhost", "UserName": "myuser"} + + nl_sock = mock.MagicMock() + self.mock_netlink.create_bound_netlink_socket.return_value = nl_sock + self.mock_readurl.side_effect = [ + mock.MagicMock(contents=json.dumps(self.imds_md).encode()), + mock.MagicMock( + contents=construct_valid_ovf_env(data=ovf_data).encode() + ), + mock.MagicMock(contents=json.dumps(self.imds_md).encode()), + ] + self.mock_azure_get_metadata_from_fabric.return_value = [] + self.mock_os_path_isfile.side_effect = [False, False, False, False] + + self.azure_ds._get_data() + + assert self.mock_os_path_isfile.mock_calls == [ + mock.call("/var/lib/cloud/data/poll_imds"), + mock.call( + os.path.join( + self.azure_ds.paths.cloud_dir, "seed/azure/ovf-env.xml" + ) + ), + mock.call("/var/lib/cloud/data/poll_imds"), + mock.call("/var/lib/cloud/data/reported_ready"), + ] + + assert self.mock_readurl.mock_calls == [ + mock.call( + "http://169.254.169.254/metadata/instance?" + "api-version=2021-08-01&extended=true", + timeout=2, + headers={"Metadata": "true"}, + retries=0, + exception_cb=dsaz.retry_on_url_exc, + infinite=False, + ), + mock.call( + "http://169.254.169.254/metadata/reprovisiondata?" + "api-version=2019-06-01", + timeout=2, + headers={"Metadata": "true"}, + exception_cb=mock.ANY, + infinite=True, + log_req_resp=False, + ), + mock.call( + "http://169.254.169.254/metadata/instance?" + "api-version=2021-08-01&extended=true", + timeout=2, + headers={"Metadata": "true"}, + retries=0, + exception_cb=dsaz.retry_on_url_exc, + infinite=False, + ), + ] + + # Verify DHCP is setup twice. + assert self.mock_net_dhcp_maybe_perform_dhcp_discovery.mock_calls == [ + mock.call(None, dsaz.dhcp_log_cb), + mock.call(None, dsaz.dhcp_log_cb), + ] + assert self.azure_ds._wireserver_endpoint == "aa:bb:cc:dd" + assert self.azure_ds._is_ephemeral_networking_up() is False + + # Verify DMI usage. + assert self.mock_dmi_read_dmi_data.mock_calls == [ + mock.call("system-uuid") + ] + assert self.azure_ds.metadata["instance-id"] == "fake-system-uuid" + + # Verify IMDS metadata. + assert self.azure_ds.metadata["imds"] == self.imds_md + + # Verify reporting ready twice. + assert self.mock_azure_get_metadata_from_fabric.mock_calls == [ + mock.call( + fallback_lease_file=None, + dhcp_opts="aa:bb:cc:dd", + iso_dev="/dev/sr0", + pubkey_info=None, + ), + mock.call( + fallback_lease_file=None, + dhcp_opts="aa:bb:cc:dd", + iso_dev=None, + pubkey_info=None, + ), + ] + + # Verify netlink operations for Running PPS. + assert self.mock_netlink.mock_calls == [ + mock.call.create_bound_netlink_socket(), + mock.call.wait_for_media_disconnect_connect(mock.ANY, "ethBoot0"), + mock.call.create_bound_netlink_socket().__bool__(), + mock.call.create_bound_netlink_socket().close(), + ] + + def test_savable_pps(self): + self.imds_md["extended"]["compute"]["ppsType"] = "Savable" + ovf_data = {"HostName": "myhost", "UserName": "myuser"} + + nl_sock = mock.MagicMock() + self.mock_netlink.create_bound_netlink_socket.return_value = nl_sock + self.mock_netlink.wait_for_nic_detach_event.return_value = "eth9" + self.mock_netlink.wait_for_nic_attach_event.return_value = ( + "ethAttached1" + ) + self.mock_readurl.side_effect = [ + mock.MagicMock(contents=json.dumps(self.imds_md).encode()), + mock.MagicMock( + contents=json.dumps(self.imds_md["network"]).encode() + ), + mock.MagicMock( + contents=construct_valid_ovf_env(data=ovf_data).encode() + ), + mock.MagicMock(contents=json.dumps(self.imds_md).encode()), + ] + self.mock_azure_get_metadata_from_fabric.return_value = [] + self.mock_os_path_isfile.side_effect = [ + False, # /var/lib/cloud/data/poll_imds + False, # seed/azure/ovf-env.xml + False, # /var/lib/cloud/data/poll_imds + False, # /var/lib/cloud/data/reported_ready + False, # /var/lib/cloud/data/reported_ready + False, # /var/lib/cloud/data/nic_detached + True, # /var/lib/cloud/data/reported_ready + ] + self.azure_ds._fallback_interface = False + + self.azure_ds._get_data() + + assert self.mock_os_path_isfile.mock_calls == [ + mock.call("/var/lib/cloud/data/poll_imds"), + mock.call( + os.path.join( + self.azure_ds.paths.cloud_dir, "seed/azure/ovf-env.xml" + ) + ), + mock.call("/var/lib/cloud/data/poll_imds"), + mock.call("/var/lib/cloud/data/reported_ready"), + mock.call("/var/lib/cloud/data/reported_ready"), + mock.call("/var/lib/cloud/data/nic_detached"), + mock.call("/var/lib/cloud/data/reported_ready"), + ] + + assert self.mock_readurl.mock_calls == [ + mock.call( + "http://169.254.169.254/metadata/instance?" + "api-version=2021-08-01&extended=true", + timeout=2, + headers={"Metadata": "true"}, + retries=0, + exception_cb=dsaz.retry_on_url_exc, + infinite=False, + ), + mock.call( + "http://169.254.169.254/metadata/instance/network?" + "api-version=2019-06-01", + timeout=2, + headers={"Metadata": "true"}, + retries=0, + exception_cb=mock.ANY, + infinite=True, + ), + mock.call( + "http://169.254.169.254/metadata/reprovisiondata?" + "api-version=2019-06-01", + timeout=2, + headers={"Metadata": "true"}, + exception_cb=mock.ANY, + infinite=True, + log_req_resp=False, + ), + mock.call( + "http://169.254.169.254/metadata/instance?" + "api-version=2021-08-01&extended=true", + timeout=2, + headers={"Metadata": "true"}, + retries=0, + exception_cb=dsaz.retry_on_url_exc, + infinite=False, + ), + ] + + # Verify DHCP is setup twice. + assert self.mock_net_dhcp_maybe_perform_dhcp_discovery.mock_calls == [ + mock.call(None, dsaz.dhcp_log_cb), + mock.call("ethAttached1", dsaz.dhcp_log_cb), + ] + assert self.azure_ds._wireserver_endpoint == "aa:bb:cc:dd" + assert self.azure_ds._is_ephemeral_networking_up() is False + + # Verify DMI usage. + assert self.mock_dmi_read_dmi_data.mock_calls == [ + mock.call("system-uuid") + ] + assert self.azure_ds.metadata["instance-id"] == "fake-system-uuid" + + # Verify IMDS metadata. + assert self.azure_ds.metadata["imds"] == self.imds_md + + # Verify reporting ready twice. + assert self.mock_azure_get_metadata_from_fabric.mock_calls == [ + mock.call( + fallback_lease_file=None, + dhcp_opts="aa:bb:cc:dd", + iso_dev="/dev/sr0", + pubkey_info=None, + ), + mock.call( + fallback_lease_file=None, + dhcp_opts="aa:bb:cc:dd", + iso_dev=None, + pubkey_info=None, + ), + ] + + # Verify netlink operations for Savable PPS. + assert self.mock_netlink.mock_calls == [ + mock.call.create_bound_netlink_socket(), + mock.call.wait_for_nic_detach_event(nl_sock), + mock.call.wait_for_nic_attach_event(nl_sock, ["ethAttached1"]), + mock.call.create_bound_netlink_socket().__bool__(), + mock.call.create_bound_netlink_socket().close(), + ] + + class TestValidateIMDSMetadata: @pytest.mark.parametrize( "mac,expected", |