summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJames Falcon <therealfalcon@gmail.com>2021-09-15 10:44:26 -0500
committerGitHub <noreply@github.com>2021-09-15 10:44:26 -0500
commit023f97d4e64c267b8bd809510b3fc75fcb9da688 (patch)
treeb5863133dbd55b94b921a2cbfdaabbafe9a65942
parent26a92b0d883492beefacee80a7e7a2ab2a2c648f (diff)
downloadvyos-cloud-init-023f97d4e64c267b8bd809510b3fc75fcb9da688.tar.gz
vyos-cloud-init-023f97d4e64c267b8bd809510b3fc75fcb9da688.zip
Integration test upgrades for the 21.3-1 SRU (#1001)
* Update test_combined.py to allow either valid LXD subplatform * Split jinja templated tests into separate module as they can be more fragile * Move checks for warnings and tracebacks into dedicated utility function. This allows us to work around persistent and expected tracebacks/warnings on particular clouds. * Update test_upgrade.py to allow either valid Azure datasource. /var/lib/waagent or a mounted device are both valid. * Add specificity to test_ntp_servers.py Clouds will often specify their own ntp servers in the ntp configuration files, so make the tests manually specify their own. * Account for additional keys on system in test_ssh_keysfiles.py * Update tests to account for invalid cache test_user_events.py and test_version_change.py both have tests that assume we will have valid ds cache when rebooting. In test_user_events.py, subsequent boots should block applying network on boot if boot event is denied. However, if the cache is invalid, it is valid to apply networking config that boot. In test_version_change.py no cache found won't trigger the expected debug log. Additionally, the pickle used for that test on an older release triggered an unexpected issue that took a different error path. * Ignore bionic in hotplug tests (LP: #1942247) On Bionic, we traceback when attempting to detect the hotplugged device in the updated metadata. This is because Bionic is specifically configured not to provide network metadata. See LP: #1942247 for more details. * Fix date used in test_final_message. In test_final_message, we ensured the variable substitution works as expected. For $timestamp, we compared against the current date. It's possible for the host date to be massively different from the client date, so obtain date on client rather than host. * Remove module success from lp1813396 test. Module may fail unrelatedly (in this case apt-get update is failing), but the test should still pass. * Skip testing events if network is disabled * Ensure we install expected version of cloud-init As part of test setup, we can install cloud-init from various sources, including PROPOSED, PPAs, etc. We were never checking that this install completes successfully, and on OCI, it wasn't completing successfully because of apt locking issues. Code has been updated to retry, and then fail loudly if we can't complete the install. * Remove ubuntu-azure-fips metapkg which mandates FIPS-flavour kernel In test_lp1835584.py * Update test_user_events.py to account for Azure behavior since Azure has a separate service to clear the pickled metadata every boot * Change failure to warning in test_upgrade.py if initial boot errors If there's already a pre-existing cause for warnings or tracebacks, that shouldn't cause the new version to fail. * Add retry to test_random_passwords_emitted_to_serial_console It's possible we haven't retrieved the entire log when the call returns, so retry a few times if the output isn't empty.
-rw-r--r--tests/integration_tests/bugs/test_gh632.py6
-rw-r--r--tests/integration_tests/bugs/test_gh868.py4
-rw-r--r--tests/integration_tests/bugs/test_lp1813396.py1
-rw-r--r--tests/integration_tests/bugs/test_lp1835584.py3
-rw-r--r--tests/integration_tests/bugs/test_lp1886531.py4
-rw-r--r--tests/integration_tests/bugs/test_lp1898997.py4
-rw-r--r--tests/integration_tests/instances.py40
-rw-r--r--tests/integration_tests/modules/test_combined.py46
-rw-r--r--tests/integration_tests/modules/test_disk_setup.py10
-rw-r--r--tests/integration_tests/modules/test_hotplug.py4
-rw-r--r--tests/integration_tests/modules/test_jinja_templating.py30
-rw-r--r--tests/integration_tests/modules/test_lxd_bridge.py4
-rw-r--r--tests/integration_tests/modules/test_ntp_servers.py8
-rw-r--r--tests/integration_tests/modules/test_set_password.py12
-rw-r--r--tests/integration_tests/modules/test_ssh_keysfile.py10
-rw-r--r--tests/integration_tests/modules/test_user_events.py23
-rw-r--r--tests/integration_tests/modules/test_version_change.py25
-rw-r--r--tests/integration_tests/test_upgrade.py25
-rw-r--r--tests/integration_tests/util.py34
19 files changed, 212 insertions, 81 deletions
diff --git a/tests/integration_tests/bugs/test_gh632.py b/tests/integration_tests/bugs/test_gh632.py
index 3c1f9347..f3702a2e 100644
--- a/tests/integration_tests/bugs/test_gh632.py
+++ b/tests/integration_tests/bugs/test_gh632.py
@@ -3,16 +3,15 @@
Verify that if cloud-init is using DataSourceRbxCloud, there is
no traceback if the metadata disk cannot be found.
"""
-
import pytest
from tests.integration_tests.instances import IntegrationInstance
+from tests.integration_tests.util import verify_clean_log
# With some datasource hacking, we can run this on a NoCloud instance
@pytest.mark.lxd_container
@pytest.mark.lxd_vm
-@pytest.mark.sru_2020_11
def test_datasource_rbx_no_stacktrace(client: IntegrationInstance):
client.write_to_file(
'/etc/cloud/cloud.cfg.d/90_dpkg.cfg',
@@ -26,8 +25,7 @@ def test_datasource_rbx_no_stacktrace(client: IntegrationInstance):
client.restart()
log = client.read_from_file('/var/log/cloud-init.log')
- assert 'WARNING' not in log
- assert 'Traceback' not in log
+ verify_clean_log(log)
assert 'Failed to load metadata and userdata' not in log
assert ("Getting data from <class 'cloudinit.sources.DataSourceRbxCloud."
"DataSourceRbxCloud'> failed") not in log
diff --git a/tests/integration_tests/bugs/test_gh868.py b/tests/integration_tests/bugs/test_gh868.py
index 838efca6..73c03451 100644
--- a/tests/integration_tests/bugs/test_gh868.py
+++ b/tests/integration_tests/bugs/test_gh868.py
@@ -1,6 +1,8 @@
"""Ensure no Traceback when 'chef_license' is set"""
import pytest
+
from tests.integration_tests.instances import IntegrationInstance
+from tests.integration_tests.util import verify_clean_log
USERDATA = """\
@@ -17,4 +19,4 @@ chef:
@pytest.mark.user_data(USERDATA)
def test_chef_license(client: IntegrationInstance):
log = client.read_from_file('/var/log/cloud-init.log')
- assert 'Traceback' not in log
+ verify_clean_log(log)
diff --git a/tests/integration_tests/bugs/test_lp1813396.py b/tests/integration_tests/bugs/test_lp1813396.py
index 68b96b1d..27d41c2b 100644
--- a/tests/integration_tests/bugs/test_lp1813396.py
+++ b/tests/integration_tests/bugs/test_lp1813396.py
@@ -29,6 +29,5 @@ def test_gpg_no_tty(client: IntegrationInstance):
"'--keyserver=keyserver.ubuntu.com', '--recv-keys', 'E4D304DF'] "
"with allowed return codes [0] (shell=False, capture=True)",
"Imported key 'E4D304DF' from keyserver 'keyserver.ubuntu.com'",
- "finish: modules-config/config-apt-configure: SUCCESS",
]
verify_ordered_items_in_text(to_verify, log)
diff --git a/tests/integration_tests/bugs/test_lp1835584.py b/tests/integration_tests/bugs/test_lp1835584.py
index 660d2a2a..732f2179 100644
--- a/tests/integration_tests/bugs/test_lp1835584.py
+++ b/tests/integration_tests/bugs/test_lp1835584.py
@@ -59,6 +59,9 @@ def _check_iid_insensitive_across_kernel_upgrade(
result = instance.execute("apt-get install linux-azure --assume-yes")
if not result.ok:
pytest.fail("Unable to install linux-azure kernel: {}".format(result))
+ # Remove ubuntu-azure-fips metapkg which mandates FIPS-flavour kernel
+ result = instance.execute("ua disable fips --assume-yes")
+ assert result.ok, "Unable to disable fips: {}".format(result)
instance.restart()
new_kernel = instance.execute("uname -r").strip()
assert orig_kernel != new_kernel
diff --git a/tests/integration_tests/bugs/test_lp1886531.py b/tests/integration_tests/bugs/test_lp1886531.py
index 058ea8bb..6dd61222 100644
--- a/tests/integration_tests/bugs/test_lp1886531.py
+++ b/tests/integration_tests/bugs/test_lp1886531.py
@@ -11,6 +11,8 @@ https://bugs.launchpad.net/ubuntu/+source/cloud-init/+bug/1886531
"""
import pytest
+from tests.integration_tests.util import verify_clean_log
+
USER_DATA = """\
#cloud-config
@@ -24,4 +26,4 @@ class TestLp1886531:
@pytest.mark.user_data(USER_DATA)
def test_lp1886531(self, client):
log_content = client.read_from_file("/var/log/cloud-init.log")
- assert "WARNING" not in log_content
+ verify_clean_log(log_content)
diff --git a/tests/integration_tests/bugs/test_lp1898997.py b/tests/integration_tests/bugs/test_lp1898997.py
index bde93d06..909bc690 100644
--- a/tests/integration_tests/bugs/test_lp1898997.py
+++ b/tests/integration_tests/bugs/test_lp1898997.py
@@ -10,7 +10,9 @@ network configuration, and confirms that the bridge can be used to ping the
default gateway.
"""
import pytest
+
from tests.integration_tests import random_mac_address
+from tests.integration_tests.util import verify_clean_log
MAC_ADDRESS = random_mac_address()
@@ -59,7 +61,7 @@ class TestInterfaceListingWithOpenvSwitch:
cloudinit_output = client.read_from_file("/var/log/cloud-init.log")
# Confirm that the network configuration was applied successfully
- assert "WARN" not in cloudinit_output
+ verify_clean_log(cloudinit_output)
# Confirm that the applied network config created the OVS bridge
assert "ovs-br" in client.execute("ip addr")
diff --git a/tests/integration_tests/instances.py b/tests/integration_tests/instances.py
index 63e0e630..8f66bf43 100644
--- a/tests/integration_tests/instances.py
+++ b/tests/integration_tests/instances.py
@@ -9,6 +9,7 @@ from pycloudlib.instance import BaseInstance
from pycloudlib.result import Result
from tests.integration_tests import integration_settings
+from tests.integration_tests.util import retry
try:
from typing import TYPE_CHECKING
@@ -142,26 +143,31 @@ class IntegrationInstance:
snapshot_id = self.snapshot()
self.cloud.snapshot_id = snapshot_id
+ # assert with retry because we can compete with apt already running in the
+ # background and get: E: Could not get lock /var/lib/apt/lists/lock - open
+ # (11: Resource temporarily unavailable)
+
+ @retry(tries=30, delay=1)
def install_proposed_image(self):
log.info('Installing proposed image')
- remote_script = (
+ assert self.execute(
'echo deb "http://archive.ubuntu.com/ubuntu '
- '$(lsb_release -sc)-proposed main" | '
- 'tee /etc/apt/sources.list.d/proposed.list\n'
- 'apt-get update -q\n'
- 'apt-get install -qy cloud-init'
- )
- self.execute(remote_script)
+ '$(lsb_release -sc)-proposed main" >> '
+ '/etc/apt/sources.list.d/proposed.list'
+ ).ok
+ assert self.execute('apt-get update -q').ok
+ assert self.execute('apt-get install -qy cloud-init').ok
+ @retry(tries=30, delay=1)
def install_ppa(self):
log.info('Installing PPA')
- remote_script = (
- 'add-apt-repository {repo} -y && '
- 'apt-get update -q && '
- 'apt-get install -qy cloud-init'
- ).format(repo=self.settings.CLOUD_INIT_SOURCE)
- self.execute(remote_script)
+ assert self.execute('add-apt-repository {} -y'.format(
+ self.settings.CLOUD_INIT_SOURCE)
+ ).ok
+ assert self.execute('apt-get update -q').ok
+ assert self.execute('apt-get install -qy cloud-init').ok
+ @retry(tries=30, delay=1)
def install_deb(self):
log.info('Installing deb package')
deb_path = integration_settings.CLOUD_INIT_SOURCE
@@ -170,13 +176,13 @@ class IntegrationInstance:
self.push_file(
local_path=integration_settings.CLOUD_INIT_SOURCE,
remote_path=remote_path)
- remote_script = 'dpkg -i {path}'.format(path=remote_path)
- self.execute(remote_script)
+ assert self.execute('dpkg -i {path}'.format(path=remote_path)).ok
+ @retry(tries=30, delay=1)
def upgrade_cloud_init(self):
log.info('Upgrading cloud-init to latest version in archive')
- self.execute("apt-get update -q")
- self.execute("apt-get install -qy cloud-init")
+ assert self.execute("apt-get update -q").ok
+ assert self.execute("apt-get install -qy cloud-init").ok
def __enter__(self):
return self
diff --git a/tests/integration_tests/modules/test_combined.py b/tests/integration_tests/modules/test_combined.py
index 27f3c074..9cd1648a 100644
--- a/tests/integration_tests/modules/test_combined.py
+++ b/tests/integration_tests/modules/test_combined.py
@@ -8,14 +8,15 @@ here.
import json
import pytest
import re
-from datetime import date
from tests.integration_tests.clouds import ImageSpecification
from tests.integration_tests.instances import IntegrationInstance
-from tests.integration_tests.util import verify_ordered_items_in_text
+from tests.integration_tests.util import (
+ verify_clean_log,
+ verify_ordered_items_in_text,
+)
USER_DATA = """\
-## template: jinja
#cloud-config
apt:
primary:
@@ -33,8 +34,7 @@ locale_configfile: /etc/default/locale
ntp:
servers: ['ntp.ubuntu.com']
runcmd:
- - echo {{ds.meta_data.local_hostname}} > /var/tmp/runcmd_output
- - echo {{merged_cfg.def_log_file}} >> /var/tmp/runcmd_output
+ - echo 'hello world' > /var/tmp/runcmd_output
"""
@@ -44,11 +44,17 @@ class TestCombined:
def test_final_message(self, class_client: IntegrationInstance):
"""Test that final_message module works as expected.
- Also tests LP 1511485: final_message is silent
+ Also tests LP 1511485: final_message is silent.
+
+ It's possible that if this test is run within a minute or so of
+ midnight that we'll see a failure because the day in the logs
+ is different from the day specified in the test definition.
"""
client = class_client
log = client.read_from_file('/var/log/cloud-init.log')
- today = date.today().strftime('%a, %d %b %Y')
+ # Get date on host rather than locally as our host could be in a
+ # wildly different timezone (or more likely recording UTC)
+ today = client.execute('date "+%a, %d %b %Y"')
expected = (
'This is my final message!\n'
r'\d+\.\d+.*\n'
@@ -96,21 +102,10 @@ class TestCombined:
'en_US.UTF-8'
], locale_gen)
- def test_runcmd_with_variable_substitution(
- self, class_client: IntegrationInstance
- ):
- """Test runcmd, while including jinja substitution.
-
- Ensure we can also substitue variables from instance-data-sensitive
- LP: #1931392
- """
+ def test_runcmd(self, class_client: IntegrationInstance):
+ """Test runcmd works as expected"""
client = class_client
- expected = [
- client.execute('hostname').stdout.strip(),
- '/var/log/cloud-init.log',
- ]
- output = client.read_from_file('/var/tmp/runcmd_output')
- verify_ordered_items_in_text(expected, output)
+ assert 'hello world' == client.read_from_file('/var/tmp/runcmd_output')
def test_no_problems(self, class_client: IntegrationInstance):
"""Test no errors, warnings, or tracebacks"""
@@ -124,8 +119,7 @@ class TestCombined:
assert result_json['errors'] == []
log = client.read_from_file('/var/log/cloud-init.log')
- assert 'WARN' not in log
- assert 'Traceback' not in log
+ verify_clean_log(log)
def _check_common_metadata(self, data):
assert data['base64_encoded_keys'] == []
@@ -171,8 +165,10 @@ class TestCombined:
v1_data = data['v1']
assert v1_data['cloud_name'] == 'unknown'
assert v1_data['platform'] == 'lxd'
- assert v1_data['subplatform'] == (
- 'seed-dir (/var/lib/cloud/seed/nocloud-net)')
+ assert any([
+ '/var/lib/cloud/seed/nocloud-net' in v1_data['subplatform'],
+ '/dev/sr0' in v1_data['subplatform']
+ ])
assert v1_data['availability_zone'] is None
assert v1_data['instance_id'] == client.instance.name
assert v1_data['local_hostname'] == client.instance.name
diff --git a/tests/integration_tests/modules/test_disk_setup.py b/tests/integration_tests/modules/test_disk_setup.py
index 1fc96c52..9c9edc46 100644
--- a/tests/integration_tests/modules/test_disk_setup.py
+++ b/tests/integration_tests/modules/test_disk_setup.py
@@ -6,6 +6,7 @@ from pycloudlib.lxd.instance import LXDInstance
from cloudinit.subp import subp
from tests.integration_tests.instances import IntegrationInstance
+from tests.integration_tests.util import verify_clean_log
DISK_PATH = '/tmp/test_disk_setup_{}'.format(uuid4())
@@ -59,8 +60,7 @@ class TestDeviceAliases:
) in log
assert 'changed my_alias.1 => /dev/sdb1' in log
assert 'changed my_alias.2 => /dev/sdb2' in log
- assert 'WARN' not in log
- assert 'Traceback' not in log
+ verify_clean_log(log)
lsblk = json.loads(client.execute('lsblk --json'))
sdb = [x for x in lsblk['blockdevices'] if x['name'] == 'sdb'][0]
@@ -120,8 +120,7 @@ class TestPartProbeAvailability:
"""
def _verify_first_disk_setup(self, client, log):
- assert 'Traceback' not in log
- assert 'WARN' not in log
+ verify_clean_log(log)
lsblk = json.loads(client.execute('lsblk --json'))
sdb = [x for x in lsblk['blockdevices'] if x['name'] == 'sdb'][0]
assert len(sdb['children']) == 2
@@ -167,8 +166,7 @@ class TestPartProbeAvailability:
client.restart()
# Assert new setup works as expected
- assert 'Traceback' not in log
- assert 'WARN' not in log
+ verify_clean_log(log)
lsblk = json.loads(client.execute('lsblk --json'))
sdb = [x for x in lsblk['blockdevices'] if x['name'] == 'sdb'][0]
diff --git a/tests/integration_tests/modules/test_hotplug.py b/tests/integration_tests/modules/test_hotplug.py
index a42d1c8c..88cd8c16 100644
--- a/tests/integration_tests/modules/test_hotplug.py
+++ b/tests/integration_tests/modules/test_hotplug.py
@@ -40,6 +40,10 @@ def _get_ip_addr(client):
@pytest.mark.openstack
+# On Bionic, we traceback when attempting to detect the hotplugged
+# device in the updated metadata. This is because Bionic is specifically
+# configured not to provide network metadata.
+@pytest.mark.not_bionic
@pytest.mark.user_data(USER_DATA)
def test_hotplug_add_remove(client: IntegrationInstance):
ips_before = _get_ip_addr(client)
diff --git a/tests/integration_tests/modules/test_jinja_templating.py b/tests/integration_tests/modules/test_jinja_templating.py
new file mode 100644
index 00000000..35b8ee2d
--- /dev/null
+++ b/tests/integration_tests/modules/test_jinja_templating.py
@@ -0,0 +1,30 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+import pytest
+
+from tests.integration_tests.instances import IntegrationInstance
+from tests.integration_tests.util import verify_ordered_items_in_text
+
+
+USER_DATA = """\
+## template: jinja
+#cloud-config
+runcmd:
+ - echo {{v1.local_hostname}} > /var/tmp/runcmd_output
+ - echo {{merged_cfg._doc}} >> /var/tmp/runcmd_output
+"""
+
+
+@pytest.mark.user_data(USER_DATA)
+def test_runcmd_with_variable_substitution(client: IntegrationInstance):
+ """Test jinja substitution.
+
+ Ensure we can also substitute variables from instance-data-sensitive
+ LP: #1931392
+ """
+ expected = [
+ client.execute('hostname').stdout.strip(),
+ ('Merged cloud-init system config from /etc/cloud/cloud.cfg and '
+ '/etc/cloud/cloud.cfg.d/')
+ ]
+ output = client.read_from_file('/var/tmp/runcmd_output')
+ verify_ordered_items_in_text(expected, output)
diff --git a/tests/integration_tests/modules/test_lxd_bridge.py b/tests/integration_tests/modules/test_lxd_bridge.py
index cbf11179..65dce3c7 100644
--- a/tests/integration_tests/modules/test_lxd_bridge.py
+++ b/tests/integration_tests/modules/test_lxd_bridge.py
@@ -6,6 +6,8 @@
import pytest
import yaml
+from tests.integration_tests.util import verify_clean_log
+
USER_DATA = """\
#cloud-config
@@ -38,7 +40,7 @@ class TestLxdBridge:
def test_bridge(self, class_client):
"""Check that the given bridge is configured"""
cloud_init_log = class_client.read_from_file("/var/log/cloud-init.log")
- assert "WARN" not in cloud_init_log
+ verify_clean_log(cloud_init_log)
# The bridge should exist
assert class_client.execute("ip addr show lxdbr0")
diff --git a/tests/integration_tests/modules/test_ntp_servers.py b/tests/integration_tests/modules/test_ntp_servers.py
index 7a799139..59241faa 100644
--- a/tests/integration_tests/modules/test_ntp_servers.py
+++ b/tests/integration_tests/modules/test_ntp_servers.py
@@ -78,6 +78,8 @@ CHRONY_DATA = """\
ntp:
enabled: true
ntp_client: chrony
+ servers:
+ - 172.16.15.14
"""
@@ -89,7 +91,7 @@ def test_chrony(client: IntegrationInstance):
else:
chrony_conf = '/etc/chrony/chrony.conf'
contents = client.read_from_file(chrony_conf)
- assert '.pool.ntp.org' in contents
+ assert 'server 172.16.15.14' in contents
TIMESYNCD_DATA = """\
@@ -97,6 +99,8 @@ TIMESYNCD_DATA = """\
ntp:
enabled: true
ntp_client: systemd-timesyncd
+ servers:
+ - 172.16.15.14
"""
@@ -106,7 +110,7 @@ def test_timesyncd(client: IntegrationInstance):
contents = client.read_from_file(
'/etc/systemd/timesyncd.conf.d/cloud-init.conf'
)
- assert '.pool.ntp.org' in contents
+ assert 'NTP=172.16.15.14' in contents
EMPTY_NTP = """\
diff --git a/tests/integration_tests/modules/test_set_password.py b/tests/integration_tests/modules/test_set_password.py
index d7cf91a5..ac9db19d 100644
--- a/tests/integration_tests/modules/test_set_password.py
+++ b/tests/integration_tests/modules/test_set_password.py
@@ -13,6 +13,8 @@ import crypt
import pytest
import yaml
+from tests.integration_tests.util import retry
+
COMMON_USER_DATA = """\
#cloud-config
@@ -129,6 +131,7 @@ class Mixin:
assert "dick:" not in cloud_init_output
assert "harry:" not in cloud_init_output
+ @retry(tries=30, delay=1)
def test_random_passwords_emitted_to_serial_console(self, class_client):
"""We should emit passwords to the serial console. (LP: #1918303)"""
try:
@@ -137,6 +140,15 @@ class Mixin:
# Assume that an exception here means that we can't use the console
# log
pytest.skip("NotImplementedError when requesting console log")
+ return
+ if console_log.lower() == 'no console output':
+ # This test retries because we might not have the full console log
+ # on the first fetch. However, if we have no console output
+ # at all, we don't want to keep retrying as that would trigger
+ # another 5 minute wait on the pycloudlib side, which could
+ # leave us waiting for a couple hours
+ pytest.fail('no console output')
+ return
assert "dick:" in console_log
assert "harry:" in console_log
diff --git a/tests/integration_tests/modules/test_ssh_keysfile.py b/tests/integration_tests/modules/test_ssh_keysfile.py
index 5c720578..b39454e6 100644
--- a/tests/integration_tests/modules/test_ssh_keysfile.py
+++ b/tests/integration_tests/modules/test_ssh_keysfile.py
@@ -38,9 +38,15 @@ def common_verify(client, expected_keys):
# Ensure key is in the key file
contents = client.read_from_file(filename)
if user in ['ubuntu', 'root']:
- # Our personal public key gets added by pycloudlib
lines = contents.split('\n')
- assert len(lines) == 2
+ if user == 'root':
+ # Our personal public key gets added by pycloudlib in
+ # addition to the default `ssh_authorized_keys`
+ assert len(lines) == 2
+ else:
+ # Clouds will insert the keys we've added to our accounts
+ # or for our launches
+ assert len(lines) >= 2
assert keys.public_key.strip() in contents
else:
assert contents.strip() == keys.public_key.strip()
diff --git a/tests/integration_tests/modules/test_user_events.py b/tests/integration_tests/modules/test_user_events.py
index a45cad72..ee8f05ae 100644
--- a/tests/integration_tests/modules/test_user_events.py
+++ b/tests/integration_tests/modules/test_user_events.py
@@ -31,9 +31,12 @@ def _add_dummy_bridge_to_netplan(client: IntegrationInstance):
@pytest.mark.gce
@pytest.mark.oci
@pytest.mark.openstack
+@pytest.mark.azure
@pytest.mark.not_xenial
def test_boot_event_disabled_by_default(client: IntegrationInstance):
log = client.read_from_file('/var/log/cloud-init.log')
+ if 'network config is disabled' in log:
+ pytest.skip("network config disabled. Test doesn't apply")
assert 'Applying network configuration' in log
assert 'dummy0' not in client.execute('ls /sys/class/net')
@@ -43,6 +46,12 @@ def test_boot_event_disabled_by_default(client: IntegrationInstance):
client.restart()
log2 = client.read_from_file('/var/log/cloud-init.log')
+ if 'cache invalid in datasource' in log2:
+ # Invalid cache will get cleared, meaning we'll create a new
+ # "instance" and apply networking config, so events aren't
+ # really relevant here
+ pytest.skip("Test only valid for existing instances")
+
# We attempt to apply network config twice on every boot.
# Ensure neither time works.
assert 2 == len(
@@ -62,25 +71,27 @@ def test_boot_event_disabled_by_default(client: IntegrationInstance):
def _test_network_config_applied_on_reboot(client: IntegrationInstance):
log = client.read_from_file('/var/log/cloud-init.log')
+ if 'network config is disabled' in log:
+ pytest.skip("network config disabled. Test doesn't apply")
assert 'Applying network configuration' in log
assert 'dummy0' not in client.execute('ls /sys/class/net')
_add_dummy_bridge_to_netplan(client)
client.execute('rm /var/log/cloud-init.log')
client.restart()
+
log = client.read_from_file('/var/log/cloud-init.log')
+ if 'cache invalid in datasource' in log:
+ # Invalid cache will get cleared, meaning we'll create a new
+ # "instance" and apply networking config, so events aren't
+ # really relevant here
+ pytest.skip("Test only valid for existing instances")
assert 'Event Allowed: scope=network EventType=boot' in log
assert 'Applying network configuration' in log
assert 'dummy0' not in client.execute('ls /sys/class/net')
-@pytest.mark.azure
-@pytest.mark.not_xenial
-def test_boot_event_enabled_by_default(client: IntegrationInstance):
- _test_network_config_applied_on_reboot(client)
-
-
USER_DATA = """\
#cloud-config
updates:
diff --git a/tests/integration_tests/modules/test_version_change.py b/tests/integration_tests/modules/test_version_change.py
index 4e9ab63f..ffea794a 100644
--- a/tests/integration_tests/modules/test_version_change.py
+++ b/tests/integration_tests/modules/test_version_change.py
@@ -1,7 +1,7 @@
from pathlib import Path
from tests.integration_tests.instances import IntegrationInstance
-from tests.integration_tests.util import ASSETS_DIR
+from tests.integration_tests.util import ASSETS_DIR, verify_clean_log
PICKLE_PATH = Path('/var/lib/cloud/instance/obj.pkl')
@@ -10,8 +10,7 @@ TEST_PICKLE = ASSETS_DIR / 'test_version_change.pkl'
def _assert_no_pickle_problems(log):
assert 'Failed loading pickled blob' not in log
- assert 'Traceback' not in log
- assert 'WARN' not in log
+ verify_clean_log(log)
def test_reboot_without_version_change(client: IntegrationInstance):
@@ -30,7 +29,13 @@ def test_reboot_without_version_change(client: IntegrationInstance):
client.push_file(TEST_PICKLE, PICKLE_PATH)
client.restart()
log = client.read_from_file('/var/log/cloud-init.log')
- assert 'Failed loading pickled blob from {}'.format(PICKLE_PATH) in log
+
+ # no cache found is an "expected" upgrade error, and
+ # "Failed" means we're unable to load the pickle
+ assert any([
+ 'Failed loading pickled blob from {}'.format(PICKLE_PATH) in log,
+ 'no cache found' in log
+ ])
def test_cache_purged_on_version_change(client: IntegrationInstance):
@@ -48,9 +53,13 @@ def test_log_message_on_missing_version_file(client: IntegrationInstance):
# Start by pushing a pickle so we can see the log message
client.push_file(TEST_PICKLE, PICKLE_PATH)
client.execute("rm /var/lib/cloud/data/python-version")
+ client.execute("rm /var/log/cloud-init.log")
client.restart()
log = client.read_from_file('/var/log/cloud-init.log')
- assert (
- 'Writing python-version file. '
- 'Cache compatibility status is currently unknown.'
- ) in log
+ if 'no cache found' not in log:
+ # We don't expect the python version file to exist if we have no
+ # pre-existing cache
+ assert (
+ 'Writing python-version file. '
+ 'Cache compatibility status is currently unknown.'
+ ) in log
diff --git a/tests/integration_tests/test_upgrade.py b/tests/integration_tests/test_upgrade.py
index 376fcc96..e90a5f9d 100644
--- a/tests/integration_tests/test_upgrade.py
+++ b/tests/integration_tests/test_upgrade.py
@@ -5,6 +5,7 @@ import pytest
from tests.integration_tests.clouds import ImageSpecification, IntegrationCloud
from tests.integration_tests.conftest import get_validated_source
+from tests.integration_tests.util import verify_clean_log
LOG = logging.getLogger('integration_testing.test_upgrade')
@@ -73,11 +74,15 @@ def test_clean_boot_of_upgraded_package(session_cloud: IntegrationCloud):
pre_cloud_blame = instance.execute('cloud-init analyze blame')
# Ensure no issues pre-upgrade
+ log = instance.read_from_file('/var/log/cloud-init.log')
assert not json.loads(pre_result)['v1']['errors']
- log = instance.read_from_file('/var/log/cloud-init.log')
- assert 'Traceback' not in log
- assert 'WARN' not in log
+ try:
+ verify_clean_log(log)
+ except AssertionError:
+ LOG.warning(
+ 'There were errors/warnings/tracebacks pre-upgrade. '
+ 'Any failures may be due to pre-upgrade problem')
# Upgrade and reboot
instance.install_new_cloud_init(source, take_snapshot=False)
@@ -105,13 +110,21 @@ def test_clean_boot_of_upgraded_package(session_cloud: IntegrationCloud):
assert not json.loads(pre_result)['v1']['errors']
log = instance.read_from_file('/var/log/cloud-init.log')
- assert 'Traceback' not in log
- assert 'WARN' not in log
+ verify_clean_log(log)
# Ensure important things stayed the same
assert pre_hostname == post_hostname
assert pre_cloud_id == post_cloud_id
- assert pre_result == post_result
+ try:
+ assert pre_result == post_result
+ except AssertionError:
+ if instance.settings.PLATFORM == 'azure':
+ pre_json = json.loads(pre_result)
+ post_json = json.loads(post_result)
+ assert pre_json['v1']['datasource'].startswith(
+ 'DataSourceAzure')
+ assert post_json['v1']['datasource'].startswith(
+ 'DataSourceAzure')
assert pre_network == post_network
# Calculate and log all the boot numbers
diff --git a/tests/integration_tests/util.py b/tests/integration_tests/util.py
index 80430eab..407096cd 100644
--- a/tests/integration_tests/util.py
+++ b/tests/integration_tests/util.py
@@ -28,6 +28,40 @@ def verify_ordered_items_in_text(to_verify: list, text: str):
assert index > -1, "Expected item not found: '{}'".format(item)
+def verify_clean_log(log):
+ """Assert no unexpected tracebacks or warnings in logs"""
+ warning_count = log.count('WARN')
+ expected_warnings = 0
+ traceback_count = log.count('Traceback')
+ expected_tracebacks = 0
+
+ warning_texts = [
+ # Consistently on all Azure launches:
+ # azure.py[WARNING]: No lease found; using default endpoint
+ 'No lease found; using default endpoint'
+ ]
+ traceback_texts = []
+ if 'oracle' in log:
+ # LP: #1842752
+ lease_exists_text = 'Stderr: RTNETLINK answers: File exists'
+ warning_texts.append(lease_exists_text)
+ traceback_texts.append(lease_exists_text)
+ # LP: #1833446
+ fetch_error_text = (
+ 'UrlError: 404 Client Error: Not Found for url: '
+ 'http://169.254.169.254/latest/meta-data/')
+ warning_texts.append(fetch_error_text)
+ traceback_texts.append(fetch_error_text)
+
+ for warning_text in warning_texts:
+ expected_warnings += log.count(warning_text)
+ for traceback_text in traceback_texts:
+ expected_tracebacks += log.count(traceback_text)
+
+ assert warning_count == expected_warnings
+ assert traceback_count == expected_tracebacks
+
+
@contextmanager
def emit_dots_on_travis():
"""emit a dot every 60 seconds if running on Travis.