summaryrefslogtreecommitdiff
path: root/cloudinit
diff options
context:
space:
mode:
authorJames Falcon <therealfalcon@gmail.com>2021-07-15 17:52:21 -0500
committerGitHub <noreply@github.com>2021-07-15 16:52:21 -0600
commiteacb0353803263934aa2ac827c37e461c87cb107 (patch)
tree2fade7310e7da0cbda78402d7838ae129866cbbf /cloudinit
parent881be6e780b258e98d1ecba4777ba3e171d5760d (diff)
downloadvyos-cloud-init-eacb0353803263934aa2ac827c37e461c87cb107.tar.gz
vyos-cloud-init-eacb0353803263934aa2ac827c37e461c87cb107.zip
Fix MIME policy failure on python version upgrade (#934)
Python 3.6 added a new `policy` attribute to `MIMEMultipart`. MIMEMultipart may be part of the cached object pickle of a datasource. Upgrading from an old version of python to 3.6+ will cause the datasource to be invalid after pickle load. This commit uses the upgrade framework to attempt to access the mime message and fail early (thus discarding the cache) if we cannot. Commit 78e89b03 should fix this issue more generally.
Diffstat (limited to 'cloudinit')
-rw-r--r--cloudinit/sources/__init__.py18
-rw-r--r--cloudinit/stages.py2
2 files changed, 20 insertions, 0 deletions
diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py
index a07c4b4f..9d25b0ee 100644
--- a/cloudinit/sources/__init__.py
+++ b/cloudinit/sources/__init__.py
@@ -75,6 +75,10 @@ NetworkConfigSource = namedtuple('NetworkConfigSource',
_NETCFG_SOURCE_NAMES)(*_NETCFG_SOURCE_NAMES)
+class DatasourceUnpickleUserDataError(Exception):
+ """Raised when userdata is unable to be unpickled due to python upgrades"""
+
+
class DataSourceNotFoundException(Exception):
pass
@@ -239,6 +243,20 @@ class DataSource(CloudInitPickleMixin, metaclass=abc.ABCMeta):
self.vendordata2 = None
if not hasattr(self, 'vendordata2_raw'):
self.vendordata2_raw = None
+ if hasattr(self, 'userdata') and self.userdata is not None:
+ # If userdata stores MIME data, on < python3.6 it will be
+ # missing the 'policy' attribute that exists on >=python3.6.
+ # Calling str() on the userdata will attempt to access this
+ # policy attribute. This will raise an exception, causing
+ # the pickle load to fail, so cloud-init will discard the cache
+ try:
+ str(self.userdata)
+ except AttributeError as e:
+ LOG.debug(
+ "Unable to unpickle datasource: %s."
+ " Ignoring current cache.", e
+ )
+ raise DatasourceUnpickleUserDataError() from e
def __str__(self):
return type_utils.obj_name(self)
diff --git a/cloudinit/stages.py b/cloudinit/stages.py
index 3688be2e..06e0d9b1 100644
--- a/cloudinit/stages.py
+++ b/cloudinit/stages.py
@@ -1070,6 +1070,8 @@ def _pkl_load(fname):
return None
try:
return pickle.loads(pickle_contents)
+ except sources.DatasourceUnpickleUserDataError:
+ return None
except Exception:
util.logexc(LOG, "Failed loading pickled blob from %s", fname)
return None