summaryrefslogtreecommitdiff
path: root/cloudinit/stages.py
diff options
context:
space:
mode:
Diffstat (limited to 'cloudinit/stages.py')
-rw-r--r--cloudinit/stages.py89
1 files changed, 51 insertions, 38 deletions
diff --git a/cloudinit/stages.py b/cloudinit/stages.py
index d508897b..64da3b5b 100644
--- a/cloudinit/stages.py
+++ b/cloudinit/stages.py
@@ -140,7 +140,7 @@ class Init(object):
]
return initial_dirs
- def purge_cache(self, rm_instance_lnk=True):
+ def purge_cache(self, rm_instance_lnk=False):
rm_list = []
rm_list.append(self.paths.boot_finished)
if rm_instance_lnk:
@@ -193,40 +193,12 @@ class Init(object):
# We try to restore from a current link and static path
# by using the instance link, if purge_cache was called
# the file wont exist.
- pickled_fn = self.paths.get_ipath_cur('obj_pkl')
- pickle_contents = None
- try:
- pickle_contents = util.load_file(pickled_fn, decode=False)
- except Exception as e:
- if os.path.isfile(pickled_fn):
- LOG.warn("failed loading pickle in %s: %s" % (pickled_fn, e))
- pass
-
- # This is expected so just return nothing
- # successfully loaded...
- if not pickle_contents:
- return None
- try:
- return pickle.loads(pickle_contents)
- except Exception:
- util.logexc(LOG, "Failed loading pickled blob from %s", pickled_fn)
- return None
+ return _pkl_load(self.paths.get_ipath_cur('obj_pkl'))
def _write_to_cache(self):
if self.datasource is NULL_DATA_SOURCE:
return False
- pickled_fn = self.paths.get_ipath_cur("obj_pkl")
- try:
- pk_contents = pickle.dumps(self.datasource)
- except Exception:
- util.logexc(LOG, "Failed pickling datasource %s", self.datasource)
- return False
- try:
- util.write_file(pickled_fn, pk_contents, omode="wb", mode=0o400)
- except Exception:
- util.logexc(LOG, "Failed pickling datasource to %s", pickled_fn)
- return False
- return True
+ return _pkl_store(self.datasource, self.paths.get_ipath_cur("obj_pkl"))
def _get_datasources(self):
# Any config provided???
@@ -238,21 +210,29 @@ class Init(object):
cfg_list = self.cfg.get('datasource_list') or []
return (cfg_list, pkg_list)
- def _get_data_source(self):
+ def _get_data_source(self, existing):
if self.datasource is not NULL_DATA_SOURCE:
return self.datasource
with events.ReportEventStack(
name="check-cache",
- description="attempting to read from cache",
+ description="attempting to read from cache [%s]" % existing,
parent=self.reporter) as myrep:
ds = self._restore_from_cache()
- if ds:
- LOG.debug("Restored from cache, datasource: %s", ds)
- myrep.description = "restored from cache"
+ if ds and existing == "trust":
+ myrep.description = "restored from cache: %s" % ds
+ elif ds and existing == "check":
+ if hasattr(ds, 'check_instance_id') and ds.check_instance_id():
+ myrep.description = "restored from checked cache: %s" % ds
+ else:
+ myrep.description = "cache invalid in datasource: %s" % ds
+ ds = None
else:
myrep.description = "no cache found"
+ LOG.debug(myrep.description)
+
if not ds:
+ util.del_file(self.paths.instance_link)
(cfg_list, pkg_list) = self._get_datasources()
# Deep copy so that user-data handlers can not modify
# (which will affect user-data handlers down the line...)
@@ -332,8 +312,8 @@ class Init(object):
self._reset()
return iid
- def fetch(self):
- return self._get_data_source()
+ def fetch(self, existing="check"):
+ return self._get_data_source(existing=existing)
def instancify(self):
return self._reflect_cur_instance()
@@ -799,3 +779,36 @@ def fetch_base_config():
base_cfgs.append(default_cfg)
return util.mergemanydict(base_cfgs)
+
+
+def _pkl_store(obj, fname):
+ try:
+ pk_contents = pickle.dumps(obj)
+ except Exception:
+ util.logexc(LOG, "Failed pickling datasource %s", obj)
+ return False
+ try:
+ util.write_file(fname, pk_contents, omode="wb", mode=0o400)
+ except Exception:
+ util.logexc(LOG, "Failed pickling datasource to %s", fname)
+ return False
+ return True
+
+
+def _pkl_load(fname):
+ pickle_contents = None
+ try:
+ pickle_contents = util.load_file(fname, decode=False)
+ except Exception as e:
+ if os.path.isfile(fname):
+ LOG.warn("failed loading pickle in %s: %s" % (fname, e))
+ pass
+
+ # This is allowed so just return nothing successfully loaded...
+ if not pickle_contents:
+ return None
+ try:
+ return pickle.loads(pickle_contents)
+ except Exception:
+ util.logexc(LOG, "Failed loading pickled blob from %s", fname)
+ return None