summaryrefslogtreecommitdiff
path: root/cloudinit/sources/__init__.py
diff options
context:
space:
mode:
authorzsdc <taras@vyos.io>2020-09-15 17:05:20 +0300
committerzsdc <taras@vyos.io>2020-09-15 17:05:20 +0300
commit7cd260b313267dc7123cb99a75d4555e24909cca (patch)
treef57f3db085a724df237ffa64b589c6bb6dd3b28f /cloudinit/sources/__init__.py
parent1a790ee102fd405e5c3a20a17a69ba0c118ed874 (diff)
parent948bd9c1fcd08346cf8ec0551d7f6c2b234e896b (diff)
downloadvyos-cloud-init-7cd260b313267dc7123cb99a75d4555e24909cca.tar.gz
vyos-cloud-init-7cd260b313267dc7123cb99a75d4555e24909cca.zip
T2117: Cloud-init updated to 20.3
Merged with 20.3 tag from the upstream Cloud-init repository
Diffstat (limited to 'cloudinit/sources/__init__.py')
-rw-r--r--cloudinit/sources/__init__.py55
1 files changed, 34 insertions, 21 deletions
diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py
index dd93cfd8..c4d60fff 100644
--- a/cloudinit/sources/__init__.py
+++ b/cloudinit/sources/__init__.py
@@ -78,7 +78,6 @@ class DataSourceNotFoundException(Exception):
class InvalidMetaDataException(Exception):
"""Raised when metadata is broken, unavailable or disabled."""
- pass
def process_instance_metadata(metadata, key_path='', sensitive_keys=()):
@@ -89,26 +88,26 @@ def process_instance_metadata(metadata, key_path='', sensitive_keys=()):
@return Dict copy of processed metadata.
"""
md_copy = copy.deepcopy(metadata)
- md_copy['base64_encoded_keys'] = []
- md_copy['sensitive_keys'] = []
+ base64_encoded_keys = []
+ sens_keys = []
for key, val in metadata.items():
if key_path:
sub_key_path = key_path + '/' + key
else:
sub_key_path = key
if key in sensitive_keys or sub_key_path in sensitive_keys:
- md_copy['sensitive_keys'].append(sub_key_path)
+ sens_keys.append(sub_key_path)
if isinstance(val, str) and val.startswith('ci-b64:'):
- md_copy['base64_encoded_keys'].append(sub_key_path)
+ base64_encoded_keys.append(sub_key_path)
md_copy[key] = val.replace('ci-b64:', '')
if isinstance(val, dict):
return_val = process_instance_metadata(
val, sub_key_path, sensitive_keys)
- md_copy['base64_encoded_keys'].extend(
- return_val.pop('base64_encoded_keys'))
- md_copy['sensitive_keys'].extend(
- return_val.pop('sensitive_keys'))
+ base64_encoded_keys.extend(return_val.pop('base64_encoded_keys'))
+ sens_keys.extend(return_val.pop('sensitive_keys'))
md_copy[key] = return_val
+ md_copy['base64_encoded_keys'] = sorted(base64_encoded_keys)
+ md_copy['sensitive_keys'] = sorted(sens_keys)
return md_copy
@@ -193,7 +192,7 @@ class DataSource(metaclass=abc.ABCMeta):
# N-tuple of keypaths or keynames redact from instance-data.json for
# non-root users
- sensitive_metadata_keys = ('security-credentials',)
+ sensitive_metadata_keys = ('merged_cfg', 'security-credentials',)
def __init__(self, sys_cfg, distro, paths, ud_proc=None):
self.sys_cfg = sys_cfg
@@ -218,14 +217,15 @@ class DataSource(metaclass=abc.ABCMeta):
def __str__(self):
return type_utils.obj_name(self)
- def _get_standardized_metadata(self):
+ def _get_standardized_metadata(self, instance_data):
"""Return a dictionary of standardized metadata keys."""
local_hostname = self.get_hostname()
instance_id = self.get_instance_id()
availability_zone = self.availability_zone
# In the event of upgrade from existing cloudinit, pickled datasource
# will not contain these new class attributes. So we need to recrawl
- # metadata to discover that content.
+ # metadata to discover that content
+ sysinfo = instance_data["sys_info"]
return {
'v1': {
'_beta_keys': ['subplatform'],
@@ -233,14 +233,22 @@ class DataSource(metaclass=abc.ABCMeta):
'availability_zone': availability_zone,
'cloud-name': self.cloud_name,
'cloud_name': self.cloud_name,
+ 'distro': sysinfo["dist"][0],
+ 'distro_version': sysinfo["dist"][1],
+ 'distro_release': sysinfo["dist"][2],
'platform': self.platform_type,
'public_ssh_keys': self.get_public_ssh_keys(),
+ 'python_version': sysinfo["python"],
'instance-id': instance_id,
'instance_id': instance_id,
+ 'kernel_release': sysinfo["uname"][2],
'local-hostname': local_hostname,
'local_hostname': local_hostname,
+ 'machine': sysinfo["uname"][4],
'region': self.region,
- 'subplatform': self.subplatform}}
+ 'subplatform': self.subplatform,
+ 'system_platform': sysinfo["platform"],
+ 'variant': sysinfo["variant"]}}
def clear_cached_attrs(self, attr_defaults=()):
"""Reset any cached metadata attributes to datasource defaults.
@@ -299,9 +307,15 @@ class DataSource(metaclass=abc.ABCMeta):
ec2_metadata = getattr(self, 'ec2_metadata')
if ec2_metadata != UNSET:
instance_data['ds']['ec2_metadata'] = ec2_metadata
- instance_data.update(
- self._get_standardized_metadata())
instance_data['ds']['_doc'] = EXPERIMENTAL_TEXT
+ # Add merged cloud.cfg and sys info for jinja templates and cli query
+ instance_data['merged_cfg'] = copy.deepcopy(self.sys_cfg)
+ instance_data['merged_cfg']['_doc'] = (
+ 'Merged cloud-init system config from /etc/cloud/cloud.cfg and'
+ ' /etc/cloud/cloud.cfg.d/')
+ instance_data['sys_info'] = util.system_info()
+ instance_data.update(
+ self._get_standardized_metadata(instance_data))
try:
# Process content base64encoding unserializable values
content = util.json_dumps(instance_data)
@@ -315,12 +329,12 @@ class DataSource(metaclass=abc.ABCMeta):
except UnicodeDecodeError as e:
LOG.warning('Error persisting instance-data.json: %s', str(e))
return False
- json_file = os.path.join(self.paths.run_dir, INSTANCE_JSON_FILE)
- write_json(json_file, processed_data) # World readable
json_sensitive_file = os.path.join(self.paths.run_dir,
INSTANCE_JSON_SENSITIVE_FILE)
- write_json(json_sensitive_file,
- redact_sensitive_keys(processed_data), mode=0o600)
+ write_json(json_sensitive_file, processed_data, mode=0o600)
+ json_file = os.path.join(self.paths.run_dir, INSTANCE_JSON_FILE)
+ # World readable
+ write_json(json_file, redact_sensitive_keys(processed_data))
return True
def _get_data(self):
@@ -496,7 +510,6 @@ class DataSource(metaclass=abc.ABCMeta):
(e.g. 'ssh-rsa') and key_value is the key itself
(e.g. 'AAAAB3NzaC1y...').
"""
- pass
def _remap_device(self, short_name):
# LP: #611137
@@ -587,7 +600,7 @@ class DataSource(metaclass=abc.ABCMeta):
# if there is an ipv4 address in 'local-hostname', then
# make up a hostname (LP: #475354) in format ip-xx.xx.xx.xx
lhost = self.metadata['local-hostname']
- if util.is_ipv4(lhost):
+ if net.is_ipv4_address(lhost):
toks = []
if resolve_ip:
toks = util.gethostbyaddr(lhost)