summaryrefslogtreecommitdiff
path: root/cloudinit/reporting/handlers.py
diff options
context:
space:
mode:
Diffstat (limited to 'cloudinit/reporting/handlers.py')
-rwxr-xr-xcloudinit/reporting/handlers.py39
1 files changed, 24 insertions, 15 deletions
diff --git a/cloudinit/reporting/handlers.py b/cloudinit/reporting/handlers.py
index 946df7e0..0a8c7af3 100755
--- a/cloudinit/reporting/handlers.py
+++ b/cloudinit/reporting/handlers.py
@@ -35,7 +35,6 @@ class ReportingHandler(metaclass=abc.ABCMeta):
def flush(self):
"""Ensure ReportingHandler has published all events"""
- pass
class LogHandler(ReportingHandler):
@@ -114,6 +113,8 @@ class HyperVKvpReportingHandler(ReportingHandler):
https://technet.microsoft.com/en-us/library/dn798287.aspx#Linux%20guests
"""
HV_KVP_EXCHANGE_MAX_VALUE_SIZE = 2048
+ # The maximum value size expected in Azure
+ HV_KVP_AZURE_MAX_VALUE_SIZE = 1024
HV_KVP_EXCHANGE_MAX_KEY_SIZE = 512
HV_KVP_RECORD_SIZE = (HV_KVP_EXCHANGE_MAX_KEY_SIZE +
HV_KVP_EXCHANGE_MAX_VALUE_SIZE)
@@ -139,7 +140,8 @@ class HyperVKvpReportingHandler(ReportingHandler):
self.event_key_prefix = u"{0}|{1}".format(self.EVENT_PREFIX,
self.incarnation_no)
self.publish_thread = threading.Thread(
- target=self._publish_event_routine)
+ target=self._publish_event_routine
+ )
self.publish_thread.daemon = True
self.publish_thread.start()
@@ -195,17 +197,23 @@ class HyperVKvpReportingHandler(ReportingHandler):
def _event_key(self, event):
"""
the event key format is:
- CLOUD_INIT|<incarnation number>|<event_type>|<event_name>|<time>
+ CLOUD_INIT|<incarnation number>|<event_type>|<event_name>|<uuid>
+ [|subevent_index]
"""
return u"{0}|{1}|{2}|{3}".format(self.event_key_prefix,
event.event_type, event.name,
uuid.uuid4())
def _encode_kvp_item(self, key, value):
- data = (struct.pack("%ds%ds" % (
+ data = struct.pack(
+ "%ds%ds"
+ % (
self.HV_KVP_EXCHANGE_MAX_KEY_SIZE,
- self.HV_KVP_EXCHANGE_MAX_VALUE_SIZE),
- key.encode('utf-8'), value.encode('utf-8')))
+ self.HV_KVP_EXCHANGE_MAX_VALUE_SIZE,
+ ),
+ key.encode("utf-8"),
+ value.encode("utf-8"),
+ )
return data
def _decode_kvp_item(self, record_data):
@@ -219,7 +227,7 @@ class HyperVKvpReportingHandler(ReportingHandler):
v = (
record_data[
self.HV_KVP_EXCHANGE_MAX_KEY_SIZE:self.HV_KVP_RECORD_SIZE
- ].decode('utf-8').strip('\x00'))
+ ].decode('utf-8').strip('\x00'))
return {'key': k, 'value': v}
@@ -244,13 +252,14 @@ class HyperVKvpReportingHandler(ReportingHandler):
data_without_desc = json.dumps(meta_data,
separators=self.JSON_SEPARATORS)
room_for_desc = (
- self.HV_KVP_EXCHANGE_MAX_VALUE_SIZE -
+ self.HV_KVP_AZURE_MAX_VALUE_SIZE -
len(data_without_desc) - 8)
value = data_without_desc.replace(
message_place_holder,
'"{key}":"{desc}"'.format(
key=self.MSG_KEY, desc=des_in_json[:room_for_desc]))
- result_array.append(self._encode_kvp_item(key, value))
+ subkey = "{}|{}".format(key, i)
+ result_array.append(self._encode_kvp_item(subkey, value))
i += 1
des_in_json = des_in_json[room_for_desc:]
if len(des_in_json) == 0:
@@ -265,11 +274,11 @@ class HyperVKvpReportingHandler(ReportingHandler):
"""
key = self._event_key(event)
meta_data = {
- "name": event.name,
- "type": event.event_type,
- "ts": (datetime.utcfromtimestamp(event.timestamp)
- .isoformat() + 'Z'),
- }
+ "name": event.name,
+ "type": event.event_type,
+ "ts": (datetime.utcfromtimestamp(event.timestamp)
+ .isoformat() + 'Z'),
+ }
if hasattr(event, self.RESULT_KEY):
meta_data[self.RESULT_KEY] = event.result
meta_data[self.MSG_KEY] = event.description
@@ -277,7 +286,7 @@ class HyperVKvpReportingHandler(ReportingHandler):
# if it reaches the maximum length of kvp value,
# break it down to slices.
# this should be very corner case.
- if len(value) > self.HV_KVP_EXCHANGE_MAX_VALUE_SIZE:
+ if len(value) > self.HV_KVP_AZURE_MAX_VALUE_SIZE:
return self._break_down(key, meta_data, event.description)
else:
data = self._encode_kvp_item(key, value)