1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
|
# vi: ts=4 expandtab
#
# Copyright (C) 2012 Canonical Ltd.
# Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
# Copyright (C) 2012 Yahoo! Inc.
#
# Author: Scott Moser <scott.moser@canonical.com>
# Author: Juerg Haefliger <juerg.haefliger@hp.com>
# Author: Joshua Harlow <harlowja@yahoo-inc.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import jsonpatch
from cloudinit import handlers
from cloudinit import log as logging
from cloudinit import mergers
from cloudinit import util
from cloudinit.settings import (PER_ALWAYS)
LOG = logging.getLogger(__name__)
MERGE_HEADER = 'Merge-Type'
# Due to the way the loading of yaml configuration was done previously,
# where previously each cloud config part was appended to a larger yaml
# file and then finally that file was loaded as one big yaml file we need
# to mimic that behavior by altering the default strategy to be replacing
# keys of prior merges.
#
#
# For example
# #file 1
# a: 3
# #file 2
# a: 22
# #combined file (comments not included)
# a: 3
# a: 22
#
# This gets loaded into yaml with final result {'a': 22}
DEF_MERGERS = mergers.string_extract_mergers('dict(replace)+list()+str()')
CLOUD_PREFIX = "#cloud-config"
JSONP_PREFIX = "#cloud-config-jsonp"
# The file header -> content types this module will handle.
CC_TYPES = {
JSONP_PREFIX: handlers.type_from_starts_with(JSONP_PREFIX),
CLOUD_PREFIX: handlers.type_from_starts_with(CLOUD_PREFIX),
}
class CloudConfigPartHandler(handlers.Handler):
def __init__(self, paths, **_kwargs):
handlers.Handler.__init__(self, PER_ALWAYS, version=3)
self.cloud_buf = None
self.cloud_fn = paths.get_ipath("cloud_config")
self.file_names = []
def list_types(self):
return list(CC_TYPES.values())
def _write_cloud_config(self):
if not self.cloud_fn:
return
# Capture which files we merged from...
file_lines = []
if self.file_names:
file_lines.append("# from %s files" % (len(self.file_names)))
for fn in self.file_names:
if not fn:
fn = '?'
file_lines.append("# %s" % (fn))
file_lines.append("")
if self.cloud_buf is not None:
# Something was actually gathered....
lines = [
CLOUD_PREFIX,
'',
]
lines.extend(file_lines)
lines.append(util.yaml_dumps(self.cloud_buf))
else:
lines = []
util.write_file(self.cloud_fn, "\n".join(lines), 0600)
def _extract_mergers(self, payload, headers):
merge_header_headers = ''
for h in [MERGE_HEADER, 'X-%s' % (MERGE_HEADER)]:
tmp_h = headers.get(h, '')
if tmp_h:
merge_header_headers = tmp_h
break
# Select either the merge-type from the content
# or the merge type from the headers or default to our own set
# if neither exists (or is empty) from the later.
payload_yaml = util.load_yaml(payload)
mergers_yaml = mergers.dict_extract_mergers(payload_yaml)
mergers_header = mergers.string_extract_mergers(merge_header_headers)
all_mergers = []
all_mergers.extend(mergers_yaml)
all_mergers.extend(mergers_header)
if not all_mergers:
all_mergers = DEF_MERGERS
return (payload_yaml, all_mergers)
def _merge_patch(self, payload):
payload = payload.lstrip()
if payload.lower().startswith(JSONP_PREFIX):
# JSON doesn't handle comments in this manner, so ensure that
# if we started with this 'type' that we remove it before
# attempting to load it as json (which the jsonpatch library will
# attempt to do).
payload = payload[JSONP_PREFIX:]
patch = jsonpatch.JsonPatch.from_string(payload)
LOG.debug("Merging by applying json patch %s", patch)
self.cloud_buf = patch.apply(self.cloud_buf, in_place=False)
def _merge_part(self, payload, headers):
(payload_yaml, my_mergers) = self._extract_mergers(payload, headers)
LOG.debug("Merging by applying %s", my_mergers)
merger = mergers.construct(my_mergers)
self.cloud_buf = merger.merge(self.cloud_buf, payload_yaml)
def _reset(self):
self.file_names = []
self.cloud_buf = None
def handle_part(self, _data, ctype, filename, # pylint: disable=W0221
payload, _frequency, headers): # pylint: disable=W0613
if ctype == handlers.CONTENT_START:
self._reset()
return
if ctype == handlers.CONTENT_END:
self._write_cloud_config()
self._reset()
return
try:
# First time through, merge with an empty dict...
if self.cloud_buf is None or not self.file_names:
self.cloud_buf = {}
if ctype == CC_TYPES[JSONP_PREFIX]:
self._merge_patch(payload)
else:
self._merge_part(payload, headers)
# Ensure filename is ok to store
for i in ("\n", "\r", "\t"):
filename = filename.replace(i, " ")
self.file_names.append(filename.strip())
except:
util.logexc(LOG, "Failed at merging in cloud config part from %s",
filename)
|