1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
|
# vi: ts=4 expandtab
#
# Copyright (C) 2012 Yahoo! Inc.
#
# Author: Joshua Harlow <harlowja@yahoo-inc.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from urlparse import (urlparse, urlunparse)
import functools
import json
import urllib
from cloudinit import log as logging
from cloudinit import util
LOG = logging.getLogger(__name__)
def maybe_json_object(text):
if not text:
return False
text = text.strip()
if text.startswith("{") and text.endswith("}"):
return True
return False
def combine_url(base, add_on):
base_parsed = list(urlparse(base))
path = base_parsed[2]
if path and not path.endswith("/"):
path += "/"
path += urllib.quote(str(add_on), safe="/:")
base_parsed[2] = path
return urlunparse(base_parsed)
# See: http://bit.ly/TyoUQs
#
# Since boto metadata reader uses the old urllib which does not
# support ssl, we need to ahead and create our own reader which
# works the same as the boto one (for now).
class MetadataMaterializer(object):
def __init__(self, blob, base_url, caller):
self._blob = blob
self._md = None
self._base_url = base_url
self._caller = caller
def _parse(self, blob):
leaves = {}
children = []
if not blob:
return (leaves, children)
def has_children(item):
if item.endswith("/"):
return True
else:
return False
def get_name(item):
if item.endswith("/"):
return item.rstrip("/")
return item
for field in blob.splitlines():
field = field.strip()
field_name = get_name(field)
if not field or not field_name:
continue
if has_children(field):
if field_name not in children:
children.append(field_name)
else:
contents = field.split("=", 1)
resource = field_name
if len(contents) > 1:
# What a PITA...
(ident, sub_contents) = contents
ident = util.safe_int(ident)
if ident is not None:
resource = "%s/openssh-key" % (ident)
field_name = sub_contents
leaves[field_name] = resource
return (leaves, children)
def materialize(self):
if self._md is not None:
return self._md
self._md = self._materialize(self._blob, self._base_url)
return self._md
def _decode_leaf_blob(self, field, blob):
if not blob:
return blob
if maybe_json_object(blob):
try:
# Assume it's json, unless it fails parsing...
return json.loads(blob)
except (ValueError, TypeError) as e:
LOG.warn("Field %s looked like json, but it was not: %s",
field, e)
if blob.find("\n") != -1:
return blob.splitlines()
return blob
def _materialize(self, blob, base_url):
(leaves, children) = self._parse(blob)
child_contents = {}
for c in children:
child_url = combine_url(base_url, c)
if not child_url.endswith("/"):
child_url += "/"
child_blob = str(self._caller(child_url))
child_contents[c] = self._materialize(child_blob, child_url)
leaf_contents = {}
for (field, resource) in leaves.items():
leaf_url = combine_url(base_url, resource)
leaf_blob = str(self._caller(leaf_url))
leaf_contents[field] = self._decode_leaf_blob(field, leaf_blob)
joined = {}
joined.update(child_contents)
for field in leaf_contents.keys():
if field in joined:
LOG.warn("Duplicate key found in results from %s", base_url)
else:
joined[field] = leaf_contents[field]
return joined
def get_instance_userdata(api_version='latest',
metadata_address='http://169.254.169.254',
ssl_details=None, timeout=5, retries=5):
ud_url = combine_url(metadata_address, api_version)
ud_url = combine_url(ud_url, 'user-data')
try:
response = util.read_file_or_url(ud_url,
ssl_details=ssl_details,
timeout=timeout,
retries=retries)
return str(response)
except Exception:
util.logexc(LOG, "Failed fetching userdata from url %s", ud_url)
return ''
def get_instance_metadata(api_version='latest',
metadata_address='http://169.254.169.254',
ssl_details=None, timeout=5, retries=5):
md_url = combine_url(metadata_address, api_version)
md_url = combine_url(md_url, 'meta-data')
caller = functools.partial(util.read_file_or_url,
ssl_details=ssl_details, timeout=timeout,
retries=retries)
try:
response = caller(md_url)
materializer = MetadataMaterializer(str(response), md_url, caller)
md = materializer.materialize()
if not isinstance(md, (dict)):
md = {}
return md
except Exception:
util.logexc(LOG, "Failed fetching metadata from url %s", md_url)
return {}
|