summaryrefslogtreecommitdiff
path: root/cloudinit
diff options
context:
space:
mode:
Diffstat (limited to 'cloudinit')
-rw-r--r--cloudinit/config/cc_phone_home.py2
-rw-r--r--cloudinit/ec2_utils.py210
-rw-r--r--cloudinit/sources/DataSourceCloudStack.py9
-rw-r--r--cloudinit/sources/DataSourceEc2.py9
-rw-r--r--cloudinit/url_helper.py16
5 files changed, 131 insertions, 115 deletions
diff --git a/cloudinit/config/cc_phone_home.py b/cloudinit/config/cc_phone_home.py
index ae1349eb..a268d66f 100644
--- a/cloudinit/config/cc_phone_home.py
+++ b/cloudinit/config/cc_phone_home.py
@@ -112,7 +112,7 @@ def handle(name, cfg, cloud, log, args):
}
url = templater.render_string(url, url_params)
try:
- uhelp.readurl(url, data=real_submit_keys, retries=tries, sec_between=3)
+ uhelp.readurl(url, data=real_submit_keys, retries=tries)
except:
util.logexc(log, ("Failed to post phone home data to"
" %s in %s tries"), url, tries)
diff --git a/cloudinit/ec2_utils.py b/cloudinit/ec2_utils.py
index ef7fac7d..b9d7a2f7 100644
--- a/cloudinit/ec2_utils.py
+++ b/cloudinit/ec2_utils.py
@@ -16,6 +16,8 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
+from urlparse import (urlparse, urlunparse)
+
import json
import urllib
@@ -26,110 +28,132 @@ from cloudinit import util
LOG = logging.getLogger(__name__)
-# For now take this and fix it...
-class LazyLoadMetadata(dict):
- def __init__(self, url, fetch_timeout, num_retries, ssl_details):
- self._url = url
- self._num_retries = num_retries
- self._ssl_details = ssl_details
- self._fetch_timeout = fetch_timeout
- self._leaves = {}
- self._dicts = []
- response = uh.readurl(url, timeout=fetch_timeout,
- retries=num_retries, ssl_details=ssl_details)
- data = str(response)
- if data:
- fields = data.split('\n')
- for field in fields:
- if field.endswith('/'):
- key = field[0:-1]
- self._dicts.append(key)
- else:
- p = field.find('=')
- if p > 0:
- key = field[p + 1:]
- resource = field[0:p] + '/openssh-key'
- else:
- key = resource = field
- self._leaves[key] = resource
- self[key] = None
-
- def _materialize(self):
- for key in self:
- self[key]
-
- def __getitem__(self, key):
- if key not in self:
- # Allow dict to throw the KeyError
- return super(LazyLoadMetadata, self).__getitem__(key)
-
- # Already loaded
- val = super(LazyLoadMetadata, self).__getitem__(key)
- if val is not None:
- return val
-
- if key in self._leaves:
- resource = self._leaves[key]
- new_url = self._url + urllib.quote(resource, safe="/:")
- response = uh.readurl(new_url, retries=self._num_retries,
- timeout=self._fetch_timeout,
- ssl_details=self._ssl_details)
- val = str(response)
- if val and val[0] == '{':
- val = json.loads(val)
+def combine_url(base, add_on):
+ base_parsed = list(urlparse(base))
+ path = base_parsed[2]
+ if path and not path.endswith("/"):
+ path += "/"
+ path += urllib.quote(str(add_on), safe="/:")
+ base_parsed[2] = path
+ return urlunparse(base_parsed)
+
+
+# See: http://bit.ly/TyoUQs
+class MetadataMaterializer(object):
+ def __init__(self, blob, base_url, **fetch_settings):
+ self._blob = blob
+ self._md = None
+ self._base_url = base_url
+ self._fetch_settings = fetch_settings
+
+ def _parse(self, blob):
+ leaves = {}
+ children = []
+ if not blob:
+ return (leaves, children)
+
+ def has_children(item):
+ if item.endswith("/"):
+ return True
+ else:
+ return False
+
+ def get_name(item):
+ if item.endswith("/"):
+ return item.rstrip("/")
+ return item
+
+ for field in blob.splitlines():
+ field = field.strip()
+ field_name = get_name(field)
+ if not field or not field_name:
+ continue
+ if has_children(field):
+ if field_name not in children:
+ children.append(field_name)
+ else:
+ contents = field.split("=", 1)
+ resource = field_name
+ if len(contents) > 1:
+ # What a PITA...
+ (ident, sub_contents) = contents
+ checked_ident = util.safe_int(ident)
+ if checked_ident is not None:
+ resource = "%s/openssh-key" % (checked_ident)
+ field_name = sub_contents
+ leaves[field_name] = resource
+ return (leaves, children)
+
+ def materialize(self):
+ if self._md is not None:
+ return self._md
+ self._md = self._materialize(self._blob, self._base_url)
+ return self._md
+
+ def _fetch_url(self, url, **opts):
+ response = uh.readurl(url, **opts)
+ return str(response)
+
+ def _decode_leaf_blob(self, blob):
+ if not blob:
+ return blob
+ stripped_blob = blob.strip()
+ if stripped_blob.startswith("{") and stripped_blob.endswith("}"):
+ # Assume and try with json
+ try:
+ return json.loads(blob)
+ except (ValueError, TypeError):
+ pass
+ if blob.find("\n") != -1:
+ return blob.splitlines()
+ return blob
+
+ def _materialize(self, blob, base_url):
+ (leaves, children) = self._parse(blob)
+ child_contents = {}
+ for c in children:
+ child_url = combine_url(base_url, c)
+ if not child_url.endswith("/"):
+ child_url += "/"
+ child_blob = self._fetch_url(child_url, **self._fetch_settings)
+ child_contents[c] = self._materialize(child_blob, child_url)
+ leaf_contents = {}
+ for (field, resource) in leaves.items():
+ leaf_url = combine_url(base_url, resource)
+ leaf_blob = self._fetch_url(leaf_url, **self._fetch_settings)
+ leaf_contents[field] = self._decode_leaf_blob(leaf_blob)
+ joined = {}
+ joined.update(child_contents)
+ for field in leaf_contents.keys():
+ if field in joined:
+ LOG.warn("Duplicate key found in results from %s", base_url)
else:
- p = val.find('\n')
- if p > 0:
- val = val.split('\n')
- self[key] = val
- elif key in self._dicts:
- new_url = self._url + key + '/'
- self[key] = LazyLoadMetadata(new_url,
- num_retries=self._num_retries,
- fetch_timeout=self._fetch_timeout,
- ssl_details=self._ssl_details)
-
- return super(LazyLoadMetadata, self).__getitem__(key)
-
- def get(self, key, default=None):
- try:
- return self[key]
- except KeyError:
- return default
-
- def values(self):
- self._materialize()
- return super(LazyLoadMetadata, self).values()
-
- def items(self):
- self._materialize()
- return super(LazyLoadMetadata, self).items()
-
- def __str__(self):
- self._materialize()
- return super(LazyLoadMetadata, self).__str__()
-
- def __repr__(self):
- self._materialize()
- return super(LazyLoadMetadata, self).__repr__()
+ joined[field] = leaf_contents[field]
+ return joined
def get_instance_userdata(url, version='latest', ssl_details=None):
- ud_url = '%s/%s/user-data' % (url, version)
+ ud_url = combine_url(url, version)
+ ud_url = combine_url(ud_url, 'user-data')
try:
response = uh.readurl(ud_url, timeout=5,
retries=10, ssl_details=ssl_details)
return str(response)
- except Exception as e:
- util.logexc(LOG, "Failed fetching url %s", ud_url)
+ except Exception:
+ util.logexc(LOG, "Failed fetching userdata from url %s", ud_url)
return None
def get_instance_metadata(url, version='latest', ssl_details=None):
- md_url = '%s/%s/meta-data' % (url, version)
+ md_url = combine_url(url, version)
+ md_url = combine_url(md_url, 'meta-data')
try:
- return LazyLoadMetadata(md_url, timeout=5,
- retries=10, ssl_details=ssl_details)
- except Exception as e:
- util.logexc(LOG, "Failed fetching url %s", md_url)
+ response = uh.readurl(md_url, timeout=5,
+ retries=10, ssl_details=ssl_details)
+ materializer = MetadataMaterializer(str(response), md_url,
+ timeout=5, retries=10,
+ ssl_details=ssl_details)
+ return materializer.materialize()
+ except Exception:
+ util.logexc(LOG, "Failed fetching metadata from url %s", md_url)
return None
diff --git a/cloudinit/sources/DataSourceCloudStack.py b/cloudinit/sources/DataSourceCloudStack.py
index f7ffa7cb..2654df53 100644
--- a/cloudinit/sources/DataSourceCloudStack.py
+++ b/cloudinit/sources/DataSourceCloudStack.py
@@ -26,8 +26,7 @@ from struct import pack
import os
import time
-import boto.utils as boto_utils
-
+from cloudinit import ec2_utils
from cloudinit import log as logging
from cloudinit import sources
from cloudinit import url_helper as uhelp
@@ -116,10 +115,8 @@ class DataSourceCloudStack(sources.DataSource):
if not self.wait_for_metadata_service():
return False
start_time = time.time()
- self.userdata_raw = boto_utils.get_instance_userdata(self.api_ver,
- None, self.metadata_address)
- self.metadata = boto_utils.get_instance_metadata(self.api_ver,
- self.metadata_address)
+ self.userdata_raw = ec2_utils.get_instance_userdata(self.metadata_address, self.api_ver)
+ self.metadata = ec2_utils.get_instance_metadata(self.metadata_address, self.api_ver)
LOG.debug("Crawl of metadata service took %s seconds",
int(time.time() - start_time))
return True
diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py
index 3686fa10..204963e7 100644
--- a/cloudinit/sources/DataSourceEc2.py
+++ b/cloudinit/sources/DataSourceEc2.py
@@ -23,8 +23,7 @@
import os
import time
-import boto.utils as boto_utils
-
+from cloudinit import ec2_utils
from cloudinit import log as logging
from cloudinit import sources
from cloudinit import url_helper as uhelp
@@ -65,10 +64,8 @@ class DataSourceEc2(sources.DataSource):
if not self.wait_for_metadata_service():
return False
start_time = time.time()
- self.userdata_raw = boto_utils.get_instance_userdata(self.api_ver,
- None, self.metadata_address)
- self.metadata = boto_utils.get_instance_metadata(self.api_ver,
- self.metadata_address)
+ self.userdata_raw = ec2_utils.get_instance_userdata(self.metadata_address, self.api_ver)
+ self.metadata = ec2_utils.get_instance_metadata(self.metadata_address, self.api_ver)
LOG.debug("Crawl of metadata service took %s seconds",
int(time.time() - start_time))
return True
diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py
index e3f63021..2c9d5eef 100644
--- a/cloudinit/url_helper.py
+++ b/cloudinit/url_helper.py
@@ -22,11 +22,10 @@
from contextlib import closing
-import errno
-import socket
import time
import urllib
+from urllib3 import exceptions
from urllib3 import connectionpool
from urllib3 import util
@@ -91,7 +90,10 @@ def readurl(url, data=None, timeout=None, retries=0,
'url': p_url.request_uri,
}
if data is not None:
- open_args['body'] = urllib.urlencode(data)
+ if isinstance(data, (str, basestring)):
+ open_args['body'] = data
+ else:
+ open_args['body'] = urllib.urlencode(data)
open_args['method'] = 'POST'
if not headers:
headers = {
@@ -112,7 +114,7 @@ def wait_for_url(urls, max_wait=None, timeout=None,
max_wait: roughly the maximum time to wait before giving up
The max time is *actually* len(urls)*timeout as each url will
be tried once and given the timeout provided.
- timeout: the timeout provided to urllib2.urlopen
+ timeout: the timeout provided to urlopen
status_cb: call method with string message when a url is not available
headers_cb: call method with single argument of url to get headers
for request.
@@ -174,12 +176,8 @@ def wait_for_url(urls, max_wait=None, timeout=None,
e = ValueError(reason)
else:
return url
- except urllib2.HTTPError as e:
+ except exceptions.HTTPError as e:
reason = "http error [%s]" % e.code
- except urllib2.URLError as e:
- reason = "url error [%s]" % e.reason
- except socket.timeout as e:
- reason = "socket timeout [%s]" % e
except Exception as e:
reason = "unexpected error [%s]" % e