From 7c9bbbc9b49425e3ba8e0517908477c58ea51d4b Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Fri, 19 Oct 2012 14:06:21 -0700 Subject: Remove the need for boto just for fetching the userdata and metadata. Add in this crawling functionality to the ec2_utils module that will fully crawl (not lazily) the ec2 metadata and parse it in the same manner as boto. 1. Make the ec2 datasource + cloudstack now call into these. 2. Fix phone_home due to urllib3 change (TBD) --- cloudinit/config/cc_phone_home.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'cloudinit/config') diff --git a/cloudinit/config/cc_phone_home.py b/cloudinit/config/cc_phone_home.py index ae1349eb..a268d66f 100644 --- a/cloudinit/config/cc_phone_home.py +++ b/cloudinit/config/cc_phone_home.py @@ -112,7 +112,7 @@ def handle(name, cfg, cloud, log, args): } url = templater.render_string(url, url_params) try: - uhelp.readurl(url, data=real_submit_keys, retries=tries, sec_between=3) + uhelp.readurl(url, data=real_submit_keys, retries=tries) except: util.logexc(log, ("Failed to post phone home data to" " %s in %s tries"), url, tries) -- cgit v1.2.3 From 6a6a2f7c337e18d84d7031ba60d5adf6a93256fc Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Mon, 22 Oct 2012 13:24:25 -0700 Subject: More cleanups for using the requests module. 1. Handle our own retries (so that we can sleep in between) 2. Cleanup the url so that partially invalid (no scheme) urls will continue working. 3. Allow redirects option now a param. --- cloudinit/config/cc_phone_home.py | 2 +- cloudinit/url_helper.py | 103 ++++++++++++++++++++------------------ 2 files changed, 54 insertions(+), 51 deletions(-) (limited to 'cloudinit/config') diff --git a/cloudinit/config/cc_phone_home.py b/cloudinit/config/cc_phone_home.py index a268d66f..ae1349eb 100644 --- a/cloudinit/config/cc_phone_home.py +++ b/cloudinit/config/cc_phone_home.py @@ -112,7 +112,7 @@ def handle(name, cfg, cloud, log, args): } url = templater.render_string(url, url_params) try: - uhelp.readurl(url, data=real_submit_keys, retries=tries) + uhelp.readurl(url, data=real_submit_keys, retries=tries, sec_between=3) except: util.logexc(log, ("Failed to post phone home data to" " %s in %s tries"), url, tries) diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py index 93df2510..e8330e24 100644 --- a/cloudinit/url_helper.py +++ b/cloudinit/url_helper.py @@ -25,7 +25,7 @@ import time import requests from requests import exceptions -from urlparse import urlparse +from urlparse import (urlparse, urlunparse) from cloudinit import log as logging from cloudinit import version @@ -48,42 +48,20 @@ except: pass -class UrlResponse(object): - def __init__(self, status_code, contents=None, headers=None): - self._status_code = status_code - self._contents = contents - self._headers = headers +def _cleanurl(url): + parsed_url = list(urlparse(url, scheme='http')) + if not parsed_url[1] and parsed_url[2]: + # Swap these since this seems to be a common + # occurrence when given urls like 'www.google.com' + parsed_url[1] = parsed_url[2] + parsed_url[2] = '' + return urlunparse(parsed_url) - @property - def code(self): - return self._status_code - @property - def contents(self): - return self._contents - - @property - def headers(self): - return self._headers - - def __str__(self): - if not self.contents: - return '' - else: - return str(self.contents) - - def ok(self, redirects_ok=False): - upper = 300 - if redirects_ok: - upper = 400 - if self.code >= 200 and self.code < upper: - return True - else: - return False - - -def readurl(url, data=None, timeout=None, retries=0, - headers=None, ssl_details=None, check_status=True): +def readurl(url, data=None, timeout=None, retries=0, sec_between=1, + headers=None, ssl_details=None, check_status=True, + allow_redirects=False): + url = _cleanurl(url) req_args = { 'url': url, } @@ -98,7 +76,8 @@ def readurl(url, data=None, timeout=None, retries=0, if 'cert_file' in ssl_details and 'key_file' in ssl_details: req_args['cert'] = [ssl_details['cert_file'], ssl_details['key_file']] - req_args['allow_redirects'] = False + + req_args['allow_redirects'] = allow_redirects req_args['method'] = 'GET' if timeout is not None: req_args['timeout'] = max(float(timeout), 0) @@ -107,16 +86,19 @@ def readurl(url, data=None, timeout=None, retries=0, # It doesn't seem like config # was added in older library versions, thus we # need to manually do the retries if it wasn't - manual_tries = 1 if CONFIG_ENABLED: - req_config = {} - req_config['store_cookies'] = False - if retries: - req_config['max_retries'] = max(int(retries), 0) + req_config = { + 'store_cookies': False, + } + # Don't use the retry support built-in + # since it doesn't allow for 'sleep_times' + # in between tries.... + # if retries: + # req_config['max_retries'] = max(int(retries), 0) req_args['config'] = req_config - else: - if retries: - manual_tries = max(int(retries) + 1, 1) + manual_tries = 1 + if retries: + manual_tries = max(int(retries) + 1, 1) if not headers: headers = { 'User-Agent': 'Cloud-Init/%s' % (version.version_string()), @@ -126,17 +108,38 @@ def readurl(url, data=None, timeout=None, retries=0, if data: # Do this after the log (it might be large) req_args['data'] = data - last_excp = [] - for _i in range(0, manual_tries): + if sec_between is None: + sec_between = -1 + excps = [] + # Handle retrying ourselves since the built-in support + # doesn't handle sleeping between tries... + for i in range(0, manual_tries): try: r = requests.request(**req_args) if check_status: r.raise_for_status() + contents = r.content + status = r.status_code + headers = r.headers + LOG.debug("Read from %s (%s, %sb) after %s attempts", url, + status, len(contents), (i + 1)) + # Doesn't seem like we can make it use a different + # subclass for responses, so add our own backward-compat + # attrs + if not hasattr(r, 'code'): + setattr(r, 'code', status) + if not hasattr(r, 'contents'): + setattr(r, 'contents', contents) + return r except exceptions.RequestException as e: - last_excp = [e] - if last_excp: - raise last_excp[-1] - return UrlResponse(r.status_code, r.content, r.headers) + excps.append(e) + if i + 1 < manual_tries and sec_between > 0: + LOG.debug("Please wait %s seconds while we wait to try again", + sec_between) + time.sleep(sec_between) + if excps: + raise excps[-1] + return None # Should throw before this... def wait_for_url(urls, max_wait=None, timeout=None, -- cgit v1.2.3 From 6d40d5b2e3da9577d4a2686444d47125e62817fe Mon Sep 17 00:00:00 2001 From: harlowja Date: Tue, 19 Feb 2013 22:51:49 -0800 Subject: Continue working on integrating requests. --- cloudinit/config/cc_phone_home.py | 4 +-- cloudinit/ec2_utils.py | 24 +++++-------- cloudinit/sources/DataSourceEc2.py | 2 +- cloudinit/sources/DataSourceMAAS.py | 16 +++++---- cloudinit/url_helper.py | 50 ++++++++++++++++++++++---- cloudinit/user_data.py | 7 ++-- cloudinit/util.py | 71 ++++++++++++++++++++++++++++++++----- 7 files changed, 129 insertions(+), 45 deletions(-) (limited to 'cloudinit/config') diff --git a/cloudinit/config/cc_phone_home.py b/cloudinit/config/cc_phone_home.py index ae1349eb..90834080 100644 --- a/cloudinit/config/cc_phone_home.py +++ b/cloudinit/config/cc_phone_home.py @@ -19,7 +19,6 @@ # along with this program. If not, see . from cloudinit import templater -from cloudinit import url_helper as uhelp from cloudinit import util from cloudinit.settings import PER_INSTANCE @@ -112,7 +111,8 @@ def handle(name, cfg, cloud, log, args): } url = templater.render_string(url, url_params) try: - uhelp.readurl(url, data=real_submit_keys, retries=tries, sec_between=3) + util.read_file_or_url(url, data=real_submit_keys, + retries=tries, sec_between=3) except: util.logexc(log, ("Failed to post phone home data to" " %s in %s tries"), url, tries) diff --git a/cloudinit/ec2_utils.py b/cloudinit/ec2_utils.py index b9d7a2f7..c422eea9 100644 --- a/cloudinit/ec2_utils.py +++ b/cloudinit/ec2_utils.py @@ -22,7 +22,6 @@ import json import urllib from cloudinit import log as logging -from cloudinit import url_helper as uh from cloudinit import util LOG = logging.getLogger(__name__) @@ -40,11 +39,10 @@ def combine_url(base, add_on): # See: http://bit.ly/TyoUQs class MetadataMaterializer(object): - def __init__(self, blob, base_url, **fetch_settings): + def __init__(self, blob, base_url): self._blob = blob self._md = None self._base_url = base_url - self._fetch_settings = fetch_settings def _parse(self, blob): leaves = {} @@ -90,8 +88,8 @@ class MetadataMaterializer(object): self._md = self._materialize(self._blob, self._base_url) return self._md - def _fetch_url(self, url, **opts): - response = uh.readurl(url, **opts) + def _fetch_url(self, url): + response = util.read_file_or_url(url) return str(response) def _decode_leaf_blob(self, blob): @@ -115,12 +113,12 @@ class MetadataMaterializer(object): child_url = combine_url(base_url, c) if not child_url.endswith("/"): child_url += "/" - child_blob = self._fetch_url(child_url, **self._fetch_settings) + child_blob = self._fetch_url(child_url) child_contents[c] = self._materialize(child_blob, child_url) leaf_contents = {} for (field, resource) in leaves.items(): leaf_url = combine_url(base_url, resource) - leaf_blob = self._fetch_url(leaf_url, **self._fetch_settings) + leaf_blob = self._fetch_url(leaf_url) leaf_contents[field] = self._decode_leaf_blob(leaf_blob) joined = {} joined.update(child_contents) @@ -136,23 +134,19 @@ def get_instance_userdata(url, version='latest', ssl_details=None): ud_url = combine_url(url, version) ud_url = combine_url(ud_url, 'user-data') try: - response = uh.readurl(ud_url, timeout=5, - retries=10, ssl_details=ssl_details) + response = util.read_file_or_url(ud_url) return str(response) except Exception: util.logexc(LOG, "Failed fetching userdata from url %s", ud_url) return None -def get_instance_metadata(url, version='latest', ssl_details=None): +def get_instance_metadata(url, version='latest'): md_url = combine_url(url, version) md_url = combine_url(md_url, 'meta-data') try: - response = uh.readurl(md_url, timeout=5, - retries=10, ssl_details=ssl_details) - materializer = MetadataMaterializer(str(response), md_url, - timeout=5, retries=10, - ssl_details=ssl_details) + response = util.read_file_or_url(md_url) + materializer = MetadataMaterializer(str(response), md_url) return materializer.materialize() except Exception: util.logexc(LOG, "Failed fetching metadata from url %s", md_url) diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py index 204963e7..47f677d4 100644 --- a/cloudinit/sources/DataSourceEc2.py +++ b/cloudinit/sources/DataSourceEc2.py @@ -137,7 +137,7 @@ class DataSourceEc2(sources.DataSource): start_time = time.time() url = uhelp.wait_for_url(urls=urls, max_wait=max_wait, - timeout=timeout, status_cb=LOG.warn) + timeout=timeout, status_cb=LOG.warn) if url: LOG.debug("Using metadata source: '%s'", url2base[url]) diff --git a/cloudinit/sources/DataSourceMAAS.py b/cloudinit/sources/DataSourceMAAS.py index e187aec9..2de31305 100644 --- a/cloudinit/sources/DataSourceMAAS.py +++ b/cloudinit/sources/DataSourceMAAS.py @@ -25,9 +25,11 @@ import os import time import urllib2 +import requests + from cloudinit import log as logging from cloudinit import sources -from cloudinit import url_helper as uhelp +from cloudinit import url_helper from cloudinit import util LOG = logging.getLogger(__name__) @@ -191,8 +193,8 @@ def read_maas_seed_url(seed_url, header_cb=None, timeout=None, version=MD_VERSION): """ Read the maas datasource at seed_url. - header_cb is a method that should return a headers dictionary that will - be given to urllib2.Request() + - header_cb is a method that should return a headers dictionary for + a given url Expected format of seed_url is are the following files: * //meta-data/instance-id @@ -220,13 +222,13 @@ def read_maas_seed_url(seed_url, header_cb=None, timeout=None, else: headers = {} try: - resp = uhelp.readurl(url, headers=headers, timeout=timeout) - if resp.ok(): + resp = util.read_file_or_url(url, headers=headers, timeout=timeout) + if resp.ok: md[name] = str(resp) else: LOG.warn(("Fetching from %s resulted in" - " an invalid http code %s"), url, resp.code) - except urllib2.HTTPError as e: + " an invalid http code %s"), url, resp.status_code) + except url_helper.UrlError as e: if e.code != 404: raise return check_seed_contents(md, seed_url) diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py index e8330e24..0839e63b 100644 --- a/cloudinit/url_helper.py +++ b/cloudinit/url_helper.py @@ -58,6 +58,44 @@ def _cleanurl(url): return urlunparse(parsed_url) +class UrlResponse(object): + def __init__(self, response): + self._response = response + + @property + def contents(self): + return self._response.content + + @property + def url(self): + return self._response.url + + @property + def ok(self): + return self._response.ok + + @property + def headers(self): + return self._response.headers + + @property + def code(self): + return self._response.status_code + + def __str__(self): + return self.contents + + +class UrlError(IOError): + def __init__(self, cause): + IOError.__init__(self, str(cause)) + self.cause = cause + if isinstance(cause, exceptions.HTTPError) and cause.response: + self.code = cause.response.status_code + else: + self.code = None + + def readurl(url, data=None, timeout=None, retries=0, sec_between=1, headers=None, ssl_details=None, check_status=True, allow_redirects=False): @@ -76,6 +114,8 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1, if 'cert_file' in ssl_details and 'key_file' in ssl_details: req_args['cert'] = [ssl_details['cert_file'], ssl_details['key_file']] + elif 'cert_file' in ssl_details: + req_args['cert'] = str(ssl_details['cert_file']) req_args['allow_redirects'] = allow_redirects req_args['method'] = 'GET' @@ -126,13 +166,9 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1, # Doesn't seem like we can make it use a different # subclass for responses, so add our own backward-compat # attrs - if not hasattr(r, 'code'): - setattr(r, 'code', status) - if not hasattr(r, 'contents'): - setattr(r, 'contents', contents) - return r + return UrlResponse(r) except exceptions.RequestException as e: - excps.append(e) + excps.append(UrlError(e)) if i + 1 < manual_tries and sec_between > 0: LOG.debug("Please wait %s seconds while we wait to try again", sec_between) @@ -213,7 +249,7 @@ def wait_for_url(urls, max_wait=None, timeout=None, e = ValueError(reason) else: return url - except exceptions.RequestException as e: + except UrlError as e: reason = "request error [%s]" % e except Exception as e: reason = "unexpected error [%s]" % e diff --git a/cloudinit/user_data.py b/cloudinit/user_data.py index 803ffc3a..4a640f1e 100644 --- a/cloudinit/user_data.py +++ b/cloudinit/user_data.py @@ -29,7 +29,6 @@ from email.mime.text import MIMEText from cloudinit import handlers from cloudinit import log as logging -from cloudinit import url_helper from cloudinit import util LOG = logging.getLogger(__name__) @@ -173,10 +172,10 @@ class UserDataProcessor(object): if include_once_on and os.path.isfile(include_once_fn): content = util.load_file(include_once_fn) else: - resp = url_helper.readurl(include_url) - if include_once_on and resp.ok(): + resp = util.read_file_or_url(include_url) + if include_once_on and resp.ok: util.write_file(include_once_fn, str(resp), mode=0600) - if resp.ok(): + if resp.ok: content = str(resp) else: LOG.warn(("Fetching from %s resulted in" diff --git a/cloudinit/util.py b/cloudinit/util.py index f5a7ac12..da2cdeda 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -51,7 +51,7 @@ import yaml from cloudinit import importer from cloudinit import log as logging from cloudinit import safeyaml -from cloudinit import url_helper as uhelp +from cloudinit import url_helper from cloudinit.settings import (CFG_BUILTIN) @@ -69,6 +69,18 @@ FN_ALLOWED = ('_-.()' + string.digits + string.ascii_letters) CONTAINER_TESTS = ['running-in-container', 'lxc-is-container'] +class FileResponse(object): + def __init__(self, path, contents): + self.code = 200 + self.headers = {} + self.contents = contents + self.ok = True + self.url = path + + def __str__(self): + return self.contents + + class ProcessExecutionError(IOError): MESSAGE_TMPL = ('%(description)s\n' @@ -628,12 +640,53 @@ def read_optional_seed(fill, base="", ext="", timeout=5): raise -def read_file_or_url(url, timeout=5, retries=10, file_retries=0): +def fetch_ssl_details(paths=None): + ssl_details = {} + # Lookup in these locations for ssl key/cert files + ssl_cert_paths = [ + '/var/lib/cloud/data/ssl', + '/var/lib/cloud/instance/data/ssl', + ] + if paths: + ssl_cert_paths.extend([ + os.path.join(paths.get_ipath_cur('data'), 'ssl'), + os.path.join(paths.get_cpath('data'), 'ssl'), + ]) + ssl_cert_paths = uniq_merge(ssl_cert_paths) + ssl_cert_paths = [d for d in ssl_cert_paths if d and os.path.isdir(d)] + cert_file = None + for d in ssl_cert_paths: + if os.path.isfile(os.path.join(d, 'cert.pem')): + cert_file = os.path.join(d, 'cert.pem') + break + key_file = None + for d in ssl_cert_paths: + if os.path.isfile(os.path.join(d, 'key.pem')): + key_file = os.path.join(d, 'key.pem') + break + if cert_file and key_file: + ssl_details['cert_file'] = cert_file + ssl_details['key_file'] = key_file + elif cert_file: + ssl_details['cert_file'] = cert_file + return ssl_details + + +def read_file_or_url(url, timeout=5, retries=10, + headers=None, data=None, sec_between=1, paths=None): if url.startswith("/"): url = "file://%s" % url - if url.startswith("file://"): - retries = file_retries - return uhelp.readurl(url, timeout=timeout, retries=retries) + if url.lower().startswith("file://"): + file_path = url[len("file://"):] + return FileResponse(file_path, contents=load_file(file_path)) + else: + return url_helper.readurl(url, + timeout=timeout, + retries=retries, + headers=headers, + data=data, + sec_between=sec_between, + ssl_details=fetch_ssl_details(paths)) def load_yaml(blob, default=None, allowed=(dict,)): @@ -675,13 +728,13 @@ def read_seeded(base="", ext="", timeout=5, retries=10, file_retries=0): md_resp = read_file_or_url(md_url, timeout, retries, file_retries) md = None - if md_resp.ok(): + if md_resp.ok: md_str = str(md_resp) md = load_yaml(md_str, default={}) ud_resp = read_file_or_url(ud_url, timeout, retries, file_retries) ud = None - if ud_resp.ok(): + if ud_resp.ok: ud_str = str(ud_resp) ud = ud_str @@ -850,8 +903,8 @@ def get_cmdline_url(names=('cloud-config-url', 'url'), if not url: return (None, None, None) - resp = uhelp.readurl(url) - if resp.contents.startswith(starts) and resp.ok(): + resp = read_file_or_url(url) + if resp.contents.startswith(starts) and resp.ok: return (key, url, str(resp)) return (key, url, None) -- cgit v1.2.3 From 9dfb60d3144860334ab1ad1d72920d962139461f Mon Sep 17 00:00:00 2001 From: harlowja Date: Thu, 21 Feb 2013 22:39:30 -0800 Subject: More work on requests integration. --- cloudinit/config/cc_phone_home.py | 3 ++- cloudinit/ec2_utils.py | 17 +++++++++++------ cloudinit/sources/DataSourceEc2.py | 6 ++++-- cloudinit/sources/DataSourceMAAS.py | 15 +++++++++------ cloudinit/util.py | 4 ++-- 5 files changed, 28 insertions(+), 17 deletions(-) (limited to 'cloudinit/config') diff --git a/cloudinit/config/cc_phone_home.py b/cloudinit/config/cc_phone_home.py index 90834080..5a4332ef 100644 --- a/cloudinit/config/cc_phone_home.py +++ b/cloudinit/config/cc_phone_home.py @@ -112,7 +112,8 @@ def handle(name, cfg, cloud, log, args): url = templater.render_string(url, url_params) try: util.read_file_or_url(url, data=real_submit_keys, - retries=tries, sec_between=3) + retries=tries, sec_between=3, + ssl_details=util.fetch_ssl_details(cloud.paths)) except: util.logexc(log, ("Failed to post phone home data to" " %s in %s tries"), url, tries) diff --git a/cloudinit/ec2_utils.py b/cloudinit/ec2_utils.py index c422eea9..026ee178 100644 --- a/cloudinit/ec2_utils.py +++ b/cloudinit/ec2_utils.py @@ -38,11 +38,16 @@ def combine_url(base, add_on): # See: http://bit.ly/TyoUQs +# +# Since boto metadata reader uses the old urllib which does not +# support ssl, we need to ahead and create our own reader which +# works the same as the boto one (for now). class MetadataMaterializer(object): - def __init__(self, blob, base_url): + def __init__(self, blob, base_url, ssl_details): self._blob = blob self._md = None self._base_url = base_url + self._ssl_details = ssl_details def _parse(self, blob): leaves = {} @@ -89,7 +94,7 @@ class MetadataMaterializer(object): return self._md def _fetch_url(self, url): - response = util.read_file_or_url(url) + response = util.read_file_or_url(url, ssl_details=self._ssl_details) return str(response) def _decode_leaf_blob(self, blob): @@ -134,19 +139,19 @@ def get_instance_userdata(url, version='latest', ssl_details=None): ud_url = combine_url(url, version) ud_url = combine_url(ud_url, 'user-data') try: - response = util.read_file_or_url(ud_url) + response = util.read_file_or_url(ud_url, ssl_details=ssl_details) return str(response) except Exception: util.logexc(LOG, "Failed fetching userdata from url %s", ud_url) return None -def get_instance_metadata(url, version='latest'): +def get_instance_metadata(url, version='latest', ssl_details=None): md_url = combine_url(url, version) md_url = combine_url(md_url, 'meta-data') try: - response = util.read_file_or_url(md_url) - materializer = MetadataMaterializer(str(response), md_url) + response = util.read_file_or_url(md_url, ssl_details=ssl_details) + materializer = MetadataMaterializer(str(response), md_url, ssl_details) return materializer.materialize() except Exception: util.logexc(LOG, "Failed fetching metadata from url %s", md_url) diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py index 47f677d4..eee4e6bc 100644 --- a/cloudinit/sources/DataSourceEc2.py +++ b/cloudinit/sources/DataSourceEc2.py @@ -64,8 +64,10 @@ class DataSourceEc2(sources.DataSource): if not self.wait_for_metadata_service(): return False start_time = time.time() - self.userdata_raw = ec2_utils.get_instance_userdata(self.metadata_address, self.api_ver) - self.metadata = ec2_utils.get_instance_metadata(self.metadata_address, self.api_ver) + self.userdata_raw = ec2_utils.get_instance_userdata(self.metadata_address, self.api_ver, + ssl_details=util.fetch_ssl_details(self.paths)) + self.metadata = ec2_utils.get_instance_metadata(self.metadata_address, self.api_ver, + ssl_details=util.fetch_ssl_details(self.paths)) LOG.debug("Crawl of metadata service took %s seconds", int(time.time() - start_time)) return True diff --git a/cloudinit/sources/DataSourceMAAS.py b/cloudinit/sources/DataSourceMAAS.py index 2de31305..dc048943 100644 --- a/cloudinit/sources/DataSourceMAAS.py +++ b/cloudinit/sources/DataSourceMAAS.py @@ -81,7 +81,8 @@ class DataSourceMAAS(sources.DataSource): self.base_url = url (userdata, metadata) = read_maas_seed_url(self.base_url, - self.md_headers) + self.md_headers, + paths=self.paths) self.userdata_raw = userdata self.metadata = metadata return True @@ -141,7 +142,7 @@ class DataSourceMAAS(sources.DataSource): LOG.debug("Using metadata source: '%s'", url) else: LOG.critical("Giving up on md from %s after %i seconds", - urls, int(time.time() - starttime)) + urls, int(time.time() - starttime)) return bool(url) @@ -190,7 +191,7 @@ def read_maas_seed_dir(seed_d): def read_maas_seed_url(seed_url, header_cb=None, timeout=None, - version=MD_VERSION): + version=MD_VERSION, paths=None): """ Read the maas datasource at seed_url. - header_cb is a method that should return a headers dictionary for @@ -222,12 +223,13 @@ def read_maas_seed_url(seed_url, header_cb=None, timeout=None, else: headers = {} try: - resp = util.read_file_or_url(url, headers=headers, timeout=timeout) + resp = util.read_file_or_url(url, headers=headers, timeout=timeout, + ssl_details=util.fetch_ssl_details(paths)) if resp.ok: md[name] = str(resp) else: LOG.warn(("Fetching from %s resulted in" - " an invalid http code %s"), url, resp.status_code) + " an invalid http code %s"), url, resp.code) except url_helper.UrlError as e: if e.code != 404: raise @@ -372,7 +374,8 @@ if __name__ == "__main__": if args.subcmd == "check-seed": if args.url.startswith("http"): (userdata, metadata) = read_maas_seed_url(args.url, - header_cb=my_headers, version=args.apiver) + header_cb=my_headers, + version=args.apiver) else: (userdata, metadata) = read_maas_seed_url(args.url) print "=== userdata ===" diff --git a/cloudinit/util.py b/cloudinit/util.py index da2cdeda..307ed613 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -673,7 +673,7 @@ def fetch_ssl_details(paths=None): def read_file_or_url(url, timeout=5, retries=10, - headers=None, data=None, sec_between=1, paths=None): + headers=None, data=None, sec_between=1, ssl_details=None): if url.startswith("/"): url = "file://%s" % url if url.lower().startswith("file://"): @@ -686,7 +686,7 @@ def read_file_or_url(url, timeout=5, retries=10, headers=headers, data=data, sec_between=sec_between, - ssl_details=fetch_ssl_details(paths)) + ssl_details=ssl_details) def load_yaml(blob, default=None, allowed=(dict,)): -- cgit v1.2.3