diff options
author | Joshua Harlow <harlowja@yahoo-inc.com> | 2014-01-23 14:41:09 -0800 |
---|---|---|
committer | Joshua Harlow <harlowja@yahoo-inc.com> | 2014-01-23 14:41:09 -0800 |
commit | ba84f51f0143a8ca1ca5113ae932505ce1bfe5e5 (patch) | |
tree | 5250034f09e1063b441c5fa59947402df1ea9bc9 /cloudinit/url_helper.py | |
parent | 5aa7d4ccf984ac296f58fa355bdce17d175dcc7d (diff) | |
download | vyos-cloud-init-ba84f51f0143a8ca1ca5113ae932505ce1bfe5e5.tar.gz vyos-cloud-init-ba84f51f0143a8ca1ca5113ae932505ce1bfe5e5.zip |
Skip retry and continued fetch of userdata when NOT_FOUND
When a 404 http code comes back from the fetching of ec2
data, instead of retrying immediatly stop the fetching process
and in the userdata fetching function handle this case as a
special case of no userdata being fetched (an empty string
in this case).
Diffstat (limited to 'cloudinit/url_helper.py')
-rw-r--r-- | cloudinit/url_helper.py | 17 |
1 files changed, 9 insertions, 8 deletions
diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py index 19a30409..42edf9cf 100644 --- a/cloudinit/url_helper.py +++ b/cloudinit/url_helper.py @@ -103,7 +103,7 @@ class UrlError(IOError): def readurl(url, data=None, timeout=None, retries=0, sec_between=1, headers=None, headers_cb=None, ssl_details=None, - check_status=True, allow_redirects=True): + check_status=True, allow_redirects=True, exception_cb=None): url = _cleanurl(url) req_args = { 'url': url, @@ -163,14 +163,13 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1, # Handle retrying ourselves since the built-in support # doesn't handle sleeping between tries... for i in range(0, manual_tries): + req_args['headers'] = headers_cb(url) + filtered_req_args = {} + for (k, v) in req_args.items(): + if k == 'data': + continue + filtered_req_args[k] = v try: - req_args['headers'] = headers_cb(url) - filtered_req_args = {} - for (k, v) in req_args.items(): - if k == 'data': - continue - filtered_req_args[k] = v - LOG.debug("[%s/%s] open '%s' with %s configuration", i, manual_tries, url, filtered_req_args) @@ -196,6 +195,8 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1, # ssl exceptions are not going to get fixed by waiting a # few seconds break + if exception_cb and not exception_cb(filtered_req_args, e): + break if i + 1 < manual_tries and sec_between > 0: LOG.debug("Please wait %s seconds while we wait to try again", sec_between) |