summaryrefslogtreecommitdiff
path: root/cloudinit/url_helper.py
diff options
context:
space:
mode:
authorScott Moser <smoser@ubuntu.com>2014-01-23 20:12:02 -0500
committerScott Moser <smoser@ubuntu.com>2014-01-23 20:12:02 -0500
commit83087580ace7c392477a6772bb0cb254012c8a4e (patch)
tree8d18b149a7c180a5e332dbfafca345190952c5b3 /cloudinit/url_helper.py
parent5aa7d4ccf984ac296f58fa355bdce17d175dcc7d (diff)
parente6da32a91c59f33fd72bebc43f8e6beae73fbf39 (diff)
downloadvyos-cloud-init-83087580ace7c392477a6772bb0cb254012c8a4e.tar.gz
vyos-cloud-init-83087580ace7c392477a6772bb0cb254012c8a4e.zip
Skip retry and continued fetch of userdata when NOT_FOUND
When a 404 http code comes back from the fetching of ec2 data, instead of retrying immediately stop the fetching process and in the userdata fetching function handle this case as a special case of no userdata being fetched (an empty string in this case).
Diffstat (limited to 'cloudinit/url_helper.py')
-rw-r--r--cloudinit/url_helper.py17
1 files changed, 9 insertions, 8 deletions
diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py
index 19a30409..42edf9cf 100644
--- a/cloudinit/url_helper.py
+++ b/cloudinit/url_helper.py
@@ -103,7 +103,7 @@ class UrlError(IOError):
def readurl(url, data=None, timeout=None, retries=0, sec_between=1,
headers=None, headers_cb=None, ssl_details=None,
- check_status=True, allow_redirects=True):
+ check_status=True, allow_redirects=True, exception_cb=None):
url = _cleanurl(url)
req_args = {
'url': url,
@@ -163,14 +163,13 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1,
# Handle retrying ourselves since the built-in support
# doesn't handle sleeping between tries...
for i in range(0, manual_tries):
+ req_args['headers'] = headers_cb(url)
+ filtered_req_args = {}
+ for (k, v) in req_args.items():
+ if k == 'data':
+ continue
+ filtered_req_args[k] = v
try:
- req_args['headers'] = headers_cb(url)
- filtered_req_args = {}
- for (k, v) in req_args.items():
- if k == 'data':
- continue
- filtered_req_args[k] = v
-
LOG.debug("[%s/%s] open '%s' with %s configuration", i,
manual_tries, url, filtered_req_args)
@@ -196,6 +195,8 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1,
# ssl exceptions are not going to get fixed by waiting a
# few seconds
break
+ if exception_cb and not exception_cb(filtered_req_args, e):
+ break
if i + 1 < manual_tries and sec_between > 0:
LOG.debug("Please wait %s seconds while we wait to try again",
sec_between)