summaryrefslogtreecommitdiff
path: root/cloudinit/url_helper.py
diff options
context:
space:
mode:
authorJoshua Harlow <harlowja@yahoo-inc.com>2012-06-08 18:00:52 -0700
committerJoshua Harlow <harlowja@yahoo-inc.com>2012-06-08 18:00:52 -0700
commitedff7583b501a0eab5376b88f8378a6b774257c7 (patch)
tree158cb6148a64551b463db3a1040d00ff50b3c3e3 /cloudinit/url_helper.py
parent1a415b187119fbdc4e08112a6afb23f1caf2b9ad (diff)
downloadvyos-cloud-init-edff7583b501a0eab5376b88f8378a6b774257c7.tar.gz
vyos-cloud-init-edff7583b501a0eab5376b88f8378a6b774257c7.zip
Cleaning this up, logic should be more sound around attempts and retries now.
Diffstat (limited to 'cloudinit/url_helper.py')
-rw-r--r--cloudinit/url_helper.py30
1 files changed, 13 insertions, 17 deletions
diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py
index 0f0a9d0c..ed78c92e 100644
--- a/cloudinit/url_helper.py
+++ b/cloudinit/url_helper.py
@@ -1,15 +1,11 @@
import errno
-import httplib
import time
import urllib
import urllib2
-from StringIO import StringIO
-
from contextlib import closing
from cloudinit import log as logging
-from cloudinit import shell as sh
LOG = logging.getLogger(__name__)
@@ -18,7 +14,7 @@ def ok_http_code(st):
return st in xrange(200, 400)
-def readurl(url, data=None, timeout=None, retries=0, sec_between=1, read_cb=None, headers=None):
+def readurl(url, data=None, timeout=None, retries=0, sec_between=1, headers=None):
openargs = {}
if timeout is not None:
openargs['timeout'] = int(timeout)
@@ -31,14 +27,13 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1, read_cb=None
if retries <= 0:
retries = 1
+ attempts = retries + 1
last_excp = None
- LOG.debug("Attempting to read from %s with %s attempts to be performed", url, retries)
- for i in range(0, retries):
+ LOG.debug("Attempting to read from %s with %s attempts to be performed", url, attempts)
+ for i in range(0, attempts):
try:
with closing(urllib2.urlopen(req, **openargs)) as rh:
- ofh = StringIO()
- sh.pipe_in_out(rh, ofh, chunk_cb=read_cb)
- return (ofh.getvalue(), rh.getcode())
+ return (rh.read(), rh.getcode())
except urllib2.HTTPError as e:
last_excp = e
LOG.exception("Failed at reading from %s.", url)
@@ -51,11 +46,12 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1, read_cb=None
else:
last_excp = e
LOG.exception("Failed at reading from %s.", url)
- LOG.debug("Please wait %s seconds while we wait to try again.", sec_between)
- time.sleep(sec_between)
+ if i + 1 < attempts:
+ LOG.debug("Please wait %s seconds while we wait to try again.", sec_between)
+ time.sleep(sec_between)
# Didn't work out
- LOG.warn("Failed downloading from %s after %s attempts", url, i + 1)
+ LOG.warn("Failed downloading from %s after %s attempts", url, attempts)
if last_excp is not None:
raise last_excp
@@ -118,11 +114,11 @@ def wait_for_url(urls, max_wait=None, timeout=None,
else:
headers = {}
- (resp, status_code) = readurl(url, headers=headers, timeout=timeout)
+ (resp, sc) = readurl(url, headers=headers, timeout=timeout)
if not resp:
- reason = "empty response [%s]" % status_code
- elif not ok_http_code(status_code):
- reason = "bad status code [%s]" % status_code
+ reason = "empty response [%s]" % sc
+ elif not ok_http_code(sc):
+ reason = "bad status code [%s]" % sc
else:
return url
except urllib2.HTTPError as e: