diff options
Diffstat (limited to 'azurelinuxagent/common/utils')
-rw-r--r-- | azurelinuxagent/common/utils/fileutil.py | 19 | ||||
-rw-r--r-- | azurelinuxagent/common/utils/restutil.py | 137 | ||||
-rw-r--r-- | azurelinuxagent/common/utils/textutil.py | 41 |
3 files changed, 141 insertions, 56 deletions
diff --git a/azurelinuxagent/common/utils/fileutil.py b/azurelinuxagent/common/utils/fileutil.py index 7ef4fef..b0b6fb7 100644 --- a/azurelinuxagent/common/utils/fileutil.py +++ b/azurelinuxagent/common/utils/fileutil.py @@ -21,11 +21,11 @@ File operation util functions """ +import glob import os import re import shutil import pwd -import tempfile import azurelinuxagent.common.logger as logger from azurelinuxagent.common.future import ustr import azurelinuxagent.common.utils.textutil as textutil @@ -111,9 +111,11 @@ def chmod(path, mode): os.chmod(path, mode) def rm_files(*args): - for path in args: - if os.path.isfile(path): - os.remove(path) + for paths in args: + #Find all possible file paths + for path in glob.glob(paths): + if os.path.isfile(path): + os.remove(path) def rm_dirs(*args): """ @@ -169,3 +171,12 @@ def findstr_in_file(file_path, pattern_str): return None +def get_all_files(root_path): + """ + Find all files under the given root path + """ + result = [] + for root, dirs, files in os.walk(root_path): + result.extend([os.path.join(root, file) for file in files]) + + return result diff --git a/azurelinuxagent/common/utils/restutil.py b/azurelinuxagent/common/utils/restutil.py index a789650..7c9ee17 100644 --- a/azurelinuxagent/common/utils/restutil.py +++ b/azurelinuxagent/common/utils/restutil.py @@ -18,9 +18,7 @@ # import time -import platform -import os -import subprocess + import azurelinuxagent.common.conf as conf import azurelinuxagent.common.logger as logger from azurelinuxagent.common.exception import HttpError @@ -32,6 +30,7 @@ REST api util functions RETRY_WAITING_INTERVAL = 10 + def _parse_url(url): o = urlparse(url) rel_uri = o.path @@ -44,6 +43,7 @@ def _parse_url(url): secure = True return o.hostname, o.port, secure, rel_uri + def get_http_proxy(): """ Get http_proxy and https_proxy from environment variables. @@ -51,106 +51,143 @@ def get_http_proxy(): """ host = conf.get_httpproxy_host() port = conf.get_httpproxy_port() - return (host, port) + return host, port + def _http_request(method, host, rel_uri, port=None, data=None, secure=False, - headers=None, proxy_host=None, proxy_port=None): + headers=None, proxy_host=None, proxy_port=None): url, conn = None, None if secure: port = 443 if port is None else port if proxy_host is not None and proxy_port is not None: - conn = httpclient.HTTPSConnection(proxy_host, proxy_port, timeout=10) + conn = httpclient.HTTPSConnection(proxy_host, + proxy_port, + timeout=10) conn.set_tunnel(host, port) - #If proxy is used, full url is needed. + # If proxy is used, full url is needed. url = "https://{0}:{1}{2}".format(host, port, rel_uri) else: - conn = httpclient.HTTPSConnection(host, port, timeout=10) + conn = httpclient.HTTPSConnection(host, + port, + timeout=10) url = rel_uri else: port = 80 if port is None else port if proxy_host is not None and proxy_port is not None: - conn = httpclient.HTTPConnection(proxy_host, proxy_port, timeout=10) - #If proxy is used, full url is needed. + conn = httpclient.HTTPConnection(proxy_host, + proxy_port, + timeout=10) + # If proxy is used, full url is needed. url = "http://{0}:{1}{2}".format(host, port, rel_uri) else: - conn = httpclient.HTTPConnection(host, port, timeout=10) + conn = httpclient.HTTPConnection(host, + port, + timeout=10) url = rel_uri - if headers == None: - conn.request(method, url, data) - else: - conn.request(method, url, data, headers) + + logger.verbose("HTTPConnection [{0}] [{1}] [{2}] [{3}]", + method, + url, + data, + headers) + + headers = {} if headers is None else headers + conn.request(method=method, url=url, body=data, headers=headers) resp = conn.getresponse() return resp -def http_request(method, url, data, headers=None, max_retry=3, chk_proxy=False): + +def http_request(method, url, data, headers=None, max_retry=3, + chk_proxy=False): """ Sending http request to server On error, sleep 10 and retry max_retry times. """ - logger.verbose("HTTP Req: {0} {1}", method, url) - logger.verbose(" Data={0}", data) - logger.verbose(" Header={0}", headers) host, port, secure, rel_uri = _parse_url(url) - #Check proxy + # Check proxy proxy_host, proxy_port = (None, None) if chk_proxy: proxy_host, proxy_port = get_http_proxy() - #If httplib module is not built with ssl support. Fallback to http + # If httplib module is not built with ssl support. Fallback to http if secure and not hasattr(httpclient, "HTTPSConnection"): logger.warn("httplib is not built with ssl support") secure = False - #If httplib module doesn't support https tunnelling. Fallback to http - if secure and \ - proxy_host is not None and \ - proxy_port is not None and \ - not hasattr(httpclient.HTTPSConnection, "set_tunnel"): - logger.warn("httplib doesn't support https tunnelling(new in python 2.7)") + # If httplib module doesn't support https tunnelling. Fallback to http + if secure and proxy_host is not None and proxy_port is not None \ + and not hasattr(httpclient.HTTPSConnection, "set_tunnel"): + logger.warn("httplib does not support https tunnelling " + "(new in python 2.7)") secure = False + logger.verbose("HTTP method: [{0}]", method) + logger.verbose("HTTP host: [{0}]", host) + logger.verbose("HTTP uri: [{0}]", rel_uri) + logger.verbose("HTTP port: [{0}]", port) + logger.verbose("HTTP data: [{0}]", data) + logger.verbose("HTTP secure: [{0}]", secure) + logger.verbose("HTTP headers: [{0}]", headers) + logger.verbose("HTTP proxy: [{0}:{1}]", proxy_host, proxy_port) + + retry_msg = '' + log_msg = "HTTP {0}".format(method) for retry in range(0, max_retry): + retry_interval = RETRY_WAITING_INTERVAL try: - resp = _http_request(method, host, rel_uri, port=port, data=data, - secure=secure, headers=headers, - proxy_host=proxy_host, proxy_port=proxy_port) - logger.verbose("HTTP Resp: Status={0}", resp.status) - logger.verbose(" Header={0}", resp.getheaders()) + resp = _http_request(method, + host, + rel_uri, + port=port, + data=data, + secure=secure, + headers=headers, + proxy_host=proxy_host, + proxy_port=proxy_port) + logger.verbose("HTTP response status: [{0}]", resp.status) return resp except httpclient.HTTPException as e: - logger.warn('HTTPException {0}, args:{1}', e, repr(e.args)) + retry_msg = 'HTTP exception: {0} {1}'.format(log_msg, e) + retry_interval = 5 except IOError as e: - logger.warn('Socket IOError {0}, args:{1}', e, repr(e.args)) - - if retry < max_retry - 1: - logger.info("Retry={0}, {1} {2}", retry, method, url) - time.sleep(RETRY_WAITING_INTERVAL) - - if url is not None and len(url) > 100: - url_log = url[0: 100] #In case the url is too long - else: - url_log = url - raise HttpError("HTTP Err: {0} {1}".format(method, url_log)) + retry_msg = 'IO error: {0} {1}'.format(log_msg, e) + retry_interval = 0 + max_retry = 0 + + if retry < max_retry: + logger.info("Retry [{0}/{1} - {3}]", + retry+1, + max_retry, + retry_interval, + retry_msg) + time.sleep(retry_interval) + + raise HttpError("{0} failed".format(log_msg)) + def http_get(url, headers=None, max_retry=3, chk_proxy=False): - return http_request("GET", url, data=None, headers=headers, + return http_request("GET", url, data=None, headers=headers, max_retry=max_retry, chk_proxy=chk_proxy) + def http_head(url, headers=None, max_retry=3, chk_proxy=False): - return http_request("HEAD", url, None, headers=headers, + return http_request("HEAD", url, None, headers=headers, max_retry=max_retry, chk_proxy=chk_proxy) + def http_post(url, data, headers=None, max_retry=3, chk_proxy=False): - return http_request("POST", url, data, headers=headers, + return http_request("POST", url, data, headers=headers, max_retry=max_retry, chk_proxy=chk_proxy) + def http_put(url, data, headers=None, max_retry=3, chk_proxy=False): - return http_request("PUT", url, data, headers=headers, + return http_request("PUT", url, data, headers=headers, max_retry=max_retry, chk_proxy=chk_proxy) + def http_delete(url, headers=None, max_retry=3, chk_proxy=False): - return http_request("DELETE", url, None, headers=headers, + return http_request("DELETE", url, None, headers=headers, max_retry=max_retry, chk_proxy=chk_proxy) -#End REST api util functions +# End REST api util functions diff --git a/azurelinuxagent/common/utils/textutil.py b/azurelinuxagent/common/utils/textutil.py index f03c7e6..59b8fe7 100644 --- a/azurelinuxagent/common/utils/textutil.py +++ b/azurelinuxagent/common/utils/textutil.py @@ -251,8 +251,14 @@ def set_ini_config(config, name, val): def remove_bom(c): - if str_to_ord(c[0]) > 128 and str_to_ord(c[1]) > 128 and \ - str_to_ord(c[2]) > 128: + ''' + bom is comprised of a sequence of three chars,0xef, 0xbb, 0xbf, in case of utf-8. + ''' + if not is_str_none_or_whitespace(c) and \ + len(c) > 2 and \ + str_to_ord(c[0]) > 128 and \ + str_to_ord(c[1]) > 128 and \ + str_to_ord(c[2]) > 128: c = c[3:] return c @@ -277,3 +283,34 @@ def b64encode(s): if PY_VERSION_MAJOR > 2: return base64.b64encode(bytes(s, 'utf-8')).decode('utf-8') return base64.b64encode(s) + + +def b64decode(s): + from azurelinuxagent.common.version import PY_VERSION_MAJOR + if PY_VERSION_MAJOR > 2: + return base64.b64decode(s).decode('utf-8') + return base64.b64decode(s) + + +def safe_shlex_split(s): + import shlex + from azurelinuxagent.common.version import PY_VERSION + if PY_VERSION[:2] == (2, 6): + return shlex.split(s.encode('utf-8')) + return shlex.split(s) + + +def parse_json(json_str): + """ + Parse json string and return a resulting dictionary + """ + # trim null and whitespaces + result = None + if not is_str_none_or_whitespace(json_str): + import json + result = json.loads(json_str.rstrip(' \t\r\n\0')) + + return result + +def is_str_none_or_whitespace(s): + return s is None or len(s) == 0 or s.isspace() |