summaryrefslogtreecommitdiff
path: root/cloudinit
diff options
context:
space:
mode:
authorBarry Warsaw <barry@python.org>2015-01-21 17:56:53 -0500
committerBarry Warsaw <barry@python.org>2015-01-21 17:56:53 -0500
commitf895cb12141281702b34da18f2384deb64c881e7 (patch)
tree7a7433752ba1317c370dd3dd815c9ee7331a923b /cloudinit
parenta64bb4febc79fcf641f6471d8cc00c74ca915f3d (diff)
downloadvyos-cloud-init-f895cb12141281702b34da18f2384deb64c881e7.tar.gz
vyos-cloud-init-f895cb12141281702b34da18f2384deb64c881e7.zip
Largely merge lp:~harlowja/cloud-init/py2-3 albeit manually because it seemed
to be behind trunk. `tox -e py27` passes full test suite. Now to work on replacing mocker.
Diffstat (limited to 'cloudinit')
-rw-r--r--cloudinit/config/cc_apt_configure.py2
-rw-r--r--cloudinit/config/cc_debug.py7
-rw-r--r--cloudinit/config/cc_landscape.py2
-rw-r--r--cloudinit/config/cc_mcollective.py15
-rw-r--r--cloudinit/config/cc_phone_home.py4
-rw-r--r--cloudinit/config/cc_puppet.py8
-rw-r--r--cloudinit/config/cc_resolv_conf.py4
-rw-r--r--cloudinit/config/cc_seed_random.py3
-rw-r--r--cloudinit/config/cc_ssh.py16
-rw-r--r--cloudinit/config/cc_yum_add_repo.py7
-rw-r--r--cloudinit/distros/__init__.py55
-rw-r--r--cloudinit/distros/arch.py2
-rw-r--r--cloudinit/distros/freebsd.py12
-rw-r--r--cloudinit/distros/net_util.py2
-rw-r--r--cloudinit/distros/parsers/hostname.py2
-rw-r--r--cloudinit/distros/parsers/hosts.py2
-rw-r--r--cloudinit/distros/parsers/resolv_conf.py2
-rw-r--r--cloudinit/distros/parsers/sys_conf.py5
-rw-r--r--cloudinit/distros/rhel.py2
-rw-r--r--cloudinit/distros/sles.py2
-rw-r--r--cloudinit/ec2_utils.py9
-rw-r--r--cloudinit/handlers/__init__.py2
-rw-r--r--cloudinit/handlers/boot_hook.py2
-rw-r--r--cloudinit/handlers/cloud_config.py2
-rw-r--r--cloudinit/handlers/shell_script.py2
-rw-r--r--cloudinit/handlers/upstart_job.py2
-rw-r--r--cloudinit/helpers.py13
-rw-r--r--cloudinit/log.py7
-rw-r--r--cloudinit/mergers/__init__.py4
-rw-r--r--cloudinit/mergers/m_dict.py4
-rw-r--r--cloudinit/mergers/m_list.py6
-rw-r--r--cloudinit/mergers/m_str.py10
-rw-r--r--cloudinit/netinfo.py4
-rw-r--r--cloudinit/signal_handler.py2
-rw-r--r--cloudinit/sources/DataSourceConfigDrive.py4
-rw-r--r--cloudinit/sources/DataSourceDigitalOcean.py9
-rw-r--r--cloudinit/sources/DataSourceEc2.py4
-rw-r--r--cloudinit/sources/DataSourceMAAS.py2
-rw-r--r--cloudinit/sources/DataSourceOVF.py6
-rw-r--r--cloudinit/sources/DataSourceSmartOS.py15
-rw-r--r--cloudinit/sources/__init__.py10
-rw-r--r--cloudinit/sources/helpers/openstack.py10
-rw-r--r--cloudinit/ssh_util.py6
-rw-r--r--cloudinit/stages.py23
-rw-r--r--cloudinit/type_utils.py32
-rw-r--r--cloudinit/url_helper.py22
-rw-r--r--cloudinit/user_data.py8
-rw-r--r--cloudinit/util.py109
48 files changed, 281 insertions, 202 deletions
diff --git a/cloudinit/config/cc_apt_configure.py b/cloudinit/config/cc_apt_configure.py
index f10b76a3..de72903f 100644
--- a/cloudinit/config/cc_apt_configure.py
+++ b/cloudinit/config/cc_apt_configure.py
@@ -126,7 +126,7 @@ def mirror2lists_fileprefix(mirror):
def rename_apt_lists(old_mirrors, new_mirrors, lists_d="/var/lib/apt/lists"):
- for (name, omirror) in old_mirrors.iteritems():
+ for (name, omirror) in old_mirrors.items():
nmirror = new_mirrors.get(name)
if not nmirror:
continue
diff --git a/cloudinit/config/cc_debug.py b/cloudinit/config/cc_debug.py
index 8c489426..bdc32fe6 100644
--- a/cloudinit/config/cc_debug.py
+++ b/cloudinit/config/cc_debug.py
@@ -34,7 +34,8 @@ It can be configured with the following option structure::
"""
import copy
-from StringIO import StringIO
+
+from six import StringIO
from cloudinit import type_utils
from cloudinit import util
@@ -77,7 +78,7 @@ def handle(name, cfg, cloud, log, args):
dump_cfg = copy.deepcopy(cfg)
for k in SKIP_KEYS:
dump_cfg.pop(k, None)
- all_keys = list(dump_cfg.keys())
+ all_keys = list(dump_cfg)
for k in all_keys:
if k.startswith("_"):
dump_cfg.pop(k, None)
@@ -103,6 +104,6 @@ def handle(name, cfg, cloud, log, args):
line = "ci-info: %s\n" % (line)
content_to_file.append(line)
if out_file:
- util.write_file(out_file, "".join(content_to_file), 0644, "w")
+ util.write_file(out_file, "".join(content_to_file), 0o644, "w")
else:
util.multi_log("".join(content_to_file), console=True, stderr=False)
diff --git a/cloudinit/config/cc_landscape.py b/cloudinit/config/cc_landscape.py
index 8a709677..0b9d846e 100644
--- a/cloudinit/config/cc_landscape.py
+++ b/cloudinit/config/cc_landscape.py
@@ -20,7 +20,7 @@
import os
-from StringIO import StringIO
+from six import StringIO
from configobj import ConfigObj
diff --git a/cloudinit/config/cc_mcollective.py b/cloudinit/config/cc_mcollective.py
index b670390d..425420ae 100644
--- a/cloudinit/config/cc_mcollective.py
+++ b/cloudinit/config/cc_mcollective.py
@@ -19,7 +19,8 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from StringIO import StringIO
+import six
+from six import StringIO
# Used since this can maintain comments
# and doesn't need a top level section
@@ -51,17 +52,17 @@ def handle(name, cfg, cloud, log, _args):
# original file in order to be able to mix the rest up
mcollective_config = ConfigObj(SERVER_CFG)
# See: http://tiny.cc/jh9agw
- for (cfg_name, cfg) in mcollective_cfg['conf'].iteritems():
+ for (cfg_name, cfg) in mcollective_cfg['conf'].items():
if cfg_name == 'public-cert':
- util.write_file(PUBCERT_FILE, cfg, mode=0644)
+ util.write_file(PUBCERT_FILE, cfg, mode=0o644)
mcollective_config['plugin.ssl_server_public'] = PUBCERT_FILE
mcollective_config['securityprovider'] = 'ssl'
elif cfg_name == 'private-cert':
- util.write_file(PRICERT_FILE, cfg, mode=0600)
+ util.write_file(PRICERT_FILE, cfg, mode=0o600)
mcollective_config['plugin.ssl_server_private'] = PRICERT_FILE
mcollective_config['securityprovider'] = 'ssl'
else:
- if isinstance(cfg, (basestring, str)):
+ if isinstance(cfg, six.string_types):
# Just set it in the 'main' section
mcollective_config[cfg_name] = cfg
elif isinstance(cfg, (dict)):
@@ -69,7 +70,7 @@ def handle(name, cfg, cloud, log, _args):
# if it is needed and then add/or create items as needed
if cfg_name not in mcollective_config.sections:
mcollective_config[cfg_name] = {}
- for (o, v) in cfg.iteritems():
+ for (o, v) in cfg.items():
mcollective_config[cfg_name][o] = v
else:
# Otherwise just try to convert it to a string
@@ -81,7 +82,7 @@ def handle(name, cfg, cloud, log, _args):
contents = StringIO()
mcollective_config.write(contents)
contents = contents.getvalue()
- util.write_file(SERVER_CFG, contents, mode=0644)
+ util.write_file(SERVER_CFG, contents, mode=0o644)
# Start mcollective
util.subp(['service', 'mcollective', 'start'], capture=False)
diff --git a/cloudinit/config/cc_phone_home.py b/cloudinit/config/cc_phone_home.py
index 5bc68b83..18a7ddad 100644
--- a/cloudinit/config/cc_phone_home.py
+++ b/cloudinit/config/cc_phone_home.py
@@ -81,7 +81,7 @@ def handle(name, cfg, cloud, log, args):
'pub_key_ecdsa': '/etc/ssh/ssh_host_ecdsa_key.pub',
}
- for (n, path) in pubkeys.iteritems():
+ for (n, path) in pubkeys.items():
try:
all_keys[n] = util.load_file(path)
except:
@@ -99,7 +99,7 @@ def handle(name, cfg, cloud, log, args):
# Get them read to be posted
real_submit_keys = {}
- for (k, v) in submit_keys.iteritems():
+ for (k, v) in submit_keys.items():
if v is None:
real_submit_keys[k] = 'N/A'
else:
diff --git a/cloudinit/config/cc_puppet.py b/cloudinit/config/cc_puppet.py
index 471a1a8a..6f1b3c57 100644
--- a/cloudinit/config/cc_puppet.py
+++ b/cloudinit/config/cc_puppet.py
@@ -18,7 +18,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from StringIO import StringIO
+from six import StringIO
import os
import socket
@@ -81,13 +81,13 @@ def handle(name, cfg, cloud, log, _args):
cleaned_contents = '\n'.join(cleaned_lines)
puppet_config.readfp(StringIO(cleaned_contents),
filename=PUPPET_CONF_PATH)
- for (cfg_name, cfg) in puppet_cfg['conf'].iteritems():
+ for (cfg_name, cfg) in puppet_cfg['conf'].items():
# Cert configuration is a special case
# Dump the puppet master ca certificate in the correct place
if cfg_name == 'ca_cert':
# Puppet ssl sub-directory isn't created yet
# Create it with the proper permissions and ownership
- util.ensure_dir(PUPPET_SSL_DIR, 0771)
+ util.ensure_dir(PUPPET_SSL_DIR, 0o771)
util.chownbyname(PUPPET_SSL_DIR, 'puppet', 'root')
util.ensure_dir(PUPPET_SSL_CERT_DIR)
util.chownbyname(PUPPET_SSL_CERT_DIR, 'puppet', 'root')
@@ -96,7 +96,7 @@ def handle(name, cfg, cloud, log, _args):
else:
# Iterate throug the config items, we'll use ConfigParser.set
# to overwrite or create new items as needed
- for (o, v) in cfg.iteritems():
+ for (o, v) in cfg.items():
if o == 'certname':
# Expand %f as the fqdn
# TODO(harlowja) should this use the cloud fqdn??
diff --git a/cloudinit/config/cc_resolv_conf.py b/cloudinit/config/cc_resolv_conf.py
index bbaa6c63..71d9e3a7 100644
--- a/cloudinit/config/cc_resolv_conf.py
+++ b/cloudinit/config/cc_resolv_conf.py
@@ -66,8 +66,8 @@ def generate_resolv_conf(template_fn, params, target_fname="/etc/resolv.conf"):
false_flags = []
if 'options' in params:
- for key, val in params['options'].iteritems():
- if type(val) == bool:
+ for key, val in params['options'].items():
+ if isinstance(val, bool):
if val:
flags.append(key)
else:
diff --git a/cloudinit/config/cc_seed_random.py b/cloudinit/config/cc_seed_random.py
index 49a6b3e8..3b7235bf 100644
--- a/cloudinit/config/cc_seed_random.py
+++ b/cloudinit/config/cc_seed_random.py
@@ -21,7 +21,8 @@
import base64
import os
-from StringIO import StringIO
+
+from six import StringIO
from cloudinit.settings import PER_INSTANCE
from cloudinit import log as logging
diff --git a/cloudinit/config/cc_ssh.py b/cloudinit/config/cc_ssh.py
index 4c76581c..ab6940fa 100644
--- a/cloudinit/config/cc_ssh.py
+++ b/cloudinit/config/cc_ssh.py
@@ -34,12 +34,12 @@ DISABLE_ROOT_OPTS = ("no-port-forwarding,no-agent-forwarding,"
"rather than the user \\\"root\\\".\';echo;sleep 10\"")
KEY_2_FILE = {
- "rsa_private": ("/etc/ssh/ssh_host_rsa_key", 0600),
- "rsa_public": ("/etc/ssh/ssh_host_rsa_key.pub", 0644),
- "dsa_private": ("/etc/ssh/ssh_host_dsa_key", 0600),
- "dsa_public": ("/etc/ssh/ssh_host_dsa_key.pub", 0644),
- "ecdsa_private": ("/etc/ssh/ssh_host_ecdsa_key", 0600),
- "ecdsa_public": ("/etc/ssh/ssh_host_ecdsa_key.pub", 0644),
+ "rsa_private": ("/etc/ssh/ssh_host_rsa_key", 0o600),
+ "rsa_public": ("/etc/ssh/ssh_host_rsa_key.pub", 0o644),
+ "dsa_private": ("/etc/ssh/ssh_host_dsa_key", 0o600),
+ "dsa_public": ("/etc/ssh/ssh_host_dsa_key.pub", 0o644),
+ "ecdsa_private": ("/etc/ssh/ssh_host_ecdsa_key", 0o600),
+ "ecdsa_public": ("/etc/ssh/ssh_host_ecdsa_key.pub", 0o644),
}
PRIV_2_PUB = {
@@ -68,13 +68,13 @@ def handle(_name, cfg, cloud, log, _args):
if "ssh_keys" in cfg:
# if there are keys in cloud-config, use them
- for (key, val) in cfg["ssh_keys"].iteritems():
+ for (key, val) in cfg["ssh_keys"].items():
if key in KEY_2_FILE:
tgt_fn = KEY_2_FILE[key][0]
tgt_perms = KEY_2_FILE[key][1]
util.write_file(tgt_fn, val, tgt_perms)
- for (priv, pub) in PRIV_2_PUB.iteritems():
+ for (priv, pub) in PRIV_2_PUB.items():
if pub in cfg['ssh_keys'] or priv not in cfg['ssh_keys']:
continue
pair = (KEY_2_FILE[priv][0], KEY_2_FILE[pub][0])
diff --git a/cloudinit/config/cc_yum_add_repo.py b/cloudinit/config/cc_yum_add_repo.py
index 0d836f28..3b821af9 100644
--- a/cloudinit/config/cc_yum_add_repo.py
+++ b/cloudinit/config/cc_yum_add_repo.py
@@ -18,9 +18,10 @@
import os
-from cloudinit import util
-
import configobj
+import six
+
+from cloudinit import util
def _canonicalize_id(repo_id):
@@ -37,7 +38,7 @@ def _format_repo_value(val):
# Can handle 'lists' in certain cases
# See: http://bit.ly/Qqrf1t
return "\n ".join([_format_repo_value(v) for v in val])
- if not isinstance(val, (basestring, str)):
+ if not isinstance(val, six.string_types):
return str(val)
return val
diff --git a/cloudinit/distros/__init__.py b/cloudinit/distros/__init__.py
index 49a0b652..4ebccdda 100644
--- a/cloudinit/distros/__init__.py
+++ b/cloudinit/distros/__init__.py
@@ -21,7 +21,8 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from StringIO import StringIO
+import six
+from six import StringIO
import abc
import itertools
@@ -334,7 +335,7 @@ class Distro(object):
redact_opts = ['passwd']
# Check the values and create the command
- for key, val in kwargs.iteritems():
+ for key, val in kwargs.items():
if key in adduser_opts and val and isinstance(val, str):
adduser_cmd.extend([adduser_opts[key], val])
@@ -393,7 +394,7 @@ class Distro(object):
if 'ssh_authorized_keys' in kwargs:
# Try to handle this in a smart manner.
keys = kwargs['ssh_authorized_keys']
- if isinstance(keys, (basestring, str)):
+ if isinstance(keys, six.string_types):
keys = [keys]
if isinstance(keys, dict):
keys = list(keys.values())
@@ -491,7 +492,7 @@ class Distro(object):
if isinstance(rules, (list, tuple)):
for rule in rules:
lines.append("%s %s" % (user, rule))
- elif isinstance(rules, (basestring, str)):
+ elif isinstance(rules, six.string_types):
lines.append("%s %s" % (user, rules))
else:
msg = "Can not create sudoers rule addition with type %r"
@@ -561,10 +562,10 @@ def _get_package_mirror_info(mirror_info, availability_zone=None,
subst['ec2_region'] = "%s" % availability_zone[0:-1]
results = {}
- for (name, mirror) in mirror_info.get('failsafe', {}).iteritems():
+ for (name, mirror) in mirror_info.get('failsafe', {}).items():
results[name] = mirror
- for (name, searchlist) in mirror_info.get('search', {}).iteritems():
+ for (name, searchlist) in mirror_info.get('search', {}).items():
mirrors = []
for tmpl in searchlist:
try:
@@ -604,30 +605,30 @@ def _get_arch_package_mirror_info(package_mirrors, arch):
# is the standard form used in the rest
# of cloud-init
def _normalize_groups(grp_cfg):
- if isinstance(grp_cfg, (str, basestring)):
+ if isinstance(grp_cfg, six.string_types):
grp_cfg = grp_cfg.strip().split(",")
- if isinstance(grp_cfg, (list)):
+ if isinstance(grp_cfg, list):
c_grp_cfg = {}
for i in grp_cfg:
- if isinstance(i, (dict)):
+ if isinstance(i, dict):
for k, v in i.items():
if k not in c_grp_cfg:
- if isinstance(v, (list)):
+ if isinstance(v, list):
c_grp_cfg[k] = list(v)
- elif isinstance(v, (basestring, str)):
+ elif isinstance(v, six.string_types):
c_grp_cfg[k] = [v]
else:
raise TypeError("Bad group member type %s" %
type_utils.obj_name(v))
else:
- if isinstance(v, (list)):
+ if isinstance(v, list):
c_grp_cfg[k].extend(v)
- elif isinstance(v, (basestring, str)):
+ elif isinstance(v, six.string_types):
c_grp_cfg[k].append(v)
else:
raise TypeError("Bad group member type %s" %
type_utils.obj_name(v))
- elif isinstance(i, (str, basestring)):
+ elif isinstance(i, six.string_types):
if i not in c_grp_cfg:
c_grp_cfg[i] = []
else:
@@ -635,7 +636,7 @@ def _normalize_groups(grp_cfg):
type_utils.obj_name(i))
grp_cfg = c_grp_cfg
groups = {}
- if isinstance(grp_cfg, (dict)):
+ if isinstance(grp_cfg, dict):
for (grp_name, grp_members) in grp_cfg.items():
groups[grp_name] = util.uniq_merge_sorted(grp_members)
else:
@@ -661,29 +662,29 @@ def _normalize_groups(grp_cfg):
# entry 'default' which will be marked as true
# all other users will be marked as false.
def _normalize_users(u_cfg, def_user_cfg=None):
- if isinstance(u_cfg, (dict)):
+ if isinstance(u_cfg, dict):
ad_ucfg = []
for (k, v) in u_cfg.items():
- if isinstance(v, (bool, int, basestring, str, float)):
+ if isinstance(v, (bool, int, float) + six.string_types):
if util.is_true(v):
ad_ucfg.append(str(k))
- elif isinstance(v, (dict)):
+ elif isinstance(v, dict):
v['name'] = k
ad_ucfg.append(v)
else:
raise TypeError(("Unmappable user value type %s"
" for key %s") % (type_utils.obj_name(v), k))
u_cfg = ad_ucfg
- elif isinstance(u_cfg, (str, basestring)):
+ elif isinstance(u_cfg, six.string_types):
u_cfg = util.uniq_merge_sorted(u_cfg)
users = {}
for user_config in u_cfg:
- if isinstance(user_config, (str, basestring, list)):
+ if isinstance(user_config, (list,) + six.string_types):
for u in util.uniq_merge(user_config):
if u and u not in users:
users[u] = {}
- elif isinstance(user_config, (dict)):
+ elif isinstance(user_config, dict):
if 'name' in user_config:
n = user_config.pop('name')
prev_config = users.get(n) or {}
@@ -784,11 +785,11 @@ def normalize_users_groups(cfg, distro):
old_user = cfg['user']
# Translate it into the format that is more useful
# going forward
- if isinstance(old_user, (basestring, str)):
+ if isinstance(old_user, six.string_types):
old_user = {
'name': old_user,
}
- if not isinstance(old_user, (dict)):
+ if not isinstance(old_user, dict):
LOG.warn(("Format for 'user' key must be a string or "
"dictionary and not %s"), type_utils.obj_name(old_user))
old_user = {}
@@ -813,7 +814,7 @@ def normalize_users_groups(cfg, distro):
default_user_config = util.mergemanydict([old_user, distro_user_config])
base_users = cfg.get('users', [])
- if not isinstance(base_users, (list, dict, str, basestring)):
+ if not isinstance(base_users, (list, dict) + six.string_types):
LOG.warn(("Format for 'users' key must be a comma separated string"
" or a dictionary or a list and not %s"),
type_utils.obj_name(base_users))
@@ -822,12 +823,12 @@ def normalize_users_groups(cfg, distro):
if old_user:
# Ensure that when user: is provided that this user
# always gets added (as the default user)
- if isinstance(base_users, (list)):
+ if isinstance(base_users, list):
# Just add it on at the end...
base_users.append({'name': 'default'})
- elif isinstance(base_users, (dict)):
+ elif isinstance(base_users, dict):
base_users['default'] = dict(base_users).get('default', True)
- elif isinstance(base_users, (str, basestring)):
+ elif isinstance(base_users, six.string_types):
# Just append it on to be re-parsed later
base_users += ",default"
diff --git a/cloudinit/distros/arch.py b/cloudinit/distros/arch.py
index 68bf1aab..e540e0bc 100644
--- a/cloudinit/distros/arch.py
+++ b/cloudinit/distros/arch.py
@@ -66,7 +66,7 @@ class Distro(distros.Distro):
settings, entries)
dev_names = entries.keys()
# Format for netctl
- for (dev, info) in entries.iteritems():
+ for (dev, info) in entries.items():
nameservers = []
net_fn = self.network_conf_dir + dev
net_cfg = {
diff --git a/cloudinit/distros/freebsd.py b/cloudinit/distros/freebsd.py
index f1b4a256..4c484639 100644
--- a/cloudinit/distros/freebsd.py
+++ b/cloudinit/distros/freebsd.py
@@ -16,7 +16,8 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from StringIO import StringIO
+import six
+from six import StringIO
import re
@@ -203,8 +204,9 @@ class Distro(distros.Distro):
redact_opts = ['passwd']
- for key, val in kwargs.iteritems():
- if key in adduser_opts and val and isinstance(val, basestring):
+ for key, val in kwargs.items():
+ if (key in adduser_opts and val
+ and isinstance(val, six.string_types)):
adduser_cmd.extend([adduser_opts[key], val])
# Redact certain fields from the logs
@@ -271,7 +273,7 @@ class Distro(distros.Distro):
nameservers = []
searchdomains = []
dev_names = entries.keys()
- for (device, info) in entries.iteritems():
+ for (device, info) in entries.items():
# Skip the loopback interface.
if device.startswith('lo'):
continue
@@ -323,7 +325,7 @@ class Distro(distros.Distro):
resolvconf.add_search_domain(domain)
except ValueError:
util.logexc(LOG, "Failed to add search domain %s", domain)
- util.write_file(self.resolv_conf_fn, str(resolvconf), 0644)
+ util.write_file(self.resolv_conf_fn, str(resolvconf), 0o644)
return dev_names
diff --git a/cloudinit/distros/net_util.py b/cloudinit/distros/net_util.py
index 8b28e2d1..cadfa6b6 100644
--- a/cloudinit/distros/net_util.py
+++ b/cloudinit/distros/net_util.py
@@ -103,7 +103,7 @@ def translate_network(settings):
consume[cmd] = args
# Check if anything left over to consume
absorb = False
- for (cmd, args) in consume.iteritems():
+ for (cmd, args) in consume.items():
if cmd == 'iface':
absorb = True
if absorb:
diff --git a/cloudinit/distros/parsers/hostname.py b/cloudinit/distros/parsers/hostname.py
index 617b3c36..84a1de42 100644
--- a/cloudinit/distros/parsers/hostname.py
+++ b/cloudinit/distros/parsers/hostname.py
@@ -16,7 +16,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from StringIO import StringIO
+from six import StringIO
from cloudinit.distros.parsers import chop_comment
diff --git a/cloudinit/distros/parsers/hosts.py b/cloudinit/distros/parsers/hosts.py
index 94c97051..3c5498ee 100644
--- a/cloudinit/distros/parsers/hosts.py
+++ b/cloudinit/distros/parsers/hosts.py
@@ -16,7 +16,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from StringIO import StringIO
+from six import StringIO
from cloudinit.distros.parsers import chop_comment
diff --git a/cloudinit/distros/parsers/resolv_conf.py b/cloudinit/distros/parsers/resolv_conf.py
index 5733c25a..8aee03a4 100644
--- a/cloudinit/distros/parsers/resolv_conf.py
+++ b/cloudinit/distros/parsers/resolv_conf.py
@@ -16,7 +16,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from StringIO import StringIO
+from six import StringIO
from cloudinit import util
diff --git a/cloudinit/distros/parsers/sys_conf.py b/cloudinit/distros/parsers/sys_conf.py
index 20ca1871..d795e12f 100644
--- a/cloudinit/distros/parsers/sys_conf.py
+++ b/cloudinit/distros/parsers/sys_conf.py
@@ -16,7 +16,8 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from StringIO import StringIO
+import six
+from six import StringIO
import pipes
import re
@@ -69,7 +70,7 @@ class SysConf(configobj.ConfigObj):
return out_contents.getvalue()
def _quote(self, value, multiline=False):
- if not isinstance(value, (str, basestring)):
+ if not isinstance(value, six.string_types):
raise ValueError('Value "%s" is not a string' % (value))
if len(value) == 0:
return ''
diff --git a/cloudinit/distros/rhel.py b/cloudinit/distros/rhel.py
index d9588632..7408989c 100644
--- a/cloudinit/distros/rhel.py
+++ b/cloudinit/distros/rhel.py
@@ -73,7 +73,7 @@ class Distro(distros.Distro):
searchservers = []
dev_names = entries.keys()
use_ipv6 = False
- for (dev, info) in entries.iteritems():
+ for (dev, info) in entries.items():
net_fn = self.network_script_tpl % (dev)
net_cfg = {
'DEVICE': dev,
diff --git a/cloudinit/distros/sles.py b/cloudinit/distros/sles.py
index 43682a12..0c6d1203 100644
--- a/cloudinit/distros/sles.py
+++ b/cloudinit/distros/sles.py
@@ -62,7 +62,7 @@ class Distro(distros.Distro):
nameservers = []
searchservers = []
dev_names = entries.keys()
- for (dev, info) in entries.iteritems():
+ for (dev, info) in entries.items():
net_fn = self.network_script_tpl % (dev)
mode = info.get('auto')
if mode and mode.lower() == 'true':
diff --git a/cloudinit/ec2_utils.py b/cloudinit/ec2_utils.py
index e69d06ff..e1ed4091 100644
--- a/cloudinit/ec2_utils.py
+++ b/cloudinit/ec2_utils.py
@@ -17,7 +17,6 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import functools
-import httplib
import json
from cloudinit import log as logging
@@ -25,7 +24,7 @@ from cloudinit import url_helper
from cloudinit import util
LOG = logging.getLogger(__name__)
-SKIP_USERDATA_CODES = frozenset([httplib.NOT_FOUND])
+SKIP_USERDATA_CODES = frozenset([url_helper.NOT_FOUND])
class MetadataLeafDecoder(object):
@@ -123,7 +122,7 @@ class MetadataMaterializer(object):
leaf_contents = {}
for (field, resource) in leaves.items():
leaf_url = url_helper.combine_url(base_url, resource)
- leaf_blob = str(self._caller(leaf_url))
+ leaf_blob = self._caller(leaf_url).contents
leaf_contents[field] = self._leaf_decoder(field, leaf_blob)
joined = {}
joined.update(child_contents)
@@ -160,7 +159,7 @@ def get_instance_userdata(api_version='latest',
timeout=timeout,
retries=retries,
exception_cb=exception_cb)
- user_data = str(response)
+ user_data = response.contents
except url_helper.UrlError as e:
if e.code not in SKIP_USERDATA_CODES:
util.logexc(LOG, "Failed fetching userdata from url %s", ud_url)
@@ -183,7 +182,7 @@ def get_instance_metadata(api_version='latest',
try:
response = caller(md_url)
- materializer = MetadataMaterializer(str(response),
+ materializer = MetadataMaterializer(response.contents,
md_url, caller,
leaf_decoder=leaf_decoder)
md = materializer.materialize()
diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py
index 059d7495..d67a70ea 100644
--- a/cloudinit/handlers/__init__.py
+++ b/cloudinit/handlers/__init__.py
@@ -147,7 +147,7 @@ def walker_handle_handler(pdata, _ctype, _filename, payload):
if not modfname.endswith(".py"):
modfname = "%s.py" % (modfname)
# TODO(harlowja): Check if path exists??
- util.write_file(modfname, payload, 0600)
+ util.write_file(modfname, payload, 0o600)
handlers = pdata['handlers']
try:
mod = fixup_handler(importer.import_module(modname))
diff --git a/cloudinit/handlers/boot_hook.py b/cloudinit/handlers/boot_hook.py
index 3a50cf87..a4ea47ac 100644
--- a/cloudinit/handlers/boot_hook.py
+++ b/cloudinit/handlers/boot_hook.py
@@ -50,7 +50,7 @@ class BootHookPartHandler(handlers.Handler):
filepath = os.path.join(self.boothook_dir, filename)
contents = util.strip_prefix_suffix(util.dos2unix(payload),
prefix=BOOTHOOK_PREFIX)
- util.write_file(filepath, contents.lstrip(), 0700)
+ util.write_file(filepath, contents.lstrip(), 0o700)
return filepath
def handle_part(self, data, ctype, filename, payload, frequency):
diff --git a/cloudinit/handlers/cloud_config.py b/cloudinit/handlers/cloud_config.py
index bf994e33..07b6d0e0 100644
--- a/cloudinit/handlers/cloud_config.py
+++ b/cloudinit/handlers/cloud_config.py
@@ -95,7 +95,7 @@ class CloudConfigPartHandler(handlers.Handler):
lines.append(util.yaml_dumps(self.cloud_buf))
else:
lines = []
- util.write_file(self.cloud_fn, "\n".join(lines), 0600)
+ util.write_file(self.cloud_fn, "\n".join(lines), 0o600)
def _extract_mergers(self, payload, headers):
merge_header_headers = ''
diff --git a/cloudinit/handlers/shell_script.py b/cloudinit/handlers/shell_script.py
index 9755ab05..b5087693 100644
--- a/cloudinit/handlers/shell_script.py
+++ b/cloudinit/handlers/shell_script.py
@@ -52,4 +52,4 @@ class ShellScriptPartHandler(handlers.Handler):
filename = util.clean_filename(filename)
payload = util.dos2unix(payload)
path = os.path.join(self.script_dir, filename)
- util.write_file(path, payload, 0700)
+ util.write_file(path, payload, 0o700)
diff --git a/cloudinit/handlers/upstart_job.py b/cloudinit/handlers/upstart_job.py
index 50d193c4..c5bea711 100644
--- a/cloudinit/handlers/upstart_job.py
+++ b/cloudinit/handlers/upstart_job.py
@@ -65,7 +65,7 @@ class UpstartJobPartHandler(handlers.Handler):
payload = util.dos2unix(payload)
path = os.path.join(self.upstart_dir, filename)
- util.write_file(path, payload, 0644)
+ util.write_file(path, payload, 0o644)
if SUITABLE_UPSTART:
util.subp(["initctl", "reload-configuration"], capture=False)
diff --git a/cloudinit/helpers.py b/cloudinit/helpers.py
index e701126e..ed396b5a 100644
--- a/cloudinit/helpers.py
+++ b/cloudinit/helpers.py
@@ -23,10 +23,11 @@
from time import time
import contextlib
-import io
import os
-from ConfigParser import (NoSectionError, NoOptionError, RawConfigParser)
+import six
+from six.moves.configparser import (
+ NoSectionError, NoOptionError, RawConfigParser)
from cloudinit.settings import (PER_INSTANCE, PER_ALWAYS, PER_ONCE,
CFG_ENV_NAME)
@@ -318,10 +319,10 @@ class ContentHandlers(object):
return self.registered[content_type]
def items(self):
- return self.registered.items()
+ return list(self.registered.items())
- def iteritems(self):
- return self.registered.iteritems()
+ # XXX This should really go away.
+ iteritems = items
class Paths(object):
@@ -449,7 +450,7 @@ class DefaultingConfigParser(RawConfigParser):
def stringify(self, header=None):
contents = ''
- with io.BytesIO() as outputstream:
+ with six.StringIO() as outputstream:
self.write(outputstream)
outputstream.flush()
contents = outputstream.getvalue()
diff --git a/cloudinit/log.py b/cloudinit/log.py
index 622c946c..3c79b9c9 100644
--- a/cloudinit/log.py
+++ b/cloudinit/log.py
@@ -28,7 +28,8 @@ import collections
import os
import sys
-from StringIO import StringIO
+import six
+from six import StringIO
# Logging levels for easy access
CRITICAL = logging.CRITICAL
@@ -72,13 +73,13 @@ def setupLogging(cfg=None):
log_cfgs = []
log_cfg = cfg.get('logcfg')
- if log_cfg and isinstance(log_cfg, (str, basestring)):
+ if log_cfg and isinstance(log_cfg, six.string_types):
# If there is a 'logcfg' entry in the config,
# respect it, it is the old keyname
log_cfgs.append(str(log_cfg))
elif "log_cfgs" in cfg:
for a_cfg in cfg['log_cfgs']:
- if isinstance(a_cfg, (basestring, str)):
+ if isinstance(a_cfg, six.string_types):
log_cfgs.append(a_cfg)
elif isinstance(a_cfg, (collections.Iterable)):
cfg_str = [str(c) for c in a_cfg]
diff --git a/cloudinit/mergers/__init__.py b/cloudinit/mergers/__init__.py
index 03aa1ee1..e13f55ac 100644
--- a/cloudinit/mergers/__init__.py
+++ b/cloudinit/mergers/__init__.py
@@ -18,6 +18,8 @@
import re
+import six
+
from cloudinit import importer
from cloudinit import log as logging
from cloudinit import type_utils
@@ -95,7 +97,7 @@ def dict_extract_mergers(config):
raw_mergers = config.pop('merge_type', None)
if raw_mergers is None:
return parsed_mergers
- if isinstance(raw_mergers, (str, basestring)):
+ if isinstance(raw_mergers, six.string_types):
return string_extract_mergers(raw_mergers)
for m in raw_mergers:
if isinstance(m, (dict)):
diff --git a/cloudinit/mergers/m_dict.py b/cloudinit/mergers/m_dict.py
index a16141fa..87cf1a72 100644
--- a/cloudinit/mergers/m_dict.py
+++ b/cloudinit/mergers/m_dict.py
@@ -16,6 +16,8 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
+import six
+
DEF_MERGE_TYPE = 'no_replace'
MERGE_TYPES = ('replace', DEF_MERGE_TYPE,)
@@ -57,7 +59,7 @@ class Merger(object):
return new_v
if isinstance(new_v, (list, tuple)) and self._recurse_array:
return self._merger.merge(old_v, new_v)
- if isinstance(new_v, (basestring)) and self._recurse_str:
+ if isinstance(new_v, six.string_types) and self._recurse_str:
return self._merger.merge(old_v, new_v)
if isinstance(new_v, (dict)) and self._recurse_dict:
return self._merger.merge(old_v, new_v)
diff --git a/cloudinit/mergers/m_list.py b/cloudinit/mergers/m_list.py
index 3b87b0fc..81e5c580 100644
--- a/cloudinit/mergers/m_list.py
+++ b/cloudinit/mergers/m_list.py
@@ -16,6 +16,8 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
+import six
+
DEF_MERGE_TYPE = 'replace'
MERGE_TYPES = ('append', 'prepend', DEF_MERGE_TYPE, 'no_replace')
@@ -73,7 +75,7 @@ class Merger(object):
return old_v
if isinstance(new_v, (list, tuple)) and self._recurse_array:
return self._merger.merge(old_v, new_v)
- if isinstance(new_v, (str, basestring)) and self._recurse_str:
+ if isinstance(new_v, six.string_types) and self._recurse_str:
return self._merger.merge(old_v, new_v)
if isinstance(new_v, (dict)) and self._recurse_dict:
return self._merger.merge(old_v, new_v)
@@ -82,6 +84,6 @@ class Merger(object):
# Ok now we are replacing same indexes
merged_list.extend(value)
common_len = min(len(merged_list), len(merge_with))
- for i in xrange(0, common_len):
+ for i in range(0, common_len):
merged_list[i] = merge_same_index(merged_list[i], merge_with[i])
return merged_list
diff --git a/cloudinit/mergers/m_str.py b/cloudinit/mergers/m_str.py
index e22ce28a..b00c4bf3 100644
--- a/cloudinit/mergers/m_str.py
+++ b/cloudinit/mergers/m_str.py
@@ -17,6 +17,8 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
+import six
+
class Merger(object):
def __init__(self, _merger, opts):
@@ -34,11 +36,11 @@ class Merger(object):
# perform the following action, if appending we will
# merge them together, otherwise we will just return value.
def _on_str(self, value, merge_with):
- if not isinstance(value, (basestring)):
+ if not isinstance(value, six.string_types):
return merge_with
if not self._append:
return merge_with
- if isinstance(value, unicode):
- return value + unicode(merge_with)
+ if isinstance(value, six.text_type):
+ return value + six.text_type(merge_with)
else:
- return value + str(merge_with)
+ return value + six.binary_type(merge_with)
diff --git a/cloudinit/netinfo.py b/cloudinit/netinfo.py
index fb40cc0d..e30d6fb5 100644
--- a/cloudinit/netinfo.py
+++ b/cloudinit/netinfo.py
@@ -87,7 +87,7 @@ def netdev_info(empty=""):
devs[curdev][target] = toks[i][len(field) + 1:]
if empty != "":
- for (_devname, dev) in devs.iteritems():
+ for (_devname, dev) in devs.items():
for field in dev:
if dev[field] == "":
dev[field] = empty
@@ -181,7 +181,7 @@ def netdev_pformat():
else:
fields = ['Device', 'Up', 'Address', 'Mask', 'Scope', 'Hw-Address']
tbl = PrettyTable(fields)
- for (dev, d) in netdev.iteritems():
+ for (dev, d) in netdev.items():
tbl.add_row([dev, d["up"], d["addr"], d["mask"], ".", d["hwaddr"]])
if d.get('addr6'):
tbl.add_row([dev, d["up"],
diff --git a/cloudinit/signal_handler.py b/cloudinit/signal_handler.py
index 40b0c94c..0d95f506 100644
--- a/cloudinit/signal_handler.py
+++ b/cloudinit/signal_handler.py
@@ -22,7 +22,7 @@ import inspect
import signal
import sys
-from StringIO import StringIO
+from six import StringIO
from cloudinit import log as logging
from cloudinit import util
diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py
index 15244a0d..eb474079 100644
--- a/cloudinit/sources/DataSourceConfigDrive.py
+++ b/cloudinit/sources/DataSourceConfigDrive.py
@@ -216,11 +216,11 @@ def on_first_boot(data, distro=None):
files = data.get('files', {})
if files:
LOG.debug("Writing %s injected files", len(files))
- for (filename, content) in files.iteritems():
+ for (filename, content) in files.items():
if not filename.startswith(os.sep):
filename = os.sep + filename
try:
- util.write_file(filename, content, mode=0660)
+ util.write_file(filename, content, mode=0o660)
except IOError:
util.logexc(LOG, "Failed writing file: %s", filename)
diff --git a/cloudinit/sources/DataSourceDigitalOcean.py b/cloudinit/sources/DataSourceDigitalOcean.py
index 8f27ee89..b20ce2a1 100644
--- a/cloudinit/sources/DataSourceDigitalOcean.py
+++ b/cloudinit/sources/DataSourceDigitalOcean.py
@@ -18,7 +18,7 @@ from cloudinit import log as logging
from cloudinit import util
from cloudinit import sources
from cloudinit import ec2_utils
-from types import StringType
+
import functools
@@ -72,10 +72,11 @@ class DataSourceDigitalOcean(sources.DataSource):
return "\n".join(self.metadata['vendor-data'])
def get_public_ssh_keys(self):
- if type(self.metadata['public-keys']) is StringType:
- return [self.metadata['public-keys']]
+ public_keys = self.metadata['public-keys']
+ if isinstance(public_keys, list):
+ return public_keys
else:
- return self.metadata['public-keys']
+ return [public_keys]
@property
def availability_zone(self):
diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py
index 1b20ecf3..798869b7 100644
--- a/cloudinit/sources/DataSourceEc2.py
+++ b/cloudinit/sources/DataSourceEc2.py
@@ -156,8 +156,8 @@ class DataSourceEc2(sources.DataSource):
# 'ephemeral0': '/dev/sdb',
# 'root': '/dev/sda1'}
found = None
- bdm_items = self.metadata['block-device-mapping'].iteritems()
- for (entname, device) in bdm_items:
+ bdm = self.metadata['block-device-mapping']
+ for (entname, device) in bdm.items():
if entname == name:
found = device
break
diff --git a/cloudinit/sources/DataSourceMAAS.py b/cloudinit/sources/DataSourceMAAS.py
index dfe90bc6..9a3e30c5 100644
--- a/cloudinit/sources/DataSourceMAAS.py
+++ b/cloudinit/sources/DataSourceMAAS.py
@@ -262,7 +262,7 @@ def check_seed_contents(content, seed):
userdata = content.get('user-data', "")
md = {}
- for (key, val) in content.iteritems():
+ for (key, val) in content.items():
if key == 'user-data':
continue
md[key] = val
diff --git a/cloudinit/sources/DataSourceOVF.py b/cloudinit/sources/DataSourceOVF.py
index 7ba60735..58a4b2a2 100644
--- a/cloudinit/sources/DataSourceOVF.py
+++ b/cloudinit/sources/DataSourceOVF.py
@@ -66,7 +66,7 @@ class DataSourceOVF(sources.DataSource):
np = {'iso': transport_iso9660,
'vmware-guestd': transport_vmware_guestd, }
name = None
- for (name, transfunc) in np.iteritems():
+ for (name, transfunc) in np.items():
(contents, _dev, _fname) = transfunc()
if contents:
break
@@ -138,7 +138,7 @@ def read_ovf_environment(contents):
ud = ""
cfg_props = ['password']
md_props = ['seedfrom', 'local-hostname', 'public-keys', 'instance-id']
- for (prop, val) in props.iteritems():
+ for (prop, val) in props.items():
if prop == 'hostname':
prop = "local-hostname"
if prop in md_props:
@@ -183,7 +183,7 @@ def transport_iso9660(require_iso=True):
# Go through mounts to see if it was already mounted
mounts = util.mounts()
- for (dev, info) in mounts.iteritems():
+ for (dev, info) in mounts.items():
fstype = info['fstype']
if fstype != "iso9660" and require_iso:
continue
diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py
index 2733a2f6..7a975d78 100644
--- a/cloudinit/sources/DataSourceSmartOS.py
+++ b/cloudinit/sources/DataSourceSmartOS.py
@@ -30,12 +30,12 @@
# Comments with "@datadictionary" are snippets of the definition
import base64
+import os
+import serial
+
from cloudinit import log as logging
from cloudinit import sources
from cloudinit import util
-import os
-import os.path
-import serial
LOG = logging.getLogger(__name__)
@@ -201,7 +201,7 @@ class DataSourceSmartOS(sources.DataSource):
if b64_all is not None:
self.b64_all = util.is_true(b64_all)
- for ci_noun, attribute in SMARTOS_ATTRIB_MAP.iteritems():
+ for ci_noun, attribute in SMARTOS_ATTRIB_MAP.items():
smartos_noun, strip = attribute
md[ci_noun] = self.query(smartos_noun, strip=strip)
@@ -218,11 +218,12 @@ class DataSourceSmartOS(sources.DataSource):
user_script = os.path.join(data_d, 'user-script')
u_script_l = "%s/user-script" % LEGACY_USER_D
write_boot_content(md.get('user-script'), content_f=user_script,
- link=u_script_l, shebang=True, mode=0700)
+ link=u_script_l, shebang=True, mode=0o700)
operator_script = os.path.join(data_d, 'operator-script')
write_boot_content(md.get('operator-script'),
- content_f=operator_script, shebang=False, mode=0700)
+ content_f=operator_script, shebang=False,
+ mode=0o700)
# @datadictionary: This key has no defined format, but its value
# is written to the file /var/db/mdata-user-data on each boot prior
@@ -381,7 +382,7 @@ def dmi_data():
def write_boot_content(content, content_f, link=None, shebang=False,
- mode=0400):
+ mode=0o400):
"""
Write the content to content_f. Under the following rules:
1. If no content, remove the file
diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py
index 7c7ef9ab..39eab51b 100644
--- a/cloudinit/sources/__init__.py
+++ b/cloudinit/sources/__init__.py
@@ -23,6 +23,8 @@
import abc
import os
+import six
+
from cloudinit import importer
from cloudinit import log as logging
from cloudinit import type_utils
@@ -130,7 +132,7 @@ class DataSource(object):
# we want to return the correct value for what will actually
# exist in this instance
mappings = {"sd": ("vd", "xvd", "vtb")}
- for (nfrom, tlist) in mappings.iteritems():
+ for (nfrom, tlist) in mappings.items():
if not short_name.startswith(nfrom):
continue
for nto in tlist:
@@ -218,18 +220,18 @@ def normalize_pubkey_data(pubkey_data):
if not pubkey_data:
return keys
- if isinstance(pubkey_data, (basestring, str)):
+ if isinstance(pubkey_data, six.string_types):
return str(pubkey_data).splitlines()
if isinstance(pubkey_data, (list, set)):
return list(pubkey_data)
if isinstance(pubkey_data, (dict)):
- for (_keyname, klist) in pubkey_data.iteritems():
+ for (_keyname, klist) in pubkey_data.items():
# lp:506332 uec metadata service responds with
# data that makes boto populate a string for 'klist' rather
# than a list.
- if isinstance(klist, (str, basestring)):
+ if isinstance(klist, six.string_types):
klist = [klist]
if isinstance(klist, (list, set)):
for pkey in klist:
diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py
index b7e19314..88c7a198 100644
--- a/cloudinit/sources/helpers/openstack.py
+++ b/cloudinit/sources/helpers/openstack.py
@@ -24,6 +24,8 @@ import copy
import functools
import os
+import six
+
from cloudinit import ec2_utils
from cloudinit import log as logging
from cloudinit import sources
@@ -205,7 +207,7 @@ class BaseReader(object):
"""
load_json_anytype = functools.partial(
- util.load_json, root_types=(dict, basestring, list))
+ util.load_json, root_types=(dict, list) + six.string_types)
def datafiles(version):
files = {}
@@ -234,7 +236,7 @@ class BaseReader(object):
'version': 2,
}
data = datafiles(self._find_working_version())
- for (name, (path, required, translator)) in data.iteritems():
+ for (name, (path, required, translator)) in data.items():
path = self._path_join(self.base_path, path)
data = None
found = False
@@ -364,7 +366,7 @@ class ConfigDriveReader(BaseReader):
raise NonReadable("%s: no files found" % (self.base_path))
md = {}
- for (name, (key, translator, default)) in FILES_V1.iteritems():
+ for (name, (key, translator, default)) in FILES_V1.items():
if name in found:
path = found[name]
try:
@@ -478,7 +480,7 @@ def convert_vendordata_json(data, recurse=True):
"""
if not data:
return None
- if isinstance(data, (str, unicode, basestring)):
+ if isinstance(data, six.string_types):
return data
if isinstance(data, list):
return copy.deepcopy(data)
diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py
index 14d0cb0f..9b2f5ed5 100644
--- a/cloudinit/ssh_util.py
+++ b/cloudinit/ssh_util.py
@@ -239,7 +239,7 @@ def setup_user_keys(keys, username, options=None):
# Make sure the users .ssh dir is setup accordingly
(ssh_dir, pwent) = users_ssh_info(username)
if not os.path.isdir(ssh_dir):
- util.ensure_dir(ssh_dir, mode=0700)
+ util.ensure_dir(ssh_dir, mode=0o700)
util.chownbyid(ssh_dir, pwent.pw_uid, pwent.pw_gid)
# Turn the 'update' keys given into actual entries
@@ -252,8 +252,8 @@ def setup_user_keys(keys, username, options=None):
(auth_key_fn, auth_key_entries) = extract_authorized_keys(username)
with util.SeLinuxGuard(ssh_dir, recursive=True):
content = update_authorized_keys(auth_key_entries, key_entries)
- util.ensure_dir(os.path.dirname(auth_key_fn), mode=0700)
- util.write_file(auth_key_fn, content, mode=0600)
+ util.ensure_dir(os.path.dirname(auth_key_fn), mode=0o700)
+ util.write_file(auth_key_fn, content, mode=0o600)
util.chownbyid(auth_key_fn, pwent.pw_uid, pwent.pw_gid)
diff --git a/cloudinit/stages.py b/cloudinit/stages.py
index 67f467f7..f4f4591d 100644
--- a/cloudinit/stages.py
+++ b/cloudinit/stages.py
@@ -20,12 +20,13 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import cPickle as pickle
-
import copy
import os
import sys
+import six
+from six.moves import cPickle as pickle
+
from cloudinit.settings import (PER_INSTANCE, FREQUENCIES, CLOUD_CONFIG)
from cloudinit import handlers
@@ -202,7 +203,7 @@ class Init(object):
util.logexc(LOG, "Failed pickling datasource %s", self.datasource)
return False
try:
- util.write_file(pickled_fn, pk_contents, mode=0400)
+ util.write_file(pickled_fn, pk_contents, mode=0o400)
except Exception:
util.logexc(LOG, "Failed pickling datasource to %s", pickled_fn)
return False
@@ -324,15 +325,15 @@ class Init(object):
def _store_userdata(self):
raw_ud = "%s" % (self.datasource.get_userdata_raw())
- util.write_file(self._get_ipath('userdata_raw'), raw_ud, 0600)
+ util.write_file(self._get_ipath('userdata_raw'), raw_ud, 0o600)
processed_ud = "%s" % (self.datasource.get_userdata())
- util.write_file(self._get_ipath('userdata'), processed_ud, 0600)
+ util.write_file(self._get_ipath('userdata'), processed_ud, 0o600)
def _store_vendordata(self):
raw_vd = "%s" % (self.datasource.get_vendordata_raw())
- util.write_file(self._get_ipath('vendordata_raw'), raw_vd, 0600)
+ util.write_file(self._get_ipath('vendordata_raw'), raw_vd, 0o600)
processed_vd = "%s" % (self.datasource.get_vendordata())
- util.write_file(self._get_ipath('vendordata'), processed_vd, 0600)
+ util.write_file(self._get_ipath('vendordata'), processed_vd, 0o600)
def _default_handlers(self, opts=None):
if opts is None:
@@ -384,7 +385,7 @@ class Init(object):
if not path or not os.path.isdir(path):
return
potential_handlers = util.find_modules(path)
- for (fname, mod_name) in potential_handlers.iteritems():
+ for (fname, mod_name) in potential_handlers.items():
try:
mod_locs, looked_locs = importer.find_module(
mod_name, [''], ['list_types', 'handle_part'])
@@ -422,7 +423,7 @@ class Init(object):
def init_handlers():
# Init the handlers first
- for (_ctype, mod) in c_handlers.iteritems():
+ for (_ctype, mod) in c_handlers.items():
if mod in c_handlers.initialized:
# Avoid initing the same module twice (if said module
# is registered to more than one content-type).
@@ -449,7 +450,7 @@ class Init(object):
def finalize_handlers():
# Give callbacks opportunity to finalize
- for (_ctype, mod) in c_handlers.iteritems():
+ for (_ctype, mod) in c_handlers.items():
if mod not in c_handlers.initialized:
# Said module was never inited in the first place, so lets
# not attempt to finalize those that never got called.
@@ -574,7 +575,7 @@ class Modules(object):
for item in cfg_mods:
if not item:
continue
- if isinstance(item, (str, basestring)):
+ if isinstance(item, six.string_types):
module_list.append({
'mod': item.strip(),
})
diff --git a/cloudinit/type_utils.py b/cloudinit/type_utils.py
index cc3d9495..b93efd6a 100644
--- a/cloudinit/type_utils.py
+++ b/cloudinit/type_utils.py
@@ -22,11 +22,31 @@
import types
+import six
+
+
+if six.PY3:
+ _NAME_TYPES = (
+ types.ModuleType,
+ types.FunctionType,
+ types.LambdaType,
+ type,
+ )
+else:
+ _NAME_TYPES = (
+ types.TypeType,
+ types.ModuleType,
+ types.FunctionType,
+ types.LambdaType,
+ types.ClassType,
+ )
+
def obj_name(obj):
- if isinstance(obj, (types.TypeType,
- types.ModuleType,
- types.FunctionType,
- types.LambdaType)):
- return str(obj.__name__)
- return obj_name(obj.__class__)
+ if isinstance(obj, _NAME_TYPES):
+ return six.text_type(obj.__name__)
+ else:
+ if not hasattr(obj, '__class__'):
+ return repr(obj)
+ else:
+ return obj_name(obj.__class__)
diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py
index 3074dd08..62001dff 100644
--- a/cloudinit/url_helper.py
+++ b/cloudinit/url_helper.py
@@ -20,21 +20,29 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import httplib
import time
-import urllib
+
+import six
import requests
from requests import exceptions
-from urlparse import (urlparse, urlunparse)
+from six.moves.urllib.parse import (
+ urlparse, urlunparse,
+ quote as urlquote)
from cloudinit import log as logging
from cloudinit import version
LOG = logging.getLogger(__name__)
-NOT_FOUND = httplib.NOT_FOUND
+if six.PY2:
+ import httplib
+ NOT_FOUND = httplib.NOT_FOUND
+else:
+ import http.client
+ NOT_FOUND = http.client.NOT_FOUND
+
# Check if requests has ssl support (added in requests >= 0.8.8)
SSL_ENABLED = False
@@ -70,7 +78,7 @@ def combine_url(base, *add_ons):
path = url_parsed[2]
if path and not path.endswith("/"):
path += "/"
- path += urllib.quote(str(add_on), safe="/:")
+ path += urlquote(str(add_on), safe="/:")
url_parsed[2] = path
return urlunparse(url_parsed)
@@ -111,7 +119,7 @@ class UrlResponse(object):
@property
def contents(self):
- return self._response.content
+ return self._response.text
@property
def url(self):
@@ -135,7 +143,7 @@ class UrlResponse(object):
return self._response.status_code
def __str__(self):
- return self.contents
+ return self._response.text
class UrlError(IOError):
diff --git a/cloudinit/user_data.py b/cloudinit/user_data.py
index de6487d8..9111bd39 100644
--- a/cloudinit/user_data.py
+++ b/cloudinit/user_data.py
@@ -29,6 +29,8 @@ from email.mime.multipart import MIMEMultipart
from email.mime.nonmultipart import MIMENonMultipart
from email.mime.text import MIMEText
+import six
+
from cloudinit import handlers
from cloudinit import log as logging
from cloudinit import util
@@ -235,7 +237,7 @@ class UserDataProcessor(object):
resp = util.read_file_or_url(include_url,
ssl_details=self.ssl_details)
if include_once_on and resp.ok():
- util.write_file(include_once_fn, str(resp), mode=0600)
+ util.write_file(include_once_fn, str(resp), mode=0o600)
if resp.ok():
content = str(resp)
else:
@@ -256,7 +258,7 @@ class UserDataProcessor(object):
# filename and type not be present
# or
# scalar(payload)
- if isinstance(ent, (str, basestring)):
+ if isinstance(ent, six.string_types):
ent = {'content': ent}
if not isinstance(ent, (dict)):
# TODO(harlowja) raise?
@@ -337,7 +339,7 @@ def convert_string(raw_data, headers=None):
data = util.decomp_gzip(raw_data)
if "mime-version:" in data[0:4096].lower():
msg = email.message_from_string(data)
- for (key, val) in headers.iteritems():
+ for (key, val) in headers.items():
_replace_header(msg, key, val)
else:
mtype = headers.get(CONTENT_TYPE, NOT_MULTIPART_TYPE)
diff --git a/cloudinit/util.py b/cloudinit/util.py
index 9efc704a..434ba7fb 100644
--- a/cloudinit/util.py
+++ b/cloudinit/util.py
@@ -20,8 +20,6 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from StringIO import StringIO
-
import contextlib
import copy as obj_copy
import ctypes
@@ -45,8 +43,10 @@ import subprocess
import sys
import tempfile
import time
-import urlparse
+from six.moves.urllib import parse as urlparse
+
+import six
import yaml
from cloudinit import importer
@@ -69,8 +69,26 @@ FN_REPLACEMENTS = {
}
FN_ALLOWED = ('_-.()' + string.digits + string.ascii_letters)
+TRUE_STRINGS = ('true', '1', 'on', 'yes')
+FALSE_STRINGS = ('off', '0', 'no', 'false')
+
+
# Helper utils to see if running in a container
-CONTAINER_TESTS = ['running-in-container', 'lxc-is-container']
+CONTAINER_TESTS = ('running-in-container', 'lxc-is-container')
+
+
+def decode_binary(blob, encoding='utf-8'):
+ # Converts a binary type into a text type using given encoding.
+ if isinstance(blob, six.text_type):
+ return blob
+ return blob.decode(encoding)
+
+
+def encode_text(text, encoding='utf-8'):
+ # Converts a text string into a binary type using given encoding.
+ if isinstance(text, six.binary_type):
+ return text
+ return text.encode(encoding)
class ProcessExecutionError(IOError):
@@ -95,7 +113,7 @@ class ProcessExecutionError(IOError):
else:
self.description = description
- if not isinstance(exit_code, (long, int)):
+ if not isinstance(exit_code, six.integer_types):
self.exit_code = '-'
else:
self.exit_code = exit_code
@@ -151,7 +169,8 @@ class SeLinuxGuard(object):
path = os.path.realpath(self.path)
# path should be a string, not unicode
- path = str(path)
+ if six.PY2:
+ path = str(path)
try:
stats = os.lstat(path)
self.selinux.matchpathcon(path, stats[stat.ST_MODE])
@@ -209,10 +228,10 @@ def fork_cb(child_cb, *args, **kwargs):
def is_true(val, addons=None):
if isinstance(val, (bool)):
return val is True
- check_set = ['true', '1', 'on', 'yes']
+ check_set = TRUE_STRINGS
if addons:
- check_set = check_set + addons
- if str(val).lower().strip() in check_set:
+ check_set = list(check_set) + addons
+ if six.text_type(val).lower().strip() in check_set:
return True
return False
@@ -220,10 +239,10 @@ def is_true(val, addons=None):
def is_false(val, addons=None):
if isinstance(val, (bool)):
return val is False
- check_set = ['off', '0', 'no', 'false']
+ check_set = FALSE_STRINGS
if addons:
- check_set = check_set + addons
- if str(val).lower().strip() in check_set:
+ check_set = list(check_set) + addons
+ if six.text_type(val).lower().strip() in check_set:
return True
return False
@@ -273,7 +292,7 @@ def uniq_merge_sorted(*lists):
def uniq_merge(*lists):
combined_list = []
for a_list in lists:
- if isinstance(a_list, (str, basestring)):
+ if isinstance(a_list, six.string_types):
a_list = a_list.strip().split(",")
# Kickout the empty ones
a_list = [a for a in a_list if len(a)]
@@ -282,7 +301,7 @@ def uniq_merge(*lists):
def clean_filename(fn):
- for (k, v) in FN_REPLACEMENTS.iteritems():
+ for (k, v) in FN_REPLACEMENTS.items():
fn = fn.replace(k, v)
removals = []
for k in fn:
@@ -296,14 +315,14 @@ def clean_filename(fn):
def decomp_gzip(data, quiet=True):
try:
- buf = StringIO(str(data))
+ buf = six.BytesIO(encode_text(data))
with contextlib.closing(gzip.GzipFile(None, "rb", 1, buf)) as gh:
- return gh.read()
+ return decode_binary(gh.read())
except Exception as e:
if quiet:
return data
else:
- raise DecompressionError(str(e))
+ raise DecompressionError(six.text_type(e))
def extract_usergroup(ug_pair):
@@ -362,7 +381,7 @@ def multi_log(text, console=True, stderr=True,
def load_json(text, root_types=(dict,)):
- decoded = json.loads(text)
+ decoded = json.loads(decode_binary(text))
if not isinstance(decoded, tuple(root_types)):
expected_types = ", ".join([str(t) for t in root_types])
raise TypeError("(%s) root types expected, got %s instead"
@@ -394,7 +413,7 @@ def get_cfg_option_str(yobj, key, default=None):
if key not in yobj:
return default
val = yobj[key]
- if not isinstance(val, (str, basestring)):
+ if not isinstance(val, six.string_types):
val = str(val)
return val
@@ -433,7 +452,7 @@ def get_cfg_option_list(yobj, key, default=None):
if isinstance(val, (list)):
cval = [v for v in val]
return cval
- if not isinstance(val, (basestring)):
+ if not isinstance(val, six.string_types):
val = str(val)
return [val]
@@ -708,10 +727,10 @@ def read_file_or_url(url, timeout=5, retries=10,
def load_yaml(blob, default=None, allowed=(dict,)):
loaded = default
+ blob = decode_binary(blob)
try:
- blob = str(blob)
- LOG.debug(("Attempting to load yaml from string "
- "of length %s with allowed root types %s"),
+ LOG.debug("Attempting to load yaml from string "
+ "of length %s with allowed root types %s",
len(blob), allowed)
converted = safeyaml.load(blob)
if not isinstance(converted, allowed):
@@ -746,14 +765,12 @@ def read_seeded(base="", ext="", timeout=5, retries=10, file_retries=0):
md_resp = read_file_or_url(md_url, timeout, retries, file_retries)
md = None
if md_resp.ok():
- md_str = str(md_resp)
- md = load_yaml(md_str, default={})
+ md = load_yaml(md_resp.contents, default={})
ud_resp = read_file_or_url(ud_url, timeout, retries, file_retries)
ud = None
if ud_resp.ok():
- ud_str = str(ud_resp)
- ud = ud_str
+ ud = ud_resp.contents
return (md, ud)
@@ -784,7 +801,7 @@ def read_conf_with_confd(cfgfile):
if "conf_d" in cfg:
confd = cfg['conf_d']
if confd:
- if not isinstance(confd, (str, basestring)):
+ if not isinstance(confd, six.string_types):
raise TypeError(("Config file %s contains 'conf_d' "
"with non-string type %s") %
(cfgfile, type_utils.obj_name(confd)))
@@ -921,8 +938,8 @@ def get_cmdline_url(names=('cloud-config-url', 'url'),
return (None, None, None)
resp = read_file_or_url(url)
- if resp.contents.startswith(starts) and resp.ok():
- return (key, url, str(resp))
+ if resp.ok() and resp.contents.startswith(starts):
+ return (key, url, resp.contents)
return (key, url, None)
@@ -1076,9 +1093,9 @@ def uniq_list(in_list):
return out_list
-def load_file(fname, read_cb=None, quiet=False):
+def load_file(fname, read_cb=None, quiet=False, decode=True):
LOG.debug("Reading from %s (quiet=%s)", fname, quiet)
- ofh = StringIO()
+ ofh = six.BytesIO()
try:
with open(fname, 'rb') as ifh:
pipe_in_out(ifh, ofh, chunk_cb=read_cb)
@@ -1089,7 +1106,10 @@ def load_file(fname, read_cb=None, quiet=False):
raise
contents = ofh.getvalue()
LOG.debug("Read %s bytes from %s", len(contents), fname)
- return contents
+ if decode:
+ return decode_binary(contents)
+ else:
+ return contents
def get_cmdline():
@@ -1219,7 +1239,7 @@ def logexc(log, msg, *args):
def hash_blob(blob, routine, mlen=None):
hasher = hashlib.new(routine)
- hasher.update(blob)
+ hasher.update(encode_text(blob))
digest = hasher.hexdigest()
# Don't get to long now
if mlen is not None:
@@ -1280,8 +1300,7 @@ def yaml_dumps(obj, explicit_start=True, explicit_end=True):
indent=4,
explicit_start=explicit_start,
explicit_end=explicit_end,
- default_flow_style=False,
- allow_unicode=True)
+ default_flow_style=False)
def ensure_dir(path, mode=None):
@@ -1515,11 +1534,17 @@ def write_file(filename, content, mode=0o644, omode="wb"):
@param filename: The full path of the file to write.
@param content: The content to write to the file.
@param mode: The filesystem mode to set on the file.
- @param omode: The open mode used when opening the file (r, rb, a, etc.)
+ @param omode: The open mode used when opening the file (w, wb, a, etc.)
"""
ensure_dir(os.path.dirname(filename))
- LOG.debug("Writing to %s - %s: [%s] %s bytes",
- filename, omode, mode, len(content))
+ if 'b' in omode.lower():
+ content = encode_text(content)
+ write_type = 'bytes'
+ else:
+ content = decode_binary(content)
+ write_type = 'characters'
+ LOG.debug("Writing to %s - %s: [%s] %s %s",
+ filename, omode, mode, len(content), write_type)
with SeLinuxGuard(path=filename):
with open(filename, omode) as fh:
fh.write(content)
@@ -1608,10 +1633,10 @@ def shellify(cmdlist, add_header=True):
if isinstance(args, list):
fixed = []
for f in args:
- fixed.append("'%s'" % (str(f).replace("'", escaped)))
+ fixed.append("'%s'" % (six.text_type(f).replace("'", escaped)))
content = "%s%s\n" % (content, ' '.join(fixed))
cmds_made += 1
- elif isinstance(args, (str, basestring)):
+ elif isinstance(args, six.string_types):
content = "%s%s\n" % (content, args)
cmds_made += 1
else:
@@ -1722,7 +1747,7 @@ def expand_package_list(version_fmt, pkgs):
pkglist = []
for pkg in pkgs:
- if isinstance(pkg, basestring):
+ if isinstance(pkg, six.string_types):
pkglist.append(pkg)
continue