diff options
| author | Barry Warsaw <barry@python.org> | 2015-01-21 17:56:53 -0500 | 
|---|---|---|
| committer | Barry Warsaw <barry@python.org> | 2015-01-21 17:56:53 -0500 | 
| commit | f895cb12141281702b34da18f2384deb64c881e7 (patch) | |
| tree | 7a7433752ba1317c370dd3dd815c9ee7331a923b | |
| parent | a64bb4febc79fcf641f6471d8cc00c74ca915f3d (diff) | |
| download | vyos-cloud-init-f895cb12141281702b34da18f2384deb64c881e7.tar.gz vyos-cloud-init-f895cb12141281702b34da18f2384deb64c881e7.zip | |
Largely merge lp:~harlowja/cloud-init/py2-3 albeit manually because it seemed
to be behind trunk.
`tox -e py27` passes full test suite.  Now to work on replacing mocker.
60 files changed, 315 insertions, 233 deletions
| diff --git a/cloudinit/config/cc_apt_configure.py b/cloudinit/config/cc_apt_configure.py index f10b76a3..de72903f 100644 --- a/cloudinit/config/cc_apt_configure.py +++ b/cloudinit/config/cc_apt_configure.py @@ -126,7 +126,7 @@ def mirror2lists_fileprefix(mirror):  def rename_apt_lists(old_mirrors, new_mirrors, lists_d="/var/lib/apt/lists"): -    for (name, omirror) in old_mirrors.iteritems(): +    for (name, omirror) in old_mirrors.items():          nmirror = new_mirrors.get(name)          if not nmirror:              continue diff --git a/cloudinit/config/cc_debug.py b/cloudinit/config/cc_debug.py index 8c489426..bdc32fe6 100644 --- a/cloudinit/config/cc_debug.py +++ b/cloudinit/config/cc_debug.py @@ -34,7 +34,8 @@ It can be configured with the following option structure::  """  import copy -from StringIO import StringIO + +from six import StringIO  from cloudinit import type_utils  from cloudinit import util @@ -77,7 +78,7 @@ def handle(name, cfg, cloud, log, args):      dump_cfg = copy.deepcopy(cfg)      for k in SKIP_KEYS:          dump_cfg.pop(k, None) -    all_keys = list(dump_cfg.keys()) +    all_keys = list(dump_cfg)      for k in all_keys:          if k.startswith("_"):              dump_cfg.pop(k, None) @@ -103,6 +104,6 @@ def handle(name, cfg, cloud, log, args):          line = "ci-info: %s\n" % (line)          content_to_file.append(line)      if out_file: -        util.write_file(out_file, "".join(content_to_file), 0644, "w") +        util.write_file(out_file, "".join(content_to_file), 0o644, "w")      else:          util.multi_log("".join(content_to_file), console=True, stderr=False) diff --git a/cloudinit/config/cc_landscape.py b/cloudinit/config/cc_landscape.py index 8a709677..0b9d846e 100644 --- a/cloudinit/config/cc_landscape.py +++ b/cloudinit/config/cc_landscape.py @@ -20,7 +20,7 @@  import os -from StringIO import StringIO +from six import StringIO  from configobj import ConfigObj diff --git a/cloudinit/config/cc_mcollective.py b/cloudinit/config/cc_mcollective.py index b670390d..425420ae 100644 --- a/cloudinit/config/cc_mcollective.py +++ b/cloudinit/config/cc_mcollective.py @@ -19,7 +19,8 @@  #    You should have received a copy of the GNU General Public License  #    along with this program.  If not, see <http://www.gnu.org/licenses/>. -from StringIO import StringIO +import six +from six import StringIO  # Used since this can maintain comments  # and doesn't need a top level section @@ -51,17 +52,17 @@ def handle(name, cfg, cloud, log, _args):          # original file in order to be able to mix the rest up          mcollective_config = ConfigObj(SERVER_CFG)          # See: http://tiny.cc/jh9agw -        for (cfg_name, cfg) in mcollective_cfg['conf'].iteritems(): +        for (cfg_name, cfg) in mcollective_cfg['conf'].items():              if cfg_name == 'public-cert': -                util.write_file(PUBCERT_FILE, cfg, mode=0644) +                util.write_file(PUBCERT_FILE, cfg, mode=0o644)                  mcollective_config['plugin.ssl_server_public'] = PUBCERT_FILE                  mcollective_config['securityprovider'] = 'ssl'              elif cfg_name == 'private-cert': -                util.write_file(PRICERT_FILE, cfg, mode=0600) +                util.write_file(PRICERT_FILE, cfg, mode=0o600)                  mcollective_config['plugin.ssl_server_private'] = PRICERT_FILE                  mcollective_config['securityprovider'] = 'ssl'              else: -                if isinstance(cfg, (basestring, str)): +                if isinstance(cfg, six.string_types):                      # Just set it in the 'main' section                      mcollective_config[cfg_name] = cfg                  elif isinstance(cfg, (dict)): @@ -69,7 +70,7 @@ def handle(name, cfg, cloud, log, _args):                      # if it is needed and then add/or create items as needed                      if cfg_name not in mcollective_config.sections:                          mcollective_config[cfg_name] = {} -                    for (o, v) in cfg.iteritems(): +                    for (o, v) in cfg.items():                          mcollective_config[cfg_name][o] = v                  else:                      # Otherwise just try to convert it to a string @@ -81,7 +82,7 @@ def handle(name, cfg, cloud, log, _args):          contents = StringIO()          mcollective_config.write(contents)          contents = contents.getvalue() -        util.write_file(SERVER_CFG, contents, mode=0644) +        util.write_file(SERVER_CFG, contents, mode=0o644)      # Start mcollective      util.subp(['service', 'mcollective', 'start'], capture=False) diff --git a/cloudinit/config/cc_phone_home.py b/cloudinit/config/cc_phone_home.py index 5bc68b83..18a7ddad 100644 --- a/cloudinit/config/cc_phone_home.py +++ b/cloudinit/config/cc_phone_home.py @@ -81,7 +81,7 @@ def handle(name, cfg, cloud, log, args):          'pub_key_ecdsa': '/etc/ssh/ssh_host_ecdsa_key.pub',      } -    for (n, path) in pubkeys.iteritems(): +    for (n, path) in pubkeys.items():          try:              all_keys[n] = util.load_file(path)          except: @@ -99,7 +99,7 @@ def handle(name, cfg, cloud, log, args):      # Get them read to be posted      real_submit_keys = {} -    for (k, v) in submit_keys.iteritems(): +    for (k, v) in submit_keys.items():          if v is None:              real_submit_keys[k] = 'N/A'          else: diff --git a/cloudinit/config/cc_puppet.py b/cloudinit/config/cc_puppet.py index 471a1a8a..6f1b3c57 100644 --- a/cloudinit/config/cc_puppet.py +++ b/cloudinit/config/cc_puppet.py @@ -18,7 +18,7 @@  #    You should have received a copy of the GNU General Public License  #    along with this program.  If not, see <http://www.gnu.org/licenses/>. -from StringIO import StringIO +from six import StringIO  import os  import socket @@ -81,13 +81,13 @@ def handle(name, cfg, cloud, log, _args):          cleaned_contents = '\n'.join(cleaned_lines)          puppet_config.readfp(StringIO(cleaned_contents),                               filename=PUPPET_CONF_PATH) -        for (cfg_name, cfg) in puppet_cfg['conf'].iteritems(): +        for (cfg_name, cfg) in puppet_cfg['conf'].items():              # Cert configuration is a special case              # Dump the puppet master ca certificate in the correct place              if cfg_name == 'ca_cert':                  # Puppet ssl sub-directory isn't created yet                  # Create it with the proper permissions and ownership -                util.ensure_dir(PUPPET_SSL_DIR, 0771) +                util.ensure_dir(PUPPET_SSL_DIR, 0o771)                  util.chownbyname(PUPPET_SSL_DIR, 'puppet', 'root')                  util.ensure_dir(PUPPET_SSL_CERT_DIR)                  util.chownbyname(PUPPET_SSL_CERT_DIR, 'puppet', 'root') @@ -96,7 +96,7 @@ def handle(name, cfg, cloud, log, _args):              else:                  # Iterate throug the config items, we'll use ConfigParser.set                  # to overwrite or create new items as needed -                for (o, v) in cfg.iteritems(): +                for (o, v) in cfg.items():                      if o == 'certname':                          # Expand %f as the fqdn                          # TODO(harlowja) should this use the cloud fqdn?? diff --git a/cloudinit/config/cc_resolv_conf.py b/cloudinit/config/cc_resolv_conf.py index bbaa6c63..71d9e3a7 100644 --- a/cloudinit/config/cc_resolv_conf.py +++ b/cloudinit/config/cc_resolv_conf.py @@ -66,8 +66,8 @@ def generate_resolv_conf(template_fn, params, target_fname="/etc/resolv.conf"):      false_flags = []      if 'options' in params: -        for key, val in params['options'].iteritems(): -            if type(val) == bool: +        for key, val in params['options'].items(): +            if isinstance(val, bool):                  if val:                      flags.append(key)                  else: diff --git a/cloudinit/config/cc_seed_random.py b/cloudinit/config/cc_seed_random.py index 49a6b3e8..3b7235bf 100644 --- a/cloudinit/config/cc_seed_random.py +++ b/cloudinit/config/cc_seed_random.py @@ -21,7 +21,8 @@  import base64  import os -from StringIO import StringIO + +from six import StringIO  from cloudinit.settings import PER_INSTANCE  from cloudinit import log as logging diff --git a/cloudinit/config/cc_ssh.py b/cloudinit/config/cc_ssh.py index 4c76581c..ab6940fa 100644 --- a/cloudinit/config/cc_ssh.py +++ b/cloudinit/config/cc_ssh.py @@ -34,12 +34,12 @@ DISABLE_ROOT_OPTS = ("no-port-forwarding,no-agent-forwarding,"  "rather than the user \\\"root\\\".\';echo;sleep 10\"")  KEY_2_FILE = { -    "rsa_private": ("/etc/ssh/ssh_host_rsa_key", 0600), -    "rsa_public": ("/etc/ssh/ssh_host_rsa_key.pub", 0644), -    "dsa_private": ("/etc/ssh/ssh_host_dsa_key", 0600), -    "dsa_public": ("/etc/ssh/ssh_host_dsa_key.pub", 0644), -    "ecdsa_private": ("/etc/ssh/ssh_host_ecdsa_key", 0600), -    "ecdsa_public": ("/etc/ssh/ssh_host_ecdsa_key.pub", 0644), +    "rsa_private": ("/etc/ssh/ssh_host_rsa_key", 0o600), +    "rsa_public": ("/etc/ssh/ssh_host_rsa_key.pub", 0o644), +    "dsa_private": ("/etc/ssh/ssh_host_dsa_key", 0o600), +    "dsa_public": ("/etc/ssh/ssh_host_dsa_key.pub", 0o644), +    "ecdsa_private": ("/etc/ssh/ssh_host_ecdsa_key", 0o600), +    "ecdsa_public": ("/etc/ssh/ssh_host_ecdsa_key.pub", 0o644),  }  PRIV_2_PUB = { @@ -68,13 +68,13 @@ def handle(_name, cfg, cloud, log, _args):      if "ssh_keys" in cfg:          # if there are keys in cloud-config, use them -        for (key, val) in cfg["ssh_keys"].iteritems(): +        for (key, val) in cfg["ssh_keys"].items():              if key in KEY_2_FILE:                  tgt_fn = KEY_2_FILE[key][0]                  tgt_perms = KEY_2_FILE[key][1]                  util.write_file(tgt_fn, val, tgt_perms) -        for (priv, pub) in PRIV_2_PUB.iteritems(): +        for (priv, pub) in PRIV_2_PUB.items():              if pub in cfg['ssh_keys'] or priv not in cfg['ssh_keys']:                  continue              pair = (KEY_2_FILE[priv][0], KEY_2_FILE[pub][0]) diff --git a/cloudinit/config/cc_yum_add_repo.py b/cloudinit/config/cc_yum_add_repo.py index 0d836f28..3b821af9 100644 --- a/cloudinit/config/cc_yum_add_repo.py +++ b/cloudinit/config/cc_yum_add_repo.py @@ -18,9 +18,10 @@  import os -from cloudinit import util -  import configobj +import six + +from cloudinit import util  def _canonicalize_id(repo_id): @@ -37,7 +38,7 @@ def _format_repo_value(val):          # Can handle 'lists' in certain cases          # See: http://bit.ly/Qqrf1t          return "\n    ".join([_format_repo_value(v) for v in val]) -    if not isinstance(val, (basestring, str)): +    if not isinstance(val, six.string_types):          return str(val)      return val diff --git a/cloudinit/distros/__init__.py b/cloudinit/distros/__init__.py index 49a0b652..4ebccdda 100644 --- a/cloudinit/distros/__init__.py +++ b/cloudinit/distros/__init__.py @@ -21,7 +21,8 @@  #    You should have received a copy of the GNU General Public License  #    along with this program.  If not, see <http://www.gnu.org/licenses/>. -from StringIO import StringIO +import six +from six import StringIO  import abc  import itertools @@ -334,7 +335,7 @@ class Distro(object):          redact_opts = ['passwd']          # Check the values and create the command -        for key, val in kwargs.iteritems(): +        for key, val in kwargs.items():              if key in adduser_opts and val and isinstance(val, str):                  adduser_cmd.extend([adduser_opts[key], val]) @@ -393,7 +394,7 @@ class Distro(object):          if 'ssh_authorized_keys' in kwargs:              # Try to handle this in a smart manner.              keys = kwargs['ssh_authorized_keys'] -            if isinstance(keys, (basestring, str)): +            if isinstance(keys, six.string_types):                  keys = [keys]              if isinstance(keys, dict):                  keys = list(keys.values()) @@ -491,7 +492,7 @@ class Distro(object):          if isinstance(rules, (list, tuple)):              for rule in rules:                  lines.append("%s %s" % (user, rule)) -        elif isinstance(rules, (basestring, str)): +        elif isinstance(rules, six.string_types):              lines.append("%s %s" % (user, rules))          else:              msg = "Can not create sudoers rule addition with type %r" @@ -561,10 +562,10 @@ def _get_package_mirror_info(mirror_info, availability_zone=None,          subst['ec2_region'] = "%s" % availability_zone[0:-1]      results = {} -    for (name, mirror) in mirror_info.get('failsafe', {}).iteritems(): +    for (name, mirror) in mirror_info.get('failsafe', {}).items():          results[name] = mirror -    for (name, searchlist) in mirror_info.get('search', {}).iteritems(): +    for (name, searchlist) in mirror_info.get('search', {}).items():          mirrors = []          for tmpl in searchlist:              try: @@ -604,30 +605,30 @@ def _get_arch_package_mirror_info(package_mirrors, arch):  # is the standard form used in the rest  # of cloud-init  def _normalize_groups(grp_cfg): -    if isinstance(grp_cfg, (str, basestring)): +    if isinstance(grp_cfg, six.string_types):          grp_cfg = grp_cfg.strip().split(",") -    if isinstance(grp_cfg, (list)): +    if isinstance(grp_cfg, list):          c_grp_cfg = {}          for i in grp_cfg: -            if isinstance(i, (dict)): +            if isinstance(i, dict):                  for k, v in i.items():                      if k not in c_grp_cfg: -                        if isinstance(v, (list)): +                        if isinstance(v, list):                              c_grp_cfg[k] = list(v) -                        elif isinstance(v, (basestring, str)): +                        elif isinstance(v, six.string_types):                              c_grp_cfg[k] = [v]                          else:                              raise TypeError("Bad group member type %s" %                                              type_utils.obj_name(v))                      else: -                        if isinstance(v, (list)): +                        if isinstance(v, list):                              c_grp_cfg[k].extend(v) -                        elif isinstance(v, (basestring, str)): +                        elif isinstance(v, six.string_types):                              c_grp_cfg[k].append(v)                          else:                              raise TypeError("Bad group member type %s" %                                              type_utils.obj_name(v)) -            elif isinstance(i, (str, basestring)): +            elif isinstance(i, six.string_types):                  if i not in c_grp_cfg:                      c_grp_cfg[i] = []              else: @@ -635,7 +636,7 @@ def _normalize_groups(grp_cfg):                                  type_utils.obj_name(i))          grp_cfg = c_grp_cfg      groups = {} -    if isinstance(grp_cfg, (dict)): +    if isinstance(grp_cfg, dict):          for (grp_name, grp_members) in grp_cfg.items():              groups[grp_name] = util.uniq_merge_sorted(grp_members)      else: @@ -661,29 +662,29 @@ def _normalize_groups(grp_cfg):  # entry 'default' which will be marked as true  # all other users will be marked as false.  def _normalize_users(u_cfg, def_user_cfg=None): -    if isinstance(u_cfg, (dict)): +    if isinstance(u_cfg, dict):          ad_ucfg = []          for (k, v) in u_cfg.items(): -            if isinstance(v, (bool, int, basestring, str, float)): +            if isinstance(v, (bool, int, float) + six.string_types):                  if util.is_true(v):                      ad_ucfg.append(str(k)) -            elif isinstance(v, (dict)): +            elif isinstance(v, dict):                  v['name'] = k                  ad_ucfg.append(v)              else:                  raise TypeError(("Unmappable user value type %s"                                   " for key %s") % (type_utils.obj_name(v), k))          u_cfg = ad_ucfg -    elif isinstance(u_cfg, (str, basestring)): +    elif isinstance(u_cfg, six.string_types):          u_cfg = util.uniq_merge_sorted(u_cfg)      users = {}      for user_config in u_cfg: -        if isinstance(user_config, (str, basestring, list)): +        if isinstance(user_config, (list,) + six.string_types):              for u in util.uniq_merge(user_config):                  if u and u not in users:                      users[u] = {} -        elif isinstance(user_config, (dict)): +        elif isinstance(user_config, dict):              if 'name' in user_config:                  n = user_config.pop('name')                  prev_config = users.get(n) or {} @@ -784,11 +785,11 @@ def normalize_users_groups(cfg, distro):          old_user = cfg['user']          # Translate it into the format that is more useful          # going forward -        if isinstance(old_user, (basestring, str)): +        if isinstance(old_user, six.string_types):              old_user = {                  'name': old_user,              } -        if not isinstance(old_user, (dict)): +        if not isinstance(old_user, dict):              LOG.warn(("Format for 'user' key must be a string or "                        "dictionary and not %s"), type_utils.obj_name(old_user))              old_user = {} @@ -813,7 +814,7 @@ def normalize_users_groups(cfg, distro):      default_user_config = util.mergemanydict([old_user, distro_user_config])      base_users = cfg.get('users', []) -    if not isinstance(base_users, (list, dict, str, basestring)): +    if not isinstance(base_users, (list, dict) + six.string_types):          LOG.warn(("Format for 'users' key must be a comma separated string"                    " or a dictionary or a list and not %s"),                   type_utils.obj_name(base_users)) @@ -822,12 +823,12 @@ def normalize_users_groups(cfg, distro):      if old_user:          # Ensure that when user: is provided that this user          # always gets added (as the default user) -        if isinstance(base_users, (list)): +        if isinstance(base_users, list):              # Just add it on at the end...              base_users.append({'name': 'default'}) -        elif isinstance(base_users, (dict)): +        elif isinstance(base_users, dict):              base_users['default'] = dict(base_users).get('default', True) -        elif isinstance(base_users, (str, basestring)): +        elif isinstance(base_users, six.string_types):              # Just append it on to be re-parsed later              base_users += ",default" diff --git a/cloudinit/distros/arch.py b/cloudinit/distros/arch.py index 68bf1aab..e540e0bc 100644 --- a/cloudinit/distros/arch.py +++ b/cloudinit/distros/arch.py @@ -66,7 +66,7 @@ class Distro(distros.Distro):                    settings, entries)          dev_names = entries.keys()          # Format for netctl -        for (dev, info) in entries.iteritems(): +        for (dev, info) in entries.items():              nameservers = []              net_fn = self.network_conf_dir + dev              net_cfg = { diff --git a/cloudinit/distros/freebsd.py b/cloudinit/distros/freebsd.py index f1b4a256..4c484639 100644 --- a/cloudinit/distros/freebsd.py +++ b/cloudinit/distros/freebsd.py @@ -16,7 +16,8 @@  #    You should have received a copy of the GNU General Public License  #    along with this program.  If not, see <http://www.gnu.org/licenses/>. -from StringIO import StringIO +import six +from six import StringIO  import re @@ -203,8 +204,9 @@ class Distro(distros.Distro):          redact_opts = ['passwd'] -        for key, val in kwargs.iteritems(): -            if key in adduser_opts and val and isinstance(val, basestring): +        for key, val in kwargs.items(): +            if (key in adduser_opts and val +                    and isinstance(val, six.string_types)):                  adduser_cmd.extend([adduser_opts[key], val])                  # Redact certain fields from the logs @@ -271,7 +273,7 @@ class Distro(distros.Distro):          nameservers = []          searchdomains = []          dev_names = entries.keys() -        for (device, info) in entries.iteritems(): +        for (device, info) in entries.items():              # Skip the loopback interface.              if device.startswith('lo'):                  continue @@ -323,7 +325,7 @@ class Distro(distros.Distro):                  resolvconf.add_search_domain(domain)              except ValueError:                  util.logexc(LOG, "Failed to add search domain %s", domain) -        util.write_file(self.resolv_conf_fn, str(resolvconf), 0644) +        util.write_file(self.resolv_conf_fn, str(resolvconf), 0o644)          return dev_names diff --git a/cloudinit/distros/net_util.py b/cloudinit/distros/net_util.py index 8b28e2d1..cadfa6b6 100644 --- a/cloudinit/distros/net_util.py +++ b/cloudinit/distros/net_util.py @@ -103,7 +103,7 @@ def translate_network(settings):              consume[cmd] = args      # Check if anything left over to consume      absorb = False -    for (cmd, args) in consume.iteritems(): +    for (cmd, args) in consume.items():          if cmd == 'iface':              absorb = True      if absorb: diff --git a/cloudinit/distros/parsers/hostname.py b/cloudinit/distros/parsers/hostname.py index 617b3c36..84a1de42 100644 --- a/cloudinit/distros/parsers/hostname.py +++ b/cloudinit/distros/parsers/hostname.py @@ -16,7 +16,7 @@  #    You should have received a copy of the GNU General Public License  #    along with this program.  If not, see <http://www.gnu.org/licenses/>. -from StringIO import StringIO +from six import StringIO  from cloudinit.distros.parsers import chop_comment diff --git a/cloudinit/distros/parsers/hosts.py b/cloudinit/distros/parsers/hosts.py index 94c97051..3c5498ee 100644 --- a/cloudinit/distros/parsers/hosts.py +++ b/cloudinit/distros/parsers/hosts.py @@ -16,7 +16,7 @@  #    You should have received a copy of the GNU General Public License  #    along with this program.  If not, see <http://www.gnu.org/licenses/>. -from StringIO import StringIO +from six import StringIO  from cloudinit.distros.parsers import chop_comment diff --git a/cloudinit/distros/parsers/resolv_conf.py b/cloudinit/distros/parsers/resolv_conf.py index 5733c25a..8aee03a4 100644 --- a/cloudinit/distros/parsers/resolv_conf.py +++ b/cloudinit/distros/parsers/resolv_conf.py @@ -16,7 +16,7 @@  #    You should have received a copy of the GNU General Public License  #    along with this program.  If not, see <http://www.gnu.org/licenses/>. -from StringIO import StringIO +from six import StringIO  from cloudinit import util diff --git a/cloudinit/distros/parsers/sys_conf.py b/cloudinit/distros/parsers/sys_conf.py index 20ca1871..d795e12f 100644 --- a/cloudinit/distros/parsers/sys_conf.py +++ b/cloudinit/distros/parsers/sys_conf.py @@ -16,7 +16,8 @@  #    You should have received a copy of the GNU General Public License  #    along with this program.  If not, see <http://www.gnu.org/licenses/>. -from StringIO import StringIO +import six +from six import StringIO  import pipes  import re @@ -69,7 +70,7 @@ class SysConf(configobj.ConfigObj):          return out_contents.getvalue()      def _quote(self, value, multiline=False): -        if not isinstance(value, (str, basestring)): +        if not isinstance(value, six.string_types):              raise ValueError('Value "%s" is not a string' % (value))          if len(value) == 0:              return '' diff --git a/cloudinit/distros/rhel.py b/cloudinit/distros/rhel.py index d9588632..7408989c 100644 --- a/cloudinit/distros/rhel.py +++ b/cloudinit/distros/rhel.py @@ -73,7 +73,7 @@ class Distro(distros.Distro):          searchservers = []          dev_names = entries.keys()          use_ipv6 = False -        for (dev, info) in entries.iteritems(): +        for (dev, info) in entries.items():              net_fn = self.network_script_tpl % (dev)              net_cfg = {                  'DEVICE': dev, diff --git a/cloudinit/distros/sles.py b/cloudinit/distros/sles.py index 43682a12..0c6d1203 100644 --- a/cloudinit/distros/sles.py +++ b/cloudinit/distros/sles.py @@ -62,7 +62,7 @@ class Distro(distros.Distro):          nameservers = []          searchservers = []          dev_names = entries.keys() -        for (dev, info) in entries.iteritems(): +        for (dev, info) in entries.items():              net_fn = self.network_script_tpl % (dev)              mode = info.get('auto')              if mode and mode.lower() == 'true': diff --git a/cloudinit/ec2_utils.py b/cloudinit/ec2_utils.py index e69d06ff..e1ed4091 100644 --- a/cloudinit/ec2_utils.py +++ b/cloudinit/ec2_utils.py @@ -17,7 +17,6 @@  #    along with this program.  If not, see <http://www.gnu.org/licenses/>.  import functools -import httplib  import json  from cloudinit import log as logging @@ -25,7 +24,7 @@ from cloudinit import url_helper  from cloudinit import util  LOG = logging.getLogger(__name__) -SKIP_USERDATA_CODES = frozenset([httplib.NOT_FOUND]) +SKIP_USERDATA_CODES = frozenset([url_helper.NOT_FOUND])  class MetadataLeafDecoder(object): @@ -123,7 +122,7 @@ class MetadataMaterializer(object):          leaf_contents = {}          for (field, resource) in leaves.items():              leaf_url = url_helper.combine_url(base_url, resource) -            leaf_blob = str(self._caller(leaf_url)) +            leaf_blob = self._caller(leaf_url).contents              leaf_contents[field] = self._leaf_decoder(field, leaf_blob)          joined = {}          joined.update(child_contents) @@ -160,7 +159,7 @@ def get_instance_userdata(api_version='latest',                                           timeout=timeout,                                           retries=retries,                                           exception_cb=exception_cb) -        user_data = str(response) +        user_data = response.contents      except url_helper.UrlError as e:          if e.code not in SKIP_USERDATA_CODES:              util.logexc(LOG, "Failed fetching userdata from url %s", ud_url) @@ -183,7 +182,7 @@ def get_instance_metadata(api_version='latest',      try:          response = caller(md_url) -        materializer = MetadataMaterializer(str(response), +        materializer = MetadataMaterializer(response.contents,                                              md_url, caller,                                              leaf_decoder=leaf_decoder)          md = materializer.materialize() diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index 059d7495..d67a70ea 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -147,7 +147,7 @@ def walker_handle_handler(pdata, _ctype, _filename, payload):      if not modfname.endswith(".py"):          modfname = "%s.py" % (modfname)      # TODO(harlowja): Check if path exists?? -    util.write_file(modfname, payload, 0600) +    util.write_file(modfname, payload, 0o600)      handlers = pdata['handlers']      try:          mod = fixup_handler(importer.import_module(modname)) diff --git a/cloudinit/handlers/boot_hook.py b/cloudinit/handlers/boot_hook.py index 3a50cf87..a4ea47ac 100644 --- a/cloudinit/handlers/boot_hook.py +++ b/cloudinit/handlers/boot_hook.py @@ -50,7 +50,7 @@ class BootHookPartHandler(handlers.Handler):          filepath = os.path.join(self.boothook_dir, filename)          contents = util.strip_prefix_suffix(util.dos2unix(payload),                                              prefix=BOOTHOOK_PREFIX) -        util.write_file(filepath, contents.lstrip(), 0700) +        util.write_file(filepath, contents.lstrip(), 0o700)          return filepath      def handle_part(self, data, ctype, filename, payload, frequency): diff --git a/cloudinit/handlers/cloud_config.py b/cloudinit/handlers/cloud_config.py index bf994e33..07b6d0e0 100644 --- a/cloudinit/handlers/cloud_config.py +++ b/cloudinit/handlers/cloud_config.py @@ -95,7 +95,7 @@ class CloudConfigPartHandler(handlers.Handler):              lines.append(util.yaml_dumps(self.cloud_buf))          else:              lines = [] -        util.write_file(self.cloud_fn, "\n".join(lines), 0600) +        util.write_file(self.cloud_fn, "\n".join(lines), 0o600)      def _extract_mergers(self, payload, headers):          merge_header_headers = '' diff --git a/cloudinit/handlers/shell_script.py b/cloudinit/handlers/shell_script.py index 9755ab05..b5087693 100644 --- a/cloudinit/handlers/shell_script.py +++ b/cloudinit/handlers/shell_script.py @@ -52,4 +52,4 @@ class ShellScriptPartHandler(handlers.Handler):          filename = util.clean_filename(filename)          payload = util.dos2unix(payload)          path = os.path.join(self.script_dir, filename) -        util.write_file(path, payload, 0700) +        util.write_file(path, payload, 0o700) diff --git a/cloudinit/handlers/upstart_job.py b/cloudinit/handlers/upstart_job.py index 50d193c4..c5bea711 100644 --- a/cloudinit/handlers/upstart_job.py +++ b/cloudinit/handlers/upstart_job.py @@ -65,7 +65,7 @@ class UpstartJobPartHandler(handlers.Handler):          payload = util.dos2unix(payload)          path = os.path.join(self.upstart_dir, filename) -        util.write_file(path, payload, 0644) +        util.write_file(path, payload, 0o644)          if SUITABLE_UPSTART:              util.subp(["initctl", "reload-configuration"], capture=False) diff --git a/cloudinit/helpers.py b/cloudinit/helpers.py index e701126e..ed396b5a 100644 --- a/cloudinit/helpers.py +++ b/cloudinit/helpers.py @@ -23,10 +23,11 @@  from time import time  import contextlib -import io  import os -from ConfigParser import (NoSectionError, NoOptionError, RawConfigParser) +import six +from six.moves.configparser import ( +    NoSectionError, NoOptionError, RawConfigParser)  from cloudinit.settings import (PER_INSTANCE, PER_ALWAYS, PER_ONCE,                                  CFG_ENV_NAME) @@ -318,10 +319,10 @@ class ContentHandlers(object):          return self.registered[content_type]      def items(self): -        return self.registered.items() +        return list(self.registered.items()) -    def iteritems(self): -        return self.registered.iteritems() +    # XXX This should really go away. +    iteritems = items  class Paths(object): @@ -449,7 +450,7 @@ class DefaultingConfigParser(RawConfigParser):      def stringify(self, header=None):          contents = '' -        with io.BytesIO() as outputstream: +        with six.StringIO() as outputstream:              self.write(outputstream)              outputstream.flush()              contents = outputstream.getvalue() diff --git a/cloudinit/log.py b/cloudinit/log.py index 622c946c..3c79b9c9 100644 --- a/cloudinit/log.py +++ b/cloudinit/log.py @@ -28,7 +28,8 @@ import collections  import os  import sys -from StringIO import StringIO +import six +from six import StringIO  # Logging levels for easy access  CRITICAL = logging.CRITICAL @@ -72,13 +73,13 @@ def setupLogging(cfg=None):      log_cfgs = []      log_cfg = cfg.get('logcfg') -    if log_cfg and isinstance(log_cfg, (str, basestring)): +    if log_cfg and isinstance(log_cfg, six.string_types):          # If there is a 'logcfg' entry in the config,          # respect it, it is the old keyname          log_cfgs.append(str(log_cfg))      elif "log_cfgs" in cfg:          for a_cfg in cfg['log_cfgs']: -            if isinstance(a_cfg, (basestring, str)): +            if isinstance(a_cfg, six.string_types):                  log_cfgs.append(a_cfg)              elif isinstance(a_cfg, (collections.Iterable)):                  cfg_str = [str(c) for c in a_cfg] diff --git a/cloudinit/mergers/__init__.py b/cloudinit/mergers/__init__.py index 03aa1ee1..e13f55ac 100644 --- a/cloudinit/mergers/__init__.py +++ b/cloudinit/mergers/__init__.py @@ -18,6 +18,8 @@  import re +import six +  from cloudinit import importer  from cloudinit import log as logging  from cloudinit import type_utils @@ -95,7 +97,7 @@ def dict_extract_mergers(config):          raw_mergers = config.pop('merge_type', None)      if raw_mergers is None:          return parsed_mergers -    if isinstance(raw_mergers, (str, basestring)): +    if isinstance(raw_mergers, six.string_types):          return string_extract_mergers(raw_mergers)      for m in raw_mergers:          if isinstance(m, (dict)): diff --git a/cloudinit/mergers/m_dict.py b/cloudinit/mergers/m_dict.py index a16141fa..87cf1a72 100644 --- a/cloudinit/mergers/m_dict.py +++ b/cloudinit/mergers/m_dict.py @@ -16,6 +16,8 @@  #    You should have received a copy of the GNU General Public License  #    along with this program.  If not, see <http://www.gnu.org/licenses/>. +import six +  DEF_MERGE_TYPE = 'no_replace'  MERGE_TYPES = ('replace', DEF_MERGE_TYPE,) @@ -57,7 +59,7 @@ class Merger(object):                  return new_v              if isinstance(new_v, (list, tuple)) and self._recurse_array:                  return self._merger.merge(old_v, new_v) -            if isinstance(new_v, (basestring)) and self._recurse_str: +            if isinstance(new_v, six.string_types) and self._recurse_str:                  return self._merger.merge(old_v, new_v)              if isinstance(new_v, (dict)) and self._recurse_dict:                  return self._merger.merge(old_v, new_v) diff --git a/cloudinit/mergers/m_list.py b/cloudinit/mergers/m_list.py index 3b87b0fc..81e5c580 100644 --- a/cloudinit/mergers/m_list.py +++ b/cloudinit/mergers/m_list.py @@ -16,6 +16,8 @@  #    You should have received a copy of the GNU General Public License  #    along with this program.  If not, see <http://www.gnu.org/licenses/>. +import six +  DEF_MERGE_TYPE = 'replace'  MERGE_TYPES = ('append', 'prepend', DEF_MERGE_TYPE, 'no_replace') @@ -73,7 +75,7 @@ class Merger(object):                  return old_v              if isinstance(new_v, (list, tuple)) and self._recurse_array:                  return self._merger.merge(old_v, new_v) -            if isinstance(new_v, (str, basestring)) and self._recurse_str: +            if isinstance(new_v, six.string_types) and self._recurse_str:                  return self._merger.merge(old_v, new_v)              if isinstance(new_v, (dict)) and self._recurse_dict:                  return self._merger.merge(old_v, new_v) @@ -82,6 +84,6 @@ class Merger(object):          # Ok now we are replacing same indexes          merged_list.extend(value)          common_len = min(len(merged_list), len(merge_with)) -        for i in xrange(0, common_len): +        for i in range(0, common_len):              merged_list[i] = merge_same_index(merged_list[i], merge_with[i])          return merged_list diff --git a/cloudinit/mergers/m_str.py b/cloudinit/mergers/m_str.py index e22ce28a..b00c4bf3 100644 --- a/cloudinit/mergers/m_str.py +++ b/cloudinit/mergers/m_str.py @@ -17,6 +17,8 @@  # You should have received a copy of the GNU General Public License  # along with this program. If not, see <http://www.gnu.org/licenses/>. +import six +  class Merger(object):      def __init__(self, _merger, opts): @@ -34,11 +36,11 @@ class Merger(object):      # perform the following action, if appending we will      # merge them together, otherwise we will just return value.      def _on_str(self, value, merge_with): -        if not isinstance(value, (basestring)): +        if not isinstance(value, six.string_types):              return merge_with          if not self._append:              return merge_with -        if isinstance(value, unicode): -            return value + unicode(merge_with) +        if isinstance(value, six.text_type): +            return value + six.text_type(merge_with)          else: -            return value + str(merge_with) +            return value + six.binary_type(merge_with) diff --git a/cloudinit/netinfo.py b/cloudinit/netinfo.py index fb40cc0d..e30d6fb5 100644 --- a/cloudinit/netinfo.py +++ b/cloudinit/netinfo.py @@ -87,7 +87,7 @@ def netdev_info(empty=""):                      devs[curdev][target] = toks[i][len(field) + 1:]      if empty != "": -        for (_devname, dev) in devs.iteritems(): +        for (_devname, dev) in devs.items():              for field in dev:                  if dev[field] == "":                      dev[field] = empty @@ -181,7 +181,7 @@ def netdev_pformat():      else:          fields = ['Device', 'Up', 'Address', 'Mask', 'Scope', 'Hw-Address']          tbl = PrettyTable(fields) -        for (dev, d) in netdev.iteritems(): +        for (dev, d) in netdev.items():              tbl.add_row([dev, d["up"], d["addr"], d["mask"], ".", d["hwaddr"]])              if d.get('addr6'):                  tbl.add_row([dev, d["up"], diff --git a/cloudinit/signal_handler.py b/cloudinit/signal_handler.py index 40b0c94c..0d95f506 100644 --- a/cloudinit/signal_handler.py +++ b/cloudinit/signal_handler.py @@ -22,7 +22,7 @@ import inspect  import signal  import sys -from StringIO import StringIO +from six import StringIO  from cloudinit import log as logging  from cloudinit import util diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index 15244a0d..eb474079 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -216,11 +216,11 @@ def on_first_boot(data, distro=None):      files = data.get('files', {})      if files:          LOG.debug("Writing %s injected files", len(files)) -        for (filename, content) in files.iteritems(): +        for (filename, content) in files.items():              if not filename.startswith(os.sep):                  filename = os.sep + filename              try: -                util.write_file(filename, content, mode=0660) +                util.write_file(filename, content, mode=0o660)              except IOError:                  util.logexc(LOG, "Failed writing file: %s", filename) diff --git a/cloudinit/sources/DataSourceDigitalOcean.py b/cloudinit/sources/DataSourceDigitalOcean.py index 8f27ee89..b20ce2a1 100644 --- a/cloudinit/sources/DataSourceDigitalOcean.py +++ b/cloudinit/sources/DataSourceDigitalOcean.py @@ -18,7 +18,7 @@ from cloudinit import log as logging  from cloudinit import util  from cloudinit import sources  from cloudinit import ec2_utils -from types import StringType +  import functools @@ -72,10 +72,11 @@ class DataSourceDigitalOcean(sources.DataSource):          return "\n".join(self.metadata['vendor-data'])      def get_public_ssh_keys(self): -        if type(self.metadata['public-keys']) is StringType: -            return [self.metadata['public-keys']] +        public_keys = self.metadata['public-keys'] +        if isinstance(public_keys, list): +            return public_keys          else: -            return self.metadata['public-keys'] +            return [public_keys]      @property      def availability_zone(self): diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py index 1b20ecf3..798869b7 100644 --- a/cloudinit/sources/DataSourceEc2.py +++ b/cloudinit/sources/DataSourceEc2.py @@ -156,8 +156,8 @@ class DataSourceEc2(sources.DataSource):          # 'ephemeral0': '/dev/sdb',          # 'root': '/dev/sda1'}          found = None -        bdm_items = self.metadata['block-device-mapping'].iteritems() -        for (entname, device) in bdm_items: +        bdm = self.metadata['block-device-mapping'] +        for (entname, device) in bdm.items():              if entname == name:                  found = device                  break diff --git a/cloudinit/sources/DataSourceMAAS.py b/cloudinit/sources/DataSourceMAAS.py index dfe90bc6..9a3e30c5 100644 --- a/cloudinit/sources/DataSourceMAAS.py +++ b/cloudinit/sources/DataSourceMAAS.py @@ -262,7 +262,7 @@ def check_seed_contents(content, seed):      userdata = content.get('user-data', "")      md = {} -    for (key, val) in content.iteritems(): +    for (key, val) in content.items():          if key == 'user-data':              continue          md[key] = val diff --git a/cloudinit/sources/DataSourceOVF.py b/cloudinit/sources/DataSourceOVF.py index 7ba60735..58a4b2a2 100644 --- a/cloudinit/sources/DataSourceOVF.py +++ b/cloudinit/sources/DataSourceOVF.py @@ -66,7 +66,7 @@ class DataSourceOVF(sources.DataSource):              np = {'iso': transport_iso9660,                    'vmware-guestd': transport_vmware_guestd, }              name = None -            for (name, transfunc) in np.iteritems(): +            for (name, transfunc) in np.items():                  (contents, _dev, _fname) = transfunc()                  if contents:                      break @@ -138,7 +138,7 @@ def read_ovf_environment(contents):      ud = ""      cfg_props = ['password']      md_props = ['seedfrom', 'local-hostname', 'public-keys', 'instance-id'] -    for (prop, val) in props.iteritems(): +    for (prop, val) in props.items():          if prop == 'hostname':              prop = "local-hostname"          if prop in md_props: @@ -183,7 +183,7 @@ def transport_iso9660(require_iso=True):      # Go through mounts to see if it was already mounted      mounts = util.mounts() -    for (dev, info) in mounts.iteritems(): +    for (dev, info) in mounts.items():          fstype = info['fstype']          if fstype != "iso9660" and require_iso:              continue diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py index 2733a2f6..7a975d78 100644 --- a/cloudinit/sources/DataSourceSmartOS.py +++ b/cloudinit/sources/DataSourceSmartOS.py @@ -30,12 +30,12 @@  #       Comments with "@datadictionary" are snippets of the definition  import base64 +import os +import serial +  from cloudinit import log as logging  from cloudinit import sources  from cloudinit import util -import os -import os.path -import serial  LOG = logging.getLogger(__name__) @@ -201,7 +201,7 @@ class DataSourceSmartOS(sources.DataSource):          if b64_all is not None:              self.b64_all = util.is_true(b64_all) -        for ci_noun, attribute in SMARTOS_ATTRIB_MAP.iteritems(): +        for ci_noun, attribute in SMARTOS_ATTRIB_MAP.items():              smartos_noun, strip = attribute              md[ci_noun] = self.query(smartos_noun, strip=strip) @@ -218,11 +218,12 @@ class DataSourceSmartOS(sources.DataSource):          user_script = os.path.join(data_d, 'user-script')          u_script_l = "%s/user-script" % LEGACY_USER_D          write_boot_content(md.get('user-script'), content_f=user_script, -                           link=u_script_l, shebang=True, mode=0700) +                           link=u_script_l, shebang=True, mode=0o700)          operator_script = os.path.join(data_d, 'operator-script')          write_boot_content(md.get('operator-script'), -                           content_f=operator_script, shebang=False, mode=0700) +                           content_f=operator_script, shebang=False, +                           mode=0o700)          # @datadictionary:  This key has no defined format, but its value          # is written to the file /var/db/mdata-user-data on each boot prior @@ -381,7 +382,7 @@ def dmi_data():  def write_boot_content(content, content_f, link=None, shebang=False, -                       mode=0400): +                       mode=0o400):      """      Write the content to content_f. Under the following rules:          1. If no content, remove the file diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 7c7ef9ab..39eab51b 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -23,6 +23,8 @@  import abc  import os +import six +  from cloudinit import importer  from cloudinit import log as logging  from cloudinit import type_utils @@ -130,7 +132,7 @@ class DataSource(object):          # we want to return the correct value for what will actually          # exist in this instance          mappings = {"sd": ("vd", "xvd", "vtb")} -        for (nfrom, tlist) in mappings.iteritems(): +        for (nfrom, tlist) in mappings.items():              if not short_name.startswith(nfrom):                  continue              for nto in tlist: @@ -218,18 +220,18 @@ def normalize_pubkey_data(pubkey_data):      if not pubkey_data:          return keys -    if isinstance(pubkey_data, (basestring, str)): +    if isinstance(pubkey_data, six.string_types):          return str(pubkey_data).splitlines()      if isinstance(pubkey_data, (list, set)):          return list(pubkey_data)      if isinstance(pubkey_data, (dict)): -        for (_keyname, klist) in pubkey_data.iteritems(): +        for (_keyname, klist) in pubkey_data.items():              # lp:506332 uec metadata service responds with              # data that makes boto populate a string for 'klist' rather              # than a list. -            if isinstance(klist, (str, basestring)): +            if isinstance(klist, six.string_types):                  klist = [klist]              if isinstance(klist, (list, set)):                  for pkey in klist: diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index b7e19314..88c7a198 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -24,6 +24,8 @@ import copy  import functools  import os +import six +  from cloudinit import ec2_utils  from cloudinit import log as logging  from cloudinit import sources @@ -205,7 +207,7 @@ class BaseReader(object):          """          load_json_anytype = functools.partial( -            util.load_json, root_types=(dict, basestring, list)) +            util.load_json, root_types=(dict, list) + six.string_types)          def datafiles(version):              files = {} @@ -234,7 +236,7 @@ class BaseReader(object):              'version': 2,          }          data = datafiles(self._find_working_version()) -        for (name, (path, required, translator)) in data.iteritems(): +        for (name, (path, required, translator)) in data.items():              path = self._path_join(self.base_path, path)              data = None              found = False @@ -364,7 +366,7 @@ class ConfigDriveReader(BaseReader):              raise NonReadable("%s: no files found" % (self.base_path))          md = {} -        for (name, (key, translator, default)) in FILES_V1.iteritems(): +        for (name, (key, translator, default)) in FILES_V1.items():              if name in found:                  path = found[name]                  try: @@ -478,7 +480,7 @@ def convert_vendordata_json(data, recurse=True):      """      if not data:          return None -    if isinstance(data, (str, unicode, basestring)): +    if isinstance(data, six.string_types):          return data      if isinstance(data, list):          return copy.deepcopy(data) diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py index 14d0cb0f..9b2f5ed5 100644 --- a/cloudinit/ssh_util.py +++ b/cloudinit/ssh_util.py @@ -239,7 +239,7 @@ def setup_user_keys(keys, username, options=None):      # Make sure the users .ssh dir is setup accordingly      (ssh_dir, pwent) = users_ssh_info(username)      if not os.path.isdir(ssh_dir): -        util.ensure_dir(ssh_dir, mode=0700) +        util.ensure_dir(ssh_dir, mode=0o700)          util.chownbyid(ssh_dir, pwent.pw_uid, pwent.pw_gid)      # Turn the 'update' keys given into actual entries @@ -252,8 +252,8 @@ def setup_user_keys(keys, username, options=None):      (auth_key_fn, auth_key_entries) = extract_authorized_keys(username)      with util.SeLinuxGuard(ssh_dir, recursive=True):          content = update_authorized_keys(auth_key_entries, key_entries) -        util.ensure_dir(os.path.dirname(auth_key_fn), mode=0700) -        util.write_file(auth_key_fn, content, mode=0600) +        util.ensure_dir(os.path.dirname(auth_key_fn), mode=0o700) +        util.write_file(auth_key_fn, content, mode=0o600)          util.chownbyid(auth_key_fn, pwent.pw_uid, pwent.pw_gid) diff --git a/cloudinit/stages.py b/cloudinit/stages.py index 67f467f7..f4f4591d 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -20,12 +20,13 @@  #    You should have received a copy of the GNU General Public License  #    along with this program.  If not, see <http://www.gnu.org/licenses/>. -import cPickle as pickle -  import copy  import os  import sys +import six +from six.moves import cPickle as pickle +  from cloudinit.settings import (PER_INSTANCE, FREQUENCIES, CLOUD_CONFIG)  from cloudinit import handlers @@ -202,7 +203,7 @@ class Init(object):              util.logexc(LOG, "Failed pickling datasource %s", self.datasource)              return False          try: -            util.write_file(pickled_fn, pk_contents, mode=0400) +            util.write_file(pickled_fn, pk_contents, mode=0o400)          except Exception:              util.logexc(LOG, "Failed pickling datasource to %s", pickled_fn)              return False @@ -324,15 +325,15 @@ class Init(object):      def _store_userdata(self):          raw_ud = "%s" % (self.datasource.get_userdata_raw()) -        util.write_file(self._get_ipath('userdata_raw'), raw_ud, 0600) +        util.write_file(self._get_ipath('userdata_raw'), raw_ud, 0o600)          processed_ud = "%s" % (self.datasource.get_userdata()) -        util.write_file(self._get_ipath('userdata'), processed_ud, 0600) +        util.write_file(self._get_ipath('userdata'), processed_ud, 0o600)      def _store_vendordata(self):          raw_vd = "%s" % (self.datasource.get_vendordata_raw()) -        util.write_file(self._get_ipath('vendordata_raw'), raw_vd, 0600) +        util.write_file(self._get_ipath('vendordata_raw'), raw_vd, 0o600)          processed_vd = "%s" % (self.datasource.get_vendordata()) -        util.write_file(self._get_ipath('vendordata'), processed_vd, 0600) +        util.write_file(self._get_ipath('vendordata'), processed_vd, 0o600)      def _default_handlers(self, opts=None):          if opts is None: @@ -384,7 +385,7 @@ class Init(object):              if not path or not os.path.isdir(path):                  return              potential_handlers = util.find_modules(path) -            for (fname, mod_name) in potential_handlers.iteritems(): +            for (fname, mod_name) in potential_handlers.items():                  try:                      mod_locs, looked_locs = importer.find_module(                          mod_name, [''], ['list_types', 'handle_part']) @@ -422,7 +423,7 @@ class Init(object):          def init_handlers():              # Init the handlers first -            for (_ctype, mod) in c_handlers.iteritems(): +            for (_ctype, mod) in c_handlers.items():                  if mod in c_handlers.initialized:                      # Avoid initing the same module twice (if said module                      # is registered to more than one content-type). @@ -449,7 +450,7 @@ class Init(object):          def finalize_handlers():              # Give callbacks opportunity to finalize -            for (_ctype, mod) in c_handlers.iteritems(): +            for (_ctype, mod) in c_handlers.items():                  if mod not in c_handlers.initialized:                      # Said module was never inited in the first place, so lets                      # not attempt to finalize those that never got called. @@ -574,7 +575,7 @@ class Modules(object):          for item in cfg_mods:              if not item:                  continue -            if isinstance(item, (str, basestring)): +            if isinstance(item, six.string_types):                  module_list.append({                      'mod': item.strip(),                  }) diff --git a/cloudinit/type_utils.py b/cloudinit/type_utils.py index cc3d9495..b93efd6a 100644 --- a/cloudinit/type_utils.py +++ b/cloudinit/type_utils.py @@ -22,11 +22,31 @@  import types +import six + + +if six.PY3: +    _NAME_TYPES = ( +        types.ModuleType, +        types.FunctionType, +        types.LambdaType, +        type, +    ) +else: +    _NAME_TYPES = ( +        types.TypeType, +        types.ModuleType, +        types.FunctionType, +        types.LambdaType, +        types.ClassType, +    ) +  def obj_name(obj): -    if isinstance(obj, (types.TypeType, -                        types.ModuleType, -                        types.FunctionType, -                        types.LambdaType)): -        return str(obj.__name__) -    return obj_name(obj.__class__) +    if isinstance(obj, _NAME_TYPES): +        return six.text_type(obj.__name__) +    else: +        if not hasattr(obj, '__class__'): +            return repr(obj) +        else: +            return obj_name(obj.__class__) diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py index 3074dd08..62001dff 100644 --- a/cloudinit/url_helper.py +++ b/cloudinit/url_helper.py @@ -20,21 +20,29 @@  #    You should have received a copy of the GNU General Public License  #    along with this program.  If not, see <http://www.gnu.org/licenses/>. -import httplib  import time -import urllib + +import six  import requests  from requests import exceptions -from urlparse import (urlparse, urlunparse) +from six.moves.urllib.parse import ( +    urlparse, urlunparse, +    quote as urlquote)  from cloudinit import log as logging  from cloudinit import version  LOG = logging.getLogger(__name__) -NOT_FOUND = httplib.NOT_FOUND +if six.PY2: +    import httplib +    NOT_FOUND = httplib.NOT_FOUND +else: +    import http.client +    NOT_FOUND = http.client.NOT_FOUND +  # Check if requests has ssl support (added in requests >= 0.8.8)  SSL_ENABLED = False @@ -70,7 +78,7 @@ def combine_url(base, *add_ons):          path = url_parsed[2]          if path and not path.endswith("/"):              path += "/" -        path += urllib.quote(str(add_on), safe="/:") +        path += urlquote(str(add_on), safe="/:")          url_parsed[2] = path          return urlunparse(url_parsed) @@ -111,7 +119,7 @@ class UrlResponse(object):      @property      def contents(self): -        return self._response.content +        return self._response.text      @property      def url(self): @@ -135,7 +143,7 @@ class UrlResponse(object):          return self._response.status_code      def __str__(self): -        return self.contents +        return self._response.text  class UrlError(IOError): diff --git a/cloudinit/user_data.py b/cloudinit/user_data.py index de6487d8..9111bd39 100644 --- a/cloudinit/user_data.py +++ b/cloudinit/user_data.py @@ -29,6 +29,8 @@ from email.mime.multipart import MIMEMultipart  from email.mime.nonmultipart import MIMENonMultipart  from email.mime.text import MIMEText +import six +  from cloudinit import handlers  from cloudinit import log as logging  from cloudinit import util @@ -235,7 +237,7 @@ class UserDataProcessor(object):                  resp = util.read_file_or_url(include_url,                                               ssl_details=self.ssl_details)                  if include_once_on and resp.ok(): -                    util.write_file(include_once_fn, str(resp), mode=0600) +                    util.write_file(include_once_fn, str(resp), mode=0o600)                  if resp.ok():                      content = str(resp)                  else: @@ -256,7 +258,7 @@ class UserDataProcessor(object):              #    filename and type not be present              # or              #  scalar(payload) -            if isinstance(ent, (str, basestring)): +            if isinstance(ent, six.string_types):                  ent = {'content': ent}              if not isinstance(ent, (dict)):                  # TODO(harlowja) raise? @@ -337,7 +339,7 @@ def convert_string(raw_data, headers=None):      data = util.decomp_gzip(raw_data)      if "mime-version:" in data[0:4096].lower():          msg = email.message_from_string(data) -        for (key, val) in headers.iteritems(): +        for (key, val) in headers.items():              _replace_header(msg, key, val)      else:          mtype = headers.get(CONTENT_TYPE, NOT_MULTIPART_TYPE) diff --git a/cloudinit/util.py b/cloudinit/util.py index 9efc704a..434ba7fb 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -20,8 +20,6 @@  #    You should have received a copy of the GNU General Public License  #    along with this program.  If not, see <http://www.gnu.org/licenses/>. -from StringIO import StringIO -  import contextlib  import copy as obj_copy  import ctypes @@ -45,8 +43,10 @@ import subprocess  import sys  import tempfile  import time -import urlparse +from six.moves.urllib import parse as urlparse + +import six  import yaml  from cloudinit import importer @@ -69,8 +69,26 @@ FN_REPLACEMENTS = {  }  FN_ALLOWED = ('_-.()' + string.digits + string.ascii_letters) +TRUE_STRINGS = ('true', '1', 'on', 'yes') +FALSE_STRINGS = ('off', '0', 'no', 'false') + +  # Helper utils to see if running in a container -CONTAINER_TESTS = ['running-in-container', 'lxc-is-container'] +CONTAINER_TESTS = ('running-in-container', 'lxc-is-container') + + +def decode_binary(blob, encoding='utf-8'): +    # Converts a binary type into a text type using given encoding. +    if isinstance(blob, six.text_type): +        return blob +    return blob.decode(encoding) + + +def encode_text(text, encoding='utf-8'): +    # Converts a text string into a binary type using given encoding. +    if isinstance(text, six.binary_type): +        return text +    return text.encode(encoding)  class ProcessExecutionError(IOError): @@ -95,7 +113,7 @@ class ProcessExecutionError(IOError):          else:              self.description = description -        if not isinstance(exit_code, (long, int)): +        if not isinstance(exit_code, six.integer_types):              self.exit_code = '-'          else:              self.exit_code = exit_code @@ -151,7 +169,8 @@ class SeLinuxGuard(object):          path = os.path.realpath(self.path)          # path should be a string, not unicode -        path = str(path) +        if six.PY2: +            path = str(path)          try:              stats = os.lstat(path)              self.selinux.matchpathcon(path, stats[stat.ST_MODE]) @@ -209,10 +228,10 @@ def fork_cb(child_cb, *args, **kwargs):  def is_true(val, addons=None):      if isinstance(val, (bool)):          return val is True -    check_set = ['true', '1', 'on', 'yes'] +    check_set = TRUE_STRINGS      if addons: -        check_set = check_set + addons -    if str(val).lower().strip() in check_set: +        check_set = list(check_set) + addons +    if six.text_type(val).lower().strip() in check_set:          return True      return False @@ -220,10 +239,10 @@ def is_true(val, addons=None):  def is_false(val, addons=None):      if isinstance(val, (bool)):          return val is False -    check_set = ['off', '0', 'no', 'false'] +    check_set = FALSE_STRINGS      if addons: -        check_set = check_set + addons -    if str(val).lower().strip() in check_set: +        check_set = list(check_set) + addons +    if six.text_type(val).lower().strip() in check_set:          return True      return False @@ -273,7 +292,7 @@ def uniq_merge_sorted(*lists):  def uniq_merge(*lists):      combined_list = []      for a_list in lists: -        if isinstance(a_list, (str, basestring)): +        if isinstance(a_list, six.string_types):              a_list = a_list.strip().split(",")              # Kickout the empty ones              a_list = [a for a in a_list if len(a)] @@ -282,7 +301,7 @@ def uniq_merge(*lists):  def clean_filename(fn): -    for (k, v) in FN_REPLACEMENTS.iteritems(): +    for (k, v) in FN_REPLACEMENTS.items():          fn = fn.replace(k, v)      removals = []      for k in fn: @@ -296,14 +315,14 @@ def clean_filename(fn):  def decomp_gzip(data, quiet=True):      try: -        buf = StringIO(str(data)) +        buf = six.BytesIO(encode_text(data))          with contextlib.closing(gzip.GzipFile(None, "rb", 1, buf)) as gh: -            return gh.read() +            return decode_binary(gh.read())      except Exception as e:          if quiet:              return data          else: -            raise DecompressionError(str(e)) +            raise DecompressionError(six.text_type(e))  def extract_usergroup(ug_pair): @@ -362,7 +381,7 @@ def multi_log(text, console=True, stderr=True,  def load_json(text, root_types=(dict,)): -    decoded = json.loads(text) +    decoded = json.loads(decode_binary(text))      if not isinstance(decoded, tuple(root_types)):          expected_types = ", ".join([str(t) for t in root_types])          raise TypeError("(%s) root types expected, got %s instead" @@ -394,7 +413,7 @@ def get_cfg_option_str(yobj, key, default=None):      if key not in yobj:          return default      val = yobj[key] -    if not isinstance(val, (str, basestring)): +    if not isinstance(val, six.string_types):          val = str(val)      return val @@ -433,7 +452,7 @@ def get_cfg_option_list(yobj, key, default=None):      if isinstance(val, (list)):          cval = [v for v in val]          return cval -    if not isinstance(val, (basestring)): +    if not isinstance(val, six.string_types):          val = str(val)      return [val] @@ -708,10 +727,10 @@ def read_file_or_url(url, timeout=5, retries=10,  def load_yaml(blob, default=None, allowed=(dict,)):      loaded = default +    blob = decode_binary(blob)      try: -        blob = str(blob) -        LOG.debug(("Attempting to load yaml from string " -                 "of length %s with allowed root types %s"), +        LOG.debug("Attempting to load yaml from string " +                 "of length %s with allowed root types %s",                   len(blob), allowed)          converted = safeyaml.load(blob)          if not isinstance(converted, allowed): @@ -746,14 +765,12 @@ def read_seeded(base="", ext="", timeout=5, retries=10, file_retries=0):      md_resp = read_file_or_url(md_url, timeout, retries, file_retries)      md = None      if md_resp.ok(): -        md_str = str(md_resp) -        md = load_yaml(md_str, default={}) +        md = load_yaml(md_resp.contents, default={})      ud_resp = read_file_or_url(ud_url, timeout, retries, file_retries)      ud = None      if ud_resp.ok(): -        ud_str = str(ud_resp) -        ud = ud_str +        ud = ud_resp.contents      return (md, ud) @@ -784,7 +801,7 @@ def read_conf_with_confd(cfgfile):      if "conf_d" in cfg:          confd = cfg['conf_d']          if confd: -            if not isinstance(confd, (str, basestring)): +            if not isinstance(confd, six.string_types):                  raise TypeError(("Config file %s contains 'conf_d' "                                   "with non-string type %s") %                                   (cfgfile, type_utils.obj_name(confd))) @@ -921,8 +938,8 @@ def get_cmdline_url(names=('cloud-config-url', 'url'),          return (None, None, None)      resp = read_file_or_url(url) -    if resp.contents.startswith(starts) and resp.ok(): -        return (key, url, str(resp)) +    if resp.ok() and resp.contents.startswith(starts): +        return (key, url, resp.contents)      return (key, url, None) @@ -1076,9 +1093,9 @@ def uniq_list(in_list):      return out_list -def load_file(fname, read_cb=None, quiet=False): +def load_file(fname, read_cb=None, quiet=False, decode=True):      LOG.debug("Reading from %s (quiet=%s)", fname, quiet) -    ofh = StringIO() +    ofh = six.BytesIO()      try:          with open(fname, 'rb') as ifh:              pipe_in_out(ifh, ofh, chunk_cb=read_cb) @@ -1089,7 +1106,10 @@ def load_file(fname, read_cb=None, quiet=False):              raise      contents = ofh.getvalue()      LOG.debug("Read %s bytes from %s", len(contents), fname) -    return contents +    if decode: +        return decode_binary(contents) +    else: +        return contents  def get_cmdline(): @@ -1219,7 +1239,7 @@ def logexc(log, msg, *args):  def hash_blob(blob, routine, mlen=None):      hasher = hashlib.new(routine) -    hasher.update(blob) +    hasher.update(encode_text(blob))      digest = hasher.hexdigest()      # Don't get to long now      if mlen is not None: @@ -1280,8 +1300,7 @@ def yaml_dumps(obj, explicit_start=True, explicit_end=True):                            indent=4,                            explicit_start=explicit_start,                            explicit_end=explicit_end, -                          default_flow_style=False, -                          allow_unicode=True) +                          default_flow_style=False)  def ensure_dir(path, mode=None): @@ -1515,11 +1534,17 @@ def write_file(filename, content, mode=0o644, omode="wb"):      @param filename: The full path of the file to write.      @param content: The content to write to the file.      @param mode: The filesystem mode to set on the file. -    @param omode: The open mode used when opening the file (r, rb, a, etc.) +    @param omode: The open mode used when opening the file (w, wb, a, etc.)      """      ensure_dir(os.path.dirname(filename)) -    LOG.debug("Writing to %s - %s: [%s] %s bytes", -               filename, omode, mode, len(content)) +    if 'b' in omode.lower(): +        content = encode_text(content) +        write_type = 'bytes' +    else: +        content = decode_binary(content) +        write_type = 'characters' +    LOG.debug("Writing to %s - %s: [%s] %s %s", +               filename, omode, mode, len(content), write_type)      with SeLinuxGuard(path=filename):          with open(filename, omode) as fh:              fh.write(content) @@ -1608,10 +1633,10 @@ def shellify(cmdlist, add_header=True):          if isinstance(args, list):              fixed = []              for f in args: -                fixed.append("'%s'" % (str(f).replace("'", escaped))) +                fixed.append("'%s'" % (six.text_type(f).replace("'", escaped)))              content = "%s%s\n" % (content, ' '.join(fixed))              cmds_made += 1 -        elif isinstance(args, (str, basestring)): +        elif isinstance(args, six.string_types):              content = "%s%s\n" % (content, args)              cmds_made += 1          else: @@ -1722,7 +1747,7 @@ def expand_package_list(version_fmt, pkgs):      pkglist = []      for pkg in pkgs: -        if isinstance(pkg, basestring): +        if isinstance(pkg, six.string_types):              pkglist.append(pkg)              continue diff --git a/packages/bddeb b/packages/bddeb index 9d264f92..83ca68bb 100755 --- a/packages/bddeb +++ b/packages/bddeb @@ -38,6 +38,7 @@ PKG_MP = {      'pyserial': 'python-serial',      'pyyaml': 'python-yaml',      'requests': 'python-requests', +    'six': 'python-six',  }  DEBUILD_ARGS = ["-S", "-d"] diff --git a/packages/brpm b/packages/brpm index 9657b1dd..72bfca08 100755 --- a/packages/brpm +++ b/packages/brpm @@ -45,6 +45,7 @@ PKG_MP = {          'pyserial': 'pyserial',          'pyyaml': 'PyYAML',          'requests': 'python-requests', +        'six': 'python-six',      },      'suse': {          'argparse': 'python-argparse', @@ -56,6 +57,7 @@ PKG_MP = {          'pyserial': 'python-pyserial',          'pyyaml': 'python-yaml',          'requests': 'python-requests', +        'six': 'python-six',      }  } diff --git a/tests/unittests/test_data.py b/tests/unittests/test_data.py index 03296e62..a35afc27 100644 --- a/tests/unittests/test_data.py +++ b/tests/unittests/test_data.py @@ -1,11 +1,11 @@  """Tests for handling of userdata within cloud init.""" -import StringIO -  import gzip  import logging  import os +from six import BytesIO, StringIO +  from email.mime.application import MIMEApplication  from email.mime.base import MIMEBase  from email.mime.multipart import MIMEMultipart @@ -53,7 +53,7 @@ class TestConsumeUserData(helpers.FilesystemMockingTestCase):          self.patchUtils(root)      def capture_log(self, lvl=logging.DEBUG): -        log_file = StringIO.StringIO() +        log_file = StringIO()          self._log_handler = logging.StreamHandler(log_file)          self._log_handler.setLevel(lvl)          self._log = log.getLogger() @@ -351,9 +351,9 @@ p: 1          """Tests that individual message gzip encoding works."""          def gzip_part(text): -            contents = StringIO.StringIO() -            f = gzip.GzipFile(fileobj=contents, mode='w') -            f.write(str(text)) +            contents = BytesIO() +            f = gzip.GzipFile(fileobj=contents, mode='wb') +            f.write(util.encode_text(text))              f.flush()              f.close()              return MIMEApplication(contents.getvalue(), 'gzip') diff --git a/tests/unittests/test_datasource/test_nocloud.py b/tests/unittests/test_datasource/test_nocloud.py index e9235951..ae9e6c22 100644 --- a/tests/unittests/test_datasource/test_nocloud.py +++ b/tests/unittests/test_datasource/test_nocloud.py @@ -85,7 +85,7 @@ class TestNoCloudDataSource(MockerTestCase):          data = {              'fs_label': None, -            'meta-data': {'instance-id': 'IID'}, +            'meta-data': yaml.safe_dump({'instance-id': 'IID'}),              'user-data': "USER_DATA_RAW",          } diff --git a/tests/unittests/test_datasource/test_openstack.py b/tests/unittests/test_datasource/test_openstack.py index 49894e51..81ef1546 100644 --- a/tests/unittests/test_datasource/test_openstack.py +++ b/tests/unittests/test_datasource/test_openstack.py @@ -20,12 +20,11 @@ import copy  import json  import re -from StringIO import StringIO - -from urlparse import urlparse -  from .. import helpers as test_helpers +from six import StringIO +from six.moves.urllib.parse import urlparse +  from cloudinit import helpers  from cloudinit import settings  from cloudinit.sources import DataSourceOpenStack as ds diff --git a/tests/unittests/test_distros/test_netconfig.py b/tests/unittests/test_distros/test_netconfig.py index 33a1d6e1..6e1a0b69 100644 --- a/tests/unittests/test_distros/test_netconfig.py +++ b/tests/unittests/test_distros/test_netconfig.py @@ -4,6 +4,8 @@ import mocker  import os +from six import StringIO +  from cloudinit import distros  from cloudinit import helpers  from cloudinit import settings @@ -11,8 +13,6 @@ from cloudinit import util  from cloudinit.distros.parsers.sys_conf import SysConf -from StringIO import StringIO -  BASE_NET_CFG = '''  auto lo diff --git a/tests/unittests/test_handler/test_handler_apt_configure.py b/tests/unittests/test_handler/test_handler_apt_configure.py index 203dd2aa..f5832365 100644 --- a/tests/unittests/test_handler/test_handler_apt_configure.py +++ b/tests/unittests/test_handler/test_handler_apt_configure.py @@ -16,12 +16,12 @@ class TestAptProxyConfig(MockerTestCase):          self.cfile = os.path.join(self.tmp, "config.cfg")      def _search_apt_config(self, contents, ptype, value): -        print( +        ## print( +        ##     r"acquire::%s::proxy\s+[\"']%s[\"'];\n" % (ptype, value), +        ##     contents, "flags=re.IGNORECASE") +        return re.search(              r"acquire::%s::proxy\s+[\"']%s[\"'];\n" % (ptype, value), -            contents, "flags=re.IGNORECASE") -        return(re.search( -            r"acquire::%s::proxy\s+[\"']%s[\"'];\n" % (ptype, value), -            contents, flags=re.IGNORECASE)) +            contents, flags=re.IGNORECASE)      def test_apt_proxy_written(self):          cfg = {'apt_proxy': 'myproxy'} diff --git a/tests/unittests/test_handler/test_handler_locale.py b/tests/unittests/test_handler/test_handler_locale.py index eb251636..690ef86f 100644 --- a/tests/unittests/test_handler/test_handler_locale.py +++ b/tests/unittests/test_handler/test_handler_locale.py @@ -29,7 +29,7 @@ from .. import helpers as t_help  from configobj import ConfigObj -from StringIO import StringIO +from six import BytesIO  import logging @@ -59,6 +59,6 @@ class TestLocale(t_help.FilesystemMockingTestCase):          cc = self._get_cloud('sles')          cc_locale.handle('cc_locale', cfg, cc, LOG, []) -        contents = util.load_file('/etc/sysconfig/language') -        n_cfg = ConfigObj(StringIO(contents)) +        contents = util.load_file('/etc/sysconfig/language', decode=False) +        n_cfg = ConfigObj(BytesIO(contents))          self.assertEquals({'RC_LANG': cfg['locale']}, dict(n_cfg)) diff --git a/tests/unittests/test_handler/test_handler_seed_random.py b/tests/unittests/test_handler/test_handler_seed_random.py index 40481f16..579377fb 100644 --- a/tests/unittests/test_handler/test_handler_seed_random.py +++ b/tests/unittests/test_handler/test_handler_seed_random.py @@ -22,7 +22,7 @@ import base64  import gzip  import tempfile -from StringIO import StringIO +from six import StringIO  from cloudinit import cloud  from cloudinit import distros diff --git a/tests/unittests/test_handler/test_handler_set_hostname.py b/tests/unittests/test_handler/test_handler_set_hostname.py index e1530e30..a9f7829b 100644 --- a/tests/unittests/test_handler/test_handler_set_hostname.py +++ b/tests/unittests/test_handler/test_handler_set_hostname.py @@ -9,7 +9,7 @@ from .. import helpers as t_help  import logging -from StringIO import StringIO +from six import BytesIO  from configobj import ConfigObj @@ -38,8 +38,8 @@ class TestHostname(t_help.FilesystemMockingTestCase):          cc_set_hostname.handle('cc_set_hostname',                                 cfg, cc, LOG, [])          if not distro.uses_systemd(): -            contents = util.load_file("/etc/sysconfig/network") -            n_cfg = ConfigObj(StringIO(contents)) +            contents = util.load_file("/etc/sysconfig/network", decode=False) +            n_cfg = ConfigObj(BytesIO(contents))              self.assertEquals({'HOSTNAME': 'blah.blah.blah.yahoo.com'},                                dict(n_cfg)) diff --git a/tests/unittests/test_handler/test_handler_timezone.py b/tests/unittests/test_handler/test_handler_timezone.py index 874db340..10ea2040 100644 --- a/tests/unittests/test_handler/test_handler_timezone.py +++ b/tests/unittests/test_handler/test_handler_timezone.py @@ -29,7 +29,7 @@ from .. import helpers as t_help  from configobj import ConfigObj -from StringIO import StringIO +from six import BytesIO  import logging @@ -67,8 +67,8 @@ class TestTimezone(t_help.FilesystemMockingTestCase):          cc_timezone.handle('cc_timezone', cfg, cc, LOG, []) -        contents = util.load_file('/etc/sysconfig/clock') -        n_cfg = ConfigObj(StringIO(contents)) +        contents = util.load_file('/etc/sysconfig/clock', decode=False) +        n_cfg = ConfigObj(BytesIO(contents))          self.assertEquals({'TIMEZONE': cfg['timezone']}, dict(n_cfg))          contents = util.load_file('/etc/localtime') diff --git a/tests/unittests/test_handler/test_handler_yum_add_repo.py b/tests/unittests/test_handler/test_handler_yum_add_repo.py index 435c9787..81806ad1 100644 --- a/tests/unittests/test_handler/test_handler_yum_add_repo.py +++ b/tests/unittests/test_handler/test_handler_yum_add_repo.py @@ -6,7 +6,7 @@ from .. import helpers  import logging -from StringIO import StringIO +from six import BytesIO  import configobj @@ -52,8 +52,9 @@ class TestConfig(helpers.FilesystemMockingTestCase):          }          self.patchUtils(self.tmp)          cc_yum_add_repo.handle('yum_add_repo', cfg, None, LOG, []) -        contents = util.load_file("/etc/yum.repos.d/epel_testing.repo") -        contents = configobj.ConfigObj(StringIO(contents)) +        contents = util.load_file("/etc/yum.repos.d/epel_testing.repo", +                                  decode=False) +        contents = configobj.ConfigObj(BytesIO(contents))          expected = {              'epel_testing': {                  'name': 'Extra Packages for Enterprise Linux 5 - Testing', | 
