diff options
Diffstat (limited to 'cloudinit/sources')
-rw-r--r-- | cloudinit/sources/DataSourceConfigDrive.py | 4 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceDigitalOcean.py | 9 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceEc2.py | 4 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceMAAS.py | 2 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceOVF.py | 6 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceSmartOS.py | 15 | ||||
-rw-r--r-- | cloudinit/sources/__init__.py | 10 | ||||
-rw-r--r-- | cloudinit/sources/helpers/openstack.py | 10 |
8 files changed, 33 insertions, 27 deletions
diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index 15244a0d..eb474079 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -216,11 +216,11 @@ def on_first_boot(data, distro=None): files = data.get('files', {}) if files: LOG.debug("Writing %s injected files", len(files)) - for (filename, content) in files.iteritems(): + for (filename, content) in files.items(): if not filename.startswith(os.sep): filename = os.sep + filename try: - util.write_file(filename, content, mode=0660) + util.write_file(filename, content, mode=0o660) except IOError: util.logexc(LOG, "Failed writing file: %s", filename) diff --git a/cloudinit/sources/DataSourceDigitalOcean.py b/cloudinit/sources/DataSourceDigitalOcean.py index 8f27ee89..b20ce2a1 100644 --- a/cloudinit/sources/DataSourceDigitalOcean.py +++ b/cloudinit/sources/DataSourceDigitalOcean.py @@ -18,7 +18,7 @@ from cloudinit import log as logging from cloudinit import util from cloudinit import sources from cloudinit import ec2_utils -from types import StringType + import functools @@ -72,10 +72,11 @@ class DataSourceDigitalOcean(sources.DataSource): return "\n".join(self.metadata['vendor-data']) def get_public_ssh_keys(self): - if type(self.metadata['public-keys']) is StringType: - return [self.metadata['public-keys']] + public_keys = self.metadata['public-keys'] + if isinstance(public_keys, list): + return public_keys else: - return self.metadata['public-keys'] + return [public_keys] @property def availability_zone(self): diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py index 1b20ecf3..798869b7 100644 --- a/cloudinit/sources/DataSourceEc2.py +++ b/cloudinit/sources/DataSourceEc2.py @@ -156,8 +156,8 @@ class DataSourceEc2(sources.DataSource): # 'ephemeral0': '/dev/sdb', # 'root': '/dev/sda1'} found = None - bdm_items = self.metadata['block-device-mapping'].iteritems() - for (entname, device) in bdm_items: + bdm = self.metadata['block-device-mapping'] + for (entname, device) in bdm.items(): if entname == name: found = device break diff --git a/cloudinit/sources/DataSourceMAAS.py b/cloudinit/sources/DataSourceMAAS.py index dfe90bc6..9a3e30c5 100644 --- a/cloudinit/sources/DataSourceMAAS.py +++ b/cloudinit/sources/DataSourceMAAS.py @@ -262,7 +262,7 @@ def check_seed_contents(content, seed): userdata = content.get('user-data', "") md = {} - for (key, val) in content.iteritems(): + for (key, val) in content.items(): if key == 'user-data': continue md[key] = val diff --git a/cloudinit/sources/DataSourceOVF.py b/cloudinit/sources/DataSourceOVF.py index 7ba60735..58a4b2a2 100644 --- a/cloudinit/sources/DataSourceOVF.py +++ b/cloudinit/sources/DataSourceOVF.py @@ -66,7 +66,7 @@ class DataSourceOVF(sources.DataSource): np = {'iso': transport_iso9660, 'vmware-guestd': transport_vmware_guestd, } name = None - for (name, transfunc) in np.iteritems(): + for (name, transfunc) in np.items(): (contents, _dev, _fname) = transfunc() if contents: break @@ -138,7 +138,7 @@ def read_ovf_environment(contents): ud = "" cfg_props = ['password'] md_props = ['seedfrom', 'local-hostname', 'public-keys', 'instance-id'] - for (prop, val) in props.iteritems(): + for (prop, val) in props.items(): if prop == 'hostname': prop = "local-hostname" if prop in md_props: @@ -183,7 +183,7 @@ def transport_iso9660(require_iso=True): # Go through mounts to see if it was already mounted mounts = util.mounts() - for (dev, info) in mounts.iteritems(): + for (dev, info) in mounts.items(): fstype = info['fstype'] if fstype != "iso9660" and require_iso: continue diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py index 2733a2f6..7a975d78 100644 --- a/cloudinit/sources/DataSourceSmartOS.py +++ b/cloudinit/sources/DataSourceSmartOS.py @@ -30,12 +30,12 @@ # Comments with "@datadictionary" are snippets of the definition import base64 +import os +import serial + from cloudinit import log as logging from cloudinit import sources from cloudinit import util -import os -import os.path -import serial LOG = logging.getLogger(__name__) @@ -201,7 +201,7 @@ class DataSourceSmartOS(sources.DataSource): if b64_all is not None: self.b64_all = util.is_true(b64_all) - for ci_noun, attribute in SMARTOS_ATTRIB_MAP.iteritems(): + for ci_noun, attribute in SMARTOS_ATTRIB_MAP.items(): smartos_noun, strip = attribute md[ci_noun] = self.query(smartos_noun, strip=strip) @@ -218,11 +218,12 @@ class DataSourceSmartOS(sources.DataSource): user_script = os.path.join(data_d, 'user-script') u_script_l = "%s/user-script" % LEGACY_USER_D write_boot_content(md.get('user-script'), content_f=user_script, - link=u_script_l, shebang=True, mode=0700) + link=u_script_l, shebang=True, mode=0o700) operator_script = os.path.join(data_d, 'operator-script') write_boot_content(md.get('operator-script'), - content_f=operator_script, shebang=False, mode=0700) + content_f=operator_script, shebang=False, + mode=0o700) # @datadictionary: This key has no defined format, but its value # is written to the file /var/db/mdata-user-data on each boot prior @@ -381,7 +382,7 @@ def dmi_data(): def write_boot_content(content, content_f, link=None, shebang=False, - mode=0400): + mode=0o400): """ Write the content to content_f. Under the following rules: 1. If no content, remove the file diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 7c7ef9ab..39eab51b 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -23,6 +23,8 @@ import abc import os +import six + from cloudinit import importer from cloudinit import log as logging from cloudinit import type_utils @@ -130,7 +132,7 @@ class DataSource(object): # we want to return the correct value for what will actually # exist in this instance mappings = {"sd": ("vd", "xvd", "vtb")} - for (nfrom, tlist) in mappings.iteritems(): + for (nfrom, tlist) in mappings.items(): if not short_name.startswith(nfrom): continue for nto in tlist: @@ -218,18 +220,18 @@ def normalize_pubkey_data(pubkey_data): if not pubkey_data: return keys - if isinstance(pubkey_data, (basestring, str)): + if isinstance(pubkey_data, six.string_types): return str(pubkey_data).splitlines() if isinstance(pubkey_data, (list, set)): return list(pubkey_data) if isinstance(pubkey_data, (dict)): - for (_keyname, klist) in pubkey_data.iteritems(): + for (_keyname, klist) in pubkey_data.items(): # lp:506332 uec metadata service responds with # data that makes boto populate a string for 'klist' rather # than a list. - if isinstance(klist, (str, basestring)): + if isinstance(klist, six.string_types): klist = [klist] if isinstance(klist, (list, set)): for pkey in klist: diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index b7e19314..88c7a198 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -24,6 +24,8 @@ import copy import functools import os +import six + from cloudinit import ec2_utils from cloudinit import log as logging from cloudinit import sources @@ -205,7 +207,7 @@ class BaseReader(object): """ load_json_anytype = functools.partial( - util.load_json, root_types=(dict, basestring, list)) + util.load_json, root_types=(dict, list) + six.string_types) def datafiles(version): files = {} @@ -234,7 +236,7 @@ class BaseReader(object): 'version': 2, } data = datafiles(self._find_working_version()) - for (name, (path, required, translator)) in data.iteritems(): + for (name, (path, required, translator)) in data.items(): path = self._path_join(self.base_path, path) data = None found = False @@ -364,7 +366,7 @@ class ConfigDriveReader(BaseReader): raise NonReadable("%s: no files found" % (self.base_path)) md = {} - for (name, (key, translator, default)) in FILES_V1.iteritems(): + for (name, (key, translator, default)) in FILES_V1.items(): if name in found: path = found[name] try: @@ -478,7 +480,7 @@ def convert_vendordata_json(data, recurse=True): """ if not data: return None - if isinstance(data, (str, unicode, basestring)): + if isinstance(data, six.string_types): return data if isinstance(data, list): return copy.deepcopy(data) |