summaryrefslogtreecommitdiff
path: root/cloudinit/sources
diff options
context:
space:
mode:
Diffstat (limited to 'cloudinit/sources')
-rw-r--r--cloudinit/sources/DataSourceAltCloud.py8
-rw-r--r--cloudinit/sources/DataSourceAzure.py7
-rw-r--r--cloudinit/sources/DataSourceConfigDrive.py4
-rw-r--r--cloudinit/sources/DataSourceDigitalOcean.py9
-rw-r--r--cloudinit/sources/DataSourceEc2.py4
-rw-r--r--cloudinit/sources/DataSourceMAAS.py48
-rw-r--r--cloudinit/sources/DataSourceOVF.py6
-rw-r--r--cloudinit/sources/DataSourceOpenNebula.py12
-rw-r--r--cloudinit/sources/DataSourceSmartOS.py21
-rw-r--r--cloudinit/sources/__init__.py10
-rw-r--r--cloudinit/sources/helpers/openstack.py10
11 files changed, 70 insertions, 69 deletions
diff --git a/cloudinit/sources/DataSourceAltCloud.py b/cloudinit/sources/DataSourceAltCloud.py
index 1b0f72a1..fb528ae5 100644
--- a/cloudinit/sources/DataSourceAltCloud.py
+++ b/cloudinit/sources/DataSourceAltCloud.py
@@ -200,11 +200,11 @@ class DataSourceAltCloud(sources.DataSource):
cmd = CMD_PROBE_FLOPPY
(cmd_out, _err) = util.subp(cmd)
LOG.debug(('Command: %s\nOutput%s') % (' '.join(cmd), cmd_out))
- except ProcessExecutionError, _err:
+ except ProcessExecutionError as _err:
util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd),
_err.message)
return False
- except OSError, _err:
+ except OSError as _err:
util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd),
_err.message)
return False
@@ -217,11 +217,11 @@ class DataSourceAltCloud(sources.DataSource):
cmd.append('--exit-if-exists=' + floppy_dev)
(cmd_out, _err) = util.subp(cmd)
LOG.debug(('Command: %s\nOutput%s') % (' '.join(cmd), cmd_out))
- except ProcessExecutionError, _err:
+ except ProcessExecutionError as _err:
util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd),
_err.message)
return False
- except OSError, _err:
+ except OSError as _err:
util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd),
_err.message)
return False
diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py
index 09bc196d..c599d50f 100644
--- a/cloudinit/sources/DataSourceAzure.py
+++ b/cloudinit/sources/DataSourceAzure.py
@@ -124,7 +124,8 @@ class DataSourceAzureNet(sources.DataSource):
LOG.debug("using files cached in %s", ddir)
# azure / hyper-v provides random data here
- seed = util.load_file("/sys/firmware/acpi/tables/OEM0", quiet=True)
+ seed = util.load_file("/sys/firmware/acpi/tables/OEM0",
+ quiet=True, decode=False)
if seed:
self.metadata['random_seed'] = seed
@@ -151,7 +152,7 @@ class DataSourceAzureNet(sources.DataSource):
# walinux agent writes files world readable, but expects
# the directory to be protected.
- write_files(ddir, files, dirmode=0700)
+ write_files(ddir, files, dirmode=0o700)
# handle the hostname 'publishing'
try:
@@ -390,7 +391,7 @@ def write_files(datadir, files, dirmode=None):
util.ensure_dir(datadir, dirmode)
for (name, content) in files.items():
util.write_file(filename=os.path.join(datadir, name),
- content=content, mode=0600)
+ content=content, mode=0o600)
def invoke_agent(cmd):
diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py
index 15244a0d..eb474079 100644
--- a/cloudinit/sources/DataSourceConfigDrive.py
+++ b/cloudinit/sources/DataSourceConfigDrive.py
@@ -216,11 +216,11 @@ def on_first_boot(data, distro=None):
files = data.get('files', {})
if files:
LOG.debug("Writing %s injected files", len(files))
- for (filename, content) in files.iteritems():
+ for (filename, content) in files.items():
if not filename.startswith(os.sep):
filename = os.sep + filename
try:
- util.write_file(filename, content, mode=0660)
+ util.write_file(filename, content, mode=0o660)
except IOError:
util.logexc(LOG, "Failed writing file: %s", filename)
diff --git a/cloudinit/sources/DataSourceDigitalOcean.py b/cloudinit/sources/DataSourceDigitalOcean.py
index 8f27ee89..b20ce2a1 100644
--- a/cloudinit/sources/DataSourceDigitalOcean.py
+++ b/cloudinit/sources/DataSourceDigitalOcean.py
@@ -18,7 +18,7 @@ from cloudinit import log as logging
from cloudinit import util
from cloudinit import sources
from cloudinit import ec2_utils
-from types import StringType
+
import functools
@@ -72,10 +72,11 @@ class DataSourceDigitalOcean(sources.DataSource):
return "\n".join(self.metadata['vendor-data'])
def get_public_ssh_keys(self):
- if type(self.metadata['public-keys']) is StringType:
- return [self.metadata['public-keys']]
+ public_keys = self.metadata['public-keys']
+ if isinstance(public_keys, list):
+ return public_keys
else:
- return self.metadata['public-keys']
+ return [public_keys]
@property
def availability_zone(self):
diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py
index 1b20ecf3..798869b7 100644
--- a/cloudinit/sources/DataSourceEc2.py
+++ b/cloudinit/sources/DataSourceEc2.py
@@ -156,8 +156,8 @@ class DataSourceEc2(sources.DataSource):
# 'ephemeral0': '/dev/sdb',
# 'root': '/dev/sda1'}
found = None
- bdm_items = self.metadata['block-device-mapping'].iteritems()
- for (entname, device) in bdm_items:
+ bdm = self.metadata['block-device-mapping']
+ for (entname, device) in bdm.items():
if entname == name:
found = device
break
diff --git a/cloudinit/sources/DataSourceMAAS.py b/cloudinit/sources/DataSourceMAAS.py
index dfe90bc6..082cc58f 100644
--- a/cloudinit/sources/DataSourceMAAS.py
+++ b/cloudinit/sources/DataSourceMAAS.py
@@ -18,12 +18,15 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import print_function
+
from email.utils import parsedate
import errno
-import oauth.oauth as oauth
+import oauthlib
import os
import time
-import urllib2
+
+from six.moves.urllib_request import Request, urlopen
from cloudinit import log as logging
from cloudinit import sources
@@ -262,7 +265,7 @@ def check_seed_contents(content, seed):
userdata = content.get('user-data', "")
md = {}
- for (key, val) in content.iteritems():
+ for (key, val) in content.items():
if key == 'user-data':
continue
md[key] = val
@@ -272,25 +275,14 @@ def check_seed_contents(content, seed):
def oauth_headers(url, consumer_key, token_key, token_secret, consumer_secret,
timestamp=None):
- consumer = oauth.OAuthConsumer(consumer_key, consumer_secret)
- token = oauth.OAuthToken(token_key, token_secret)
-
- if timestamp is None:
- ts = int(time.time())
- else:
- ts = timestamp
-
- params = {
- 'oauth_version': "1.0",
- 'oauth_nonce': oauth.generate_nonce(),
- 'oauth_timestamp': ts,
- 'oauth_token': token.key,
- 'oauth_consumer_key': consumer.key,
- }
- req = oauth.OAuthRequest(http_url=url, parameters=params)
- req.sign_request(oauth.OAuthSignatureMethod_PLAINTEXT(),
- consumer, token)
- return req.to_header()
+ client = oauthlib.oauth1.Client(
+ consumer_key,
+ client_secret=consumer_secret,
+ resource_owner_key=token_key,
+ resource_owner_secret=token_secret,
+ signature_method=oauthlib.SIGNATURE_PLAINTEXT)
+ uri, signed_headers, body = client.sign(url)
+ return signed_headers
class MAASSeedDirNone(Exception):
@@ -357,11 +349,11 @@ if __name__ == "__main__":
creds[key] = cfg[key]
def geturl(url, headers_cb):
- req = urllib2.Request(url, data=None, headers=headers_cb(url))
- return (urllib2.urlopen(req).read())
+ req = Request(url, data=None, headers=headers_cb(url))
+ return urlopen(req).read()
def printurl(url, headers_cb):
- print "== %s ==\n%s\n" % (url, geturl(url, headers_cb))
+ print("== %s ==\n%s\n" % (url, geturl(url, headers_cb)))
def crawl(url, headers_cb=None):
if url.endswith("/"):
@@ -386,9 +378,9 @@ if __name__ == "__main__":
version=args.apiver)
else:
(userdata, metadata) = read_maas_seed_url(args.url)
- print "=== userdata ==="
- print userdata
- print "=== metadata ==="
+ print("=== userdata ===")
+ print(userdata)
+ print("=== metadata ===")
pprint.pprint(metadata)
elif args.subcmd == "get":
diff --git a/cloudinit/sources/DataSourceOVF.py b/cloudinit/sources/DataSourceOVF.py
index 7ba60735..58a4b2a2 100644
--- a/cloudinit/sources/DataSourceOVF.py
+++ b/cloudinit/sources/DataSourceOVF.py
@@ -66,7 +66,7 @@ class DataSourceOVF(sources.DataSource):
np = {'iso': transport_iso9660,
'vmware-guestd': transport_vmware_guestd, }
name = None
- for (name, transfunc) in np.iteritems():
+ for (name, transfunc) in np.items():
(contents, _dev, _fname) = transfunc()
if contents:
break
@@ -138,7 +138,7 @@ def read_ovf_environment(contents):
ud = ""
cfg_props = ['password']
md_props = ['seedfrom', 'local-hostname', 'public-keys', 'instance-id']
- for (prop, val) in props.iteritems():
+ for (prop, val) in props.items():
if prop == 'hostname':
prop = "local-hostname"
if prop in md_props:
@@ -183,7 +183,7 @@ def transport_iso9660(require_iso=True):
# Go through mounts to see if it was already mounted
mounts = util.mounts()
- for (dev, info) in mounts.iteritems():
+ for (dev, info) in mounts.items():
fstype = info['fstype']
if fstype != "iso9660" and require_iso:
continue
diff --git a/cloudinit/sources/DataSourceOpenNebula.py b/cloudinit/sources/DataSourceOpenNebula.py
index e2469f6e..61709c1b 100644
--- a/cloudinit/sources/DataSourceOpenNebula.py
+++ b/cloudinit/sources/DataSourceOpenNebula.py
@@ -34,6 +34,7 @@ from cloudinit import log as logging
from cloudinit import sources
from cloudinit import util
+
LOG = logging.getLogger(__name__)
DEFAULT_IID = "iid-dsopennebula"
@@ -280,7 +281,7 @@ def parse_shell_config(content, keylist=None, bash=None, asuser=None,
# allvars expands to all existing variables by using '${!x*}' notation
# where x is lower or upper case letters or '_'
- allvars = ["${!%s*}" % x for x in string.letters + "_"]
+ allvars = ["${!%s*}" % x for x in string.ascii_letters + "_"]
keylist_in = keylist
if keylist is None:
@@ -379,9 +380,8 @@ def read_context_disk_dir(source_dir, asuser=None):
raise BrokenContextDiskDir("configured user '%s' "
"does not exist", asuser)
try:
- with open(os.path.join(source_dir, 'context.sh'), 'r') as f:
- content = f.read().strip()
-
+ path = os.path.join(source_dir, 'context.sh')
+ content = util.load_file(path)
context = parse_shell_config(content, asuser=asuser)
except util.ProcessExecutionError as e:
raise BrokenContextDiskDir("Error processing context.sh: %s" % (e))
@@ -426,14 +426,14 @@ def read_context_disk_dir(source_dir, asuser=None):
context.get('USER_DATA_ENCODING'))
if encoding == "base64":
try:
- results['userdata'] = base64.b64decode(results['userdata'])
+ results['userdata'] = util.b64d(results['userdata'])
except TypeError:
LOG.warn("Failed base64 decoding of userdata")
# generate static /etc/network/interfaces
# only if there are any required context variables
# http://opennebula.org/documentation:rel3.8:cong#network_configuration
- for k in context.keys():
+ for k in context:
if re.match(r'^ETH\d+_IP$', k):
(out, _) = util.subp(['/sbin/ip', 'link'])
net = OpenNebulaNetwork(out, context)
diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py
index 86b8775a..9d48beab 100644
--- a/cloudinit/sources/DataSourceSmartOS.py
+++ b/cloudinit/sources/DataSourceSmartOS.py
@@ -30,12 +30,13 @@
# Comments with "@datadictionary" are snippets of the definition
import base64
+import binascii
+import os
+import serial
+
from cloudinit import log as logging
from cloudinit import sources
from cloudinit import util
-import os
-import os.path
-import serial
LOG = logging.getLogger(__name__)
@@ -201,7 +202,7 @@ class DataSourceSmartOS(sources.DataSource):
if b64_all is not None:
self.b64_all = util.is_true(b64_all)
- for ci_noun, attribute in SMARTOS_ATTRIB_MAP.iteritems():
+ for ci_noun, attribute in SMARTOS_ATTRIB_MAP.items():
smartos_noun, strip = attribute
md[ci_noun] = self.query(smartos_noun, strip=strip)
@@ -218,11 +219,12 @@ class DataSourceSmartOS(sources.DataSource):
user_script = os.path.join(data_d, 'user-script')
u_script_l = "%s/user-script" % LEGACY_USER_D
write_boot_content(md.get('user-script'), content_f=user_script,
- link=u_script_l, shebang=True, mode=0700)
+ link=u_script_l, shebang=True, mode=0o700)
operator_script = os.path.join(data_d, 'operator-script')
write_boot_content(md.get('operator-script'),
- content_f=operator_script, shebang=False, mode=0700)
+ content_f=operator_script, shebang=False,
+ mode=0o700)
# @datadictionary: This key has no defined format, but its value
# is written to the file /var/db/mdata-user-data on each boot prior
@@ -349,8 +351,9 @@ def query_data(noun, seed_device, seed_timeout, strip=False, default=None,
if b64:
try:
- return base64.b64decode(resp)
- except TypeError:
+ return util.b64d(resp)
+ # Bogus input produces different errors in Python 2 and 3; catch both.
+ except (TypeError, binascii.Error):
LOG.warn("Failed base64 decoding key '%s'", noun)
return resp
@@ -368,7 +371,7 @@ def dmi_data():
def write_boot_content(content, content_f, link=None, shebang=False,
- mode=0400):
+ mode=0o400):
"""
Write the content to content_f. Under the following rules:
1. If no content, remove the file
diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py
index 7c7ef9ab..39eab51b 100644
--- a/cloudinit/sources/__init__.py
+++ b/cloudinit/sources/__init__.py
@@ -23,6 +23,8 @@
import abc
import os
+import six
+
from cloudinit import importer
from cloudinit import log as logging
from cloudinit import type_utils
@@ -130,7 +132,7 @@ class DataSource(object):
# we want to return the correct value for what will actually
# exist in this instance
mappings = {"sd": ("vd", "xvd", "vtb")}
- for (nfrom, tlist) in mappings.iteritems():
+ for (nfrom, tlist) in mappings.items():
if not short_name.startswith(nfrom):
continue
for nto in tlist:
@@ -218,18 +220,18 @@ def normalize_pubkey_data(pubkey_data):
if not pubkey_data:
return keys
- if isinstance(pubkey_data, (basestring, str)):
+ if isinstance(pubkey_data, six.string_types):
return str(pubkey_data).splitlines()
if isinstance(pubkey_data, (list, set)):
return list(pubkey_data)
if isinstance(pubkey_data, (dict)):
- for (_keyname, klist) in pubkey_data.iteritems():
+ for (_keyname, klist) in pubkey_data.items():
# lp:506332 uec metadata service responds with
# data that makes boto populate a string for 'klist' rather
# than a list.
- if isinstance(klist, (str, basestring)):
+ if isinstance(klist, six.string_types):
klist = [klist]
if isinstance(klist, (list, set)):
for pkey in klist:
diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py
index b7e19314..88c7a198 100644
--- a/cloudinit/sources/helpers/openstack.py
+++ b/cloudinit/sources/helpers/openstack.py
@@ -24,6 +24,8 @@ import copy
import functools
import os
+import six
+
from cloudinit import ec2_utils
from cloudinit import log as logging
from cloudinit import sources
@@ -205,7 +207,7 @@ class BaseReader(object):
"""
load_json_anytype = functools.partial(
- util.load_json, root_types=(dict, basestring, list))
+ util.load_json, root_types=(dict, list) + six.string_types)
def datafiles(version):
files = {}
@@ -234,7 +236,7 @@ class BaseReader(object):
'version': 2,
}
data = datafiles(self._find_working_version())
- for (name, (path, required, translator)) in data.iteritems():
+ for (name, (path, required, translator)) in data.items():
path = self._path_join(self.base_path, path)
data = None
found = False
@@ -364,7 +366,7 @@ class ConfigDriveReader(BaseReader):
raise NonReadable("%s: no files found" % (self.base_path))
md = {}
- for (name, (key, translator, default)) in FILES_V1.iteritems():
+ for (name, (key, translator, default)) in FILES_V1.items():
if name in found:
path = found[name]
try:
@@ -478,7 +480,7 @@ def convert_vendordata_json(data, recurse=True):
"""
if not data:
return None
- if isinstance(data, (str, unicode, basestring)):
+ if isinstance(data, six.string_types):
return data
if isinstance(data, list):
return copy.deepcopy(data)