summaryrefslogtreecommitdiff
path: root/cloudinit/sources
diff options
context:
space:
mode:
Diffstat (limited to 'cloudinit/sources')
-rw-r--r--cloudinit/sources/DataSourceAltCloud.py2
-rw-r--r--cloudinit/sources/DataSourceAzure.py23
-rw-r--r--cloudinit/sources/DataSourceConfigDrive.py2
-rw-r--r--cloudinit/sources/DataSourceDigitalOcean.py4
-rw-r--r--cloudinit/sources/DataSourceEc2.py12
-rw-r--r--cloudinit/sources/DataSourceMAAS.py17
-rw-r--r--cloudinit/sources/DataSourceNoCloud.py4
-rw-r--r--cloudinit/sources/DataSourceNone.py4
-rw-r--r--cloudinit/sources/DataSourceOVF.py36
-rw-r--r--cloudinit/sources/DataSourceOpenNebula.py7
-rw-r--r--cloudinit/sources/DataSourceSmartOS.py9
-rw-r--r--cloudinit/sources/helpers/vmware/imc/config_nic.py10
12 files changed, 70 insertions, 60 deletions
diff --git a/cloudinit/sources/DataSourceAltCloud.py b/cloudinit/sources/DataSourceAltCloud.py
index 60d58d6d..cd61df31 100644
--- a/cloudinit/sources/DataSourceAltCloud.py
+++ b/cloudinit/sources/DataSourceAltCloud.py
@@ -284,7 +284,7 @@ class DataSourceAltCloud(sources.DataSource):
# In the future 'dsmode' like behavior can be added to offer user
# the ability to run before networking.
datasources = [
- (DataSourceAltCloud, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)),
+ (DataSourceAltCloud, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)),
]
diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py
index bd80a8a6..2af0ad9b 100644
--- a/cloudinit/sources/DataSourceAzure.py
+++ b/cloudinit/sources/DataSourceAzure.py
@@ -38,7 +38,8 @@ LOG = logging.getLogger(__name__)
DS_NAME = 'Azure'
DEFAULT_METADATA = {"instance-id": "iid-AZURE-NODE"}
AGENT_START = ['service', 'walinuxagent', 'start']
-BOUNCE_COMMAND = ['sh', '-xc',
+BOUNCE_COMMAND = [
+ 'sh', '-xc',
"i=$interface; x=0; ifdown $i || x=$?; ifup $i || x=$?; exit $x"]
BUILTIN_DS_CONFIG = {
@@ -91,9 +92,9 @@ def temporary_hostname(temp_hostname, cfg, hostname_command='hostname'):
"""
policy = cfg['hostname_bounce']['policy']
previous_hostname = get_hostname(hostname_command)
- if (not util.is_true(cfg.get('set_hostname'))
- or util.is_false(policy)
- or (previous_hostname == temp_hostname and policy != 'force')):
+ if (not util.is_true(cfg.get('set_hostname')) or
+ util.is_false(policy) or
+ (previous_hostname == temp_hostname and policy != 'force')):
yield None
return
set_hostname(temp_hostname, hostname_command)
@@ -123,8 +124,8 @@ class DataSourceAzureNet(sources.DataSource):
with temporary_hostname(temp_hostname, self.ds_cfg,
hostname_command=hostname_command) \
as previous_hostname:
- if (previous_hostname is not None
- and util.is_true(self.ds_cfg.get('set_hostname'))):
+ if (previous_hostname is not None and
+ util.is_true(self.ds_cfg.get('set_hostname'))):
cfg = self.ds_cfg['hostname_bounce']
try:
perform_hostname_bounce(hostname=temp_hostname,
@@ -152,7 +153,8 @@ class DataSourceAzureNet(sources.DataSource):
else:
bname = str(pk['fingerprint'] + ".crt")
fp_files += [os.path.join(ddir, bname)]
- LOG.debug("ssh authentication: using fingerprint from fabirc")
+ LOG.debug("ssh authentication: "
+ "using fingerprint from fabirc")
missing = util.log_time(logfunc=LOG.debug, msg="waiting for files",
func=wait_for_files,
@@ -506,7 +508,7 @@ def read_azure_ovf(contents):
raise BrokenAzureDataSource("invalid xml: %s" % e)
results = find_child(dom.documentElement,
- lambda n: n.localName == "ProvisioningSection")
+ lambda n: n.localName == "ProvisioningSection")
if len(results) == 0:
raise NonAzureDataSource("No ProvisioningSection")
@@ -516,7 +518,8 @@ def read_azure_ovf(contents):
provSection = results[0]
lpcs_nodes = find_child(provSection,
- lambda n: n.localName == "LinuxProvisioningConfigurationSet")
+ lambda n:
+ n.localName == "LinuxProvisioningConfigurationSet")
if len(results) == 0:
raise NonAzureDataSource("No LinuxProvisioningConfigurationSet")
@@ -633,7 +636,7 @@ class NonAzureDataSource(Exception):
# Used to match classes to dependencies
datasources = [
- (DataSourceAzureNet, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)),
+ (DataSourceAzureNet, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)),
]
diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py
index eb474079..e3916208 100644
--- a/cloudinit/sources/DataSourceConfigDrive.py
+++ b/cloudinit/sources/DataSourceConfigDrive.py
@@ -39,7 +39,7 @@ FS_TYPES = ('vfat', 'iso9660')
LABEL_TYPES = ('config-2',)
POSSIBLE_MOUNTS = ('sr', 'cd')
OPTICAL_DEVICES = tuple(('/dev/%s%s' % (z, i) for z in POSSIBLE_MOUNTS
- for i in range(0, 2)))
+ for i in range(0, 2)))
class DataSourceConfigDrive(openstack.SourceMixin, sources.DataSource):
diff --git a/cloudinit/sources/DataSourceDigitalOcean.py b/cloudinit/sources/DataSourceDigitalOcean.py
index 5d47564d..12e863d2 100644
--- a/cloudinit/sources/DataSourceDigitalOcean.py
+++ b/cloudinit/sources/DataSourceDigitalOcean.py
@@ -101,8 +101,8 @@ class DataSourceDigitalOcean(sources.DataSource):
# Used to match classes to dependencies
datasources = [
- (DataSourceDigitalOcean, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)),
- ]
+ (DataSourceDigitalOcean, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)),
+]
# Return a list of data sources that match this set of dependencies
diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py
index 0032d06c..3ef2c6af 100644
--- a/cloudinit/sources/DataSourceEc2.py
+++ b/cloudinit/sources/DataSourceEc2.py
@@ -61,12 +61,12 @@ class DataSourceEc2(sources.DataSource):
if not self.wait_for_metadata_service():
return False
start_time = time.time()
- self.userdata_raw = ec2.get_instance_userdata(self.api_ver,
- self.metadata_address)
+ self.userdata_raw = \
+ ec2.get_instance_userdata(self.api_ver, self.metadata_address)
self.metadata = ec2.get_instance_metadata(self.api_ver,
self.metadata_address)
LOG.debug("Crawl of metadata service took %s seconds",
- int(time.time() - start_time))
+ int(time.time() - start_time))
return True
except Exception:
util.logexc(LOG, "Failed reading from metadata address %s",
@@ -132,13 +132,13 @@ class DataSourceEc2(sources.DataSource):
start_time = time.time()
url = uhelp.wait_for_url(urls=urls, max_wait=max_wait,
- timeout=timeout, status_cb=LOG.warn)
+ timeout=timeout, status_cb=LOG.warn)
if url:
LOG.debug("Using metadata source: '%s'", url2base[url])
else:
LOG.critical("Giving up on md from %s after %s seconds",
- urls, int(time.time() - start_time))
+ urls, int(time.time() - start_time))
self.metadata_address = url2base.get(url)
return bool(url)
@@ -206,7 +206,7 @@ class DataSourceEc2(sources.DataSource):
# Used to match classes to dependencies
datasources = [
- (DataSourceEc2, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)),
+ (DataSourceEc2, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)),
]
diff --git a/cloudinit/sources/DataSourceMAAS.py b/cloudinit/sources/DataSourceMAAS.py
index cfc59ca5..d828f078 100644
--- a/cloudinit/sources/DataSourceMAAS.py
+++ b/cloudinit/sources/DataSourceMAAS.py
@@ -254,7 +254,7 @@ class MAASSeedDirMalformed(Exception):
# Used to match classes to dependencies
datasources = [
- (DataSourceMAAS, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)),
+ (DataSourceMAAS, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)),
]
@@ -275,17 +275,18 @@ if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Interact with MAAS DS')
parser.add_argument("--config", metavar="file",
- help="specify DS config file", default=None)
+ help="specify DS config file", default=None)
parser.add_argument("--ckey", metavar="key",
- help="the consumer key to auth with", default=None)
+ help="the consumer key to auth with", default=None)
parser.add_argument("--tkey", metavar="key",
- help="the token key to auth with", default=None)
+ help="the token key to auth with", default=None)
parser.add_argument("--csec", metavar="secret",
- help="the consumer secret (likely '')", default="")
+ help="the consumer secret (likely '')", default="")
parser.add_argument("--tsec", metavar="secret",
- help="the token secret to auth with", default=None)
+ help="the token secret to auth with", default=None)
parser.add_argument("--apiver", metavar="version",
- help="the apiver to use ("" can be used)", default=MD_VERSION)
+ help="the apiver to use ("" can be used)",
+ default=MD_VERSION)
subcmds = parser.add_subparsers(title="subcommands", dest="subcmd")
subcmds.add_parser('crawl', help="crawl the datasource")
@@ -297,7 +298,7 @@ if __name__ == "__main__":
args = parser.parse_args()
creds = {'consumer_key': args.ckey, 'token_key': args.tkey,
- 'token_secret': args.tsec, 'consumer_secret': args.csec}
+ 'token_secret': args.tsec, 'consumer_secret': args.csec}
if args.config:
cfg = util.read_conf(args.config)
diff --git a/cloudinit/sources/DataSourceNoCloud.py b/cloudinit/sources/DataSourceNoCloud.py
index 4dffe6e6..4cad6877 100644
--- a/cloudinit/sources/DataSourceNoCloud.py
+++ b/cloudinit/sources/DataSourceNoCloud.py
@@ -263,8 +263,8 @@ class DataSourceNoCloudNet(DataSourceNoCloud):
# Used to match classes to dependencies
datasources = [
- (DataSourceNoCloud, (sources.DEP_FILESYSTEM, )),
- (DataSourceNoCloudNet, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)),
+ (DataSourceNoCloud, (sources.DEP_FILESYSTEM, )),
+ (DataSourceNoCloudNet, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)),
]
diff --git a/cloudinit/sources/DataSourceNone.py b/cloudinit/sources/DataSourceNone.py
index 12a8a992..d1a62b2a 100644
--- a/cloudinit/sources/DataSourceNone.py
+++ b/cloudinit/sources/DataSourceNone.py
@@ -47,8 +47,8 @@ class DataSourceNone(sources.DataSource):
# Used to match classes to dependencies
datasources = [
- (DataSourceNone, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)),
- (DataSourceNone, []),
+ (DataSourceNone, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)),
+ (DataSourceNone, []),
]
diff --git a/cloudinit/sources/DataSourceOVF.py b/cloudinit/sources/DataSourceOVF.py
index d12601a4..8e97e51a 100644
--- a/cloudinit/sources/DataSourceOVF.py
+++ b/cloudinit/sources/DataSourceOVF.py
@@ -66,18 +66,21 @@ class DataSourceOVF(sources.DataSource):
system_type = util.read_dmi_data("system-product-name")
if system_type is None:
- LOG.debug("No system-product-name found")
+ LOG.debug("No system-product-name found")
elif 'vmware' in system_type.lower():
LOG.debug("VMware Virtual Platform found")
- deployPkgPluginPath = search_file("/usr/lib/vmware-tools", "libdeployPkgPlugin.so")
+ deployPkgPluginPath = search_file("/usr/lib/vmware-tools",
+ "libdeployPkgPlugin.so")
if deployPkgPluginPath:
- vmwareImcConfigFilePath = util.log_time(logfunc=LOG.debug,
+ vmwareImcConfigFilePath = \
+ util.log_time(logfunc=LOG.debug,
msg="waiting for configuration file",
func=wait_for_imc_cfg_file,
args=("/tmp", "cust.cfg"))
if vmwareImcConfigFilePath:
- LOG.debug("Found VMware DeployPkg Config File Path at %s" % vmwareImcConfigFilePath)
+ LOG.debug("Found VMware DeployPkg Config File Path at %s" %
+ vmwareImcConfigFilePath)
else:
LOG.debug("Didn't find VMware DeployPkg Config File Path")
@@ -147,7 +150,7 @@ class DataSourceOVF(sources.DataSource):
def get_public_ssh_keys(self):
if 'public-keys' not in self.metadata:
- return []
+ return []
pks = self.metadata['public-keys']
if isinstance(pks, (list)):
return pks
@@ -170,7 +173,7 @@ class DataSourceOVFNet(DataSourceOVF):
def wait_for_imc_cfg_file(dirpath, filename, maxwait=180, naplen=5):
waited = 0
-
+
while waited < maxwait:
fileFullPath = search_file(dirpath, filename)
if fileFullPath:
@@ -179,6 +182,7 @@ def wait_for_imc_cfg_file(dirpath, filename, maxwait=180, naplen=5):
waited += naplen
return None
+
# This will return a dict with some content
# meta-data, user-data, some config
def read_vmware_imc(config):
@@ -186,13 +190,14 @@ def read_vmware_imc(config):
cfg = {}
ud = ""
if config.host_name:
- if config.domain_name:
- md['local-hostname'] = config.host_name + "." + config.domain_name
- else:
- md['local-hostname'] = config.host_name
+ if config.domain_name:
+ md['local-hostname'] = config.host_name + "." + config.domain_name
+ else:
+ md['local-hostname'] = config.host_name
return (md, ud, cfg)
+
# This will return a dict with some content
# meta-data, user-data, some config
def read_ovf_environment(contents):
@@ -328,14 +333,14 @@ def get_properties(contents):
# could also check here that elem.namespaceURI ==
# "http://schemas.dmtf.org/ovf/environment/1"
propSections = find_child(dom.documentElement,
- lambda n: n.localName == "PropertySection")
+ lambda n: n.localName == "PropertySection")
if len(propSections) == 0:
raise XmlError("No 'PropertySection's")
props = {}
propElems = find_child(propSections[0],
- (lambda n: n.localName == "Property"))
+ (lambda n: n.localName == "Property"))
for elem in propElems:
key = elem.attributes.getNamedItemNS(envNsURI, "key").value
@@ -347,7 +352,7 @@ def get_properties(contents):
def search_file(dirpath, filename):
if not dirpath or not filename:
- return None
+ return None
for root, dirs, files in os.walk(dirpath):
if filename in files:
@@ -355,14 +360,15 @@ def search_file(dirpath, filename):
return None
+
class XmlError(Exception):
pass
# Used to match classes to dependencies
datasources = (
- (DataSourceOVF, (sources.DEP_FILESYSTEM, )),
- (DataSourceOVFNet, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)),
+ (DataSourceOVF, (sources.DEP_FILESYSTEM, )),
+ (DataSourceOVFNet, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)),
)
diff --git a/cloudinit/sources/DataSourceOpenNebula.py b/cloudinit/sources/DataSourceOpenNebula.py
index ac2c3b45..681f3a96 100644
--- a/cloudinit/sources/DataSourceOpenNebula.py
+++ b/cloudinit/sources/DataSourceOpenNebula.py
@@ -149,8 +149,8 @@ class BrokenContextDiskDir(Exception):
class OpenNebulaNetwork(object):
REG_DEV_MAC = re.compile(
- r'^\d+: (eth\d+):.*?link\/ether (..:..:..:..:..:..) ?',
- re.MULTILINE | re.DOTALL)
+ r'^\d+: (eth\d+):.*?link\/ether (..:..:..:..:..:..) ?',
+ re.MULTILINE | re.DOTALL)
def __init__(self, ip, context):
self.ip = ip
@@ -404,7 +404,8 @@ def read_context_disk_dir(source_dir, asuser=None):
if ssh_key_var:
lines = context.get(ssh_key_var).splitlines()
results['metadata']['public-keys'] = [l for l in lines
- if len(l) and not l.startswith("#")]
+ if len(l) and not
+ l.startswith("#")]
# custom hostname -- try hostname or leave cloud-init
# itself create hostname from IP address later
diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py
index 7453379a..5edab152 100644
--- a/cloudinit/sources/DataSourceSmartOS.py
+++ b/cloudinit/sources/DataSourceSmartOS.py
@@ -90,8 +90,7 @@ BUILTIN_DS_CONFIG = {
'user-data',
'user-script',
'sdc:datacenter_name',
- 'sdc:uuid',
- ],
+ 'sdc:uuid'],
'base64_keys': [],
'base64_all': False,
'disk_aliases': {'ephemeral0': '/dev/vdb'},
@@ -102,7 +101,7 @@ BUILTIN_CLOUD_CONFIG = {
'ephemeral0': {'table_type': 'mbr',
'layout': False,
'overwrite': False}
- },
+ },
'fs_setup': [{'label': 'ephemeral0',
'filesystem': 'ext3',
'device': 'ephemeral0'}],
@@ -450,7 +449,7 @@ class JoyentMetadataClient(object):
response = bytearray()
response.extend(self.metasource.read(1))
- while response[-1:] != b'\n':
+ while response[-1:] != b'\n':
response.extend(self.metasource.read(1))
response = response.rstrip().decode('ascii')
LOG.debug('Read "%s" from metadata transport.', response)
@@ -513,7 +512,7 @@ def write_boot_content(content, content_f, link=None, shebang=False,
except Exception as e:
util.logexc(LOG, ("Failed to identify script type for %s" %
- content_f, e))
+ content_f, e))
if link:
try:
diff --git a/cloudinit/sources/helpers/vmware/imc/config_nic.py b/cloudinit/sources/helpers/vmware/imc/config_nic.py
index 6d721134..8c5c08cf 100644
--- a/cloudinit/sources/helpers/vmware/imc/config_nic.py
+++ b/cloudinit/sources/helpers/vmware/imc/config_nic.py
@@ -46,12 +46,12 @@ class NicConfigurator:
"""
primary_nics = [nic for nic in self.nics if nic.primary]
if not primary_nics:
- return None
+ return None
elif len(primary_nics) > 1:
- raise Exception('There can only be one primary nic',
+ raise Exception('There can only be one primary nic',
[nic.mac for nic in primary_nics])
else:
- return primary_nics[0]
+ return primary_nics[0]
def find_devices(self):
"""
@@ -185,8 +185,8 @@ class NicConfigurator:
lines = []
for addr in addrs:
- lines.append(' up route -A inet6 add default gw %s metric 10000' %
- addr.gateway)
+ lines.append(' up route -A inet6 add default gw '
+ '%s metric 10000' % addr.gateway)
return lines