diff options
Diffstat (limited to 'cloudinit/sources')
-rw-r--r-- | cloudinit/sources/DataSourceAzure.py | 21 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceConfigDrive.py | 2 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceEc2.py | 10 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceMAAS.py | 15 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceOVF.py | 4 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceOpenNebula.py | 3 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceSmartOS.py | 7 |
7 files changed, 33 insertions, 29 deletions
diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index bd80a8a6..b03ab895 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -38,7 +38,8 @@ LOG = logging.getLogger(__name__) DS_NAME = 'Azure' DEFAULT_METADATA = {"instance-id": "iid-AZURE-NODE"} AGENT_START = ['service', 'walinuxagent', 'start'] -BOUNCE_COMMAND = ['sh', '-xc', +BOUNCE_COMMAND = [ + 'sh', '-xc', "i=$interface; x=0; ifdown $i || x=$?; ifup $i || x=$?; exit $x"] BUILTIN_DS_CONFIG = { @@ -91,9 +92,9 @@ def temporary_hostname(temp_hostname, cfg, hostname_command='hostname'): """ policy = cfg['hostname_bounce']['policy'] previous_hostname = get_hostname(hostname_command) - if (not util.is_true(cfg.get('set_hostname')) - or util.is_false(policy) - or (previous_hostname == temp_hostname and policy != 'force')): + if (not util.is_true(cfg.get('set_hostname')) or + util.is_false(policy) or + (previous_hostname == temp_hostname and policy != 'force')): yield None return set_hostname(temp_hostname, hostname_command) @@ -123,8 +124,8 @@ class DataSourceAzureNet(sources.DataSource): with temporary_hostname(temp_hostname, self.ds_cfg, hostname_command=hostname_command) \ as previous_hostname: - if (previous_hostname is not None - and util.is_true(self.ds_cfg.get('set_hostname'))): + if (previous_hostname is not None and + util.is_true(self.ds_cfg.get('set_hostname'))): cfg = self.ds_cfg['hostname_bounce'] try: perform_hostname_bounce(hostname=temp_hostname, @@ -152,7 +153,8 @@ class DataSourceAzureNet(sources.DataSource): else: bname = str(pk['fingerprint'] + ".crt") fp_files += [os.path.join(ddir, bname)] - LOG.debug("ssh authentication: using fingerprint from fabirc") + LOG.debug("ssh authentication: " + "using fingerprint from fabirc") missing = util.log_time(logfunc=LOG.debug, msg="waiting for files", func=wait_for_files, @@ -506,7 +508,7 @@ def read_azure_ovf(contents): raise BrokenAzureDataSource("invalid xml: %s" % e) results = find_child(dom.documentElement, - lambda n: n.localName == "ProvisioningSection") + lambda n: n.localName == "ProvisioningSection") if len(results) == 0: raise NonAzureDataSource("No ProvisioningSection") @@ -516,7 +518,8 @@ def read_azure_ovf(contents): provSection = results[0] lpcs_nodes = find_child(provSection, - lambda n: n.localName == "LinuxProvisioningConfigurationSet") + lambda n: + n.localName == "LinuxProvisioningConfigurationSet") if len(results) == 0: raise NonAzureDataSource("No LinuxProvisioningConfigurationSet") diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index eb474079..e3916208 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -39,7 +39,7 @@ FS_TYPES = ('vfat', 'iso9660') LABEL_TYPES = ('config-2',) POSSIBLE_MOUNTS = ('sr', 'cd') OPTICAL_DEVICES = tuple(('/dev/%s%s' % (z, i) for z in POSSIBLE_MOUNTS - for i in range(0, 2))) + for i in range(0, 2))) class DataSourceConfigDrive(openstack.SourceMixin, sources.DataSource): diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py index 0032d06c..6a897f7d 100644 --- a/cloudinit/sources/DataSourceEc2.py +++ b/cloudinit/sources/DataSourceEc2.py @@ -61,12 +61,12 @@ class DataSourceEc2(sources.DataSource): if not self.wait_for_metadata_service(): return False start_time = time.time() - self.userdata_raw = ec2.get_instance_userdata(self.api_ver, - self.metadata_address) + self.userdata_raw = \ + ec2.get_instance_userdata(self.api_ver, self.metadata_address) self.metadata = ec2.get_instance_metadata(self.api_ver, self.metadata_address) LOG.debug("Crawl of metadata service took %s seconds", - int(time.time() - start_time)) + int(time.time() - start_time)) return True except Exception: util.logexc(LOG, "Failed reading from metadata address %s", @@ -132,13 +132,13 @@ class DataSourceEc2(sources.DataSource): start_time = time.time() url = uhelp.wait_for_url(urls=urls, max_wait=max_wait, - timeout=timeout, status_cb=LOG.warn) + timeout=timeout, status_cb=LOG.warn) if url: LOG.debug("Using metadata source: '%s'", url2base[url]) else: LOG.critical("Giving up on md from %s after %s seconds", - urls, int(time.time() - start_time)) + urls, int(time.time() - start_time)) self.metadata_address = url2base.get(url) return bool(url) diff --git a/cloudinit/sources/DataSourceMAAS.py b/cloudinit/sources/DataSourceMAAS.py index cfc59ca5..f18c4cee 100644 --- a/cloudinit/sources/DataSourceMAAS.py +++ b/cloudinit/sources/DataSourceMAAS.py @@ -275,17 +275,18 @@ if __name__ == "__main__": parser = argparse.ArgumentParser(description='Interact with MAAS DS') parser.add_argument("--config", metavar="file", - help="specify DS config file", default=None) + help="specify DS config file", default=None) parser.add_argument("--ckey", metavar="key", - help="the consumer key to auth with", default=None) + help="the consumer key to auth with", default=None) parser.add_argument("--tkey", metavar="key", - help="the token key to auth with", default=None) + help="the token key to auth with", default=None) parser.add_argument("--csec", metavar="secret", - help="the consumer secret (likely '')", default="") + help="the consumer secret (likely '')", default="") parser.add_argument("--tsec", metavar="secret", - help="the token secret to auth with", default=None) + help="the token secret to auth with", default=None) parser.add_argument("--apiver", metavar="version", - help="the apiver to use ("" can be used)", default=MD_VERSION) + help="the apiver to use ("" can be used)", + default=MD_VERSION) subcmds = parser.add_subparsers(title="subcommands", dest="subcmd") subcmds.add_parser('crawl', help="crawl the datasource") @@ -297,7 +298,7 @@ if __name__ == "__main__": args = parser.parse_args() creds = {'consumer_key': args.ckey, 'token_key': args.tkey, - 'token_secret': args.tsec, 'consumer_secret': args.csec} + 'token_secret': args.tsec, 'consumer_secret': args.csec} if args.config: cfg = util.read_conf(args.config) diff --git a/cloudinit/sources/DataSourceOVF.py b/cloudinit/sources/DataSourceOVF.py index 58a4b2a2..adf9b12e 100644 --- a/cloudinit/sources/DataSourceOVF.py +++ b/cloudinit/sources/DataSourceOVF.py @@ -264,14 +264,14 @@ def get_properties(contents): # could also check here that elem.namespaceURI == # "http://schemas.dmtf.org/ovf/environment/1" propSections = find_child(dom.documentElement, - lambda n: n.localName == "PropertySection") + lambda n: n.localName == "PropertySection") if len(propSections) == 0: raise XmlError("No 'PropertySection's") props = {} propElems = find_child(propSections[0], - (lambda n: n.localName == "Property")) + (lambda n: n.localName == "Property")) for elem in propElems: key = elem.attributes.getNamedItemNS(envNsURI, "key").value diff --git a/cloudinit/sources/DataSourceOpenNebula.py b/cloudinit/sources/DataSourceOpenNebula.py index ac2c3b45..b26940d1 100644 --- a/cloudinit/sources/DataSourceOpenNebula.py +++ b/cloudinit/sources/DataSourceOpenNebula.py @@ -404,7 +404,8 @@ def read_context_disk_dir(source_dir, asuser=None): if ssh_key_var: lines = context.get(ssh_key_var).splitlines() results['metadata']['public-keys'] = [l for l in lines - if len(l) and not l.startswith("#")] + if len(l) and not + l.startswith("#")] # custom hostname -- try hostname or leave cloud-init # itself create hostname from IP address later diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py index 7453379a..139ee52c 100644 --- a/cloudinit/sources/DataSourceSmartOS.py +++ b/cloudinit/sources/DataSourceSmartOS.py @@ -90,8 +90,7 @@ BUILTIN_DS_CONFIG = { 'user-data', 'user-script', 'sdc:datacenter_name', - 'sdc:uuid', - ], + 'sdc:uuid'], 'base64_keys': [], 'base64_all': False, 'disk_aliases': {'ephemeral0': '/dev/vdb'}, @@ -450,7 +449,7 @@ class JoyentMetadataClient(object): response = bytearray() response.extend(self.metasource.read(1)) - while response[-1:] != b'\n': + while response[-1:] != b'\n': response.extend(self.metasource.read(1)) response = response.rstrip().decode('ascii') LOG.debug('Read "%s" from metadata transport.', response) @@ -513,7 +512,7 @@ def write_boot_content(content, content_f, link=None, shebang=False, except Exception as e: util.logexc(LOG, ("Failed to identify script type for %s" % - content_f, e)) + content_f, e)) if link: try: |