diff options
Diffstat (limited to 'cloudinit/sources')
| -rw-r--r-- | cloudinit/sources/DataSourceAltCloud.py | 4 | ||||
| -rw-r--r-- | cloudinit/sources/DataSourceAzure.py | 22 | ||||
| -rw-r--r-- | cloudinit/sources/DataSourceCloudSigma.py | 2 | ||||
| -rw-r--r-- | cloudinit/sources/DataSourceCloudStack.py | 7 | ||||
| -rw-r--r-- | cloudinit/sources/DataSourceConfigDrive.py | 4 | ||||
| -rw-r--r-- | cloudinit/sources/DataSourceDigitalOcean.py | 2 | ||||
| -rw-r--r-- | cloudinit/sources/DataSourceEc2.py | 10 | ||||
| -rw-r--r-- | cloudinit/sources/DataSourceGCE.py | 5 | ||||
| -rw-r--r-- | cloudinit/sources/DataSourceMAAS.py | 10 | ||||
| -rw-r--r-- | cloudinit/sources/DataSourceNoCloud.py | 4 | ||||
| -rw-r--r-- | cloudinit/sources/DataSourceOVF.py | 10 | ||||
| -rw-r--r-- | cloudinit/sources/DataSourceOpenNebula.py | 4 | ||||
| -rw-r--r-- | cloudinit/sources/DataSourceOpenStack.py | 4 | ||||
| -rw-r--r-- | cloudinit/sources/DataSourceSmartOS.py | 2 | ||||
| -rw-r--r-- | cloudinit/sources/__init__.py | 4 | ||||
| -rw-r--r-- | cloudinit/sources/helpers/azure.py | 2 | ||||
| -rw-r--r-- | cloudinit/sources/helpers/digitalocean.py | 64 | ||||
| -rw-r--r-- | cloudinit/sources/helpers/openstack.py | 2 | ||||
| -rw-r--r-- | cloudinit/sources/helpers/vmware/imc/config_file.py | 8 |
19 files changed, 88 insertions, 82 deletions
diff --git a/cloudinit/sources/DataSourceAltCloud.py b/cloudinit/sources/DataSourceAltCloud.py index 8528fa10..ed1d691a 100644 --- a/cloudinit/sources/DataSourceAltCloud.py +++ b/cloudinit/sources/DataSourceAltCloud.py @@ -181,7 +181,7 @@ class DataSourceAltCloud(sources.DataSource): try: cmd = CMD_PROBE_FLOPPY (cmd_out, _err) = util.subp(cmd) - LOG.debug(('Command: %s\nOutput%s') % (' '.join(cmd), cmd_out)) + LOG.debug('Command: %s\nOutput%s', ' '.join(cmd), cmd_out) except ProcessExecutionError as _err: util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd), _err) return False @@ -196,7 +196,7 @@ class DataSourceAltCloud(sources.DataSource): cmd = CMD_UDEVADM_SETTLE cmd.append('--exit-if-exists=' + floppy_dev) (cmd_out, _err) = util.subp(cmd) - LOG.debug(('Command: %s\nOutput%s') % (' '.join(cmd), cmd_out)) + LOG.debug('Command: %s\nOutput%s', ' '.join(cmd), cmd_out) except ProcessExecutionError as _err: util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd), _err) return False diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index 48a3e1df..04358b73 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -116,7 +116,7 @@ class DataSourceAzureNet(sources.DataSource): # the metadata and "bounce" the network to force DDNS to update via # dhclient azure_hostname = self.metadata.get('local-hostname') - LOG.debug("Hostname in metadata is {}".format(azure_hostname)) + LOG.debug("Hostname in metadata is %s", azure_hostname) hostname_command = self.ds_cfg['hostname_bounce']['hostname_command'] with temporary_hostname(azure_hostname, self.ds_cfg, @@ -132,7 +132,7 @@ class DataSourceAzureNet(sources.DataSource): cfg=cfg, prev_hostname=previous_hostname) except Exception as e: - LOG.warn("Failed publishing hostname: %s", e) + LOG.warning("Failed publishing hostname: %s", e) util.logexc(LOG, "handling set_hostname failed") def get_metadata_from_agent(self): @@ -168,7 +168,7 @@ class DataSourceAzureNet(sources.DataSource): func=wait_for_files, args=(fp_files,)) if len(missing): - LOG.warn("Did not find files, but going on: %s", missing) + LOG.warning("Did not find files, but going on: %s", missing) metadata = {} metadata['public-keys'] = key_value or pubkeys_from_crt_files(fp_files) @@ -199,7 +199,7 @@ class DataSourceAzureNet(sources.DataSource): except BrokenAzureDataSource as exc: raise exc except util.MountFailedError: - LOG.warn("%s was not mountable", cdev) + LOG.warning("%s was not mountable", cdev) continue (md, self.userdata_raw, cfg, files) = ret @@ -331,8 +331,8 @@ def address_ephemeral_resize(devpath=RESOURCE_DISK_PATH, maxwait=120, log_pre="Azure ephemeral disk: ") if missing: - LOG.warn("ephemeral device '%s' did not appear after %d seconds.", - devpath, maxwait) + LOG.warning("ephemeral device '%s' did not appear after %d seconds.", + devpath, maxwait) return result = False @@ -342,7 +342,7 @@ def address_ephemeral_resize(devpath=RESOURCE_DISK_PATH, maxwait=120, else: result, msg = can_dev_be_reformatted(devpath) - LOG.debug("reformattable=%s: %s" % (result, msg)) + LOG.debug("reformattable=%s: %s", result, msg) if not result: return @@ -355,7 +355,7 @@ def address_ephemeral_resize(devpath=RESOURCE_DISK_PATH, maxwait=120, LOG.debug(bmsg + " removed.") except Exception as e: # python3 throws FileNotFoundError, python2 throws OSError - LOG.warn(bmsg + ": remove failed! (%s)" % e) + LOG.warning(bmsg + ": remove failed! (%s)", e) else: LOG.debug(bmsg + " did not exist.") return @@ -405,7 +405,7 @@ def pubkeys_from_crt_files(flist): errors.append(fname) if errors: - LOG.warn("failed to convert the crt files to pubkey: %s", errors) + LOG.warning("failed to convert the crt files to pubkey: %s", errors) return pubkeys @@ -427,8 +427,8 @@ def wait_for_files(flist, maxwait=60, naplen=.5, log_pre=""): time.sleep(naplen) waited += naplen - LOG.warn("%sStill missing files after %s seconds: %s", - log_pre, maxwait, need) + LOG.warning("%sStill missing files after %s seconds: %s", + log_pre, maxwait, need) return need diff --git a/cloudinit/sources/DataSourceCloudSigma.py b/cloudinit/sources/DataSourceCloudSigma.py index ffc23e3d..19df16b1 100644 --- a/cloudinit/sources/DataSourceCloudSigma.py +++ b/cloudinit/sources/DataSourceCloudSigma.py @@ -43,7 +43,7 @@ class DataSourceCloudSigma(sources.DataSource): LOG.debug("detected hypervisor as %s", sys_product_name) return 'cloudsigma' in sys_product_name.lower() - LOG.warn("failed to query dmi data for system product name") + LOG.warning("failed to query dmi data for system product name") return False def get_data(self): diff --git a/cloudinit/sources/DataSourceCloudStack.py b/cloudinit/sources/DataSourceCloudStack.py index b0ab275c..0188d894 100644 --- a/cloudinit/sources/DataSourceCloudStack.py +++ b/cloudinit/sources/DataSourceCloudStack.py @@ -178,9 +178,10 @@ def get_default_gateway(): def get_dhclient_d(): # find lease files directory - supported_dirs = ["/var/lib/dhclient", "/var/lib/dhcp"] + supported_dirs = ["/var/lib/dhclient", "/var/lib/dhcp", + "/var/lib/NetworkManager"] for d in supported_dirs: - if os.path.exists(d): + if os.path.exists(d) and len(os.listdir(d)) > 0: LOG.debug("Using %s lease directory", d) return d return None @@ -207,8 +208,8 @@ def get_latest_lease(): def get_vr_address(): # Get the address of the virtual router via dhcp leases - # see http://bit.ly/T76eKC for documentation on the virtual router. # If no virtual router is detected, fallback on default gateway. + # See http://docs.cloudstack.apache.org/projects/cloudstack-administration/en/4.8/virtual_machines/user-data.html # noqa lease_file = get_latest_lease() if not lease_file: LOG.debug("No lease file found, using default gateway") diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index 46dd89e0..ef374f3f 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -127,7 +127,7 @@ class DataSourceConfigDrive(openstack.SourceMixin, sources.DataSource): try: self.vendordata_raw = sources.convert_vendordata(vd) except ValueError as e: - LOG.warn("Invalid content in vendor-data: %s", e) + LOG.warning("Invalid content in vendor-data: %s", e) self.vendordata_raw = None # network_config is an /etc/network/interfaces formated file and is @@ -190,7 +190,7 @@ def on_first_boot(data, distro=None, network=True): if network: net_conf = data.get("network_config", '') if net_conf and distro: - LOG.warn("Updating network interfaces from config drive") + LOG.warning("Updating network interfaces from config drive") distro.apply_network(net_conf) write_injected_files(data.get('files')) diff --git a/cloudinit/sources/DataSourceDigitalOcean.py b/cloudinit/sources/DataSourceDigitalOcean.py index d052c4c3..5e7e66be 100644 --- a/cloudinit/sources/DataSourceDigitalOcean.py +++ b/cloudinit/sources/DataSourceDigitalOcean.py @@ -51,7 +51,7 @@ class DataSourceDigitalOcean(sources.DataSource): if not is_do: return False - LOG.info("Running on digital ocean. droplet_id=%s" % droplet_id) + LOG.info("Running on digital ocean. droplet_id=%s", droplet_id) ipv4LL_nic = None if self.use_ip4LL: diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py index 6f01a139..2f9c7edf 100644 --- a/cloudinit/sources/DataSourceEc2.py +++ b/cloudinit/sources/DataSourceEc2.py @@ -125,7 +125,7 @@ class DataSourceEc2(sources.DataSource): if len(filtered): mdurls = filtered else: - LOG.warn("Empty metadata url list! using default list") + LOG.warning("Empty metadata url list! using default list") mdurls = self.metadata_urls urls = [] @@ -232,7 +232,7 @@ def read_strict_mode(cfgval, default): try: return parse_strict_mode(cfgval) except ValueError as e: - LOG.warn(e) + LOG.warning(e) return default @@ -270,7 +270,7 @@ def warn_if_necessary(cfgval, cfg): try: mode, sleep = parse_strict_mode(cfgval) except ValueError as e: - LOG.warn(e) + LOG.warning(e) return if mode == "false": @@ -304,8 +304,8 @@ def identify_platform(): if result: return result except Exception as e: - LOG.warn("calling %s with %s raised exception: %s", - checker, data, e) + LOG.warning("calling %s with %s raised exception: %s", + checker, data, e) def _collect_platform_data(): diff --git a/cloudinit/sources/DataSourceGCE.py b/cloudinit/sources/DataSourceGCE.py index 637c9505..e9afda9c 100644 --- a/cloudinit/sources/DataSourceGCE.py +++ b/cloudinit/sources/DataSourceGCE.py @@ -98,7 +98,7 @@ class DataSourceGCE(sources.DataSource): if not running_on_gce: LOG.debug(msg, mkey) else: - LOG.warn(msg, mkey) + LOG.warning(msg, mkey) return False self.metadata[mkey] = value @@ -116,7 +116,8 @@ class DataSourceGCE(sources.DataSource): self.metadata['user-data'] = b64decode( self.metadata['user-data']) else: - LOG.warn('unknown user-data-encoding: %s, ignoring', encoding) + LOG.warning('unknown user-data-encoding: %s, ignoring', + encoding) return running_on_gce diff --git a/cloudinit/sources/DataSourceMAAS.py b/cloudinit/sources/DataSourceMAAS.py index 41179b02..77df5a51 100644 --- a/cloudinit/sources/DataSourceMAAS.py +++ b/cloudinit/sources/DataSourceMAAS.py @@ -71,7 +71,7 @@ class DataSourceMAAS(sources.DataSource): except MAASSeedDirNone: pass except MAASSeedDirMalformed as exc: - LOG.warn("%s was malformed: %s" % (self.seed_dir, exc)) + LOG.warning("%s was malformed: %s", self.seed_dir, exc) raise # If there is no metadata_url, then we're not configured @@ -107,7 +107,7 @@ class DataSourceMAAS(sources.DataSource): try: self.vendordata_raw = sources.convert_vendordata(vd) except ValueError as e: - LOG.warn("Invalid content in vendor-data: %s", e) + LOG.warning("Invalid content in vendor-data: %s", e) self.vendordata_raw = None def wait_for_metadata_service(self, url): @@ -126,7 +126,7 @@ class DataSourceMAAS(sources.DataSource): if timeout in mcfg: timeout = int(mcfg.get("timeout", timeout)) except Exception: - LOG.warn("Failed to get timeout, using %s" % timeout) + LOG.warning("Failed to get timeout, using %s", timeout) starttime = time.time() if url.endswith("/"): @@ -190,8 +190,8 @@ def read_maas_seed_url(seed_url, read_file_or_url=None, timeout=None, else: md[path] = util.decode_binary(resp.contents) else: - LOG.warn(("Fetching from %s resulted in" - " an invalid http code %s"), url, resp.code) + LOG.warning(("Fetching from %s resulted in" + " an invalid http code %s"), url, resp.code) except url_helper.UrlError as e: if e.code == 404 and not optional: raise MAASSeedDirMalformed( diff --git a/cloudinit/sources/DataSourceNoCloud.py b/cloudinit/sources/DataSourceNoCloud.py index 5924b828..c68f6b8c 100644 --- a/cloudinit/sources/DataSourceNoCloud.py +++ b/cloudinit/sources/DataSourceNoCloud.py @@ -104,8 +104,8 @@ class DataSourceNoCloud(sources.DataSource): pp2d_kwargs) except ValueError as e: if dev in label_list: - LOG.warn("device %s with label=%s not a" - "valid seed.", dev, label) + LOG.warning("device %s with label=%s not a" + "valid seed.", dev, label) continue mydata = _merge_new_seed(mydata, seeded) diff --git a/cloudinit/sources/DataSourceOVF.py b/cloudinit/sources/DataSourceOVF.py index d70784ac..f20c9a65 100644 --- a/cloudinit/sources/DataSourceOVF.py +++ b/cloudinit/sources/DataSourceOVF.py @@ -225,12 +225,12 @@ def get_max_wait_from_cfg(cfg): try: max_wait = int(cfg.get(max_wait_cfg_option, default_max_wait)) except ValueError: - LOG.warn("Failed to get '%s', using %s", - max_wait_cfg_option, default_max_wait) + LOG.warning("Failed to get '%s', using %s", + max_wait_cfg_option, default_max_wait) if max_wait <= 0: - LOG.warn("Invalid value '%s' for '%s', using '%s' instead", - max_wait, max_wait_cfg_option, default_max_wait) + LOG.warning("Invalid value '%s' for '%s', using '%s' instead", + max_wait, max_wait_cfg_option, default_max_wait) max_wait = default_max_wait return max_wait @@ -355,7 +355,7 @@ def transport_iso9660(require_iso=True): try: (fname, contents) = util.mount_cb(fullp, get_ovf_env, mtype=mtype) except util.MountFailedError: - LOG.debug("%s not mountable as iso9660" % fullp) + LOG.debug("%s not mountable as iso9660", fullp) continue if contents is not False: diff --git a/cloudinit/sources/DataSourceOpenNebula.py b/cloudinit/sources/DataSourceOpenNebula.py index cd75e6ea..5fdac192 100644 --- a/cloudinit/sources/DataSourceOpenNebula.py +++ b/cloudinit/sources/DataSourceOpenNebula.py @@ -64,7 +64,7 @@ class DataSourceOpenNebula(sources.DataSource): except BrokenContextDiskDir as exc: raise exc except util.MountFailedError: - LOG.warn("%s was not mountable" % cdev) + LOG.warning("%s was not mountable", cdev) if results: seed = cdev @@ -381,7 +381,7 @@ def read_context_disk_dir(source_dir, asuser=None): try: results['userdata'] = util.b64d(results['userdata']) except TypeError: - LOG.warn("Failed base64 decoding of userdata") + LOG.warning("Failed base64 decoding of userdata") # generate static /etc/network/interfaces # only if there are any required context variables diff --git a/cloudinit/sources/DataSourceOpenStack.py b/cloudinit/sources/DataSourceOpenStack.py index e1ea21f8..f0a6bfce 100644 --- a/cloudinit/sources/DataSourceOpenStack.py +++ b/cloudinit/sources/DataSourceOpenStack.py @@ -73,7 +73,7 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource): if len(filtered): urls = filtered else: - LOG.warn("Empty metadata url list! using default list") + LOG.warning("Empty metadata url list! using default list") urls = [DEF_MD_URL] md_urls = [] @@ -137,7 +137,7 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource): try: self.vendordata_raw = sources.convert_vendordata(vd) except ValueError as e: - LOG.warn("Invalid content in vendor-data: %s", e) + LOG.warning("Invalid content in vendor-data: %s", e) self.vendordata_raw = None return True diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py index 5e668947..6c6902fd 100644 --- a/cloudinit/sources/DataSourceSmartOS.py +++ b/cloudinit/sources/DataSourceSmartOS.py @@ -555,7 +555,7 @@ class JoyentMetadataLegacySerialClient(JoyentMetadataSerialClient): val = base64.b64decode(val.encode()).decode() # Bogus input produces different errors in Python 2 and 3 except (TypeError, binascii.Error): - LOG.warn("Failed base64 decoding key '%s': %s", key, val) + LOG.warning("Failed base64 decoding key '%s': %s", key, val) if strip: val = val.strip() diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 5c99437e..c3ce36d6 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -237,8 +237,8 @@ class DataSource(object): if candidate in valid: return candidate else: - LOG.warn("invalid dsmode '%s', using default=%s", - candidate, default) + LOG.warning("invalid dsmode '%s', using default=%s", + candidate, default) return default return default diff --git a/cloudinit/sources/helpers/azure.py b/cloudinit/sources/helpers/azure.py index f32dac9a..6e01aa47 100644 --- a/cloudinit/sources/helpers/azure.py +++ b/cloudinit/sources/helpers/azure.py @@ -289,7 +289,7 @@ class WALinuxAgentShim(object): LOG.debug("Unable to find endpoint in dhclient logs. " " Falling back to check lease files") if fallback_lease_file is None: - LOG.warn("No fallback lease file was specified.") + LOG.warning("No fallback lease file was specified.") value = None else: LOG.debug("Looking for endpoint in lease file %s", diff --git a/cloudinit/sources/helpers/digitalocean.py b/cloudinit/sources/helpers/digitalocean.py index 72f7bde4..257989e8 100644 --- a/cloudinit/sources/helpers/digitalocean.py +++ b/cloudinit/sources/helpers/digitalocean.py @@ -23,11 +23,8 @@ def assign_ipv4_link_local(nic=None): """ if not nic: - for cdev in sorted(cloudnet.get_devicelist()): - if cloudnet.is_physical(cdev): - nic = cdev - LOG.debug("assigned nic '%s' for link-local discovery", nic) - break + nic = get_link_local_nic() + LOG.debug("selected interface '%s' for reading metadata", nic) if not nic: raise RuntimeError("unable to find interfaces to access the" @@ -57,6 +54,13 @@ def assign_ipv4_link_local(nic=None): return nic +def get_link_local_nic(): + nics = [f for f in cloudnet.get_devicelist() if cloudnet.is_physical(f)] + if not nics: + return None + return min(nics, key=lambda d: cloudnet.read_sys_net_int(d, 'ifindex')) + + def del_ipv4_link_local(nic=None): """Remove the ip4LL address. While this is not necessary, the ip4LL address is extraneous and confusing to users. @@ -107,15 +111,12 @@ def convert_network_configuration(config, dns_servers): } """ - def _get_subnet_part(pcfg, nameservers=None): + def _get_subnet_part(pcfg): subpart = {'type': 'static', 'control': 'auto', 'address': pcfg.get('ip_address'), 'gateway': pcfg.get('gateway')} - if nameservers: - subpart['dns_nameservers'] = nameservers - if ":" in pcfg.get('ip_address'): subpart['address'] = "{0}/{1}".format(pcfg.get('ip_address'), pcfg.get('cidr')) @@ -124,27 +125,31 @@ def convert_network_configuration(config, dns_servers): return subpart - all_nics = [] - for k in ('public', 'private'): - if k in config: - all_nics.extend(config[k]) - - macs_to_nics = cloudnet.get_interfaces_by_mac() nic_configs = [] + macs_to_nics = cloudnet.get_interfaces_by_mac() + LOG.debug("nic mapping: %s", macs_to_nics) - for nic in all_nics: + for n in config: + nic = config[n][0] + LOG.debug("considering %s", nic) mac_address = nic.get('mac') + if mac_address not in macs_to_nics: + raise RuntimeError("Did not find network interface on system " + "with mac '%s'. Cannot apply configuration: %s" + % (mac_address, nic)) + sysfs_name = macs_to_nics.get(mac_address) nic_type = nic.get('type', 'unknown') - # Note: the entry 'public' above contains a list, but - # the list will only ever have one nic inside it per digital ocean. - # If it ever had more than one nic, then this code would - # assign all 'public' the same name. - if_name = NIC_MAP.get(nic_type, sysfs_name) - LOG.debug("mapped %s interface to %s, assigning name of %s", - mac_address, sysfs_name, if_name) + if_name = NIC_MAP.get(nic_type, sysfs_name) + if if_name != sysfs_name: + LOG.debug("Found %s interface '%s' on '%s', assigned name of '%s'", + nic_type, mac_address, sysfs_name, if_name) + else: + msg = ("Found interface '%s' on '%s', which is not a public " + "or private interface. Using default system naming.") + LOG.debug(msg, mac_address, sysfs_name) ncfg = {'type': 'physical', 'mac_address': mac_address, @@ -157,13 +162,8 @@ def convert_network_configuration(config, dns_servers): continue sub_part = _get_subnet_part(raw_subnet) - if nic_type == 'public' and 'anchor' not in netdef: - # add DNS resolvers to the public interfaces only - sub_part = _get_subnet_part(raw_subnet, dns_servers) - else: - # remove the gateway any non-public interfaces - if 'gateway' in sub_part: - del sub_part['gateway'] + if netdef in ('private', 'anchor_ipv4', 'anchor_ipv6'): + del sub_part['gateway'] subnets.append(sub_part) @@ -171,6 +171,10 @@ def convert_network_configuration(config, dns_servers): nic_configs.append(ncfg) LOG.debug("nic '%s' configuration: %s", if_name, ncfg) + if dns_servers: + LOG.debug("added dns servers: %s", dns_servers) + nic_configs.append({'type': 'nameserver', 'address': dns_servers}) + return {'version': 1, 'config': nic_configs} diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index 61cd36bd..26f3168d 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -21,7 +21,7 @@ from cloudinit import sources from cloudinit import url_helper from cloudinit import util -# For reference: http://tinyurl.com/laora4c +# See https://docs.openstack.org/user-guide/cli-config-drive.html LOG = logging.getLogger(__name__) diff --git a/cloudinit/sources/helpers/vmware/imc/config_file.py b/cloudinit/sources/helpers/vmware/imc/config_file.py index 14293f3c..602af078 100644 --- a/cloudinit/sources/helpers/vmware/imc/config_file.py +++ b/cloudinit/sources/helpers/vmware/imc/config_file.py @@ -43,9 +43,9 @@ class ConfigFile(ConfigSource, dict): # "sensitive" settings shall not be logged if canLog: - logger.debug("ADDED KEY-VAL :: '%s' = '%s'" % (key, val)) + logger.debug("ADDED KEY-VAL :: '%s' = '%s'", key, val) else: - logger.debug("ADDED KEY-VAL :: '%s' = '*****************'" % key) + logger.debug("ADDED KEY-VAL :: '%s' = '*****************'", key) self[key] = val @@ -60,7 +60,7 @@ class ConfigFile(ConfigSource, dict): Keyword arguments: filename - The full path to the config file. """ - logger.info('Parsing the config file %s.' % filename) + logger.info('Parsing the config file %s.', filename) config = configparser.ConfigParser() config.optionxform = str @@ -69,7 +69,7 @@ class ConfigFile(ConfigSource, dict): self.clear() for category in config.sections(): - logger.debug("FOUND CATEGORY = '%s'" % category) + logger.debug("FOUND CATEGORY = '%s'", category) for (key, value) in config.items(category): self._insertKey(category + '|' + key, value) |
