From a59eba66ede19aa85f53a1d2b67c3ffd8860b449 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Thu, 21 Aug 2014 13:46:41 -0400 Subject: use url_helper to combine url This seems cleaner, to avoid duplicate '/' being added. --- cloudinit/sources/DataSourceCloudStack.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/DataSourceCloudStack.py b/cloudinit/sources/DataSourceCloudStack.py index 08f661e4..1bbeca59 100644 --- a/cloudinit/sources/DataSourceCloudStack.py +++ b/cloudinit/sources/DataSourceCloudStack.py @@ -78,7 +78,8 @@ class DataSourceCloudStack(sources.DataSource): (max_wait, timeout) = self._get_url_settings() - urls = [self.metadata_address + "/latest/meta-data/instance-id"] + urls = [uhelp.combine_url(self.metadata_address, + 'latest/meta-data/instance-id')] start_time = time.time() url = uhelp.wait_for_url(urls=urls, max_wait=max_wait, timeout=timeout, status_cb=LOG.warn) -- cgit v1.2.3 From b3216b56f3fea3259c290faa2dd496215b625904 Mon Sep 17 00:00:00 2001 From: Jay Faulkner Date: Tue, 26 Aug 2014 11:50:11 -0700 Subject: fix(pep8): Fix various pep8 violations and version-lock pep8 Fixed all complaints from running "make pep8". Also version locked pep8 in test-requirements.txt to ensure that pep8 requirements don't change without an explicit commit. --- cloudinit/config/cc_apt_configure.py | 2 +- cloudinit/config/cc_byobu.py | 2 +- cloudinit/config/cc_chef.py | 3 +- cloudinit/config/cc_disk_setup.py | 4 +-- cloudinit/config/cc_grub_dpkg.py | 3 +- cloudinit/config/cc_phone_home.py | 2 +- cloudinit/config/cc_resizefs.py | 2 +- cloudinit/config/cc_rightscale_userdata.py | 34 +++++++++++----------- cloudinit/config/cc_rsyslog.py | 4 +-- cloudinit/config/cc_ssh.py | 2 +- cloudinit/config/cc_ssh_authkey_fingerprints.py | 2 +- cloudinit/config/cc_yum_add_repo.py | 4 +-- cloudinit/distros/__init__.py | 4 +-- cloudinit/mergers/m_list.py | 2 +- cloudinit/sources/DataSourceAzure.py | 4 +-- cloudinit/sources/DataSourceNoCloud.py | 2 +- cloudinit/sources/DataSourceOVF.py | 4 +-- cloudinit/sources/DataSourceSmartOS.py | 32 ++++++++++---------- cloudinit/sources/__init__.py | 2 +- cloudinit/stages.py | 2 +- cloudinit/url_helper.py | 4 +-- cloudinit/util.py | 6 ++-- test-requirements.txt | 2 +- tests/unittests/test_datasource/test_azure.py | 2 +- tests/unittests/test_datasource/test_nocloud.py | 4 +-- tests/unittests/test_distros/test_generic.py | 2 +- .../test_handler/test_handler_growpart.py | 8 ++--- .../test_handler/test_handler_power_state.py | 2 +- .../test_handler/test_handler_yum_add_repo.py | 2 +- tools/hacking.py | 2 +- 30 files changed, 76 insertions(+), 74 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/config/cc_apt_configure.py b/cloudinit/config/cc_apt_configure.py index 29c13a3d..f10b76a3 100644 --- a/cloudinit/config/cc_apt_configure.py +++ b/cloudinit/config/cc_apt_configure.py @@ -235,7 +235,7 @@ def find_apt_mirror_info(cloud, cfg): mirror = util.search_for_mirror(search) if (not mirror and - util.get_cfg_option_bool(cfg, "apt_mirror_search_dns", False)): + util.get_cfg_option_bool(cfg, "apt_mirror_search_dns", False)): mydom = "" doms = [] diff --git a/cloudinit/config/cc_byobu.py b/cloudinit/config/cc_byobu.py index 92d428b7..ef0ce7ab 100644 --- a/cloudinit/config/cc_byobu.py +++ b/cloudinit/config/cc_byobu.py @@ -43,7 +43,7 @@ def handle(name, cfg, cloud, log, args): valid = ("enable-user", "enable-system", "enable", "disable-user", "disable-system", "disable") - if not value in valid: + if value not in valid: log.warn("Unknown value %s for byobu_by_default", value) mod_user = value.endswith("-user") diff --git a/cloudinit/config/cc_chef.py b/cloudinit/config/cc_chef.py index 727769cd..806deed9 100644 --- a/cloudinit/config/cc_chef.py +++ b/cloudinit/config/cc_chef.py @@ -87,7 +87,8 @@ def handle(name, cfg, cloud, log, _args): # If chef is not installed, we install chef based on 'install_type' if (not os.path.isfile('/usr/bin/chef-client') or - util.get_cfg_option_bool(chef_cfg, 'force_install', default=False)): + util.get_cfg_option_bool(chef_cfg, + 'force_install', default=False)): install_type = util.get_cfg_option_str(chef_cfg, 'install_type', 'packages') diff --git a/cloudinit/config/cc_disk_setup.py b/cloudinit/config/cc_disk_setup.py index 0b970e4e..a5209268 100644 --- a/cloudinit/config/cc_disk_setup.py +++ b/cloudinit/config/cc_disk_setup.py @@ -271,7 +271,7 @@ def find_device_node(device, fs_type=None, label=None, valid_targets=None, return ('/dev/%s' % d['name'], False) if (d['fstype'] == fs_type and - ((label_match and d['label'] == label) or not label_match)): + ((label_match and d['label'] == label) or not label_match)): # If we find a matching device, we return that return ('/dev/%s' % d['name'], True) @@ -447,7 +447,7 @@ def get_partition_mbr_layout(size, layout): return "0," if ((len(layout) == 0 and isinstance(layout, list)) or - not isinstance(layout, list)): + not isinstance(layout, list)): raise Exception("Partition layout is invalid") last_part_num = len(layout) diff --git a/cloudinit/config/cc_grub_dpkg.py b/cloudinit/config/cc_grub_dpkg.py index b3ce6fb6..85716a91 100644 --- a/cloudinit/config/cc_grub_dpkg.py +++ b/cloudinit/config/cc_grub_dpkg.py @@ -36,7 +36,8 @@ def handle(_name, cfg, _cloud, log, _args): "grub-pc/install_devices_empty", None) if ((os.path.exists("/dev/sda1") and not os.path.exists("/dev/sda")) or - (os.path.exists("/dev/xvda1") and not os.path.exists("/dev/xvda"))): + (os.path.exists("/dev/xvda1") + and not os.path.exists("/dev/xvda"))): if idevs is None: idevs = "" if idevs_empty is None: diff --git a/cloudinit/config/cc_phone_home.py b/cloudinit/config/cc_phone_home.py index 2e058ccd..5bc68b83 100644 --- a/cloudinit/config/cc_phone_home.py +++ b/cloudinit/config/cc_phone_home.py @@ -47,7 +47,7 @@ def handle(name, cfg, cloud, log, args): if len(args) != 0: ph_cfg = util.read_conf(args[0]) else: - if not 'phone_home' in cfg: + if 'phone_home' not in cfg: log.debug(("Skipping module named %s, " "no 'phone_home' configuration found"), name) return diff --git a/cloudinit/config/cc_resizefs.py b/cloudinit/config/cc_resizefs.py index be406034..667d5977 100644 --- a/cloudinit/config/cc_resizefs.py +++ b/cloudinit/config/cc_resizefs.py @@ -105,7 +105,7 @@ def handle(name, cfg, _cloud, log, args): container = util.is_container() if (devpth == "/dev/root" and not os.path.exists(devpth) and - not container): + not container): devpth = rootdev_from_cmdline(util.get_cmdline()) if devpth is None: log.warn("Unable to find device '/dev/root'") diff --git a/cloudinit/config/cc_rightscale_userdata.py b/cloudinit/config/cc_rightscale_userdata.py index c771728d..7d2ec10a 100644 --- a/cloudinit/config/cc_rightscale_userdata.py +++ b/cloudinit/config/cc_rightscale_userdata.py @@ -18,22 +18,22 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -## -## The purpose of this script is to allow cloud-init to consume -## rightscale style userdata. rightscale user data is key-value pairs -## in a url-query-string like format. -## -## for cloud-init support, there will be a key named -## 'CLOUD_INIT_REMOTE_HOOK'. -## -## This cloud-config module will -## - read the blob of data from raw user data, and parse it as key/value -## - for each key that is found, download the content to -## the local instance/scripts directory and set them executable. -## - the files in that directory will be run by the user-scripts module -## Therefore, this must run before that. -## -## +# +# The purpose of this script is to allow cloud-init to consume +# rightscale style userdata. rightscale user data is key-value pairs +# in a url-query-string like format. +# +# for cloud-init support, there will be a key named +# 'CLOUD_INIT_REMOTE_HOOK'. +# +# This cloud-config module will +# - read the blob of data from raw user data, and parse it as key/value +# - for each key that is found, download the content to +# the local instance/scripts directory and set them executable. +# - the files in that directory will be run by the user-scripts module +# Therefore, this must run before that. +# +# import os @@ -58,7 +58,7 @@ def handle(name, _cfg, cloud, log, _args): try: mdict = parse_qs(ud) - if not mdict or not MY_HOOKNAME in mdict: + if mdict or MY_HOOKNAME not in mdict: log.debug(("Skipping module %s, " "did not find %s in parsed" " raw userdata"), name, MY_HOOKNAME) diff --git a/cloudinit/config/cc_rsyslog.py b/cloudinit/config/cc_rsyslog.py index 0c2c6880..57486edc 100644 --- a/cloudinit/config/cc_rsyslog.py +++ b/cloudinit/config/cc_rsyslog.py @@ -35,7 +35,7 @@ def handle(name, cfg, cloud, log, _args): # *.* @@syslogd.example.com # process 'rsyslog' - if not 'rsyslog' in cfg: + if 'rsyslog' not in cfg: log.debug(("Skipping module named %s," " no 'rsyslog' key in configuration"), name) return @@ -46,7 +46,7 @@ def handle(name, cfg, cloud, log, _args): files = [] for i, ent in enumerate(cfg['rsyslog']): if isinstance(ent, dict): - if not "content" in ent: + if "content" not in ent: log.warn("No 'content' entry in config entry %s", i + 1) continue content = ent['content'] diff --git a/cloudinit/config/cc_ssh.py b/cloudinit/config/cc_ssh.py index 64a5e3cb..4c76581c 100644 --- a/cloudinit/config/cc_ssh.py +++ b/cloudinit/config/cc_ssh.py @@ -75,7 +75,7 @@ def handle(_name, cfg, cloud, log, _args): util.write_file(tgt_fn, val, tgt_perms) for (priv, pub) in PRIV_2_PUB.iteritems(): - if pub in cfg['ssh_keys'] or not priv in cfg['ssh_keys']: + if pub in cfg['ssh_keys'] or priv not in cfg['ssh_keys']: continue pair = (KEY_2_FILE[priv][0], KEY_2_FILE[pub][0]) cmd = ['sh', '-xc', KEY_GEN_TPL % pair] diff --git a/cloudinit/config/cc_ssh_authkey_fingerprints.py b/cloudinit/config/cc_ssh_authkey_fingerprints.py index 7da62589..51580633 100644 --- a/cloudinit/config/cc_ssh_authkey_fingerprints.py +++ b/cloudinit/config/cc_ssh_authkey_fingerprints.py @@ -55,7 +55,7 @@ def _gen_fingerprint(b64_text, hash_meth='md5'): def _is_printable_key(entry): if any([entry.keytype, entry.base64, entry.comment, entry.options]): if (entry.keytype and - entry.keytype.lower().strip() in ['ssh-dss', 'ssh-rsa']): + entry.keytype.lower().strip() in ['ssh-dss', 'ssh-rsa']): return True return False diff --git a/cloudinit/config/cc_yum_add_repo.py b/cloudinit/config/cc_yum_add_repo.py index 5c273825..0d836f28 100644 --- a/cloudinit/config/cc_yum_add_repo.py +++ b/cloudinit/config/cc_yum_add_repo.py @@ -42,7 +42,7 @@ def _format_repo_value(val): return val -## TODO(harlowja): move to distro? +# TODO(harlowja): move to distro? # See man yum.conf def _format_repository_config(repo_id, repo_config): to_be = configobj.ConfigObj() @@ -89,7 +89,7 @@ def handle(name, cfg, _cloud, log, _args): repo_config = n_repo_config missing_required = 0 for req_field in ['baseurl']: - if not req_field in repo_config: + if req_field not in repo_config: log.warn(("Repository %s does not contain a %s" " configuration 'required' entry"), repo_id, req_field) diff --git a/cloudinit/distros/__init__.py b/cloudinit/distros/__init__.py index 1a56dfb3..4b41220e 100644 --- a/cloudinit/distros/__init__.py +++ b/cloudinit/distros/__init__.py @@ -167,7 +167,7 @@ class Distro(object): def expand_osfamily(family_list): distros = [] for family in family_list: - if not family in OSFAMILIES: + if family not in OSFAMILIES: raise ValueError("No distibutions found for osfamily %s" % (family)) distros.extend(OSFAMILIES[family]) @@ -218,7 +218,7 @@ class Distro(object): fn) if (sys_hostname and prev_hostname and - sys_hostname != prev_hostname): + sys_hostname != prev_hostname): LOG.debug("%s differs from %s, assuming user maintained hostname.", prev_hostname_fn, sys_fn) diff --git a/cloudinit/mergers/m_list.py b/cloudinit/mergers/m_list.py index 62999b4e..3b87b0fc 100644 --- a/cloudinit/mergers/m_list.py +++ b/cloudinit/mergers/m_list.py @@ -53,7 +53,7 @@ class Merger(object): def _on_list(self, value, merge_with): if (self._method == 'replace' and - not isinstance(merge_with, (tuple, list))): + not isinstance(merge_with, (tuple, list))): return merge_with # Ok we now know that what we are merging with is a list or tuple. diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index bd75e6d8..09bc196d 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -452,7 +452,7 @@ def load_azure_ovf_pubkeys(sshnode): continue if (len(child.childNodes) != 1 or - child.childNodes[0].nodeType != text_node): + child.childNodes[0].nodeType != text_node): continue cur[name] = child.childNodes[0].wholeText.strip() @@ -521,7 +521,7 @@ def read_azure_ovf(contents): simple = False value = "" if (len(child.childNodes) == 1 and - child.childNodes[0].nodeType == dom.TEXT_NODE): + child.childNodes[0].nodeType == dom.TEXT_NODE): simple = True value = child.childNodes[0].wholeText diff --git a/cloudinit/sources/DataSourceNoCloud.py b/cloudinit/sources/DataSourceNoCloud.py index a315aae0..c26a645c 100644 --- a/cloudinit/sources/DataSourceNoCloud.py +++ b/cloudinit/sources/DataSourceNoCloud.py @@ -181,7 +181,7 @@ class DataSourceNoCloud(sources.DataSource): # and the source of the seed was self.dsmode # ('local' for NoCloud, 'net' for NoCloudNet') if ('network-interfaces' in mydata['meta-data'] and - (self.dsmode in ("local", seeded_interfaces))): + (self.dsmode in ("local", seeded_interfaces))): LOG.debug("Updating network interfaces from %s", self) self.distro.apply_network( mydata['meta-data']['network-interfaces']) diff --git a/cloudinit/sources/DataSourceOVF.py b/cloudinit/sources/DataSourceOVF.py index 77b43e17..2f53c1ba 100644 --- a/cloudinit/sources/DataSourceOVF.py +++ b/cloudinit/sources/DataSourceOVF.py @@ -107,7 +107,7 @@ class DataSourceOVF(sources.DataSource): return True def get_public_ssh_keys(self): - if not 'public-keys' in self.metadata: + if 'public-keys' not in self.metadata: return [] pks = self.metadata['public-keys'] if isinstance(pks, (list)): @@ -205,7 +205,7 @@ def transport_iso9660(require_iso=True): fullp = os.path.join("/dev/", dev) if (fullp in mounts or - not cdmatch.match(dev) or os.path.isdir(fullp)): + not cdmatch.match(dev) or os.path.isdir(fullp)): continue try: diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py index 65ec0339..2733a2f6 100644 --- a/cloudinit/sources/DataSourceSmartOS.py +++ b/cloudinit/sources/DataSourceSmartOS.py @@ -41,7 +41,7 @@ import serial LOG = logging.getLogger(__name__) SMARTOS_ATTRIB_MAP = { - #Cloud-init Key : (SmartOS Key, Strip line endings) + # Cloud-init Key : (SmartOS Key, Strip line endings) 'local-hostname': ('hostname', True), 'public-keys': ('root_authorized_keys', True), 'user-script': ('user-script', False), @@ -96,21 +96,21 @@ BUILTIN_CLOUD_CONFIG = { 'device': 'ephemeral0'}], } -## builtin vendor-data is a boothook that writes a script into -## /var/lib/cloud/scripts/per-boot. *That* script then handles -## executing the 'operator-script' and 'user-script' files -## that cloud-init writes into /var/lib/cloud/instance/data/ -## if they exist. -## -## This is all very indirect, but its done like this so that at -## some point in the future, perhaps cloud-init wouldn't do it at -## all, but rather the vendor actually provide vendor-data that accomplished -## their desires. (That is the point of vendor-data). -## -## cloud-init does cheat a bit, and write the operator-script and user-script -## itself. It could have the vendor-script do that, but it seems better -## to not require the image to contain a tool (mdata-get) to read those -## keys when we have a perfectly good one inside cloud-init. +# builtin vendor-data is a boothook that writes a script into +# /var/lib/cloud/scripts/per-boot. *That* script then handles +# executing the 'operator-script' and 'user-script' files +# that cloud-init writes into /var/lib/cloud/instance/data/ +# if they exist. +# +# This is all very indirect, but its done like this so that at +# some point in the future, perhaps cloud-init wouldn't do it at +# all, but rather the vendor actually provide vendor-data that accomplished +# their desires. (That is the point of vendor-data). +# +# cloud-init does cheat a bit, and write the operator-script and user-script +# itself. It could have the vendor-script do that, but it seems better +# to not require the image to contain a tool (mdata-get) to read those +# keys when we have a perfectly good one inside cloud-init. BUILTIN_VENDOR_DATA = """\ #cloud-boothook #!/bin/sh diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index fef4d460..7d52a2e6 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -166,7 +166,7 @@ class DataSource(object): defhost = "localhost" domain = defdomain - if not self.metadata or not 'local-hostname' in self.metadata: + if self.metadata or 'local-hostname' not in self.metadata: # this is somewhat questionable really. # the cloud datasource was asked for a hostname # and didn't have one. raising error might be more appropriate diff --git a/cloudinit/stages.py b/cloudinit/stages.py index 9e071fc4..d29d480a 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -642,7 +642,7 @@ class Modules(object): # Try the modules frequency, otherwise fallback to a known one if not freq: freq = mod.frequency - if not freq in FREQUENCIES: + if freq not in FREQUENCIES: freq = PER_INSTANCE LOG.debug("Running module %s (%s) with frequency %s", name, mod, freq) diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py index 4a83169a..73c1fa4e 100644 --- a/cloudinit/url_helper.py +++ b/cloudinit/url_helper.py @@ -237,8 +237,8 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1, return UrlResponse(r) except exceptions.RequestException as e: if (isinstance(e, (exceptions.HTTPError)) - and hasattr(e, 'response') # This appeared in v 0.10.8 - and hasattr(e.response, 'status_code')): + and hasattr(e, 'response') # This appeared in v 0.10.8 + and hasattr(e.response, 'status_code')): excps.append(UrlError(e, code=e.response.status_code, headers=e.response.headers)) else: diff --git a/cloudinit/util.py b/cloudinit/util.py index bc681f4a..0821901a 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -423,7 +423,7 @@ def get_cfg_option_list(yobj, key, default=None): @return: The configuration option as a list of strings or default if key is not found. """ - if not key in yobj: + if key not in yobj: return default if yobj[key] is None: return [] @@ -1148,7 +1148,7 @@ def chownbyname(fname, user=None, group=None): # this returns the specific 'mode' entry, cleanly formatted, with value def get_output_cfg(cfg, mode): ret = [None, None] - if not cfg or not 'output' in cfg: + if cfg or 'output' not in cfg: return ret outcfg = cfg['output'] @@ -1745,7 +1745,7 @@ def parse_mount_info(path, mountinfo_lines, log=LOG): # Ignore mount points higher than an already seen mount # point. if (match_mount_point_elements is not None and - len(match_mount_point_elements) > len(mount_point_elements)): + len(match_mount_point_elements) > len(mount_point_elements)): continue # Find the '-' which terminates a list of optional columns to diff --git a/test-requirements.txt b/test-requirements.txt index 4be0211d..47d45d47 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,6 +1,6 @@ httpretty>=0.7.1 mocker nose -pep8 +pep8==1.5.7 pyflakes pylint diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py index 88c82d5e..e992a006 100644 --- a/tests/unittests/test_datasource/test_azure.py +++ b/tests/unittests/test_datasource/test_azure.py @@ -235,7 +235,7 @@ class TestAzureDataSource(MockerTestCase): self.assertEqual(dsrc.userdata_raw, mydata) def test_no_datasource_expected(self): - #no source should be found if no seed_dir and no devs + # no source should be found if no seed_dir and no devs data = {} dsrc = self._get_ds({}) ret = dsrc.get_data() diff --git a/tests/unittests/test_datasource/test_nocloud.py b/tests/unittests/test_datasource/test_nocloud.py index 14274562..8bcc026c 100644 --- a/tests/unittests/test_datasource/test_nocloud.py +++ b/tests/unittests/test_datasource/test_nocloud.py @@ -50,7 +50,7 @@ class TestNoCloudDataSource(MockerTestCase): self.assertTrue(ret) def test_fs_label(self): - #find_devs_with should not be called ff fs_label is None + # find_devs_with should not be called ff fs_label is None ds = DataSourceNoCloud.DataSourceNoCloud class PsuedoException(Exception): @@ -74,7 +74,7 @@ class TestNoCloudDataSource(MockerTestCase): self.assertFalse(ret) def test_no_datasource_expected(self): - #no source should be found if no cmdline, config, and fs_label=None + # no source should be found if no cmdline, config, and fs_label=None sys_cfg = {'datasource': {'NoCloud': {'fs_label': None}}} ds = DataSourceNoCloud.DataSourceNoCloud diff --git a/tests/unittests/test_distros/test_generic.py b/tests/unittests/test_distros/test_generic.py index c24c790e..a972568f 100644 --- a/tests/unittests/test_distros/test_generic.py +++ b/tests/unittests/test_distros/test_generic.py @@ -193,7 +193,7 @@ class TestGenericDistro(helpers.FilesystemMockingTestCase): 'security': 'http://security-mirror2-intel'}) -#def _get_package_mirror_info(mirror_info, availability_zone=None, +# def _get_package_mirror_info(mirror_info, availability_zone=None, # mirror_filter=util.search_for_mirror): diff --git a/tests/unittests/test_handler/test_handler_growpart.py b/tests/unittests/test_handler/test_handler_growpart.py index f6dc4521..fa624197 100644 --- a/tests/unittests/test_handler/test_handler_growpart.py +++ b/tests/unittests/test_handler/test_handler_growpart.py @@ -53,7 +53,7 @@ class TestDisabled(MockerTestCase): self.handle = cc_growpart.handle def test_mode_off(self): - #Test that nothing is done if mode is off. + # Test that nothing is done if mode is off. # this really only verifies that resizer_factory isn't called config = {'growpart': {'mode': 'off'}} @@ -109,7 +109,7 @@ class TestConfig(MockerTestCase): self.assertTrue(isinstance(ret, cc_growpart.ResizeGrowPart)) def test_handle_with_no_growpart_entry(self): - #if no 'growpart' entry in config, then mode=auto should be used + # if no 'growpart' entry in config, then mode=auto should be used myresizer = object() @@ -141,7 +141,7 @@ class TestResize(MockerTestCase): self.mocker.order() def test_simple_devices(self): - #test simple device list + # test simple device list # this patches out devent2dev, os.stat, and device_part_info # so in the end, doesn't test a lot devs = ["/dev/XXda1", "/dev/YYda2"] @@ -187,7 +187,7 @@ class TestResize(MockerTestCase): find("/dev/YYda2", resized)[1]) self.assertEqual(cc_growpart.RESIZE.SKIPPED, find(enoent[0], resized)[1]) - #self.assertEqual(resize_calls, + # self.assertEqual(resize_calls, # [("/dev/XXda", "1", "/dev/XXda1"), # ("/dev/YYda", "2", "/dev/YYda2")]) finally: diff --git a/tests/unittests/test_handler/test_handler_power_state.py b/tests/unittests/test_handler/test_handler_power_state.py index 4b7b2112..2f86b8f8 100644 --- a/tests/unittests/test_handler/test_handler_power_state.py +++ b/tests/unittests/test_handler/test_handler_power_state.py @@ -67,7 +67,7 @@ def check_lps_ret(psc_return, mode=None): cmd = psc_return[0] timeout = psc_return[1] - if not 'shutdown' in psc_return[0][0]: + if 'shutdown' not in psc_return[0][0]: errs.append("string 'shutdown' not in cmd") if mode is not None: diff --git a/tests/unittests/test_handler/test_handler_yum_add_repo.py b/tests/unittests/test_handler/test_handler_yum_add_repo.py index 156441c7..21b89c34 100644 --- a/tests/unittests/test_handler/test_handler_yum_add_repo.py +++ b/tests/unittests/test_handler/test_handler_yum_add_repo.py @@ -24,7 +24,7 @@ class TestConfig(helpers.FilesystemMockingTestCase): 'epel-testing': { 'name': 'Extra Packages for Enterprise Linux 5 - Testing', # Missing this should cause the repo not to be written - #'baseurl': 'http://blah.org/pub/epel/testing/5/$basearch', + # 'baseurl': 'http://blah.org/pub/epel/testing/5/$basearch', 'enabled': False, 'gpgcheck': True, 'gpgkey': 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL', diff --git a/tools/hacking.py b/tools/hacking.py index 26a07c53..14bd0cda 100755 --- a/tools/hacking.py +++ b/tools/hacking.py @@ -71,7 +71,7 @@ def cloud_import_alphabetical(physical_line, line_number, lines): # with or without "as y" length = [2, 4] if (len(split_line) in length and len(split_previous) in length and - split_line[0] == "import" and split_previous[0] == "import"): + split_line[0] == "import" and split_previous[0] == "import"): if split_line[1] < split_previous[1]: return (0, "N306: imports not in alphabetical order (%s, %s)" % (split_previous[1], split_line[1])) -- cgit v1.2.3 From 92c04cc87387e244485ad502ecacd46b3f150366 Mon Sep 17 00:00:00 2001 From: Jay Faulkner Date: Tue, 26 Aug 2014 12:38:41 -0700 Subject: Upgrade configdrive version - Upgrade configdrive to use 2013-10-17 - Fix issue with vendor_data.json parsing Co-Authored-By: Paul Querna --- cloudinit/sources/DataSourceConfigDrive.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index 0c35f83a..8e65fc46 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -125,7 +125,19 @@ class DataSourceConfigDrive(openstack.SourceMixin, sources.DataSource): self.userdata_raw = results.get('userdata') self.version = results['version'] self.files.update(results.get('files', {})) - self.vendordata_raw = results.get('vendordata') + + # if vendordata includes 'cloud-init', then read that explicitly + # for cloud-init (for namespacing). + vd = results.get('vendordata') + if isinstance(vd, dict): + if 'cloud-init' in vd: + self.vendordata_raw = vd['cloud-init'] + else: + # TODO(pquerna): this is so wrong. + self.vendordata_raw = json.dumps(vd) + else: + self.vendordata_raw = vd + return True @@ -160,7 +172,7 @@ def get_ds_mode(cfgdrv_ver, ds_cfg=None, user=None): return "net" -def read_config_drive(source_dir, version="2012-08-10"): +def read_config_drive(source_dir, version="2013-10-17"): reader = openstack.ConfigDriveReader(source_dir) finders = [ (reader.read_v2, [], {'version': version}), -- cgit v1.2.3 From 5fb6482692cfffba5ba45102858b14ba3acc5bc7 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Tue, 26 Aug 2014 15:53:41 -0400 Subject: further remove evidence of pylint. This just removes comments '# pylint:' things and other code remnents of pylint. --- Makefile | 2 +- cloudinit/config/cc_disk_setup.py | 2 +- cloudinit/config/cc_mounts.py | 2 +- cloudinit/config/cc_power_state_change.py | 4 ++-- cloudinit/config/cc_resizefs.py | 8 +++---- cloudinit/config/cc_set_passwords.py | 2 +- cloudinit/distros/parsers/resolv_conf.py | 4 ++-- cloudinit/handlers/boot_hook.py | 3 +-- cloudinit/handlers/cloud_config.py | 3 +-- cloudinit/handlers/shell_script.py | 3 +-- cloudinit/handlers/upstart_job.py | 3 +-- cloudinit/patcher.py | 2 +- cloudinit/sources/DataSourceOpenNebula.py | 2 +- cloudinit/type_utils.py | 2 -- cloudinit/url_helper.py | 13 +++++------ cloudinit/util.py | 16 ++++++------- packages/debian/control.in | 1 - pylintrc | 19 ---------------- setup.py | 2 +- tests/unittests/test__init__.py | 3 +-- tests/unittests/test_distros/test_generic.py | 4 ++-- .../test_handler/test_handler_growpart.py | 2 -- tests/unittests/test_merging.py | 2 +- tests/unittests/test_util.py | 6 ++--- tools/hacking.py | 4 ++-- tools/mock-meta.py | 8 +++---- tools/run-pep8 | 2 +- tools/run-pylint | 26 ---------------------- 28 files changed, 45 insertions(+), 105 deletions(-) delete mode 100644 pylintrc delete mode 100755 tools/run-pylint (limited to 'cloudinit/sources') diff --git a/Makefile b/Makefile index d96e6488..009257ca 100644 --- a/Makefile +++ b/Makefile @@ -58,5 +58,5 @@ rpm: deb: ./packages/bddeb -.PHONY: test pylint pyflakes 2to3 clean pep8 rpm deb yaml check_version +.PHONY: test pyflakes 2to3 clean pep8 rpm deb yaml check_version .PHONY: pip-test-requirements pip-requirements clean_pyc diff --git a/cloudinit/config/cc_disk_setup.py b/cloudinit/config/cc_disk_setup.py index a5209268..1660832b 100644 --- a/cloudinit/config/cc_disk_setup.py +++ b/cloudinit/config/cc_disk_setup.py @@ -484,7 +484,7 @@ def get_partition_mbr_layout(size, layout): def purge_disk_ptable(device): # wipe the first and last megabyte of a disk (or file) # gpt stores partition table both at front and at end. - null = '\0' # pylint: disable=W1401 + null = '\0' start_len = 1024 * 1024 end_len = 1024 * 1024 with open(device, "rb+") as fp: diff --git a/cloudinit/config/cc_mounts.py b/cloudinit/config/cc_mounts.py index 80590118..ba1303d1 100644 --- a/cloudinit/config/cc_mounts.py +++ b/cloudinit/config/cc_mounts.py @@ -18,7 +18,7 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -from string import whitespace # pylint: disable=W0402 +from string import whitespace import logging import os.path diff --git a/cloudinit/config/cc_power_state_change.py b/cloudinit/config/cc_power_state_change.py index 638daef8..09d37371 100644 --- a/cloudinit/config/cc_power_state_change.py +++ b/cloudinit/config/cc_power_state_change.py @@ -119,7 +119,7 @@ def load_power_state(cfg): def doexit(sysexit): - os._exit(sysexit) # pylint: disable=W0212 + os._exit(sysexit) def execmd(exe_args, output=None, data_in=None): @@ -127,7 +127,7 @@ def execmd(exe_args, output=None, data_in=None): proc = subprocess.Popen(exe_args, stdin=subprocess.PIPE, stdout=output, stderr=subprocess.STDOUT) proc.communicate(data_in) - ret = proc.returncode # pylint: disable=E1101 + ret = proc.returncode except Exception: doexit(EXIT_FAIL) doexit(ret) diff --git a/cloudinit/config/cc_resizefs.py b/cloudinit/config/cc_resizefs.py index 667d5977..b9655749 100644 --- a/cloudinit/config/cc_resizefs.py +++ b/cloudinit/config/cc_resizefs.py @@ -28,19 +28,19 @@ from cloudinit import util frequency = PER_ALWAYS -def _resize_btrfs(mount_point, devpth): # pylint: disable=W0613 +def _resize_btrfs(mount_point, devpth): return ('btrfs', 'filesystem', 'resize', 'max', mount_point) -def _resize_ext(mount_point, devpth): # pylint: disable=W0613 +def _resize_ext(mount_point, devpth): return ('resize2fs', devpth) -def _resize_xfs(mount_point, devpth): # pylint: disable=W0613 +def _resize_xfs(mount_point, devpth): return ('xfs_growfs', devpth) -def _resize_ufs(mount_point, devpth): # pylint: disable=W0613 +def _resize_ufs(mount_point, devpth): return ('growfs', devpth) # Do not use a dictionary as these commands should be able to be used diff --git a/cloudinit/config/cc_set_passwords.py b/cloudinit/config/cc_set_passwords.py index 4a3b21af..24e33915 100644 --- a/cloudinit/config/cc_set_passwords.py +++ b/cloudinit/config/cc_set_passwords.py @@ -28,7 +28,7 @@ from cloudinit import distros as ds from cloudinit import ssh_util from cloudinit import util -from string import letters, digits # pylint: disable=W0402 +from string import letters, digits # We are removing certain 'painful' letters/numbers PW_SET = (letters.translate(None, 'loLOI') + diff --git a/cloudinit/distros/parsers/resolv_conf.py b/cloudinit/distros/parsers/resolv_conf.py index 1be9d46b..5733c25a 100644 --- a/cloudinit/distros/parsers/resolv_conf.py +++ b/cloudinit/distros/parsers/resolv_conf.py @@ -137,8 +137,8 @@ class ResolvConf(object): self._contents.append(('option', ['search', s_list, ''])) return flat_sds - @local_domain.setter # pl51222 pylint: disable=E1101 - def local_domain(self, domain): # pl51222 pylint: disable=E0102 + @local_domain.setter + def local_domain(self, domain): self.parse() self._remove_option('domain') self._contents.append(('option', ['domain', str(domain), ''])) diff --git a/cloudinit/handlers/boot_hook.py b/cloudinit/handlers/boot_hook.py index 1848ce2c..3a50cf87 100644 --- a/cloudinit/handlers/boot_hook.py +++ b/cloudinit/handlers/boot_hook.py @@ -53,8 +53,7 @@ class BootHookPartHandler(handlers.Handler): util.write_file(filepath, contents.lstrip(), 0700) return filepath - def handle_part(self, _data, ctype, filename, # pylint: disable=W0221 - payload, frequency): # pylint: disable=W0613 + def handle_part(self, data, ctype, filename, payload, frequency): if ctype in handlers.CONTENT_SIGNALS: return diff --git a/cloudinit/handlers/cloud_config.py b/cloudinit/handlers/cloud_config.py index 4232700f..bf994e33 100644 --- a/cloudinit/handlers/cloud_config.py +++ b/cloudinit/handlers/cloud_config.py @@ -138,8 +138,7 @@ class CloudConfigPartHandler(handlers.Handler): self.file_names = [] self.cloud_buf = None - def handle_part(self, _data, ctype, filename, # pylint: disable=W0221 - payload, _frequency, headers): # pylint: disable=W0613 + def handle_part(self, data, ctype, filename, payload, frequency, headers): if ctype == handlers.CONTENT_START: self._reset() return diff --git a/cloudinit/handlers/shell_script.py b/cloudinit/handlers/shell_script.py index 30c1ed89..9755ab05 100644 --- a/cloudinit/handlers/shell_script.py +++ b/cloudinit/handlers/shell_script.py @@ -44,8 +44,7 @@ class ShellScriptPartHandler(handlers.Handler): handlers.type_from_starts_with(SHELL_PREFIX), ] - def handle_part(self, _data, ctype, filename, # pylint: disable=W0221 - payload, frequency): # pylint: disable=W0613 + def handle_part(self, data, ctype, filename, payload, frequency): if ctype in handlers.CONTENT_SIGNALS: # TODO(harlowja): maybe delete existing things here return diff --git a/cloudinit/handlers/upstart_job.py b/cloudinit/handlers/upstart_job.py index bac4cad2..50d193c4 100644 --- a/cloudinit/handlers/upstart_job.py +++ b/cloudinit/handlers/upstart_job.py @@ -44,8 +44,7 @@ class UpstartJobPartHandler(handlers.Handler): handlers.type_from_starts_with(UPSTART_PREFIX), ] - def handle_part(self, _data, ctype, filename, # pylint: disable=W0221 - payload, frequency): + def handle_part(self, data, ctype, filename, payload, frequency): if ctype in handlers.CONTENT_SIGNALS: return diff --git a/cloudinit/patcher.py b/cloudinit/patcher.py index 0f3c034e..f6609d6f 100644 --- a/cloudinit/patcher.py +++ b/cloudinit/patcher.py @@ -41,7 +41,7 @@ def _patch_logging(): fallback_handler = QuietStreamHandler(sys.stderr) fallback_handler.setFormatter(logging.Formatter(FALL_FORMAT)) - def handleError(self, record): # pylint: disable=W0613 + def handleError(self, record): try: fallback_handler.handle(record) fallback_handler.flush() diff --git a/cloudinit/sources/DataSourceOpenNebula.py b/cloudinit/sources/DataSourceOpenNebula.py index 34557f8b..e2469f6e 100644 --- a/cloudinit/sources/DataSourceOpenNebula.py +++ b/cloudinit/sources/DataSourceOpenNebula.py @@ -28,7 +28,7 @@ import base64 import os import pwd import re -import string # pylint: disable=W0402 +import string from cloudinit import log as logging from cloudinit import sources diff --git a/cloudinit/type_utils.py b/cloudinit/type_utils.py index 2decbfc5..cc3d9495 100644 --- a/cloudinit/type_utils.py +++ b/cloudinit/type_utils.py @@ -19,8 +19,6 @@ # # You should have received a copy of the GNU General Public License # along with this program. If not, see . -# -# pylint: disable=C0302 import types diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py index 73c1fa4e..3074dd08 100644 --- a/cloudinit/url_helper.py +++ b/cloudinit/url_helper.py @@ -44,7 +44,7 @@ try: from distutils.version import LooseVersion import pkg_resources _REQ = pkg_resources.get_distribution('requests') - _REQ_VER = LooseVersion(_REQ.version) # pylint: disable=E1103 + _REQ_VER = LooseVersion(_REQ.version) if _REQ_VER >= LooseVersion('0.8.8'): SSL_ENABLED = True if _REQ_VER >= LooseVersion('0.7.0') and _REQ_VER < LooseVersion('1.0.0'): @@ -54,7 +54,7 @@ except: def _cleanurl(url): - parsed_url = list(urlparse(url, scheme='http')) # pylint: disable=E1123 + parsed_url = list(urlparse(url, scheme='http')) if not parsed_url[1] and parsed_url[2]: # Swap these since this seems to be a common # occurrence when given urls like 'www.google.com' @@ -90,7 +90,7 @@ class StringResponse(object): self.contents = contents self.url = None - def ok(self, *args, **kwargs): # pylint: disable=W0613 + def ok(self, *args, **kwargs): if self.code != 200: return False return True @@ -150,7 +150,7 @@ class UrlError(IOError): def _get_ssl_args(url, ssl_details): ssl_args = {} - scheme = urlparse(url).scheme # pylint: disable=E1101 + scheme = urlparse(url).scheme if scheme == 'https' and ssl_details: if not SSL_ENABLED: LOG.warn("SSL is not supported in requests v%s, " @@ -227,10 +227,9 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1, r = requests.request(**req_args) if check_status: - r.raise_for_status() # pylint: disable=E1103 + r.raise_for_status() LOG.debug("Read from %s (%s, %sb) after %s attempts", url, - r.status_code, len(r.content), # pylint: disable=E1103 - (i + 1)) + r.status_code, len(r.content), (i + 1)) # Doesn't seem like we can make it use a different # subclass for responses, so add our own backward-compat # attrs diff --git a/cloudinit/util.py b/cloudinit/util.py index 0821901a..bdb0f268 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -19,8 +19,6 @@ # # You should have received a copy of the GNU General Public License # along with this program. If not, see . -# -# pylint: disable=C0302 from StringIO import StringIO @@ -42,7 +40,7 @@ import re import shutil import socket import stat -import string # pylint: disable=W0402 +import string import subprocess import sys import tempfile @@ -198,11 +196,11 @@ def fork_cb(child_cb, *args): if fid == 0: try: child_cb(*args) - os._exit(0) # pylint: disable=W0212 + os._exit(0) except: logexc(LOG, "Failed forking and calling callback %s", type_utils.obj_name(child_cb)) - os._exit(1) # pylint: disable=W0212 + os._exit(1) else: LOG.debug("Forked child %s who will run callback %s", fid, type_utils.obj_name(child_cb)) @@ -487,7 +485,7 @@ def redirect_output(outfmt, errfmt, o_out=None, o_err=None): new_fp = open(arg, owith) elif mode == "|": proc = subprocess.Popen(arg, shell=True, stdin=subprocess.PIPE) - new_fp = proc.stdin # pylint: disable=E1101 + new_fp = proc.stdin else: raise TypeError("Invalid type for output format: %s" % outfmt) @@ -509,7 +507,7 @@ def redirect_output(outfmt, errfmt, o_out=None, o_err=None): new_fp = open(arg, owith) elif mode == "|": proc = subprocess.Popen(arg, shell=True, stdin=subprocess.PIPE) - new_fp = proc.stdin # pylint: disable=E1101 + new_fp = proc.stdin else: raise TypeError("Invalid type for error format: %s" % errfmt) @@ -937,7 +935,7 @@ def is_resolvable(name): should also not exist. The random entry will be resolved inside the search list. """ - global _DNS_REDIRECT_IP # pylint: disable=W0603 + global _DNS_REDIRECT_IP if _DNS_REDIRECT_IP is None: badips = set() badnames = ("does-not-exist.example.com.", "example.invalid.", @@ -1532,7 +1530,7 @@ def subp(args, data=None, rcs=None, env=None, capture=True, shell=False, (out, err) = sp.communicate(data) except OSError as e: raise ProcessExecutionError(cmd=args, reason=e) - rc = sp.returncode # pylint: disable=E1101 + rc = sp.returncode if rc not in rcs: raise ProcessExecutionError(stdout=out, stderr=err, exit_code=rc, diff --git a/packages/debian/control.in b/packages/debian/control.in index c892747c..9207e5f4 100644 --- a/packages/debian/control.in +++ b/packages/debian/control.in @@ -9,7 +9,6 @@ Build-Depends: debhelper (>= 9), python (>= 2.6.6-3~), python-nose, pyflakes, - pylint, python-setuptools, python-selinux, python-cheetah, diff --git a/pylintrc b/pylintrc deleted file mode 100644 index ee886510..00000000 --- a/pylintrc +++ /dev/null @@ -1,19 +0,0 @@ -[General] -init-hook='import sys; sys.path.append("tests/")' - -[MESSAGES CONTROL] -# See: http://pylint-messages.wikidot.com/all-codes -# W0142: *args and **kwargs are fine. -# W0511: TODOs in code comments are fine. -# W0702: No exception type(s) specified -# W0703: Catch "Exception" -# C0103: Invalid name -# C0111: Missing docstring -disable=W0142,W0511,W0702,W0703,C0103,C0111 - -[REPORTS] -reports=no -include-ids=yes - -[FORMAT] -max-line-length=79 diff --git a/setup.py b/setup.py index 556103b9..2fb191c0 100755 --- a/setup.py +++ b/setup.py @@ -46,7 +46,7 @@ def tiny_p(cmd, capture=True): sp = subprocess.Popen(cmd, stdout=stdout, stderr=stderr, stdin=None) (out, err) = sp.communicate() - ret = sp.returncode # pylint: disable=E1101 + ret = sp.returncode if ret not in [0]: raise RuntimeError("Failed running %s [rc=%s] (%s, %s)" % (cmd, ret, out, err)) diff --git a/tests/unittests/test__init__.py b/tests/unittests/test__init__.py index 03065c8b..17965488 100644 --- a/tests/unittests/test__init__.py +++ b/tests/unittests/test__init__.py @@ -18,8 +18,7 @@ class FakeModule(handlers.Handler): def list_types(self): return self.types - def handle_part(self, _data, ctype, filename, # pylint: disable=W0221 - payload, frequency): + def handle_part(self, data, ctype, filename, payload, frequency): pass diff --git a/tests/unittests/test_distros/test_generic.py b/tests/unittests/test_distros/test_generic.py index a972568f..db6aa0e8 100644 --- a/tests/unittests/test_distros/test_generic.py +++ b/tests/unittests/test_distros/test_generic.py @@ -26,8 +26,8 @@ package_mirrors = [ unknown_arch_info ] -gpmi = distros._get_package_mirror_info # pylint: disable=W0212 -gapmi = distros._get_arch_package_mirror_info # pylint: disable=W0212 +gpmi = distros._get_package_mirror_info +gapmi = distros._get_arch_package_mirror_info class TestGenericDistro(helpers.FilesystemMockingTestCase): diff --git a/tests/unittests/test_handler/test_handler_growpart.py b/tests/unittests/test_handler/test_handler_growpart.py index fa624197..5d0636d1 100644 --- a/tests/unittests/test_handler/test_handler_growpart.py +++ b/tests/unittests/test_handler/test_handler_growpart.py @@ -203,8 +203,6 @@ def simple_device_part_info(devpath): class Bunch(object): - st_mode = None # fix pylint complaint - def __init__(self, **kwds): self.__dict__.update(kwds) diff --git a/tests/unittests/test_merging.py b/tests/unittests/test_merging.py index 17704f8e..07b610f7 100644 --- a/tests/unittests/test_merging.py +++ b/tests/unittests/test_merging.py @@ -11,7 +11,7 @@ import glob import os import random import re -import string # pylint: disable=W0402 +import string SOURCE_PAT = "source*.*yaml" EXPECTED_PAT = "expected%s.yaml" diff --git a/tests/unittests/test_util.py b/tests/unittests/test_util.py index 0cb41520..35e92445 100644 --- a/tests/unittests/test_util.py +++ b/tests/unittests/test_util.py @@ -1,5 +1,3 @@ -# pylint: disable=C0301 -# the mountinfo data lines are too long import os import stat import yaml @@ -18,7 +16,7 @@ class FakeSelinux(object): self.match_what = match_what self.restored = [] - def matchpathcon(self, path, mode): # pylint: disable=W0613 + def matchpathcon(self, path, mode): if path == self.match_what: return else: @@ -27,7 +25,7 @@ class FakeSelinux(object): def is_selinux_enabled(self): return True - def restorecon(self, path, recursive): # pylint: disable=W0613 + def restorecon(self, path, recursive): self.restored.append(path) diff --git a/tools/hacking.py b/tools/hacking.py index 14bd0cda..e7797564 100755 --- a/tools/hacking.py +++ b/tools/hacking.py @@ -154,7 +154,7 @@ def add_cloud(): if not inspect.isfunction(function): continue if name.startswith("cloud_"): - exec("pep8.%s = %s" % (name, name)) # pylint: disable=W0122 + exec("pep8.%s = %s" % (name, name)) if __name__ == "__main__": # NOVA based 'hacking.py' error codes start with an N @@ -163,7 +163,7 @@ if __name__ == "__main__": pep8.current_file = current_file pep8.readlines = readlines try: - pep8._main() # pylint: disable=W0212 + pep8._main() finally: if len(_missingImport) > 0: print >> sys.stderr, ("%i imports missing in this test environment" diff --git a/tools/mock-meta.py b/tools/mock-meta.py index c79f0598..dfbc2a71 100755 --- a/tools/mock-meta.py +++ b/tools/mock-meta.py @@ -23,7 +23,7 @@ import json import logging import os import random -import string # pylint: disable=W0402 +import string import sys import yaml @@ -306,7 +306,7 @@ class UserDataHandler(object): blob = "\n".join(lines) return blob.strip() - def get_data(self, params, who, **kwargs): # pylint: disable=W0613 + def get_data(self, params, who, **kwargs): if not params: return self._get_user_blob(who=who) return NOT_IMPL_RESPONSE @@ -427,8 +427,8 @@ def extract_opts(): def setup_fetchers(opts): - global meta_fetcher # pylint: disable=W0603 - global user_fetcher # pylint: disable=W0603 + global meta_fetcher + global user_fetcher meta_fetcher = MetaDataHandler(opts) user_fetcher = UserDataHandler(opts) diff --git a/tools/run-pep8 b/tools/run-pep8 index cfce5edd..d0a131f6 100755 --- a/tools/run-pep8 +++ b/tools/run-pep8 @@ -13,7 +13,7 @@ else base=`pwd`/tools/ fi -IGNORE="E501" # Line too long (these are caught by pylint) +IGNORE="" # King Arthur: Be quiet! ... Be Quiet! I Order You to Be Quiet. IGNORE="$IGNORE,E121" # Continuation line indentation is not a multiple of four diff --git a/tools/run-pylint b/tools/run-pylint deleted file mode 100755 index 0fe0c64a..00000000 --- a/tools/run-pylint +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash - -if [ $# -eq 0 ]; then - files=( bin/cloud-init $(find * -name "*.py" -type f) ) -else - files=( "$@" ); -fi - -RC_FILE="pylintrc" -if [ ! -f $RC_FILE ]; then - RC_FILE="../pylintrc" -fi - -cmd=( - pylint - --rcfile=$RC_FILE - --disable=R - --disable=I - --dummy-variables-rgx="_" - "${files[@]}" -) - -echo -e "\nRunning pylint:" -echo "${cmd[@]}" -"${cmd[@]}" - -- cgit v1.2.3 From 5fc46193d3e65d0eaaaa45bcf41c5b35b4e80df7 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 26 Aug 2014 13:41:46 -0700 Subject: Optimize away most of the path_exists checks --- cloudinit/sources/helpers/openstack.py | 101 +++++++++++++++++---------------- 1 file changed, 52 insertions(+), 49 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index 0fac0335..361c9994 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -149,10 +149,6 @@ class BaseReader(object): def _path_join(self, base, *add_ons): pass - @abc.abstractmethod - def _path_exists(self, path): - pass - @abc.abstractmethod def _path_read(self, path): pass @@ -170,22 +166,9 @@ class BaseReader(object): path = self._path_join(self.base_path, "openstack", *path_pieces) return self._path_read(path) + @abc.abstractmethod def _find_working_version(self, version): - search_versions = [version] + list(OS_VERSIONS) - for potential_version in search_versions: - if not potential_version: - continue - path = self._path_join(self.base_path, "openstack", - potential_version) - if self._path_exists(path): - if potential_version != version: - LOG.debug("Version '%s' not available, attempting to use" - " version '%s' instead", version, - potential_version) - return potential_version - LOG.debug("Version '%s' not available, attempting to use '%s'" - " instead", version, OS_LATEST) - return OS_LATEST + pass def read_v2(self, version=None): """Reads a version 2 formatted location. @@ -228,15 +211,14 @@ class BaseReader(object): path = self._path_join(self.base_path, path) data = None found = False - if self._path_exists(path): - try: - data = self._path_read(path) - except IOError: - raise NonReadable("Failed to read: %s" % path) - found = True + try: + data = self._path_read(path) + except IOError: + pass else: - if required: - raise NonReadable("Missing mandatory path: %s" % path) + found = True + if required and not found: + raise NonReadable("Missing mandatory path: %s" % path) if found and translator: try: data = translator(data) @@ -315,6 +297,23 @@ class ConfigDriveReader(BaseReader): def _path_read(self, path): return util.load_file(path) + def _find_working_version(self, version): + search_versions = [version] + list(OS_VERSIONS) + for potential_version in search_versions: + if not potential_version: + continue + path = self._path_join(self.base_path, "openstack", + potential_version) + if self._path_exists(path): + if potential_version != version: + LOG.debug("Version '%s' not available, attempting to use" + " version '%s' instead", version, + potential_version) + return potential_version + LOG.debug("Version '%s' not available, attempting to use '%s'" + " instead", version, OS_LATEST) + return OS_LATEST + def _read_ec2_metadata(self): path = self._path_join(self.base_path, 'ec2', 'latest', 'meta-data.json') @@ -401,6 +400,32 @@ class MetadataReader(BaseReader): self.timeout = float(timeout) self.retries = int(retries) + def _find_working_version(self, version): + search_versions = [version] + list(OS_VERSIONS) + version_path = self._path_join(self.base_path, "openstack") + versions_available = [] + try: + versions = self._path_read(version_path) + except IOError: + pass + else: + for line in versions.splitlines(): + line = line.strip() + if not line: + continue + versions_available.append(line) + for potential_version in search_versions: + if potential_version not in versions_available: + continue + if potential_version != version: + LOG.debug("Version '%s' not available, attempting to use" + " version '%s' instead", version, + potential_version) + return potential_version + LOG.debug("Version '%s' not available, searched %s, attempting to" + " use '%s' instead", version, search_versions, OS_LATEST) + return OS_LATEST + def _path_read(self, path): response = url_helper.readurl(path, retries=self.retries, @@ -408,28 +433,6 @@ class MetadataReader(BaseReader): timeout=self.timeout) return response.contents - def _path_exists(self, path): - - def should_retry_cb(request, cause): - try: - code = int(cause.code) - if code >= 400: - return False - except (TypeError, ValueError): - # Older versions of requests didn't have a code. - pass - return True - - try: - response = url_helper.readurl(path, - retries=self.retries, - ssl_details=self.ssl_details, - timeout=self.timeout, - exception_cb=should_retry_cb) - return response.ok() - except IOError: - return False - def _path_join(self, base, *add_ons): return url_helper.combine_url(base, *add_ons) -- cgit v1.2.3 From df7b6c5564e6ef8f5ceae9de8ff79410a4894f5f Mon Sep 17 00:00:00 2001 From: Jay Faulkner Date: Tue, 26 Aug 2014 14:45:08 -0700 Subject: Add failback for older Openstack configdrive versions - Also utilizing the constants defined in cloudinit/sources/helpers/openstack.py for configdrive versions --- cloudinit/sources/DataSourceConfigDrive.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index 8e65fc46..c55f7d4a 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -172,10 +172,12 @@ def get_ds_mode(cfgdrv_ver, ds_cfg=None, user=None): return "net" -def read_config_drive(source_dir, version="2013-10-17"): +def read_config_drive(source_dir, version=openstack.OS_HAVANA): reader = openstack.ConfigDriveReader(source_dir) finders = [ (reader.read_v2, [], {'version': version}), + (reader.read_v2, [], {'version': openstack.OS_GRIZZLY}), + (reader.read_v2, [], {'version': openstack.OS_FOLSOM}), (reader.read_v1, [], {}), ] excps = [] -- cgit v1.2.3 From 1e019cc001a8f31fa978d76b86fbe7813b2a076f Mon Sep 17 00:00:00 2001 From: Jay Faulkner Date: Tue, 26 Aug 2014 15:12:35 -0700 Subject: Refactor vendor_data handling vendor_data is guaranteed to be a dict if it exists; if it doesn't exist ensure it's represented by an empty dict to avoid checking it to see if it's a dict. --- cloudinit/sources/DataSourceConfigDrive.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index c55f7d4a..65477184 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -126,15 +126,12 @@ class DataSourceConfigDrive(openstack.SourceMixin, sources.DataSource): self.version = results['version'] self.files.update(results.get('files', {})) + # If there is no vendordata, set vd to an empty dict instead of None + vd = results.get('vendordata', {}) # if vendordata includes 'cloud-init', then read that explicitly # for cloud-init (for namespacing). - vd = results.get('vendordata') - if isinstance(vd, dict): - if 'cloud-init' in vd: - self.vendordata_raw = vd['cloud-init'] - else: - # TODO(pquerna): this is so wrong. - self.vendordata_raw = json.dumps(vd) + if 'cloud-init' in vd: + self.vendordata_raw = vd['cloud-init'] else: self.vendordata_raw = vd -- cgit v1.2.3 From cc9e3af6c95b3263a49d4590d9dd176bdc570c99 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 26 Aug 2014 23:02:58 -0700 Subject: Add logging around failure to read optional/mandatory paths --- cloudinit/sources/helpers/openstack.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index 361c9994..e5a38de0 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -213,8 +213,12 @@ class BaseReader(object): found = False try: data = self._path_read(path) - except IOError: - pass + except IOError as e: + if not required: + LOG.debug("Failed reading optional path %s due" + " to: %s", path, e) + else: + LOG.exception("Failed reading mandatory path %s", path) else: found = True if required and not found: -- cgit v1.2.3 From 6a6d3a499c2327b03993bbaea2b9d0df5dc7eb64 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 26 Aug 2014 23:04:56 -0700 Subject: Just use os.path.exists directly --- cloudinit/sources/helpers/openstack.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index e5a38de0..418ab4d1 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -295,9 +295,6 @@ class ConfigDriveReader(BaseReader): components = [base] + list(add_ons) return os.path.join(*components) - def _path_exists(self, path): - return os.path.exists(path) - def _path_read(self, path): return util.load_file(path) @@ -308,7 +305,7 @@ class ConfigDriveReader(BaseReader): continue path = self._path_join(self.base_path, "openstack", potential_version) - if self._path_exists(path): + if os.path.exists(path): if potential_version != version: LOG.debug("Version '%s' not available, attempting to use" " version '%s' instead", version, @@ -321,7 +318,7 @@ class ConfigDriveReader(BaseReader): def _read_ec2_metadata(self): path = self._path_join(self.base_path, 'ec2', 'latest', 'meta-data.json') - if not self._path_exists(path): + if not os.path.exists(path): return {} else: try: @@ -341,7 +338,7 @@ class ConfigDriveReader(BaseReader): found = {} for name in FILES_V1.keys(): path = self._path_join(self.base_path, name) - if self._path_exists(path): + if os.path.exists(path): found[name] = path if len(found) == 0: raise NonReadable("%s: no files found" % (self.base_path)) -- cgit v1.2.3 From 9a7587f35f327b9f8cafce8687832e9e77c1afde Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 26 Aug 2014 23:08:14 -0700 Subject: Log a warning when unable to fetch the openstack versions --- cloudinit/sources/helpers/openstack.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index 418ab4d1..56986a94 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -407,8 +407,9 @@ class MetadataReader(BaseReader): versions_available = [] try: versions = self._path_read(version_path) - except IOError: - pass + except IOError as e: + LOG.warn("Unable to read openstack versions from %s due" + " to: %s", version_path, e) else: for line in versions.splitlines(): line = line.strip() -- cgit v1.2.3 From fb482ce4a36d9b4be75a8bf5b428189548a205d9 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 26 Aug 2014 23:10:50 -0700 Subject: Show the available versions in the debug log message --- cloudinit/sources/helpers/openstack.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index 56986a94..3ceec837 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -424,8 +424,9 @@ class MetadataReader(BaseReader): " version '%s' instead", version, potential_version) return potential_version - LOG.debug("Version '%s' not available, searched %s, attempting to" - " use '%s' instead", version, search_versions, OS_LATEST) + LOG.debug("Version '%s' not available, searched for %s (with available" + " versions being %s), attempting to use '%s' instead", + version, search_versions, versions_available, OS_LATEST) return OS_LATEST def _path_read(self, path): -- cgit v1.2.3 From d701035265c765bc42cb3bc358f2bfd0b41f484b Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Wed, 27 Aug 2014 12:30:23 -0700 Subject: Fixed more of the slowness around fetching and retrying --- cloudinit/sources/helpers/openstack.py | 112 +++++++++++++--------- tests/unittests/test_datasource/test_openstack.py | 23 ++++- 2 files changed, 84 insertions(+), 51 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index 3ceec837..2d0fc70e 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -21,6 +21,7 @@ import abc import base64 import copy +import httplib import os from cloudinit import ec2_utils @@ -153,10 +154,31 @@ class BaseReader(object): def _path_read(self, path): pass + @abc.abstractmethod + def _fetch_available_versions(self): + pass + @abc.abstractmethod def _read_ec2_metadata(self): pass + def _find_working_version(self, version): + search_versions = [version] + list(OS_VERSIONS) + available_versions = self._fetch_available_versions() + for potential_version in search_versions: + if not potential_version: + continue + if potential_version not in available_versions: + continue + if potential_version != version: + LOG.debug("Version '%s' not available, attempting to use" + " version '%s' instead", version, + potential_version) + return potential_version + LOG.debug("Version '%s' not available, attempting to use '%s'" + " instead", version, OS_LATEST) + return OS_LATEST + def _read_content_path(self, item): path = item.get('content_path', '').lstrip("/") path_pieces = path.split("/") @@ -166,10 +188,6 @@ class BaseReader(object): path = self._path_join(self.base_path, "openstack", *path_pieces) return self._path_read(path) - @abc.abstractmethod - def _find_working_version(self, version): - pass - def read_v2(self, version=None): """Reads a version 2 formatted location. @@ -290,6 +308,7 @@ class BaseReader(object): class ConfigDriveReader(BaseReader): def __init__(self, base_path): super(ConfigDriveReader, self).__init__(base_path) + self._versions = None def _path_join(self, base, *add_ons): components = [base] + list(add_ons) @@ -298,22 +317,21 @@ class ConfigDriveReader(BaseReader): def _path_read(self, path): return util.load_file(path) - def _find_working_version(self, version): - search_versions = [version] + list(OS_VERSIONS) - for potential_version in search_versions: - if not potential_version: - continue - path = self._path_join(self.base_path, "openstack", - potential_version) - if os.path.exists(path): - if potential_version != version: - LOG.debug("Version '%s' not available, attempting to use" - " version '%s' instead", version, - potential_version) - return potential_version - LOG.debug("Version '%s' not available, attempting to use '%s'" - " instead", version, OS_LATEST) - return OS_LATEST + def _fetch_available_versions(self): + if self._versions is not None: + return self._versions + else: + versions_available = [] + path = self._path_join(self.base_path, 'openstack') + try: + for child in os.listdir(path): + child_path = os.path.join(path, child) + if os.path.isdir(child_path): + versions_available.append(child) + except (OSError, IOError): + pass + self._versions = tuple(versions_available) + return self._versions def _read_ec2_metadata(self): path = self._path_join(self.base_path, @@ -400,40 +418,40 @@ class MetadataReader(BaseReader): self.ssl_details = ssl_details self.timeout = float(timeout) self.retries = int(retries) + self._versions = None - def _find_working_version(self, version): - search_versions = [version] + list(OS_VERSIONS) - version_path = self._path_join(self.base_path, "openstack") - versions_available = [] - try: - versions = self._path_read(version_path) - except IOError as e: - LOG.warn("Unable to read openstack versions from %s due" - " to: %s", version_path, e) + def _fetch_available_versions(self): + if self._versions is not None: + return self._versions else: - for line in versions.splitlines(): - line = line.strip() - if not line: - continue - versions_available.append(line) - for potential_version in search_versions: - if potential_version not in versions_available: - continue - if potential_version != version: - LOG.debug("Version '%s' not available, attempting to use" - " version '%s' instead", version, - potential_version) - return potential_version - LOG.debug("Version '%s' not available, searched for %s (with available" - " versions being %s), attempting to use '%s' instead", - version, search_versions, versions_available, OS_LATEST) - return OS_LATEST + path = self._path_join(self.base_path, "openstack") + versions_available = [] + try: + versions = self._path_read(path) + except IOError as e: + LOG.warn("Unable to read openstack versions from %s due" + " to: %s", path, e) + else: + for line in versions.splitlines(): + line = line.strip() + if not line: + continue + versions_available.append(line) + self._versions = tuple(versions_available) + return self._versions def _path_read(self, path): + + def should_retry(_request_args, cause): + if cause.code == httplib.NOT_FOUND: + return False + return True + response = url_helper.readurl(path, retries=self.retries, ssl_details=self.ssl_details, - timeout=self.timeout) + timeout=self.timeout, + exception_cb=should_retry) return response.contents def _path_join(self, base, *add_ons): diff --git a/tests/unittests/test_datasource/test_openstack.py b/tests/unittests/test_datasource/test_openstack.py index f43cbec8..412ae5a4 100644 --- a/tests/unittests/test_datasource/test_openstack.py +++ b/tests/unittests/test_datasource/test_openstack.py @@ -67,8 +67,8 @@ OSTACK_META = { CONTENT_0 = 'This is contents of /etc/foo.cfg\n' CONTENT_1 = '# this is /etc/bar/bar.cfg\n' OS_FILES = { - 'openstack/2012-08-10/meta_data.json': json.dumps(OSTACK_META), - 'openstack/2012-08-10/user_data': USER_DATA, + 'openstack/latest/meta_data.json': json.dumps(OSTACK_META), + 'openstack/latest/user_data': USER_DATA, 'openstack/content/0000': CONTENT_0, 'openstack/content/0001': CONTENT_1, 'openstack/latest/meta_data.json': json.dumps(OSTACK_META), @@ -78,6 +78,9 @@ OS_FILES = { EC2_FILES = { 'latest/user-data': USER_DATA, } +EC2_VERSIONS = [ + 'lastest', +] def _register_uris(version, ec2_files, ec2_meta, os_files): @@ -85,6 +88,9 @@ def _register_uris(version, ec2_files, ec2_meta, os_files): same data returned by the openstack metadata service (and ec2 service).""" def match_ec2_url(uri, headers): + path = uri.path.strip("/") + if len(path) == 0: + return (200, headers, "\n".join(EC2_VERSIONS)) path = uri.path.lstrip("/") if path in ec2_files: return (200, headers, ec2_files.get(path)) @@ -110,11 +116,20 @@ def _register_uris(version, ec2_files, ec2_meta, os_files): return (200, headers, str(value)) return (404, headers, '') - def get_request_callback(method, uri, headers): - uri = urlparse(uri) + def match_os_uri(uri, headers): + path = uri.path.strip("/") + if path == 'openstack': + return (200, headers, "\n".join([openstack.OS_LATEST])) path = uri.path.lstrip("/") if path in os_files: return (200, headers, os_files.get(path)) + return (404, headers, '') + + def get_request_callback(method, uri, headers): + uri = urlparse(uri) + path = uri.path.lstrip("/").split("/") + if path[0] == 'openstack': + return match_os_uri(uri, headers) return match_ec2_url(uri, headers) hp.register_uri(hp.GET, re.compile(r'http://169.254.169.254/.*'), -- cgit v1.2.3 From 984d36fb8c5feb82c31121ffd5d6a72b4f593499 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Wed, 27 Aug 2014 12:56:50 -0700 Subject: Fix retry cb to reflect what used to exist --- cloudinit/sources/helpers/openstack.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index 2d0fc70e..025a2404 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -21,7 +21,6 @@ import abc import base64 import copy -import httplib import os from cloudinit import ec2_utils @@ -442,16 +441,21 @@ class MetadataReader(BaseReader): def _path_read(self, path): - def should_retry(_request_args, cause): - if cause.code == httplib.NOT_FOUND: - return False + def should_retry_cb(_request_args, cause): + try: + code = int(cause.code) + if code >= 400: + return False + except (TypeError, ValueError): + # Older versions of requests didn't have a code. + pass return True response = url_helper.readurl(path, retries=self.retries, ssl_details=self.ssl_details, timeout=self.timeout, - exception_cb=should_retry) + exception_cb=should_retry_cb) return response.contents def _path_join(self, base, *add_ons): -- cgit v1.2.3 From 419e0caab7e005827485460372c7f0d54ac0e9c9 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Wed, 27 Aug 2014 17:03:43 -0400 Subject: no functional changes, but some minor changes. --- cloudinit/sources/helpers/openstack.py | 75 ++++++++++------------- tests/unittests/test_datasource/test_openstack.py | 2 +- 2 files changed, 34 insertions(+), 43 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index 025a2404..3c6bb6aa 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -162,21 +162,25 @@ class BaseReader(object): pass def _find_working_version(self, version): + try: + versions_available = self._fetch_available_versions(self) + except Exception as e: + LOG.warn("Unable to read openstack versions from %s due to: %s", + self.base_path, e) + versions_available = [] + search_versions = [version] + list(OS_VERSIONS) - available_versions = self._fetch_available_versions() + selected_version = OS_LATEST for potential_version in search_versions: - if not potential_version: + if potential_version not in versions_available: continue - if potential_version not in available_versions: - continue - if potential_version != version: - LOG.debug("Version '%s' not available, attempting to use" - " version '%s' instead", version, - potential_version) - return potential_version - LOG.debug("Version '%s' not available, attempting to use '%s'" - " instead", version, OS_LATEST) - return OS_LATEST + selected_version = potential_version + break + + if selected_version != version: + LOG.warn("Version '%s' not available, attempting to use" + " version '%s' instead", version, selected_version) + return selected_version def _read_content_path(self, item): path = item.get('content_path', '').lstrip("/") @@ -317,20 +321,12 @@ class ConfigDriveReader(BaseReader): return util.load_file(path) def _fetch_available_versions(self): - if self._versions is not None: - return self._versions - else: - versions_available = [] + if self._versions is None: path = self._path_join(self.base_path, 'openstack') - try: - for child in os.listdir(path): - child_path = os.path.join(path, child) - if os.path.isdir(child_path): - versions_available.append(child) - except (OSError, IOError): - pass - self._versions = tuple(versions_available) - return self._versions + found = [d for d in os.listdir(path) + if os.path.isdir(os.path.join(path))] + self._versions = tuple(found) + return self._versions def _read_ec2_metadata(self): path = self._path_join(self.base_path, @@ -420,24 +416,19 @@ class MetadataReader(BaseReader): self._versions = None def _fetch_available_versions(self): + # /openstack/ returns a newline separated list of versions if self._versions is not None: - return self._versions - else: - path = self._path_join(self.base_path, "openstack") - versions_available = [] - try: - versions = self._path_read(path) - except IOError as e: - LOG.warn("Unable to read openstack versions from %s due" - " to: %s", path, e) - else: - for line in versions.splitlines(): - line = line.strip() - if not line: - continue - versions_available.append(line) - self._versions = tuple(versions_available) - return self._versions + return self.os_versions + found = [] + content = self._path_read(version_path) + for line in content.splitlines(): + line = line.strip() + if not line: + continue + found.append(line) + self._versions = tuple(found) + return self._versions + def _path_read(self, path): diff --git a/tests/unittests/test_datasource/test_openstack.py b/tests/unittests/test_datasource/test_openstack.py index 412ae5a4..530fba20 100644 --- a/tests/unittests/test_datasource/test_openstack.py +++ b/tests/unittests/test_datasource/test_openstack.py @@ -79,7 +79,7 @@ EC2_FILES = { 'latest/user-data': USER_DATA, } EC2_VERSIONS = [ - 'lastest', + 'latest', ] -- cgit v1.2.3 From 141caf7f3b224c0265c7bb0014b96ca08aa67193 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 2 Sep 2014 13:31:18 -0700 Subject: Remove/adjust the verbose 'failed at attempted import of' log Instead of using this log (which really isn't a failure) we should instead of just return the looked up locations and then if there really is an error the caller can handle the usage of the looked up locations as they choose fit. --- cloudinit/distros/__init__.py | 8 +++----- cloudinit/importer.py | 21 ++++++++------------- cloudinit/mergers/__init__.py | 10 ++++++---- cloudinit/sources/__init__.py | 6 +++--- cloudinit/stages.py | 18 +++++++++--------- 5 files changed, 29 insertions(+), 34 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/distros/__init__.py b/cloudinit/distros/__init__.py index 4b41220e..9c9211fe 100644 --- a/cloudinit/distros/__init__.py +++ b/cloudinit/distros/__init__.py @@ -847,12 +847,10 @@ def extract_default(users, default_name=None, default_config=None): def fetch(name): - locs = importer.find_module(name, - ['', __name__], - ['Distro']) + locs, looked_locs = importer.find_module(name, ['', __name__], ['Distro']) if not locs: - raise ImportError("No distribution found for distro %s" - % (name)) + raise ImportError("No distribution found for distro %s (searched %s)" + % (name, looked_locs)) mod = importer.import_module(locs[0]) cls = getattr(mod, 'Distro') return cls diff --git a/cloudinit/importer.py b/cloudinit/importer.py index a1929137..fb57253c 100644 --- a/cloudinit/importer.py +++ b/cloudinit/importer.py @@ -22,10 +22,6 @@ import sys -from cloudinit import log as logging - -LOG = logging.getLogger(__name__) - def import_module(module_name): __import__(module_name) @@ -33,25 +29,24 @@ def import_module(module_name): def find_module(base_name, search_paths, required_attrs=None): - found_places = [] if not required_attrs: required_attrs = [] # NOTE(harlowja): translate the search paths to include the base name. - real_paths = [] + lookup_paths = [] for path in search_paths: real_path = [] if path: real_path.extend(path.split(".")) real_path.append(base_name) full_path = '.'.join(real_path) - real_paths.append(full_path) - for full_path in real_paths: + lookup_paths.append(full_path) + found_paths = [] + for full_path in lookup_paths: mod = None try: mod = import_module(full_path) - except ImportError as e: - LOG.debug("Failed at attempted import of '%s' due to: %s", - full_path, e) + except ImportError: + pass if not mod: continue found_attrs = 0 @@ -59,5 +54,5 @@ def find_module(base_name, search_paths, required_attrs=None): if hasattr(mod, attr): found_attrs += 1 if found_attrs == len(required_attrs): - found_places.append(full_path) - return found_places + found_paths.append(full_path) + return (found_paths, lookup_paths) diff --git a/cloudinit/mergers/__init__.py b/cloudinit/mergers/__init__.py index 650b42a9..03aa1ee1 100644 --- a/cloudinit/mergers/__init__.py +++ b/cloudinit/mergers/__init__.py @@ -143,12 +143,14 @@ def construct(parsed_mergers): for (m_name, m_ops) in parsed_mergers: if not m_name.startswith(MERGER_PREFIX): m_name = MERGER_PREFIX + str(m_name) - merger_locs = importer.find_module(m_name, - [__name__], - [MERGER_ATTR]) + merger_locs, looked_locs = importer.find_module(m_name, + [__name__], + [MERGER_ATTR]) if not merger_locs: msg = ("Could not find merger module named '%s' " - "with attribute '%s'") % (m_name, MERGER_ATTR) + "with attribute '%s' (searched %s)") % (m_name, + MERGER_ATTR, + looked_locs) raise ImportError(msg) else: mod = importer.import_module(merger_locs[0]) diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 7d52a2e6..5d58bebd 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -272,9 +272,9 @@ def list_sources(cfg_list, depends, pkg_list): for ds_name in cfg_list: if not ds_name.startswith(DS_PREFIX): ds_name = '%s%s' % (DS_PREFIX, ds_name) - m_locs = importer.find_module(ds_name, - pkg_list, - ['get_datasource_list']) + m_locs, _looked_locs = importer.find_module(ds_name, + pkg_list, + ['get_datasource_list']) for m_loc in m_locs: mod = importer.import_module(m_loc) lister = getattr(mod, "get_datasource_list") diff --git a/cloudinit/stages.py b/cloudinit/stages.py index d29d480a..67f467f7 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -386,12 +386,12 @@ class Init(object): potential_handlers = util.find_modules(path) for (fname, mod_name) in potential_handlers.iteritems(): try: - mod_locs = importer.find_module(mod_name, [''], - ['list_types', - 'handle_part']) + mod_locs, looked_locs = importer.find_module( + mod_name, [''], ['list_types', 'handle_part']) if not mod_locs: - LOG.warn(("Could not find a valid user-data handler" - " named %s in file %s"), mod_name, fname) + LOG.warn("Could not find a valid user-data handler" + " named %s in file %s (searched %s)", + mod_name, fname, looked_locs) continue mod = importer.import_module(mod_locs[0]) mod = handlers.fixup_handler(mod) @@ -621,11 +621,11 @@ class Modules(object): " has an unknown frequency %s"), raw_name, freq) # Reset it so when ran it will get set to a known value freq = None - mod_locs = importer.find_module(mod_name, - ['', type_utils.obj_name(config)], - ['handle']) + mod_locs, looked_locs = importer.find_module( + mod_name, ['', type_utils.obj_name(config)], ['handle']) if not mod_locs: - LOG.warn("Could not find module named %s", mod_name) + LOG.warn("Could not find module named %s (searched %s)", + mod_name, looked_locs) continue mod = config.fixup_module(importer.import_module(mod_locs[0])) mostly_mods.append([mod, raw_name, freq, run_args]) -- cgit v1.2.3 From 7dade55ae5e429e0abb1687fb94799226c211e18 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 2 Sep 2014 14:17:38 -0700 Subject: Fix logic statement and pep8 issue --- cloudinit/sources/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 7d52a2e6..a2a8f492 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -66,7 +66,7 @@ class DataSource(object): name = name[0:-3] self.ds_cfg = util.get_cfg_by_path(self.sys_cfg, - ("datasource", name), {}) + ("datasource", name), {}) if not ud_proc: self.ud_proc = ud.UserDataProcessor(self.paths) else: @@ -166,7 +166,7 @@ class DataSource(object): defhost = "localhost" domain = defdomain - if self.metadata or 'local-hostname' not in self.metadata: + if not self.metadata or 'local-hostname' not in self.metadata: # this is somewhat questionable really. # the cloud datasource was asked for a hostname # and didn't have one. raising error might be more appropriate -- cgit v1.2.3 From 6f8bd54650e78bd383b29a59bee95a887ff13c81 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Thu, 4 Sep 2014 10:33:37 -0700 Subject: Reduce logging levels and fix broken call to _fetch_available_versions --- cloudinit/sources/helpers/openstack.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index 3c6bb6aa..61c61570 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -163,10 +163,10 @@ class BaseReader(object): def _find_working_version(self, version): try: - versions_available = self._fetch_available_versions(self) + versions_available = self._fetch_available_versions() except Exception as e: - LOG.warn("Unable to read openstack versions from %s due to: %s", - self.base_path, e) + LOG.debug("Unable to read openstack versions from %s due to: %s", + self.base_path, e) versions_available = [] search_versions = [version] + list(OS_VERSIONS) @@ -178,8 +178,8 @@ class BaseReader(object): break if selected_version != version: - LOG.warn("Version '%s' not available, attempting to use" - " version '%s' instead", version, selected_version) + LOG.debug("Version '%s' not available, attempting to use" + " version '%s' instead", version, selected_version) return selected_version def _read_content_path(self, item): @@ -239,7 +239,8 @@ class BaseReader(object): LOG.debug("Failed reading optional path %s due" " to: %s", path, e) else: - LOG.exception("Failed reading mandatory path %s", path) + LOG.debug("Failed reading mandatory path %s due" + " to: %s", path, e) else: found = True if required and not found: -- cgit v1.2.3 From 67efb99bc8e4628e86aef76fed3411c1cc763571 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Thu, 4 Sep 2014 12:33:41 -0700 Subject: Fix a couple more cases of exceptional logs --- cloudinit/sources/DataSourceOpenStack.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/DataSourceOpenStack.py b/cloudinit/sources/DataSourceOpenStack.py index 0970d07b..221759e1 100644 --- a/cloudinit/sources/DataSourceOpenStack.py +++ b/cloudinit/sources/DataSourceOpenStack.py @@ -64,13 +64,13 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource): try: max_wait = int(self.ds_cfg.get("max_wait", max_wait)) - except Exception: - util.logexc(LOG, "Failed to get max wait. using %s", max_wait) + except Exception as e: + LOG.debug("Failed to get max wait. using %s: %s", max_wait, e) try: timeout = max(0, int(self.ds_cfg.get("timeout", timeout))) - except Exception: - util.logexc(LOG, "Failed to get timeout, using %s", timeout) + except Exception as e: + LOG.debug("Failed to get timeout, using %s: %s", timeout, e) return (max_wait, timeout) def wait_for_metadata_service(self): @@ -82,7 +82,7 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource): if len(filtered): urls = filtered else: - LOG.warn("Empty metadata url list! using default list") + LOG.debug("Empty metadata url list! using default list") urls = [DEF_MD_URL] md_urls = [] @@ -123,9 +123,9 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource): 'version': openstack.OS_HAVANA}) except openstack.NonReadable: return False - except (openstack.BrokenMetadata, IOError): - util.logexc(LOG, "Broken metadata address %s", - self.metadata_address) + except (openstack.BrokenMetadata, IOError) as e: + LOG.debug("Broken metadata address %s: %s", + self.metadata_address, e) return False user_dsmode = results.get('dsmode', None) -- cgit v1.2.3 From 822000955100179f9f5944e0b2403c95fac39458 Mon Sep 17 00:00:00 2001 From: Jay Faulkner Date: Mon, 8 Sep 2014 21:32:21 -0700 Subject: Update read_config_drive to use OS_VERSIONS tuple for readers Updated read_config_drive: removed the unused version kwarg, used the OS_VERSIONS tuple from the openstack helper to avoid hardcoding versions. Added a comment to the tuple in helpers/openstack.py asking for it to be kept in chronological order. --- cloudinit/sources/DataSourceConfigDrive.py | 18 ++++++++++-------- cloudinit/sources/helpers/openstack.py | 1 + 2 files changed, 11 insertions(+), 8 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index 65477184..2456bc2e 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -169,15 +169,17 @@ def get_ds_mode(cfgdrv_ver, ds_cfg=None, user=None): return "net" -def read_config_drive(source_dir, version=openstack.OS_HAVANA): - reader = openstack.ConfigDriveReader(source_dir) - finders = [ - (reader.read_v2, [], {'version': version}), - (reader.read_v2, [], {'version': openstack.OS_GRIZZLY}), - (reader.read_v2, [], {'version': openstack.OS_FOLSOM}), - (reader.read_v1, [], {}), - ] +def read_config_drive(source_dir): excps = [] + finders = [] + reader = openstack.ConfigDriveReader(source_dir) + + # openstack.OS_VERSIONS is stored in chronological order, so to check the + # newest first, use reversed() + for version in reversed(openstack.OS_VERSIONS): + finders.append((reader.read_v2, [], {'version': version})) + finders.append((reader.read_v1, [], {})) + for (functor, args, kwargs) in finders: try: return functor(*args, **kwargs) diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index 0fac0335..ed102c4c 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -48,6 +48,7 @@ OS_LATEST = 'latest' OS_FOLSOM = '2012-08-10' OS_GRIZZLY = '2013-04-04' OS_HAVANA = '2013-10-17' +# keep this in chronological order by time: add new entries to the end OS_VERSIONS = ( OS_FOLSOM, OS_GRIZZLY, -- cgit v1.2.3 From 6386accbf4933ecb368d2f32b7db95583e03b525 Mon Sep 17 00:00:00 2001 From: Jay Faulkner Date: Wed, 10 Sep 2014 09:01:42 -0700 Subject: Only use vendordata under cloud-init key for ConfigDrive This data will be treated the same as vendordata from other sources. --- cloudinit/sources/DataSourceConfigDrive.py | 2 -- 1 file changed, 2 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index 2456bc2e..83cc6b25 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -132,8 +132,6 @@ class DataSourceConfigDrive(openstack.SourceMixin, sources.DataSource): # for cloud-init (for namespacing). if 'cloud-init' in vd: self.vendordata_raw = vd['cloud-init'] - else: - self.vendordata_raw = vd return True -- cgit v1.2.3 From 1b975098ea2cc82a168203c720ef936db4864467 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Wed, 10 Sep 2014 14:39:59 -0400 Subject: log as warn if things are obviously wrong If something is broken as in a built in config, or code just broken, then logging warning during search for metadata is ok. --- cloudinit/sources/DataSourceOpenStack.py | 16 ++++++++-------- cloudinit/sources/helpers/openstack.py | 6 ++++-- 2 files changed, 12 insertions(+), 10 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/DataSourceOpenStack.py b/cloudinit/sources/DataSourceOpenStack.py index 221759e1..0970d07b 100644 --- a/cloudinit/sources/DataSourceOpenStack.py +++ b/cloudinit/sources/DataSourceOpenStack.py @@ -64,13 +64,13 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource): try: max_wait = int(self.ds_cfg.get("max_wait", max_wait)) - except Exception as e: - LOG.debug("Failed to get max wait. using %s: %s", max_wait, e) + except Exception: + util.logexc(LOG, "Failed to get max wait. using %s", max_wait) try: timeout = max(0, int(self.ds_cfg.get("timeout", timeout))) - except Exception as e: - LOG.debug("Failed to get timeout, using %s: %s", timeout, e) + except Exception: + util.logexc(LOG, "Failed to get timeout, using %s", timeout) return (max_wait, timeout) def wait_for_metadata_service(self): @@ -82,7 +82,7 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource): if len(filtered): urls = filtered else: - LOG.debug("Empty metadata url list! using default list") + LOG.warn("Empty metadata url list! using default list") urls = [DEF_MD_URL] md_urls = [] @@ -123,9 +123,9 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource): 'version': openstack.OS_HAVANA}) except openstack.NonReadable: return False - except (openstack.BrokenMetadata, IOError) as e: - LOG.debug("Broken metadata address %s: %s", - self.metadata_address, e) + except (openstack.BrokenMetadata, IOError): + util.logexc(LOG, "Broken metadata address %s", + self.metadata_address) return False user_dsmode = results.get('dsmode', None) diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index 7b27621c..0c6b9b5a 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -178,8 +178,10 @@ class BaseReader(object): break if selected_version != version: - LOG.debug("Version '%s' not available, attempting to use" - " version '%s' instead", version, selected_version) + LOG.warn("Version '%s' not available, attempting to use " + "version '%s' instead", version, selected_version) + else: + LOG.debug("Version '%s' was available.", version) return selected_version def _read_content_path(self, item): -- cgit v1.2.3 From 64f4a63c174a4f7c21e6e372927906099103d64c Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Wed, 10 Sep 2014 14:42:33 -0400 Subject: fix a bug, use a list instead of tuple for _versions using tuple for _versions was just not necessary. fix reference to undefined os_versions. --- cloudinit/sources/helpers/openstack.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index 0c6b9b5a..3c3de7e4 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -328,7 +328,7 @@ class ConfigDriveReader(BaseReader): path = self._path_join(self.base_path, 'openstack') found = [d for d in os.listdir(path) if os.path.isdir(os.path.join(path))] - self._versions = tuple(found) + self._versions = found return self._versions def _read_ec2_metadata(self): @@ -421,7 +421,7 @@ class MetadataReader(BaseReader): def _fetch_available_versions(self): # /openstack/ returns a newline separated list of versions if self._versions is not None: - return self.os_versions + return self._versions found = [] version_path = self._path_join(self.base_path, "openstack") content = self._path_read(version_path) @@ -430,7 +430,7 @@ class MetadataReader(BaseReader): if not line: continue found.append(line) - self._versions = tuple(found) + self._versions = found return self._versions -- cgit v1.2.3 From c4b09a27239bc88bcf6b4ec536410bcc02cdf11c Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Wed, 10 Sep 2014 15:37:09 -0400 Subject: make BaseReader select latest supported version --- cloudinit/sources/DataSourceConfigDrive.py | 16 ++++++---------- cloudinit/sources/DataSourceOpenStack.py | 3 +-- cloudinit/sources/helpers/openstack.py | 19 +++++++++++++------ 3 files changed, 20 insertions(+), 18 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index 83cc6b25..b8c16361 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -167,17 +167,13 @@ def get_ds_mode(cfgdrv_ver, ds_cfg=None, user=None): return "net" -def read_config_drive(source_dir): - excps = [] - finders = [] +def read_config_drive(source_dir, version=None): reader = openstack.ConfigDriveReader(source_dir) - - # openstack.OS_VERSIONS is stored in chronological order, so to check the - # newest first, use reversed() - for version in reversed(openstack.OS_VERSIONS): - finders.append((reader.read_v2, [], {'version': version})) - finders.append((reader.read_v1, [], {})) - + finders = [ + (reader.read_v2, [], {'version': version}), + (reader.read_v1, [], {}), + ] + excps = [] for (functor, args, kwargs) in finders: try: return functor(*args, **kwargs) diff --git a/cloudinit/sources/DataSourceOpenStack.py b/cloudinit/sources/DataSourceOpenStack.py index 0970d07b..ce8e8364 100644 --- a/cloudinit/sources/DataSourceOpenStack.py +++ b/cloudinit/sources/DataSourceOpenStack.py @@ -119,8 +119,7 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource): 'Crawl of openstack metadata service', read_metadata_service, args=[self.metadata_address], - kwargs={'ssl_details': self.ssl_details, - 'version': openstack.OS_HAVANA}) + kwargs={'ssl_details': self.ssl_details}) except openstack.NonReadable: return False except (openstack.BrokenMetadata, IOError): diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index ed91a55c..ed9cdbaa 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -48,7 +48,7 @@ OS_LATEST = 'latest' OS_FOLSOM = '2012-08-10' OS_GRIZZLY = '2013-04-04' OS_HAVANA = '2013-10-17' -# keep this in chronological order by time: add new entries to the end +# keep this in chronological order. new supported versions go at the end. OS_VERSIONS = ( OS_FOLSOM, OS_GRIZZLY, @@ -162,7 +162,7 @@ class BaseReader(object): def _read_ec2_metadata(self): pass - def _find_working_version(self, version): + def _find_working_version(self, version=None): try: versions_available = self._fetch_available_versions() except Exception as e: @@ -170,7 +170,14 @@ class BaseReader(object): self.base_path, e) versions_available = [] - search_versions = [version] + list(OS_VERSIONS) + # openstack.OS_VERSIONS is stored in chronological order, so + # reverse it to check newest first. + supported = [v for v in reversed(list(OS_VERSIONS))] + if version is not None: + search_versions = [version] + supported + else: + search_versions = supported + selected_version = OS_LATEST for potential_version in search_versions: if potential_version not in versions_available: @@ -178,11 +185,12 @@ class BaseReader(object): selected_version = potential_version break - if selected_version != version: + if version is not None and selected_version != version: LOG.warn("Version '%s' not available, attempting to use " "version '%s' instead", version, selected_version) else: - LOG.debug("Version '%s' was available.", version) + LOG.debug("Selected version '%s' from %s", version, + versions_available) return selected_version def _read_content_path(self, item): @@ -434,7 +442,6 @@ class MetadataReader(BaseReader): self._versions = found return self._versions - def _path_read(self, path): def should_retry_cb(_request_args, cause): -- cgit v1.2.3 From c548fdf3201519c1c30c815ba9feec643b87e0bf Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Wed, 10 Sep 2014 15:40:30 -0400 Subject: fix log message --- cloudinit/sources/helpers/openstack.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index ed9cdbaa..3d903a3c 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -189,7 +189,7 @@ class BaseReader(object): LOG.warn("Version '%s' not available, attempting to use " "version '%s' instead", version, selected_version) else: - LOG.debug("Selected version '%s' from %s", version, + LOG.debug("Selected version '%s' from %s", selected_version, versions_available) return selected_version -- cgit v1.2.3 From 44113217719ebee756325b40a5d14045ba8f3a3a Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Wed, 10 Sep 2014 16:00:00 -0400 Subject: drop version= from readers instead of taking a version that they should look for, the readers now just select the highest supported version. definitely a use case later for having version= but nothing is using it now. --- cloudinit/sources/DataSourceConfigDrive.py | 4 ++-- cloudinit/sources/DataSourceOpenStack.py | 4 ++-- cloudinit/sources/helpers/openstack.py | 23 +++++++---------------- tests/unittests/test_datasource/test_openstack.py | 16 ++++++++-------- 4 files changed, 19 insertions(+), 28 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index b8c16361..a27d07fb 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -167,10 +167,10 @@ def get_ds_mode(cfgdrv_ver, ds_cfg=None, user=None): return "net" -def read_config_drive(source_dir, version=None): +def read_config_drive(source_dir): reader = openstack.ConfigDriveReader(source_dir) finders = [ - (reader.read_v2, [], {'version': version}), + (reader.read_v2, [], {}), (reader.read_v1, [], {}), ] excps = [] diff --git a/cloudinit/sources/DataSourceOpenStack.py b/cloudinit/sources/DataSourceOpenStack.py index ce8e8364..466de8f4 100644 --- a/cloudinit/sources/DataSourceOpenStack.py +++ b/cloudinit/sources/DataSourceOpenStack.py @@ -153,9 +153,9 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource): return True -def read_metadata_service(base_url, version=None, ssl_details=None): +def read_metadata_service(base_url, ssl_details=None): reader = openstack.MetadataReader(base_url, ssl_details=ssl_details) - return reader.read_v2(version=version) + return reader.read_v2() # Used to match classes to dependencies diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index 3d903a3c..a7dd05df 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -162,7 +162,7 @@ class BaseReader(object): def _read_ec2_metadata(self): pass - def _find_working_version(self, version=None): + def _find_working_version(self): try: versions_available = self._fetch_available_versions() except Exception as e: @@ -173,24 +173,16 @@ class BaseReader(object): # openstack.OS_VERSIONS is stored in chronological order, so # reverse it to check newest first. supported = [v for v in reversed(list(OS_VERSIONS))] - if version is not None: - search_versions = [version] + supported - else: - search_versions = supported - selected_version = OS_LATEST - for potential_version in search_versions: + + for potential_version in supported: if potential_version not in versions_available: continue selected_version = potential_version break - if version is not None and selected_version != version: - LOG.warn("Version '%s' not available, attempting to use " - "version '%s' instead", version, selected_version) - else: - LOG.debug("Selected version '%s' from %s", selected_version, - versions_available) + LOG.debug("Selected version '%s' from %s", selected_version, + versions_available) return selected_version def _read_content_path(self, item): @@ -202,7 +194,7 @@ class BaseReader(object): path = self._path_join(self.base_path, "openstack", *path_pieces) return self._path_read(path) - def read_v2(self, version=None): + def read_v2(self): """Reads a version 2 formatted location. Return a dict with metadata, userdata, ec2-metadata, dsmode, @@ -233,12 +225,11 @@ class BaseReader(object): ) return files - version = self._find_working_version(version) results = { 'userdata': '', 'version': 2, } - data = datafiles(version) + data = datafiles(self._find_working_version()) for (name, (path, required, translator)) in data.iteritems(): path = self._path_join(self.base_path, path) data = None diff --git a/tests/unittests/test_datasource/test_openstack.py b/tests/unittests/test_datasource/test_openstack.py index 530fba20..6809823e 100644 --- a/tests/unittests/test_datasource/test_openstack.py +++ b/tests/unittests/test_datasource/test_openstack.py @@ -142,7 +142,7 @@ class TestOpenStackDataSource(test_helpers.HttprettyTestCase): @hp.activate def test_successful(self): _register_uris(self.VERSION, EC2_FILES, EC2_META, OS_FILES) - f = ds.read_metadata_service(BASE_URL, version=self.VERSION) + f = ds.read_metadata_service(BASE_URL) self.assertEquals(VENDOR_DATA, f.get('vendordata')) self.assertEquals(CONTENT_0, f['files']['/etc/foo.cfg']) self.assertEquals(CONTENT_1, f['files']['/etc/bar/bar.cfg']) @@ -164,7 +164,7 @@ class TestOpenStackDataSource(test_helpers.HttprettyTestCase): @hp.activate def test_no_ec2(self): _register_uris(self.VERSION, {}, {}, OS_FILES) - f = ds.read_metadata_service(BASE_URL, version=self.VERSION) + f = ds.read_metadata_service(BASE_URL) self.assertEquals(VENDOR_DATA, f.get('vendordata')) self.assertEquals(CONTENT_0, f['files']['/etc/foo.cfg']) self.assertEquals(CONTENT_1, f['files']['/etc/bar/bar.cfg']) @@ -180,7 +180,7 @@ class TestOpenStackDataSource(test_helpers.HttprettyTestCase): os_files.pop(k, None) _register_uris(self.VERSION, {}, {}, os_files) self.assertRaises(openstack.NonReadable, ds.read_metadata_service, - BASE_URL, version=self.VERSION) + BASE_URL) @hp.activate def test_bad_uuid(self): @@ -192,7 +192,7 @@ class TestOpenStackDataSource(test_helpers.HttprettyTestCase): os_files[k] = json.dumps(os_meta) _register_uris(self.VERSION, {}, {}, os_files) self.assertRaises(openstack.BrokenMetadata, ds.read_metadata_service, - BASE_URL, version=self.VERSION) + BASE_URL) @hp.activate def test_userdata_empty(self): @@ -201,7 +201,7 @@ class TestOpenStackDataSource(test_helpers.HttprettyTestCase): if k.endswith('user_data'): os_files.pop(k, None) _register_uris(self.VERSION, {}, {}, os_files) - f = ds.read_metadata_service(BASE_URL, version=self.VERSION) + f = ds.read_metadata_service(BASE_URL) self.assertEquals(VENDOR_DATA, f.get('vendordata')) self.assertEquals(CONTENT_0, f['files']['/etc/foo.cfg']) self.assertEquals(CONTENT_1, f['files']['/etc/bar/bar.cfg']) @@ -214,7 +214,7 @@ class TestOpenStackDataSource(test_helpers.HttprettyTestCase): if k.endswith('vendor_data.json'): os_files.pop(k, None) _register_uris(self.VERSION, {}, {}, os_files) - f = ds.read_metadata_service(BASE_URL, version=self.VERSION) + f = ds.read_metadata_service(BASE_URL) self.assertEquals(CONTENT_0, f['files']['/etc/foo.cfg']) self.assertEquals(CONTENT_1, f['files']['/etc/bar/bar.cfg']) self.assertFalse(f.get('vendordata')) @@ -227,7 +227,7 @@ class TestOpenStackDataSource(test_helpers.HttprettyTestCase): os_files[k] = '{' # some invalid json _register_uris(self.VERSION, {}, {}, os_files) self.assertRaises(openstack.BrokenMetadata, ds.read_metadata_service, - BASE_URL, version=self.VERSION) + BASE_URL) @hp.activate def test_metadata_invalid(self): @@ -237,7 +237,7 @@ class TestOpenStackDataSource(test_helpers.HttprettyTestCase): os_files[k] = '{' # some invalid json _register_uris(self.VERSION, {}, {}, os_files) self.assertRaises(openstack.BrokenMetadata, ds.read_metadata_service, - BASE_URL, version=self.VERSION) + BASE_URL) @hp.activate def test_datasource(self): -- cgit v1.2.3 From 2755274f34ef11651a5ee57a31955f3e413cdfc4 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Wed, 10 Sep 2014 16:46:11 -0400 Subject: OpenStack: search less urls to determine if MD service is there. We were checking for presense of meta_data.json for each supported metadata version. Instead just check that /openstack is there. This reduces the time to check on EC2 or any other cloud. --- cloudinit/sources/DataSourceOpenStack.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/DataSourceOpenStack.py b/cloudinit/sources/DataSourceOpenStack.py index 466de8f4..765137c6 100644 --- a/cloudinit/sources/DataSourceOpenStack.py +++ b/cloudinit/sources/DataSourceOpenStack.py @@ -88,11 +88,9 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource): md_urls = [] url2base = {} for url in urls: - for version in openstack.OS_VERSIONS + (openstack.OS_LATEST,): - md_url = url_helper.combine_url(url, 'openstack', - version, 'meta_data.json') - md_urls.append(md_url) - url2base[md_url] = url + md_url = url_helper.combine_url(url, 'openstack') + md_urls.append(md_url) + url2base[md_url] = url (max_wait, timeout) = self._get_url_settings() start_time = time.time() -- cgit v1.2.3 From 932c073db79e667623d27174c55e5b16ea439578 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Wed, 10 Sep 2014 21:17:40 -0400 Subject: Openstack: Vendor data cleanup For now, this vendor data handling is just added to openstack. However, in an effort to allow sanely handling of multi-part vendor-data that is namespaced, we add openstack.convert_vendordata_json . That basically takes whatever was loaded from vendordata and takes the 'cloud-init' key if it is a dict. This way the author can namespace cloud-init, basically telling it to ignore everything else. --- cloudinit/sources/DataSourceConfigDrive.py | 13 +++++---- cloudinit/sources/DataSourceOpenStack.py | 12 ++++---- cloudinit/sources/helpers/openstack.py | 25 ++++++++++++++++ tests/unittests/test_datasource/test_openstack.py | 35 ++++++++++++++++++++++- 4 files changed, 72 insertions(+), 13 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index a27d07fb..4e5d90de 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -126,12 +126,13 @@ class DataSourceConfigDrive(openstack.SourceMixin, sources.DataSource): self.version = results['version'] self.files.update(results.get('files', {})) - # If there is no vendordata, set vd to an empty dict instead of None - vd = results.get('vendordata', {}) - # if vendordata includes 'cloud-init', then read that explicitly - # for cloud-init (for namespacing). - if 'cloud-init' in vd: - self.vendordata_raw = vd['cloud-init'] + vd = results.get('vendordata') + self.vendordata_pure = vd + try: + self.vendordata_raw = openstack.convert_vendordata_json(vd) + except ValueError as e: + LOG.warn("Invalid content in vendor-data: %s", e) + self.vendordata_raw = None return True diff --git a/cloudinit/sources/DataSourceOpenStack.py b/cloudinit/sources/DataSourceOpenStack.py index 765137c6..469c2e2a 100644 --- a/cloudinit/sources/DataSourceOpenStack.py +++ b/cloudinit/sources/DataSourceOpenStack.py @@ -140,13 +140,13 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource): self.version = results['version'] self.files.update(results.get('files', {})) - # if vendordata includes 'cloud-init', then read that explicitly - # for cloud-init (for namespacing). vd = results.get('vendordata') - if isinstance(vd, dict) and 'cloud-init' in vd: - self.vendordata_raw = vd['cloud-init'] - else: - self.vendordata_raw = vd + self.vendordata_pure = vd + try: + self.vendordata_raw = openstack.convert_vendordata_json(vd) + except ValueError as e: + LOG.warn("Invalid content in vendor-data: %s", e) + self.vendordata_raw = None return True diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index a7dd05df..9718b0be 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -459,3 +459,28 @@ class MetadataReader(BaseReader): return ec2_utils.get_instance_metadata(ssl_details=self.ssl_details, timeout=self.timeout, retries=self.retries) + + +def convert_vendordata_json(data, recurse=True): + """ data: a loaded json *object* (strings, arrays, dicts). + return something suitable for cloudinit vendordata_raw. + + if data is: + None: return None + string: return string + list: return data + the list is then processed in UserDataProcessor + dict: return convert_vendordata_json(data.get('cloud-init')) + """ + if not data: + return None + if isinstance(data, (str, unicode, basestring)): + return data + if isinstance(data, list): + return copy.deepcopy(data) + if isinstance(data, dict): + if recurse is True: + return convert_vendordata_json(data.get('cloud-init'), + recurse=False) + raise ValueError("vendordata['cloud-init'] cannot be dict") + raise ValueError("Unknown data type for vendordata: %s" % type(data)) diff --git a/tests/unittests/test_datasource/test_openstack.py b/tests/unittests/test_datasource/test_openstack.py index 6809823e..7b4e651a 100644 --- a/tests/unittests/test_datasource/test_openstack.py +++ b/tests/unittests/test_datasource/test_openstack.py @@ -19,6 +19,7 @@ import copy import json import re +import unittest from StringIO import StringIO @@ -256,7 +257,8 @@ class TestOpenStackDataSource(test_helpers.HttprettyTestCase): self.assertEquals(EC2_META, ds_os.ec2_metadata) self.assertEquals(USER_DATA, ds_os.userdata_raw) self.assertEquals(2, len(ds_os.files)) - self.assertEquals(VENDOR_DATA, ds_os.vendordata_raw) + self.assertEquals(VENDOR_DATA, ds_os.vendordata_pure) + self.assertEquals(ds_os.vendordata_raw, None) @hp.activate def test_bad_datasource_meta(self): @@ -314,3 +316,34 @@ class TestOpenStackDataSource(test_helpers.HttprettyTestCase): found = ds_os.get_data() self.assertFalse(found) self.assertIsNone(ds_os.version) + + +class TestVendorDataLoading(unittest.TestCase): + def cvj(self, data): + return openstack.convert_vendordata_json(data) + + def test_vd_load_none(self): + # non-existant vendor-data should return none + self.assertIsNone(self.cvj(None)) + + def test_vd_load_string(self): + self.assertEqual(self.cvj("foobar"), "foobar") + + def test_vd_load_list(self): + data = [{'foo': 'bar'}, 'mystring', list(['another', 'list'])] + self.assertEqual(self.cvj(data), data) + + def test_vd_load_dict_no_ci(self): + self.assertEqual(self.cvj({'foo': 'bar'}), None) + + def test_vd_load_dict_ci_dict(self): + self.assertRaises(ValueError, self.cvj, + {'foo': 'bar', 'cloud-init': {'x': 1}}) + + def test_vd_load_dict_ci_string(self): + data = {'foo': 'bar', 'cloud-init': 'VENDOR_DATA'} + self.assertEqual(self.cvj(data), data['cloud-init']) + + def test_vd_load_dict_ci_list(self): + data = {'foo': 'bar', 'cloud-init': ['VD_1', 'VD_2']} + self.assertEqual(self.cvj(data), data['cloud-init']) -- cgit v1.2.3 From b2ba18dcc48b4721b097dc108f5a5eceba6b87ab Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Thu, 11 Sep 2014 10:41:10 -0400 Subject: when loading vendordata allow it to be string or list --- cloudinit/sources/helpers/openstack.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index 9718b0be..b7e19314 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -21,6 +21,7 @@ import abc import base64 import copy +import functools import os from cloudinit import ec2_utils @@ -203,6 +204,9 @@ class BaseReader(object): If not a valid location, raise a NonReadable exception. """ + load_json_anytype = functools.partial( + util.load_json, root_types=(dict, basestring, list)) + def datafiles(version): files = {} files['metadata'] = ( @@ -221,7 +225,7 @@ class BaseReader(object): files['vendordata'] = ( self._path_join("openstack", version, 'vendor_data.json'), False, - util.load_json, + load_json_anytype, ) return files -- cgit v1.2.3