diff options
author | Ben Howard <ben.howard@canonical.com> | 2013-08-20 09:58:30 -0600 |
---|---|---|
committer | Ben Howard <ben.howard@canonical.com> | 2013-08-20 09:58:30 -0600 |
commit | 89d58c94d59c62dddea3f77b7a805c1afa2ca955 (patch) | |
tree | 6b7cd86a373706730f0ec11f06517fc2a003f7c9 | |
parent | e683ab2baa3e67614edcd409122bd1aec99737e0 (diff) | |
parent | b2ee0966793f3a9c7d8e92ce1c13b9583a9a76e7 (diff) | |
download | vyos-cloud-init-89d58c94d59c62dddea3f77b7a805c1afa2ca955.tar.gz vyos-cloud-init-89d58c94d59c62dddea3f77b7a805c1afa2ca955.zip |
Updated merge proposal with current tree
-rw-r--r-- | ChangeLog | 4 | ||||
-rwxr-xr-x | bin/cloud-init | 4 | ||||
-rw-r--r-- | cloudinit/config/cc_apt_configure.py | 38 | ||||
-rw-r--r-- | cloudinit/config/cc_growpart.py | 9 | ||||
-rw-r--r-- | cloudinit/config/cc_resizefs.py | 11 | ||||
-rw-r--r-- | cloudinit/distros/__init__.py | 9 | ||||
-rw-r--r-- | cloudinit/distros/debian.py | 8 | ||||
-rw-r--r-- | cloudinit/distros/rhel.py | 10 | ||||
-rw-r--r-- | cloudinit/distros/sles.py | 10 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceAzure.py | 22 | ||||
-rw-r--r-- | cloudinit/util.py | 35 | ||||
-rw-r--r-- | doc/examples/cloud-config-TODO.txt | 20 | ||||
-rw-r--r-- | doc/examples/cloud-config.txt | 3 | ||||
-rw-r--r-- | tests/unittests/test_datasource/test_azure.py | 12 | ||||
-rw-r--r-- | tests/unittests/test_handler/test_handler_apt_configure.py | 106 | ||||
-rw-r--r-- | tests/unittests/test_handler/test_handler_growpart.py | 3 |
16 files changed, 223 insertions, 81 deletions
@@ -11,6 +11,10 @@ - always finalize handlers even if processing failed (LP: #1203368) - support merging into cloud-config via jsonp. (LP: #1200476) - add datasource 'SmartOS' for Joyent Cloud. Adds a dependency on serial. + - add 'log_time' helper to util for timing how long things take + which also reads from uptime. uptime is useful as clock may change during + boot due to ntp. + - prefer growpart resizer to 'parted resizepart' (LP: #1212492) 0.7.2: - add a debian watch file - add 'sudo' entry to ubuntu's default user (LP: #1080717) diff --git a/bin/cloud-init b/bin/cloud-init index c5a5b949..b4f9fd07 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -502,7 +502,9 @@ def main(): signal_handler.attach_handlers() (name, functor) = args.action - return functor(name, args) + + return util.log_time(logfunc=LOG.debug, msg="cloud-init mode '%s'" % name, + get_uptime=True, func=functor, args=(name, args)) if __name__ == '__main__': diff --git a/cloudinit/config/cc_apt_configure.py b/cloudinit/config/cc_apt_configure.py index 3ce3b351..5a407016 100644 --- a/cloudinit/config/cc_apt_configure.py +++ b/cloudinit/config/cc_apt_configure.py @@ -27,7 +27,8 @@ from cloudinit import util distros = ['ubuntu', 'debian'] PROXY_TPL = "Acquire::HTTP::Proxy \"%s\";\n" -PROXY_FN = "/etc/apt/apt.conf.d/95cloud-init-proxy" +APT_CONFIG_FN = "/etc/apt/apt.conf.d/94cloud-init-config" +APT_PROXY_FN = "/etc/apt/apt.conf.d/95cloud-init-proxy" # A temporary shell program to get a given gpg key # from a given keyserver @@ -67,18 +68,10 @@ def handle(name, cfg, cloud, log, _args): "security": "security.ubuntu.com/ubuntu"}) rename_apt_lists(old_mirrors, mirrors) - # Set up any apt proxy - proxy = cfg.get("apt_proxy", None) - proxy_filename = PROXY_FN - if proxy: - try: - # See man 'apt.conf' - contents = PROXY_TPL % (proxy) - util.write_file(proxy_filename, contents) - except Exception as e: - util.logexc(log, "Failed to write proxy to %s", proxy_filename) - elif os.path.isfile(proxy_filename): - util.del_file(proxy_filename) + try: + apply_apt_config(cfg, APT_PROXY_FN, APT_CONFIG_FN) + except Exception as e: + log.warn("failed to proxy or apt config info: %s", e) # Process 'apt_sources' if 'apt_sources' in cfg: @@ -256,3 +249,22 @@ def find_apt_mirror_info(cloud, cfg): mirror_info.update({'primary': mirror}) return mirror_info + + +def apply_apt_config(cfg, proxy_fname, config_fname): + # Set up any apt proxy + cfgs = (('apt_proxy', 'Acquire::HTTP::Proxy "%s";'), + ('apt_http_proxy', 'Acquire::HTTP::Proxy "%s";'), + ('apt_ftp_proxy', 'Acquire::FTP::Proxy "%s";'), + ('apt_https_proxy', 'Acquire::HTTPS::Proxy "%s";')) + + proxies = [fmt % cfg.get(name) for (name, fmt) in cfgs if cfg.get(name)] + if len(proxies): + util.write_file(proxy_fname, '\n'.join(proxies) + '\n') + elif os.path.isfile(proxy_fname): + util.del_file(proxy_fname) + + if cfg.get('apt_config', None): + util.write_file(config_fname, cfg.get('apt_config')) + elif os.path.isfile(config_fname): + util.del_file(config_fname) diff --git a/cloudinit/config/cc_growpart.py b/cloudinit/config/cc_growpart.py index 4f8c8f80..2d54aabf 100644 --- a/cloudinit/config/cc_growpart.py +++ b/cloudinit/config/cc_growpart.py @@ -96,7 +96,7 @@ class ResizeParted(object): def resize(self, diskdev, partnum, partdev): before = get_size(partdev) try: - util.subp(["parted", "resizepart", diskdev, partnum]) + util.subp(["parted", diskdev, "resizepart", partnum]) except util.ProcessExecutionError as e: raise ResizeFailedException(e) @@ -264,11 +264,14 @@ def handle(_name, cfg, _cloud, log, _args): raise e return - resized = resize_devices(resizer, devices) + resized = util.log_time(logfunc=log.debug, msg="resize_devices", + func=resize_devices, args=(resizer, devices)) for (entry, action, msg) in resized: if action == RESIZE.CHANGED: log.info("'%s' resized: %s" % (entry, msg)) else: log.debug("'%s' %s: %s" % (entry, action, msg)) -RESIZERS = (('parted', ResizeParted), ('growpart', ResizeGrowPart)) +# LP: 1212444 FIXME re-order and favor ResizeParted +#RESIZERS = (('growpart', ResizeGrowPart),) +RESIZERS = (('growpart', ResizeGrowPart), ('parted', ResizeParted)) diff --git a/cloudinit/config/cc_resizefs.py b/cloudinit/config/cc_resizefs.py index b4ee16b2..56040fdd 100644 --- a/cloudinit/config/cc_resizefs.py +++ b/cloudinit/config/cc_resizefs.py @@ -21,7 +21,6 @@ import errno import os import stat -import time from cloudinit.settings import PER_ALWAYS from cloudinit import util @@ -120,9 +119,12 @@ def handle(name, cfg, _cloud, log, args): if resize_root == NOBLOCK: # Fork to a child that will run # the resize command - util.fork_cb(do_resize, resize_cmd, log) + util.fork_cb( + util.log_time(logfunc=log.debug, msg="backgrounded Resizing", + func=do_resize, args=(resize_cmd, log))) else: - do_resize(resize_cmd, log) + util.log_time(logfunc=log.debug, msg="Resizing", + func=do_resize, args=(resize_cmd, log)) action = 'Resized' if resize_root == NOBLOCK: @@ -132,13 +134,10 @@ def handle(name, cfg, _cloud, log, args): def do_resize(resize_cmd, log): - start = time.time() try: util.subp(resize_cmd) except util.ProcessExecutionError: util.logexc(log, "Failed to resize filesystem (cmd=%s)", resize_cmd) raise - tot_time = time.time() - start - log.debug("Resizing took %.3f seconds", tot_time) # TODO(harlowja): Should we add a fsck check after this to make # sure we didn't corrupt anything? diff --git a/cloudinit/distros/__init__.py b/cloudinit/distros/__init__.py index 249e1b19..74e95797 100644 --- a/cloudinit/distros/__init__.py +++ b/cloudinit/distros/__init__.py @@ -47,9 +47,11 @@ LOG = logging.getLogger(__name__) class Distro(object): __metaclass__ = abc.ABCMeta + hosts_fn = "/etc/hosts" ci_sudoers_fn = "/etc/sudoers.d/90-cloud-init-users" hostname_conf_fn = "/etc/hostname" + tz_zone_dir = "/usr/share/zoneinfo" def __init__(self, name, cfg, paths): self._paths = paths @@ -66,6 +68,13 @@ class Distro(object): # to write this blob out in a distro format raise NotImplementedError() + def _find_tz_file(self, tz): + tz_file = os.path.join(self.tz_zone_dir, str(tz)) + if not os.path.isfile(tz_file): + raise IOError(("Invalid timezone %s," + " no file found at %s") % (tz, tz_file)) + return tz_file + def get_option(self, opt_name, default=None): return self._cfg.get(opt_name, default) diff --git a/cloudinit/distros/debian.py b/cloudinit/distros/debian.py index 0811eefd..8fe49cbe 100644 --- a/cloudinit/distros/debian.py +++ b/cloudinit/distros/debian.py @@ -44,7 +44,6 @@ class Distro(distros.Distro): network_conf_fn = "/etc/network/interfaces" tz_conf_fn = "/etc/timezone" tz_local_fn = "/etc/localtime" - tz_zone_dir = "/usr/share/zoneinfo" def __init__(self, name, cfg, paths): distros.Distro.__init__(self, name, cfg, paths) @@ -130,12 +129,7 @@ class Distro(distros.Distro): return "127.0.1.1" def set_timezone(self, tz): - # TODO(harlowja): move this code into - # the parent distro... - tz_file = os.path.join(self.tz_zone_dir, str(tz)) - if not os.path.isfile(tz_file): - raise RuntimeError(("Invalid timezone %s," - " no file found at %s") % (tz, tz_file)) + tz_file = self._find_tz_file(tz) # Note: "" provides trailing newline during join tz_lines = [ util.make_header(), diff --git a/cloudinit/distros/rhel.py b/cloudinit/distros/rhel.py index a022ca60..30195384 100644 --- a/cloudinit/distros/rhel.py +++ b/cloudinit/distros/rhel.py @@ -20,8 +20,6 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. -import os - from cloudinit import distros from cloudinit import helpers from cloudinit import log as logging @@ -51,7 +49,6 @@ class Distro(distros.Distro): network_script_tpl = '/etc/sysconfig/network-scripts/ifcfg-%s' resolve_conf_fn = "/etc/resolv.conf" tz_local_fn = "/etc/localtime" - tz_zone_dir = "/usr/share/zoneinfo" def __init__(self, name, cfg, paths): distros.Distro.__init__(self, name, cfg, paths) @@ -164,12 +161,7 @@ class Distro(distros.Distro): return distros.Distro._bring_up_interfaces(self, device_names) def set_timezone(self, tz): - # TODO(harlowja): move this code into - # the parent distro... - tz_file = os.path.join(self.tz_zone_dir, str(tz)) - if not os.path.isfile(tz_file): - raise RuntimeError(("Invalid timezone %s," - " no file found at %s") % (tz, tz_file)) + tz_file = self._find_tz_file(tz) if self._dist_uses_systemd(): # Currently, timedatectl complains if invoked during startup # so for compatibility, create the link manually. diff --git a/cloudinit/distros/sles.py b/cloudinit/distros/sles.py index 904e931a..f2ac4efc 100644 --- a/cloudinit/distros/sles.py +++ b/cloudinit/distros/sles.py @@ -18,8 +18,6 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. -import os - from cloudinit import distros from cloudinit.distros.parsers.hostname import HostnameConf @@ -42,7 +40,6 @@ class Distro(distros.Distro): network_script_tpl = '/etc/sysconfig/network/ifcfg-%s' resolve_conf_fn = '/etc/resolv.conf' tz_local_fn = '/etc/localtime' - tz_zone_dir = '/usr/share/zoneinfo' def __init__(self, name, cfg, paths): distros.Distro.__init__(self, name, cfg, paths) @@ -151,12 +148,7 @@ class Distro(distros.Distro): return distros.Distro._bring_up_interfaces(self, device_names) def set_timezone(self, tz): - # TODO(harlowja): move this code into - # the parent distro... - tz_file = os.path.join(self.tz_zone_dir, str(tz)) - if not os.path.isfile(tz_file): - raise RuntimeError(("Invalid timezone %s," - " no file found at %s") % (tz, tz_file)) + tz_file = self._find_tz_file(tz) # Adjust the sysconfig clock zone setting clock_cfg = { 'TIMEZONE': str(tz), diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index 2f28702e..7ec622bf 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -17,6 +17,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. import base64 +import crypt import os import os.path import time @@ -138,13 +139,11 @@ class DataSourceAzureNet(sources.DataSource): bname = pk['fingerprint'] + ".crt" fp_files += [os.path.join(mycfg['data_dir'], bname)] - start = time.time() - missing = wait_for_files(wait_for + fp_files) + missing = util.log_time(logfunc=LOG.debug, msg="waiting for files", + func=wait_for_files, + args=(wait_for + fp_files,)) if len(missing): LOG.warn("Did not find files, but going on: %s", missing) - else: - LOG.debug("waited %.3f seconds for %d files to appear", - time.time() - start, len(wait_for)) if shcfgxml in missing: LOG.warn("SharedConfig.xml missing, using static instance-id") @@ -206,11 +205,12 @@ def apply_hostname_bounce(hostname, policy, interface, command, command = BOUNCE_COMMAND LOG.debug("pubhname: publishing hostname [%s]", msg) - start = time.time() shell = not isinstance(command, (list, tuple)) # capture=False, see comments in bug 1202758 and bug 1206164. - (output, err) = util.subp(command, shell=shell, capture=False, env=env) - LOG.debug("publishing hostname took %.3f seconds", time.time() - start) + util.log_time(logfunc=LOG.debug, msg="publishing hostname", + get_uptime=True, func=util.subp, + kwargs={'command': command, 'shell': shell, 'capture': False, + 'env': env}) def crtfile_to_pubkey(fname): @@ -425,7 +425,7 @@ def read_azure_ovf(contents): if username: defuser['name'] = username if password: - defuser['password'] = password + defuser['passwd'] = encrypt_pass(password) defuser['lock_passwd'] = False if defuser: @@ -437,6 +437,10 @@ def read_azure_ovf(contents): return (md, ud, cfg) +def encrypt_pass(password, salt_id="$6$"): + return crypt.crypt(password, salt_id + util.rand_str(strlen=16)) + + def list_possible_azure_ds_devs(): # return a sorted list of devices that might have a azure datasource devlist = [] diff --git a/cloudinit/util.py b/cloudinit/util.py index 8542fe27..4a74ba57 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -1770,3 +1770,38 @@ def which(program): return exe_file return None + + +def log_time(logfunc, msg, func, args=None, kwargs=None, get_uptime=False): + if args is None: + args = [] + if kwargs is None: + kwargs = {} + + start = time.time() + + ustart = None + if get_uptime: + try: + ustart = float(uptime()) + except ValueError: + pass + + try: + ret = func(*args, **kwargs) + finally: + delta = time.time() - start + if ustart is not None: + try: + udelta = float(uptime()) - ustart + except ValueError: + udelta = "N/A" + + tmsg = " took %0.3f seconds" % delta + if get_uptime: + tmsg += "(%0.2f)" % udelta + try: + logfunc(msg + tmsg) + except: + pass + return ret diff --git a/doc/examples/cloud-config-TODO.txt b/doc/examples/cloud-config-TODO.txt deleted file mode 100644 index c7ed54ab..00000000 --- a/doc/examples/cloud-config-TODO.txt +++ /dev/null @@ -1,20 +0,0 @@ -# Add apt configuration files -# Add an apt.conf.d/ file with the relevant content -# -# See apt.conf man page for more information. -# -# Defaults: -# + filename: 00-boot-conf -# -apt_conf: - - # Creates an apt proxy configuration in /etc/apt/apt.conf.d/01-proxy - - filename: "01-proxy" - content: | - Acquire::http::Proxy "http://proxy.example.org:3142/ubuntu"; - - # Add the following line to /etc/apt/apt.conf.d/00-boot-conf - # (run debconf at a critical priority) - - content: | - DPkg::Pre-Install-Pkgs:: "/usr/sbin/dpkg-preconfigure --apt -p critical|| true"; - diff --git a/doc/examples/cloud-config.txt b/doc/examples/cloud-config.txt index 24b4b36c..bcfd7917 100644 --- a/doc/examples/cloud-config.txt +++ b/doc/examples/cloud-config.txt @@ -53,6 +53,9 @@ apt_mirror_search: apt_mirror_search_dns: False # apt_proxy (configure Acquire::HTTP::Proxy) +# 'apt_http_proxy' is an alias for 'apt_proxy'. +# Also, available are 'apt_ftp_proxy' and 'apt_https_proxy'. +# These affect Acquire::FTP::Proxy and Acquire::HTTPS::Proxy respectively apt_proxy: http://my.apt.proxy:3128 # apt_pipelining (configure Acquire::http::Pipeline-Depth) diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py index 4cd3f213..1ca6a79d 100644 --- a/tests/unittests/test_datasource/test_azure.py +++ b/tests/unittests/test_datasource/test_azure.py @@ -2,6 +2,7 @@ from cloudinit import helpers from cloudinit.sources import DataSourceAzure from tests.unittests.helpers import populate_dir +import crypt import base64 from mocker import MockerTestCase import os @@ -207,11 +208,15 @@ class TestAzureDataSource(MockerTestCase): self.assertTrue('default_user' in dsrc.cfg['system_info']) defuser = dsrc.cfg['system_info']['default_user'] - # default user shoudl be updated for password and username - # and should not be locked. + # default user should be updated username and should not be locked. self.assertEqual(defuser['name'], odata['UserName']) - self.assertEqual(defuser['password'], odata['UserPassword']) self.assertFalse(defuser['lock_passwd']) + # passwd is crypt formated string $id$salt$encrypted + # encrypting plaintext with salt value of everything up to final '$' + # should equal that after the '$' + pos = defuser['passwd'].rfind("$") + 1 + self.assertEqual(defuser['passwd'], + crypt.crypt(odata['UserPassword'], defuser['passwd'][0:pos])) def test_userdata_found(self): mydata = "FOOBAR" @@ -249,7 +254,6 @@ class TestAzureDataSource(MockerTestCase): def test_apply_bounce_call_1(self): # hostname needs to get through to apply_hostname_bounce - mydata = "FOOBAR" odata = {'HostName': 'my-random-hostname'} data = {'ovfcontent': construct_valid_ovf_env(data=odata)} diff --git a/tests/unittests/test_handler/test_handler_apt_configure.py b/tests/unittests/test_handler/test_handler_apt_configure.py new file mode 100644 index 00000000..203dd2aa --- /dev/null +++ b/tests/unittests/test_handler/test_handler_apt_configure.py @@ -0,0 +1,106 @@ +from mocker import MockerTestCase + +from cloudinit import util + +from cloudinit.config import cc_apt_configure + +import os +import re + + +class TestAptProxyConfig(MockerTestCase): + def setUp(self): + super(TestAptProxyConfig, self).setUp() + self.tmp = self.makeDir() + self.pfile = os.path.join(self.tmp, "proxy.cfg") + self.cfile = os.path.join(self.tmp, "config.cfg") + + def _search_apt_config(self, contents, ptype, value): + print( + r"acquire::%s::proxy\s+[\"']%s[\"'];\n" % (ptype, value), + contents, "flags=re.IGNORECASE") + return(re.search( + r"acquire::%s::proxy\s+[\"']%s[\"'];\n" % (ptype, value), + contents, flags=re.IGNORECASE)) + + def test_apt_proxy_written(self): + cfg = {'apt_proxy': 'myproxy'} + cc_apt_configure.apply_apt_config(cfg, self.pfile, self.cfile) + + self.assertTrue(os.path.isfile(self.pfile)) + self.assertFalse(os.path.isfile(self.cfile)) + + contents = str(util.read_file_or_url(self.pfile)) + self.assertTrue(self._search_apt_config(contents, "http", "myproxy")) + + def test_apt_http_proxy_written(self): + cfg = {'apt_http_proxy': 'myproxy'} + cc_apt_configure.apply_apt_config(cfg, self.pfile, self.cfile) + + self.assertTrue(os.path.isfile(self.pfile)) + self.assertFalse(os.path.isfile(self.cfile)) + + contents = str(util.read_file_or_url(self.pfile)) + self.assertTrue(self._search_apt_config(contents, "http", "myproxy")) + + def test_apt_all_proxy_written(self): + cfg = {'apt_http_proxy': 'myproxy_http_proxy', + 'apt_https_proxy': 'myproxy_https_proxy', + 'apt_ftp_proxy': 'myproxy_ftp_proxy'} + + values = {'http': cfg['apt_http_proxy'], + 'https': cfg['apt_https_proxy'], + 'ftp': cfg['apt_ftp_proxy'], + } + + cc_apt_configure.apply_apt_config(cfg, self.pfile, self.cfile) + + self.assertTrue(os.path.isfile(self.pfile)) + self.assertFalse(os.path.isfile(self.cfile)) + + contents = str(util.read_file_or_url(self.pfile)) + + for ptype, pval in values.iteritems(): + self.assertTrue(self._search_apt_config(contents, ptype, pval)) + + def test_proxy_deleted(self): + util.write_file(self.cfile, "content doesnt matter") + cc_apt_configure.apply_apt_config({}, self.pfile, self.cfile) + self.assertFalse(os.path.isfile(self.pfile)) + self.assertFalse(os.path.isfile(self.cfile)) + + def test_proxy_replaced(self): + util.write_file(self.cfile, "content doesnt matter") + cc_apt_configure.apply_apt_config({'apt_proxy': "foo"}, + self.pfile, self.cfile) + self.assertTrue(os.path.isfile(self.pfile)) + contents = str(util.read_file_or_url(self.pfile)) + self.assertTrue(self._search_apt_config(contents, "http", "foo")) + + def test_config_written(self): + payload = 'this is my apt config' + cfg = {'apt_config': payload} + + cc_apt_configure.apply_apt_config(cfg, self.pfile, self.cfile) + + self.assertTrue(os.path.isfile(self.cfile)) + self.assertFalse(os.path.isfile(self.pfile)) + + self.assertEqual(str(util.read_file_or_url(self.cfile)), payload) + + def test_config_replaced(self): + util.write_file(self.pfile, "content doesnt matter") + cc_apt_configure.apply_apt_config({'apt_config': "foo"}, + self.pfile, self.cfile) + self.assertTrue(os.path.isfile(self.cfile)) + self.assertEqual(str(util.read_file_or_url(self.cfile)), "foo") + + def test_config_deleted(self): + # if no 'apt_config' is provided, delete any previously written file + util.write_file(self.pfile, "content doesnt matter") + cc_apt_configure.apply_apt_config({}, self.pfile, self.cfile) + self.assertFalse(os.path.isfile(self.pfile)) + self.assertFalse(os.path.isfile(self.cfile)) + + +# vi: ts=4 expandtab diff --git a/tests/unittests/test_handler/test_handler_growpart.py b/tests/unittests/test_handler/test_handler_growpart.py index b1b872b0..c0497e08 100644 --- a/tests/unittests/test_handler/test_handler_growpart.py +++ b/tests/unittests/test_handler/test_handler_growpart.py @@ -9,6 +9,7 @@ import errno import logging import os import re +import unittest # growpart: # mode: auto # off, on, auto, 'growpart', 'parted' @@ -121,6 +122,7 @@ class TestConfig(MockerTestCase): # Order must be correct self.mocker.order() + @unittest.skip("until LP: #1212444 fixed") def test_no_resizers_auto_is_fine(self): subp = self.mocker.replace(util.subp, passthrough=False) subp(['parted', '--help'], env={'LANG': 'C'}) @@ -142,6 +144,7 @@ class TestConfig(MockerTestCase): self.assertRaises(ValueError, self.handle, self.name, config, self.cloud_init, self.log, self.args) + @unittest.skip("until LP: #1212444 fixed") def test_mode_auto_prefers_parted(self): subp = self.mocker.replace(util.subp, passthrough=False) subp(['parted', '--help'], env={'LANG': 'C'}) |