From 8900f9cba622eeaf3810003c5a6ff7522312277b Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Thu, 7 Jun 2012 12:42:38 -0700 Subject: 1. Adding some new helper files that split off file inclusion, templating, importing, constant usage. 1. Move all datasources to a new sources directory 1. Rename some files to be more consistent with python file/module naming. --- cloudinit/ssh_util.py | 227 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 227 insertions(+) create mode 100644 cloudinit/ssh_util.py (limited to 'cloudinit/ssh_util.py') diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py new file mode 100644 index 00000000..a081fbe8 --- /dev/null +++ b/cloudinit/ssh_util.py @@ -0,0 +1,227 @@ +#!/usr/bin/python +# vi: ts=4 expandtab +# +# Copyright (C) 2012 Canonical Ltd. +# Copyright (C) 2012 Hewlett-Packard Development Company, L.P. +# +# Author: Scott Moser +# Author: Juerg Hafliger +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3, as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import os +import os.path +import cloudinit.util as util + + +class AuthKeyEntry(): + # lines are options, keytype, base64-encoded key, comment + # man page says the following which I did not understand: + # The options field is optional; its presence is determined by whether + # the line starts with a number or not (the options field never starts + # with a number) + options = None + keytype = None + base64 = None + comment = None + is_comment = False + line_in = "" + + def __init__(self, line, def_opt=None): + line = line.rstrip("\n\r") + self.line_in = line + if line.startswith("#") or line.strip() == "": + self.is_comment = True + else: + ent = line.strip() + toks = ent.split(None, 3) + if len(toks) == 1: + self.base64 = toks[0] + elif len(toks) == 2: + (self.base64, self.comment) = toks + elif len(toks) == 3: + (self.keytype, self.base64, self.comment) = toks + elif len(toks) == 4: + i = 0 + ent = line.strip() + quoted = False + # taken from auth_rsa_key_allowed in auth-rsa.c + try: + while (i < len(ent) and + ((quoted) or (ent[i] not in (" ", "\t")))): + curc = ent[i] + nextc = ent[i + 1] + if curc == "\\" and nextc == '"': + i = i + 1 + elif curc == '"': + quoted = not quoted + i = i + 1 + except IndexError: + self.is_comment = True + return + + try: + self.options = ent[0:i] + (self.keytype, self.base64, self.comment) = \ + ent[i + 1:].split(None, 3) + except ValueError: + # we did not understand this line + self.is_comment = True + + if self.options == None and def_opt: + self.options = def_opt + + return + + def debug(self): + print("line_in=%s\ncomment: %s\noptions=%s\nkeytype=%s\nbase64=%s\n" + "comment=%s\n" % (self.line_in, self.is_comment, self.options, + self.keytype, self.base64, self.comment)), + + def __repr__(self): + if self.is_comment: + return(self.line_in) + else: + toks = [] + for e in (self.options, self.keytype, self.base64, self.comment): + if e: + toks.append(e) + + return(' '.join(toks)) + + +def update_authorized_keys(fname, keys): + # keys is a list of AuthKeyEntries + # key_prefix is the prefix (options) to prepend + try: + fp = open(fname, "r") + lines = fp.readlines() # lines have carriage return + fp.close() + except IOError: + lines = [] + + ka_stats = {} # keys_added status + for k in keys: + ka_stats[k] = False + + to_add = [] + for key in keys: + to_add.append(key) + + for i in range(0, len(lines)): + ent = AuthKeyEntry(lines[i]) + for k in keys: + if k.base64 == ent.base64 and not k.is_comment: + ent = k + try: + to_add.remove(k) + except ValueError: + pass + lines[i] = str(ent) + + # now append any entries we did not match above + for key in to_add: + lines.append(str(key)) + + if len(lines) == 0: + return("") + else: + return('\n'.join(lines) + "\n") + + +def setup_user_keys(keys, user, key_prefix, log=None): + import pwd + saved_umask = os.umask(077) + + pwent = pwd.getpwnam(user) + + ssh_dir = '%s/.ssh' % pwent.pw_dir + if not os.path.exists(ssh_dir): + os.mkdir(ssh_dir) + os.chown(ssh_dir, pwent.pw_uid, pwent.pw_gid) + + try: + ssh_cfg = parse_ssh_config() + akeys = ssh_cfg.get("AuthorizedKeysFile", "%h/.ssh/authorized_keys") + akeys = akeys.replace("%h", pwent.pw_dir) + akeys = akeys.replace("%u", user) + if not akeys.startswith('/'): + akeys = os.path.join(pwent.pw_dir, akeys) + authorized_keys = akeys + except Exception: + authorized_keys = '%s/.ssh/authorized_keys' % pwent.pw_dir + if log: + util.logexc(log) + + key_entries = [] + for k in keys: + ke = AuthKeyEntry(k, def_opt=key_prefix) + key_entries.append(ke) + + content = update_authorized_keys(authorized_keys, key_entries) + util.write_file(authorized_keys, content, 0600) + + os.chown(authorized_keys, pwent.pw_uid, pwent.pw_gid) + util.restorecon_if_possible(ssh_dir, recursive=True) + + os.umask(saved_umask) + + +def parse_ssh_config(fname="/etc/ssh/sshd_config"): + ret = {} + fp = open(fname) + for l in fp.readlines(): + l = l.strip() + if not l or l.startswith("#"): + continue + key, val = l.split(None, 1) + ret[key] = val + fp.close() + return(ret) + +if __name__ == "__main__": + def main(): + import sys + # usage: orig_file, new_keys, [key_prefix] + # prints out merged, where 'new_keys' will trump old + ## example + ## ### begin auth_keys ### + # ssh-rsa AAAAB3NzaC1xxxxxxxxxV3csgm8cJn7UveKHkYjJp8= smoser-work + # ssh-rsa AAAAB3NzaC1xxxxxxxxxCmXp5Kt5/82cD/VN3NtHw== smoser@brickies + # ### end authorized_keys ### + # + # ### begin new_keys ### + # ssh-rsa nonmatch smoser@newhost + # ssh-rsa AAAAB3NzaC1xxxxxxxxxV3csgm8cJn7UveKHkYjJp8= new_comment + # ### end new_keys ### + # + # Then run as: + # program auth_keys new_keys \ + # 'no-port-forwarding,command=\"echo hi world;\"' + def_prefix = None + orig_key_file = sys.argv[1] + new_key_file = sys.argv[2] + if len(sys.argv) > 3: + def_prefix = sys.argv[3] + fp = open(new_key_file) + + newkeys = [] + for line in fp.readlines(): + newkeys.append(AuthKeyEntry(line, def_prefix)) + + fp.close() + print update_authorized_keys(orig_key_file, newkeys) + + main() + +# vi: ts=4 expandtab -- cgit v1.2.3 From 707e310c2aade8bd1cd024b008f5ecfeb4155063 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Fri, 8 Jun 2012 18:09:24 -0700 Subject: Remove the main function from this, seems like that should be in a test if needed. --- cloudinit/ssh_util.py | 37 ------------------------------------- 1 file changed, 37 deletions(-) (limited to 'cloudinit/ssh_util.py') diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py index a081fbe8..1483f718 100644 --- a/cloudinit/ssh_util.py +++ b/cloudinit/ssh_util.py @@ -188,40 +188,3 @@ def parse_ssh_config(fname="/etc/ssh/sshd_config"): ret[key] = val fp.close() return(ret) - -if __name__ == "__main__": - def main(): - import sys - # usage: orig_file, new_keys, [key_prefix] - # prints out merged, where 'new_keys' will trump old - ## example - ## ### begin auth_keys ### - # ssh-rsa AAAAB3NzaC1xxxxxxxxxV3csgm8cJn7UveKHkYjJp8= smoser-work - # ssh-rsa AAAAB3NzaC1xxxxxxxxxCmXp5Kt5/82cD/VN3NtHw== smoser@brickies - # ### end authorized_keys ### - # - # ### begin new_keys ### - # ssh-rsa nonmatch smoser@newhost - # ssh-rsa AAAAB3NzaC1xxxxxxxxxV3csgm8cJn7UveKHkYjJp8= new_comment - # ### end new_keys ### - # - # Then run as: - # program auth_keys new_keys \ - # 'no-port-forwarding,command=\"echo hi world;\"' - def_prefix = None - orig_key_file = sys.argv[1] - new_key_file = sys.argv[2] - if len(sys.argv) > 3: - def_prefix = sys.argv[3] - fp = open(new_key_file) - - newkeys = [] - for line in fp.readlines(): - newkeys.append(AuthKeyEntry(line, def_prefix)) - - fp.close() - print update_authorized_keys(orig_key_file, newkeys) - - main() - -# vi: ts=4 expandtab -- cgit v1.2.3 From 2d831d8a0e0c57bc85de1e1e2def2788fa6ac525 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sat, 9 Jun 2012 12:35:39 -0700 Subject: Cleanup this and add refactoring around large constructors (add a parse method). Handle error cases better... --- cloudinit/ssh_util.py | 277 ++++++++++++++++++++++++++++---------------------- 1 file changed, 155 insertions(+), 122 deletions(-) (limited to 'cloudinit/ssh_util.py') diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py index 1483f718..93fd55dd 100644 --- a/cloudinit/ssh_util.py +++ b/cloudinit/ssh_util.py @@ -20,42 +20,70 @@ # along with this program. If not, see . import os -import os.path -import cloudinit.util as util - - -class AuthKeyEntry(): - # lines are options, keytype, base64-encoded key, comment - # man page says the following which I did not understand: - # The options field is optional; its presence is determined by whether - # the line starts with a number or not (the options field never starts - # with a number) - options = None - keytype = None - base64 = None - comment = None - is_comment = False - line_in = "" +import pwd + +from cloudinit import log as logging +from cloudinit import util + +LOG = logging.getLogger(__name__) + + +class AuthKeyEntry(object): + """ + AUTHORIZED_KEYS FILE FORMAT + AuthorizedKeysFile specifies the file containing public keys for public + key authentication; if none is specified, the default is + ~/.ssh/authorized_keys. Each line of the file contains one key (empty + (because of the size of the public key encoding) up to a limit of 8 kilo- + bytes, which permits DSA keys up to 8 kilobits and RSA keys up to 16 + kilobits. You don't want to type them in; instead, copy the + identity.pub, id_dsa.pub, or the id_rsa.pub file and edit it. + + sshd enforces a minimum RSA key modulus size for protocol 1 and protocol + 2 keys of 768 bits. + + The options (if present) consist of comma-separated option specifica- + tions. No spaces are permitted, except within double quotes. The fol- + lowing option specifications are supported (note that option keywords are + case-insensitive): + """ def __init__(self, line, def_opt=None): - line = line.rstrip("\n\r") - self.line_in = line - if line.startswith("#") or line.strip() == "": - self.is_comment = True + self.line = str(line) + (self.value, self.components) = self._parse(self.line, def_opt) + + def _form_components(self, toks): + components = {} + if len(toks) == 1: + components['base64'] = toks[0] + elif len(toks) == 2: + components['base64'] = toks[0] + components['comment'] = toks[1] + elif len(toks) == 3: + components['keytype'] = toks[0] + components['base64'] = toks[1] + components['comment'] = toks[2] + return components + + def get(self, piece): + return self.components.get(piece) + + def _parse(self, in_line, def_opt): + line = in_line.rstrip("\r\n") + if line.startswith("#") or line.strip() == '': + return (False, {}) else: ent = line.strip() toks = ent.split(None, 3) - if len(toks) == 1: - self.base64 = toks[0] - elif len(toks) == 2: - (self.base64, self.comment) = toks - elif len(toks) == 3: - (self.keytype, self.base64, self.comment) = toks - elif len(toks) == 4: + tmp_components = {} + if def_opt: + tmp_components['options'] = def_opt + if len(toks) < 4: + tmp_components.update(self._form_components(toks)) + else: + # taken from auth_rsa_key_allowed in auth-rsa.c i = 0 - ent = line.strip() quoted = False - # taken from auth_rsa_key_allowed in auth-rsa.c try: while (i < len(ent) and ((quoted) or (ent[i] not in (" ", "\t")))): @@ -67,124 +95,129 @@ class AuthKeyEntry(): quoted = not quoted i = i + 1 except IndexError: - self.is_comment = True - return - + return (False, {}) try: - self.options = ent[0:i] - (self.keytype, self.base64, self.comment) = \ - ent[i + 1:].split(None, 3) - except ValueError: - # we did not understand this line - self.is_comment = True - - if self.options == None and def_opt: - self.options = def_opt - - return - - def debug(self): - print("line_in=%s\ncomment: %s\noptions=%s\nkeytype=%s\nbase64=%s\n" - "comment=%s\n" % (self.line_in, self.is_comment, self.options, - self.keytype, self.base64, self.comment)), - - def __repr__(self): - if self.is_comment: - return(self.line_in) + options = ent[0:i] + toks = ent[i + 1:].split(None, 3) + if options: + tmp_components['options'] = options + tmp_components.update(self._form_components(toks)) + except (IndexError, ValueError): + return (False, {}) + # We got some useful value! + return (True, tmp_components) + + def __str__(self): + if not self.value: + return self.line else: toks = [] - for e in (self.options, self.keytype, self.base64, self.comment): - if e: - toks.append(e) - - return(' '.join(toks)) + if 'options' in self.components: + toks.append(self.components['options']) + if 'keytype' in self.components: + toks.append(self.components['keytype']) + if 'base64' in self.components: + toks.append(self.components['base64']) + if 'comment' in self.components: + toks.append(self.components['comment']) + if not toks: + return '' + return ' '.join(toks) def update_authorized_keys(fname, keys): - # keys is a list of AuthKeyEntries - # key_prefix is the prefix (options) to prepend + lines = [] try: - fp = open(fname, "r") - lines = fp.readlines() # lines have carriage return - fp.close() - except IOError: + if os.path.isfile(fname): + lines = util.load_file(fname).splitlines() + except (IOError, OSError): + LOG.exception("Error reading lines from %s", fname) lines = [] - ka_stats = {} # keys_added status - for k in keys: - ka_stats[k] = False - - to_add = [] - for key in keys: - to_add.append(key) - + to_add = list(keys) for i in range(0, len(lines)): ent = AuthKeyEntry(lines[i]) + if not ent.value: + continue + # Replace those with the same base64 for k in keys: - if k.base64 == ent.base64 and not k.is_comment: + if not k.value: + continue + if k.get('base64') == ent.get('base64'): + # Replace it with our better one ent = k - try: - to_add.remove(k) - except ValueError: - pass + # Don't add it later + to_add.remove(k) lines[i] = str(ent) - # now append any entries we did not match above + # Now append any entries we did not match above for key in to_add: lines.append(str(key)) - if len(lines) == 0: - return("") - else: - return('\n'.join(lines) + "\n") + # Ensure it ends with a newline + lines.append('') + return '\n'.join(lines) -def setup_user_keys(keys, user, key_prefix, log=None): - import pwd - saved_umask = os.umask(077) - +def setup_user_keys(keys, user, key_prefix, sshd_config_fn="/etc/ssh/sshd_config"): pwent = pwd.getpwnam(user) - ssh_dir = '%s/.ssh' % pwent.pw_dir + ssh_dir = os.path.join(pwent.pw_dir, '.ssh') if not os.path.exists(ssh_dir): - os.mkdir(ssh_dir) - os.chown(ssh_dir, pwent.pw_uid, pwent.pw_gid) - - try: - ssh_cfg = parse_ssh_config() - akeys = ssh_cfg.get("AuthorizedKeysFile", "%h/.ssh/authorized_keys") - akeys = akeys.replace("%h", pwent.pw_dir) - akeys = akeys.replace("%u", user) - if not akeys.startswith('/'): - akeys = os.path.join(pwent.pw_dir, akeys) - authorized_keys = akeys - except Exception: - authorized_keys = '%s/.ssh/authorized_keys' % pwent.pw_dir - if log: - util.logexc(log) + util.ensure_dir(ssh_dir, mode=0700) + util.chownbyid(ssh_dir, pwent.pw_uid, pwent.pw_gid) key_entries = [] for k in keys: - ke = AuthKeyEntry(k, def_opt=key_prefix) - key_entries.append(ke) - - content = update_authorized_keys(authorized_keys, key_entries) - util.write_file(authorized_keys, content, 0600) - - os.chown(authorized_keys, pwent.pw_uid, pwent.pw_gid) - util.restorecon_if_possible(ssh_dir, recursive=True) - - os.umask(saved_umask) - - -def parse_ssh_config(fname="/etc/ssh/sshd_config"): + key_entries.append(AuthKeyEntry(k, def_opt=key_prefix)) + + with util.SeLinuxGuard(ssh_dir, recursive=True): + try: + """ + AuthorizedKeysFile may contain tokens + of the form %T which are substituted during connection set-up. + The following tokens are defined: %% is replaced by a literal + '%', %h is replaced by the home directory of the user being + authenticated and %u is replaced by the username of that user. + """ + ssh_cfg = parse_ssh_config(sshd_config_fn) + akeys = ssh_cfg.get("authorizedkeysfile", '') + akeys = akeys.strip() + if not akeys: + akeys = "%h/.ssh/authorized_keys" + akeys = akeys.replace("%h", pwent.pw_dir) + akeys = akeys.replace("%u", user) + akeys = akeys.replace("%%", '%') + if not akeys.startswith('/'): + akeys = os.path.join(pwent.pw_dir, akeys) + authorized_keys = akeys + except (IOError, OSError): + authorized_keys = os.path.join(ssh_dir, 'authorized_keys') + LOG.exception(("Failed extracting 'AuthorizedKeysFile' in ssh config" + " from %s, using 'AuthorizedKeysFile' file %s instead."), + sshd_config_fn, authorized_keys) + + content = update_authorized_keys(authorized_keys, key_entries) + util.ensure_dir(os.path.dirname(authorized_keys), mode=0700) + util.write_file(authorized_keys, content, mode=0600) + util.chownbyid(authorized_keys, pwent.pw_uid, pwent.pw_gid) + + +def parse_ssh_config(fname): + """ + The file contains keyword-argu-ment pairs, one per line. + Lines starting with '#' and empty lines are interpreted as comments. + Note: key-words are case-insensitive and arguments are case-sensitive + """ ret = {} - fp = open(fname) - for l in fp.readlines(): - l = l.strip() - if not l or l.startswith("#"): + if not os.path.isfile(fname): + return ret + for line in util.load_file(fname).splitlines(): + line = line.strip() + if not line or line.startswith("#"): continue - key, val = l.split(None, 1) - ret[key] = val - fp.close() - return(ret) + (key, val) = line.split(None, 1) + key = key.strip().lower() + if key: + ret[key] = val + return ret -- cgit v1.2.3 From 7a719072faac3b0947d163968bd6e311859ceb3b Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Mon, 11 Jun 2012 17:17:51 -0700 Subject: Add comments on formats, add in function that handles option extraction + other pylint cleanups. --- cloudinit/ssh_util.py | 105 +++++++++++++++++++++++++++++--------------------- 1 file changed, 62 insertions(+), 43 deletions(-) (limited to 'cloudinit/ssh_util.py') diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py index 93fd55dd..c97b3819 100644 --- a/cloudinit/ssh_util.py +++ b/cloudinit/ssh_util.py @@ -19,6 +19,9 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . +from StringIO import StringIO + +import csv import os import pwd @@ -26,6 +29,7 @@ from cloudinit import log as logging from cloudinit import util LOG = logging.getLogger(__name__) +DEF_SSHD_CFG = "/etc/ssh/sshd_config" class AuthKeyEntry(object): @@ -52,6 +56,40 @@ class AuthKeyEntry(object): self.line = str(line) (self.value, self.components) = self._parse(self.line, def_opt) + def _extract_options(self, ent): + """ + The options (if present) consist of comma-separated option specifica- + tions. No spaces are permitted, except within double quotes. + Note that option keywords are case-insensitive. + """ + quoted = False + i = 0 + while (i < len(ent) and + ((quoted) or (ent[i] not in (" ", "\t")))): + curc = ent[i] + if i + 1 >= len(ent): + i = i + 1 + break + nextc = ent[i + 1] + if curc == "\\" and nextc == '"': + i = i + 1 + elif curc == '"': + quoted = not quoted + i = i + 1 + + options = ent[0:i] + options_lst = [] + reader = csv.reader(StringIO(options), quoting=csv.QUOTE_NONE) + for row in reader: + for e in row: + e = e.strip() + if e: + options_lst.append(e) + toks = [] + if i + 1 < len(ent): + toks = ent[i + 1:].split(None, 3) + return (options_lst, toks) + def _form_components(self, toks): components = {} if len(toks) == 1: @@ -81,29 +119,10 @@ class AuthKeyEntry(object): if len(toks) < 4: tmp_components.update(self._form_components(toks)) else: - # taken from auth_rsa_key_allowed in auth-rsa.c - i = 0 - quoted = False - try: - while (i < len(ent) and - ((quoted) or (ent[i] not in (" ", "\t")))): - curc = ent[i] - nextc = ent[i + 1] - if curc == "\\" and nextc == '"': - i = i + 1 - elif curc == '"': - quoted = not quoted - i = i + 1 - except IndexError: - return (False, {}) - try: - options = ent[0:i] - toks = ent[i + 1:].split(None, 3) - if options: - tmp_components['options'] = options - tmp_components.update(self._form_components(toks)) - except (IndexError, ValueError): - return (False, {}) + (options, toks) = self._extract_options(ent) + if options: + tmp_components['options'] = ",".join(options) + tmp_components.update(self._form_components(toks)) # We got some useful value! return (True, tmp_components) @@ -125,7 +144,7 @@ class AuthKeyEntry(object): return ' '.join(toks) -def update_authorized_keys(fname, keys): +def _update_authorized_keys(fname, keys): lines = [] try: if os.path.isfile(fname): @@ -159,9 +178,11 @@ def update_authorized_keys(fname, keys): return '\n'.join(lines) -def setup_user_keys(keys, user, key_prefix, sshd_config_fn="/etc/ssh/sshd_config"): - pwent = pwd.getpwnam(user) +def setup_user_keys(keys, user, key_prefix, sshd_config_fn=None): + if not sshd_config_fn: + sshd_config_fn = DEF_SSHD_CFG + pwent = pwd.getpwnam(user) ssh_dir = os.path.join(pwent.pw_dir, '.ssh') if not os.path.exists(ssh_dir): util.ensure_dir(ssh_dir, mode=0700) @@ -173,14 +194,12 @@ def setup_user_keys(keys, user, key_prefix, sshd_config_fn="/etc/ssh/sshd_config with util.SeLinuxGuard(ssh_dir, recursive=True): try: - """ - AuthorizedKeysFile may contain tokens - of the form %T which are substituted during connection set-up. - The following tokens are defined: %% is replaced by a literal - '%', %h is replaced by the home directory of the user being - authenticated and %u is replaced by the username of that user. - """ - ssh_cfg = parse_ssh_config(sshd_config_fn) + # AuthorizedKeysFile may contain tokens + # of the form %T which are substituted during connection set-up. + # The following tokens are defined: %% is replaced by a literal + # '%', %h is replaced by the home directory of the user being + # authenticated and %u is replaced by the username of that user. + ssh_cfg = _parse_ssh_config(sshd_config_fn) akeys = ssh_cfg.get("authorizedkeysfile", '') akeys = akeys.strip() if not akeys: @@ -193,22 +212,22 @@ def setup_user_keys(keys, user, key_prefix, sshd_config_fn="/etc/ssh/sshd_config authorized_keys = akeys except (IOError, OSError): authorized_keys = os.path.join(ssh_dir, 'authorized_keys') - LOG.exception(("Failed extracting 'AuthorizedKeysFile' in ssh config" - " from %s, using 'AuthorizedKeysFile' file %s instead."), + LOG.exception(("Failed extracting 'AuthorizedKeysFile'" + " in ssh config" + " from %s, using 'AuthorizedKeysFile' file" + " %s instead"), sshd_config_fn, authorized_keys) - content = update_authorized_keys(authorized_keys, key_entries) + content = _update_authorized_keys(authorized_keys, key_entries) util.ensure_dir(os.path.dirname(authorized_keys), mode=0700) util.write_file(authorized_keys, content, mode=0600) util.chownbyid(authorized_keys, pwent.pw_uid, pwent.pw_gid) -def parse_ssh_config(fname): - """ - The file contains keyword-argu-ment pairs, one per line. - Lines starting with '#' and empty lines are interpreted as comments. - Note: key-words are case-insensitive and arguments are case-sensitive - """ +def _parse_ssh_config(fname): + # The file contains keyword-argument pairs, one per line. + # Lines starting with '#' and empty lines are interpreted as comments. + # Note: key-words are case-insensitive and arguments are case-sensitive ret = {} if not os.path.isfile(fname): return ret -- cgit v1.2.3 From 7b3bf46487e599b375acfdf99176294810805ef0 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Fri, 15 Jun 2012 18:24:04 -0700 Subject: Ensure when an exception is captured that we use the util.logexc helper. --- cloudinit/ssh_util.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) (limited to 'cloudinit/ssh_util.py') diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py index c97b3819..f6941a29 100644 --- a/cloudinit/ssh_util.py +++ b/cloudinit/ssh_util.py @@ -144,13 +144,13 @@ class AuthKeyEntry(object): return ' '.join(toks) -def _update_authorized_keys(fname, keys): +def update_authorized_keys(fname, keys): lines = [] try: if os.path.isfile(fname): lines = util.load_file(fname).splitlines() except (IOError, OSError): - LOG.exception("Error reading lines from %s", fname) + util.logexc(LOG, "Error reading lines from %s", fname) lines = [] to_add = list(keys) @@ -199,7 +199,7 @@ def setup_user_keys(keys, user, key_prefix, sshd_config_fn=None): # The following tokens are defined: %% is replaced by a literal # '%', %h is replaced by the home directory of the user being # authenticated and %u is replaced by the username of that user. - ssh_cfg = _parse_ssh_config(sshd_config_fn) + ssh_cfg = parse_ssh_config(sshd_config_fn) akeys = ssh_cfg.get("authorizedkeysfile", '') akeys = akeys.strip() if not akeys: @@ -212,19 +212,19 @@ def setup_user_keys(keys, user, key_prefix, sshd_config_fn=None): authorized_keys = akeys except (IOError, OSError): authorized_keys = os.path.join(ssh_dir, 'authorized_keys') - LOG.exception(("Failed extracting 'AuthorizedKeysFile'" - " in ssh config" - " from %s, using 'AuthorizedKeysFile' file" - " %s instead"), - sshd_config_fn, authorized_keys) + util.logexc(LOG, ("Failed extracting 'AuthorizedKeysFile'" + " in ssh config" + " from %s, using 'AuthorizedKeysFile' file" + " %s instead"), + sshd_config_fn, authorized_keys) - content = _update_authorized_keys(authorized_keys, key_entries) + content = update_authorized_keys(authorized_keys, key_entries) util.ensure_dir(os.path.dirname(authorized_keys), mode=0700) util.write_file(authorized_keys, content, mode=0600) util.chownbyid(authorized_keys, pwent.pw_uid, pwent.pw_gid) -def _parse_ssh_config(fname): +def parse_ssh_config(fname): # The file contains keyword-argument pairs, one per line. # Lines starting with '#' and empty lines are interpreted as comments. # Note: key-words are case-insensitive and arguments are case-sensitive -- cgit v1.2.3 From 54bbf3b7c59352ca8482abab5728d8621c9888fe Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sat, 16 Jun 2012 13:43:31 -0700 Subject: Comments as to why we are using a csv parser and do some of the logic checks that are done for option extraction --- cloudinit/ssh_util.py | 10 ++++++++++ 1 file changed, 10 insertions(+) (limited to 'cloudinit/ssh_util.py') diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py index f6941a29..ba252e7f 100644 --- a/cloudinit/ssh_util.py +++ b/cloudinit/ssh_util.py @@ -79,12 +79,22 @@ class AuthKeyEntry(object): options = ent[0:i] options_lst = [] + + # Now use a csv parser to pull the options + # out of the above string that we just found an endpoint for. + # + # No quoting so we don't mess up any of the quoting that + # is already there. reader = csv.reader(StringIO(options), quoting=csv.QUOTE_NONE) for row in reader: for e in row: + # Only keep non-empty csv options e = e.strip() if e: options_lst.append(e) + + # Now take the rest of the items before the string + # as long as there is room to do this... toks = [] if i + 1 < len(ent): toks = ent[i + 1:].split(None, 3) -- cgit v1.2.3 From b71b42c9f99b97d6340a8274a249ed2c62ec7bfe Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sat, 16 Jun 2012 19:18:29 -0700 Subject: Update ssh_util to have a parser class and a line entry class. Also have a method that will parse the authorized key file. This allows: 1. Testing of parsing seperate from testing of entry construction. 1. Testing of authorized key file parsing, separate from updating. --- cloudinit/ssh_util.py | 118 ++++++++++++++++++++++++++++++-------------------- 1 file changed, 71 insertions(+), 47 deletions(-) (limited to 'cloudinit/ssh_util.py') diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py index ba252e7f..663afd92 100644 --- a/cloudinit/ssh_util.py +++ b/cloudinit/ssh_util.py @@ -32,7 +32,38 @@ LOG = logging.getLogger(__name__) DEF_SSHD_CFG = "/etc/ssh/sshd_config" -class AuthKeyEntry(object): +class AuthKeyLine(object): + def __init__(self, source, keytype=None, base64=None, + comment=None, options=None): + self.base64 = base64 + self.comment = comment + self.options = options + self.keytype = keytype + self.source = source + + def empty(self): + if (not self.base64 and + not self.comment and not self.keytype and not self.options): + return True + return False + + def __str__(self): + toks = [] + if self.options: + toks.append(self.options) + if self.keytype: + toks.append(self.keytype) + if self.base64: + toks.append(self.base64) + if self.comment: + toks.append(self.comment) + if not toks: + return self.source + else: + return ' '.join(toks) + + +class AuthKeyLineParser(object): """ AUTHORIZED_KEYS FILE FORMAT AuthorizedKeysFile specifies the file containing public keys for public @@ -52,10 +83,6 @@ class AuthKeyEntry(object): case-insensitive): """ - def __init__(self, line, def_opt=None): - self.line = str(line) - (self.value, self.components) = self._parse(self.line, def_opt) - def _extract_options(self, ent): """ The options (if present) consist of comma-separated option specifica- @@ -97,10 +124,11 @@ class AuthKeyEntry(object): # as long as there is room to do this... toks = [] if i + 1 < len(ent): - toks = ent[i + 1:].split(None, 3) + rest = ent[i + 1:] + toks = rest.split(None, 2) return (options_lst, toks) - def _form_components(self, toks): + def _form_components(self, line, toks, options=None): components = {} if len(toks) == 1: components['base64'] = toks[0] @@ -111,50 +139,31 @@ class AuthKeyEntry(object): components['keytype'] = toks[0] components['base64'] = toks[1] components['comment'] = toks[2] - return components - - def get(self, piece): - return self.components.get(piece) + components['options'] = options + if not components: + return AuthKeyLine(line) + else: + return AuthKeyLine(line, **components) - def _parse(self, in_line, def_opt): + def parse(self, in_line, def_opt=None): line = in_line.rstrip("\r\n") if line.startswith("#") or line.strip() == '': - return (False, {}) + return AuthKeyLine(source=line) else: ent = line.strip() toks = ent.split(None, 3) - tmp_components = {} - if def_opt: - tmp_components['options'] = def_opt if len(toks) < 4: - tmp_components.update(self._form_components(toks)) + return self._form_components(line, toks, def_opt) else: (options, toks) = self._extract_options(ent) if options: - tmp_components['options'] = ",".join(options) - tmp_components.update(self._form_components(toks)) - # We got some useful value! - return (True, tmp_components) + options = ",".join(options) + else: + options = def_opt + return self._form_components(line, toks, options) - def __str__(self): - if not self.value: - return self.line - else: - toks = [] - if 'options' in self.components: - toks.append(self.components['options']) - if 'keytype' in self.components: - toks.append(self.components['keytype']) - if 'base64' in self.components: - toks.append(self.components['base64']) - if 'comment' in self.components: - toks.append(self.components['comment']) - if not toks: - return '' - return ' '.join(toks) - -def update_authorized_keys(fname, keys): +def parse_authorized_keys(fname): lines = [] try: if os.path.isfile(fname): @@ -163,25 +172,38 @@ def update_authorized_keys(fname, keys): util.logexc(LOG, "Error reading lines from %s", fname) lines = [] + parser = AuthKeyLineParser() + contents = [] + for line in lines: + contents.append(parser.parse(line)) + return contents + + +def update_authorized_keys(fname, keys): + entries = parse_authorized_keys(fname) to_add = list(keys) - for i in range(0, len(lines)): - ent = AuthKeyEntry(lines[i]) - if not ent.value: + + for i in range(0, len(entries)): + ent = entries[i] + if ent.empty() or not ent.base64: continue # Replace those with the same base64 for k in keys: - if not k.value: + if k.empty() or not k.base64: continue - if k.get('base64') == ent.get('base64'): + if k.base64 == ent.base64: # Replace it with our better one ent = k # Don't add it later to_add.remove(k) - lines[i] = str(ent) + entries[i] = ent # Now append any entries we did not match above for key in to_add: - lines.append(str(key)) + entries.append(key) + + # Now format them back to strings... + lines = [str(b) for b in entries] # Ensure it ends with a newline lines.append('') @@ -198,9 +220,11 @@ def setup_user_keys(keys, user, key_prefix, sshd_config_fn=None): util.ensure_dir(ssh_dir, mode=0700) util.chownbyid(ssh_dir, pwent.pw_uid, pwent.pw_gid) + # Turn the keys given into actual entries + parser = AuthKeyLineParser() key_entries = [] for k in keys: - key_entries.append(AuthKeyEntry(k, def_opt=key_prefix)) + key_entries.append(parser.parse(str(k), def_opt=key_prefix)) with util.SeLinuxGuard(ssh_dir, recursive=True): try: -- cgit v1.2.3 From 8034ab85b03f31dce06b9c66ed03132c95b7875c Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sat, 16 Jun 2012 19:24:30 -0700 Subject: Send in src line instead of stripped line. --- cloudinit/ssh_util.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) (limited to 'cloudinit/ssh_util.py') diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py index 663afd92..d58707d0 100644 --- a/cloudinit/ssh_util.py +++ b/cloudinit/ssh_util.py @@ -128,7 +128,7 @@ class AuthKeyLineParser(object): toks = rest.split(None, 2) return (options_lst, toks) - def _form_components(self, line, toks, options=None): + def _form_components(self, src_line, toks, options=None): components = {} if len(toks) == 1: components['base64'] = toks[0] @@ -141,26 +141,26 @@ class AuthKeyLineParser(object): components['comment'] = toks[2] components['options'] = options if not components: - return AuthKeyLine(line) + return AuthKeyLine(src_line) else: - return AuthKeyLine(line, **components) + return AuthKeyLine(src_line, **components) - def parse(self, in_line, def_opt=None): - line = in_line.rstrip("\r\n") + def parse(self, src_line, def_opt=None): + line = src_line.rstrip("\r\n") if line.startswith("#") or line.strip() == '': - return AuthKeyLine(source=line) + return AuthKeyLine(src_line) else: ent = line.strip() toks = ent.split(None, 3) if len(toks) < 4: - return self._form_components(line, toks, def_opt) + return self._form_components(src_line, toks, def_opt) else: (options, toks) = self._extract_options(ent) if options: options = ",".join(options) else: options = def_opt - return self._form_components(line, toks, options) + return self._form_components(src_line, toks, options) def parse_authorized_keys(fname): -- cgit v1.2.3 From 4a9bfce6a5dedd7702507e8810c39677c532e42f Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Sat, 16 Jun 2012 19:57:01 -0700 Subject: Remove default since it will never be called that way and apply default since its not currently being called that way --- cloudinit/ssh_util.py | 5 +---- cloudinit/transforms/ssh.py | 3 +-- 2 files changed, 2 insertions(+), 6 deletions(-) (limited to 'cloudinit/ssh_util.py') diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py index d58707d0..13adbb09 100644 --- a/cloudinit/ssh_util.py +++ b/cloudinit/ssh_util.py @@ -210,10 +210,7 @@ def update_authorized_keys(fname, keys): return '\n'.join(lines) -def setup_user_keys(keys, user, key_prefix, sshd_config_fn=None): - if not sshd_config_fn: - sshd_config_fn = DEF_SSHD_CFG - +def setup_user_keys(keys, user, key_prefix, sshd_config_fn=DEF_SSHD_CFG): pwent = pwd.getpwnam(user) ssh_dir = os.path.join(pwent.pw_dir, '.ssh') if not os.path.exists(ssh_dir): diff --git a/cloudinit/transforms/ssh.py b/cloudinit/transforms/ssh.py index f5c22e28..b1f2ce89 100644 --- a/cloudinit/transforms/ssh.py +++ b/cloudinit/transforms/ssh.py @@ -112,8 +112,7 @@ def handle(_name, cfg, cloud, log, _args): util.logexc(log, "Applying ssh credentials failed!") -def apply_credentials(keys, user, disable_root, - disable_root_opts=DISABLE_ROOT_OPTS): +def apply_credentials(keys, user, disable_root, disable_root_opts): keys = set(keys) if user: -- cgit v1.2.3 From 43eb6d5aace53bef2116dde0796807befef1d8ff Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Tue, 19 Jun 2012 17:58:41 -0700 Subject: Make most of all the places use the paths join() function so that testing with non-real read/write paths is easier. --- cloudinit/distros/rhel.py | 6 ++++- cloudinit/distros/ubuntu.py | 7 ++++-- cloudinit/helpers.py | 34 +++++++++++++++++++---------- cloudinit/ssh_util.py | 10 ++++++--- cloudinit/transforms/apt_pipelining.py | 15 +++++-------- cloudinit/transforms/apt_update_upgrade.py | 21 +++++++++++------- cloudinit/transforms/ca_certs.py | 23 ++++++++++--------- cloudinit/transforms/chef.py | 31 +++++++++++++++++--------- cloudinit/transforms/keys_to_console.py | 9 ++++---- cloudinit/transforms/landscape.py | 20 ++++++++++------- cloudinit/transforms/mcollective.py | 25 +++++++++++---------- cloudinit/transforms/mounts.py | 6 ++--- cloudinit/transforms/phone_home.py | 2 +- cloudinit/transforms/puppet.py | 34 +++++++++++++++++------------ cloudinit/transforms/resizefs.py | 8 +++---- cloudinit/transforms/rightscale_userdata.py | 1 + cloudinit/transforms/rsyslog.py | 3 ++- cloudinit/transforms/runcmd.py | 2 +- cloudinit/transforms/salt_minion.py | 2 ++ cloudinit/transforms/set_passwords.py | 4 ++-- cloudinit/transforms/ssh.py | 13 ++++++----- 21 files changed, 167 insertions(+), 109 deletions(-) (limited to 'cloudinit/ssh_util.py') diff --git a/cloudinit/distros/rhel.py b/cloudinit/distros/rhel.py index e0ca5909..aef7f6f3 100644 --- a/cloudinit/distros/rhel.py +++ b/cloudinit/distros/rhel.py @@ -73,6 +73,7 @@ class Distro(distros.Distro): lines.insert(0, '# Created by cloud-init') contents = "\n".join(lines) net_fn = NETWORK_FN_TPL % (dev) + net_fn = self._paths.join(False, net_fn) util.write_file(net_fn, contents, 0644) def set_hostname(self, hostname): @@ -104,6 +105,7 @@ class Distro(distros.Distro): new_contents.append("# Added by cloud-init") new_contents.append("HOSTNAME=%s" % (hostname)) contents = "\n".join(new_contents) + out_fn = self._paths.join(False, out_fn) util.write_file(out_fn, contents, 0644) def update_hostname(self, hostname, prev_file): @@ -143,6 +145,7 @@ class Distro(distros.Distro): return default def _read_conf(self, filename): + filename = self._paths.join(True, filename) contents = util.load_file(filename, quiet=True) conf_lines = [] for line in contents.splitlines(): @@ -194,7 +197,8 @@ class Distro(distros.Distro): new_contents.append("# Added by cloud-init") new_contents.append('ZONE="%s"' % (tz)) tz_contents = "\n".join(new_contents) - util.write_file("/etc/sysconfig/clock", tz_contents) + tz_fn = self._paths.join(False, "/etc/sysconfig/clock") + util.write_file(tz_fn, tz_contents) # This ensures that the correct tz will be used for the system util.copy(tz_file, "/etc/localtime") diff --git a/cloudinit/distros/ubuntu.py b/cloudinit/distros/ubuntu.py index 24724d83..94565b14 100644 --- a/cloudinit/distros/ubuntu.py +++ b/cloudinit/distros/ubuntu.py @@ -46,7 +46,8 @@ class Distro(distros.Distro): self.package_command('install', pkglist) def _write_network(self, settings): - util.write_file("/etc/network/interfaces", settings) + n_fn = self._paths.join(False, "/etc/network/interfaces") + util.write_file(n_fn, settings) def set_hostname(self, hostname): self._write_hostname(hostname, "/etc/hostname") @@ -84,6 +85,7 @@ class Distro(distros.Distro): util.subp(['hostname', hostname]) def _read_hostname(self, filename, default=None): + filename = self._paths.join(True, filename) contents = util.load_file(filename, quiet=True) for line in contents.splitlines(): c_pos = line.find("#") @@ -105,7 +107,8 @@ class Distro(distros.Distro): raise Exception(("Invalid timezone %s," " no file found at %s") % (tz, tz_file)) tz_contents = "%s\n" % tz - util.write_file("/etc/timezone", tz_contents) + tz_fn = self._paths.join(False, "/etc/timezone") + util.write_file(tz_fn, tz_contents) util.copy(tz_file, "/etc/localtime") def package_command(self, command, args=None): diff --git a/cloudinit/helpers.py b/cloudinit/helpers.py index 3938e7ee..9f55a984 100644 --- a/cloudinit/helpers.py +++ b/cloudinit/helpers.py @@ -208,17 +208,18 @@ class Paths(object): def __init__(self, path_cfgs, ds=None): self.cfgs = path_cfgs # Populate all the initial paths - self.cloud_dir = self.join_paths(False, - path_cfgs.get('cloud_dir', - '/var/lib/cloud')) + self.cloud_dir = self.join(False, + path_cfgs.get('cloud_dir', + '/var/lib/cloud')) self.instance_link = os.path.join(self.cloud_dir, 'instance') self.boot_finished = os.path.join(self.instance_link, "boot-finished") self.upstart_conf_d = path_cfgs.get('upstart_dir') - template_dir = self.join_paths(True, - path_cfgs.get('templates_dir', - '/etc/cloud/templates/')) - self.template_tpl = os.path.join(template_dir, '%s.tmpl') + if self.upstart_conf_d: + self.upstart_conf_d = self.join(False, self.upstart_conf_d) self.seed_dir = os.path.join(self.cloud_dir, 'seed') + # This one isn't joined, since it should just be read-only + template_dir = path_cfgs.get('templates_dir', '/etc/cloud/templates/') + self.template_tpl = os.path.join(template_dir, '%s.tmpl') self.lookups = { "handlers": "handlers", "scripts": "scripts", @@ -235,16 +236,25 @@ class Paths(object): # joins the paths but also appends a read # or write root if available - def join_paths(self, read_only, *paths): + def join(self, read_only, *paths): if read_only: - root = self.cfgs.get('read_root', '/') + root = self.cfgs.get('read_root') else: - root = self.cfgs.get('write_root', '/') + root = self.cfgs.get('write_root') if not paths: return root - joined = os.path.join(*paths) + if len(paths) > 1: + joined = os.path.join(*paths) + else: + joined = paths[0] if root: - joined = os.path.join(root, joined.lstrip("/")) + pre_joined = joined + # Need to remove any starting '/' since this + # will confuse os.path.join + joined = joined.lstrip("/") + joined = os.path.join(root, joined) + LOG.debug("Translated %s to adjusted path %s (%s)", + pre_joined, joined, read_only) return joined # get_ipath_cur: get the current instance path for an item diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py index 13adbb09..96143d32 100644 --- a/cloudinit/ssh_util.py +++ b/cloudinit/ssh_util.py @@ -210,9 +210,12 @@ def update_authorized_keys(fname, keys): return '\n'.join(lines) -def setup_user_keys(keys, user, key_prefix, sshd_config_fn=DEF_SSHD_CFG): +def setup_user_keys(keys, user, key_prefix, paths): + + # Make sure the users .ssh dir is setup accordingly pwent = pwd.getpwnam(user) ssh_dir = os.path.join(pwent.pw_dir, '.ssh') + ssh_dir = paths.join(False, ssh_dir) if not os.path.exists(ssh_dir): util.ensure_dir(ssh_dir, mode=0700) util.chownbyid(ssh_dir, pwent.pw_uid, pwent.pw_gid) @@ -223,6 +226,7 @@ def setup_user_keys(keys, user, key_prefix, sshd_config_fn=DEF_SSHD_CFG): for k in keys: key_entries.append(parser.parse(str(k), def_opt=key_prefix)) + sshd_conf_fn = paths.join(True, DEF_SSHD_CFG) with util.SeLinuxGuard(ssh_dir, recursive=True): try: # AuthorizedKeysFile may contain tokens @@ -230,7 +234,7 @@ def setup_user_keys(keys, user, key_prefix, sshd_config_fn=DEF_SSHD_CFG): # The following tokens are defined: %% is replaced by a literal # '%', %h is replaced by the home directory of the user being # authenticated and %u is replaced by the username of that user. - ssh_cfg = parse_ssh_config(sshd_config_fn) + ssh_cfg = parse_ssh_config(sshd_conf_fn) akeys = ssh_cfg.get("authorizedkeysfile", '') akeys = akeys.strip() if not akeys: @@ -247,7 +251,7 @@ def setup_user_keys(keys, user, key_prefix, sshd_config_fn=DEF_SSHD_CFG): " in ssh config" " from %s, using 'AuthorizedKeysFile' file" " %s instead"), - sshd_config_fn, authorized_keys) + sshd_conf_fn, authorized_keys) content = update_authorized_keys(authorized_keys, key_entries) util.ensure_dir(os.path.dirname(authorized_keys), mode=0700) diff --git a/cloudinit/transforms/apt_pipelining.py b/cloudinit/transforms/apt_pipelining.py index d8e574b9..f460becb 100644 --- a/cloudinit/transforms/apt_pipelining.py +++ b/cloudinit/transforms/apt_pipelining.py @@ -23,7 +23,7 @@ frequency = PER_INSTANCE distros = ['ubuntu', 'debian'] -default_file = "/etc/apt/apt.conf.d/90cloud-init-pipelining" +DEFAULT_FILE = "/etc/apt/apt.conf.d/90cloud-init-pipelining" # Acquire::http::Pipeline-Depth can be a value # from 0 to 5 indicating how many outstanding requests APT should send. @@ -31,30 +31,27 @@ default_file = "/etc/apt/apt.conf.d/90cloud-init-pipelining" # on TCP connections - otherwise data corruption will occur. -def handle(_name, cfg, _cloud, log, _args): +def handle(_name, cfg, cloud, log, _args): apt_pipe_value = util.get_cfg_option_str(cfg, "apt_pipelining", False) apt_pipe_value_s = str(apt_pipe_value).lower().strip() if apt_pipe_value_s == "false": - write_apt_snippet("0", log) - + write_apt_snippet(cloud, "0", log, DEFAULT_FILE) elif apt_pipe_value_s in ("none", "unchanged", "os"): return - elif apt_pipe_value_s in [str(b) for b in xrange(0, 6)]: - write_apt_snippet(apt_pipe_value_s, log) - + write_apt_snippet(cloud, apt_pipe_value_s, log, DEFAULT_FILE) else: log.warn("Invalid option for apt_pipeling: %s", apt_pipe_value) -def write_apt_snippet(setting, log, f_name=default_file): +def write_apt_snippet(cloud, setting, log, f_name): """ Writes f_name with apt pipeline depth 'setting' """ file_contents = ("//Written by cloud-init per 'apt_pipelining'\n" 'Acquire::http::Pipeline-Depth "%s";\n') % (setting) - util.write_file(f_name, file_contents) + util.write_file(cloud.paths.join(False, f_name), file_contents) log.debug("Wrote %s with apt pipeline depth setting %s", f_name, setting) diff --git a/cloudinit/transforms/apt_update_upgrade.py b/cloudinit/transforms/apt_update_upgrade.py index d49d8bd2..29bbc1ae 100644 --- a/cloudinit/transforms/apt_update_upgrade.py +++ b/cloudinit/transforms/apt_update_upgrade.py @@ -26,6 +26,8 @@ from cloudinit import util distros = ['ubuntu', 'debian'] +PROXY_TPL = "Acquire::HTTP::Proxy \"%s\";\n" + def handle(_name, cfg, cloud, log, _args): update = util.get_cfg_option_bool(cfg, 'apt_update', False) @@ -44,22 +46,23 @@ def handle(_name, cfg, cloud, log, _args): "archive.ubuntu.com/ubuntu") rename_apt_lists(old_mir, mirror) - # set up proxy + # Set up any apt proxy proxy = cfg.get("apt_proxy", None) proxy_filename = "/etc/apt/apt.conf.d/95cloud-init-proxy" if proxy: try: - # See http://linux.die.net/man/5/apt.conf - contents = "Acquire::HTTP::Proxy \"%s\";\n" - util.write_file(proxy_filename, contents % (proxy)) + # See man 'apt.conf' + contents = PROXY_TPL % (proxy) + util.write_file(cloud.paths.join(False, proxy_filename), + contents) except Exception as e: util.logexc(log, "Failed to write proxy to %s", proxy_filename) elif os.path.isfile(proxy_filename): util.del_file(proxy_filename) - # process 'apt_sources' + # Process 'apt_sources' if 'apt_sources' in cfg: - errors = add_sources(cfg['apt_sources'], + errors = add_sources(cloud, cfg['apt_sources'], {'MIRROR': mirror, 'RELEASE': release}) for e in errors: log.warn("Source Error: %s", ':'.join(e)) @@ -138,7 +141,7 @@ def generate_sources_list(codename, mirror, cloud, log): log.warn("No template found, not rendering /etc/apt/sources.list") -def add_sources(srclist, template_params=None): +def add_sources(cloud, srclist, template_params=None): """ add entries in /etc/apt/sources.list.d for each abbreviated sources.list entry in 'srclist'. When rendering template, also @@ -187,7 +190,9 @@ def add_sources(srclist, template_params=None): errorlist.append([source, "failed add key"]) try: - util.write_file(ent['filename'], "%s\n" % (source), omode="ab") + contents = "%s\n" % (source) + util.write_file(cloud.paths.join(False, ent['filename']), + contents, omode="ab") except: errorlist.append([source, "failed write to file %s" % ent['filename']]) diff --git a/cloudinit/transforms/ca_certs.py b/cloudinit/transforms/ca_certs.py index e0802bfe..56c41561 100644 --- a/cloudinit/transforms/ca_certs.py +++ b/cloudinit/transforms/ca_certs.py @@ -33,7 +33,7 @@ def update_ca_certs(): util.subp(["update-ca-certificates"]) -def add_ca_certs(certs): +def add_ca_certs(cloud, certs): """ Adds certificates to the system. To actually apply the new certificates you must also call L{update_ca_certs}. @@ -41,26 +41,29 @@ def add_ca_certs(certs): @param certs: A list of certificate strings. """ if certs: - cert_file_contents = "\n".join(certs) + # First ensure they are strings... + cert_file_contents = "\n".join([str(c) for c in certs]) cert_file_fullpath = os.path.join(CA_CERT_PATH, CA_CERT_FILENAME) + cert_file_fullpath = cloud.paths.join(False, cert_file_fullpath) util.write_file(cert_file_fullpath, cert_file_contents, mode=0644) # Append cert filename to CA_CERT_CONFIG file. - util.write_file(CA_CERT_CONFIG, "\n%s" % CA_CERT_FILENAME, omode="ab") + util.write_file(cloud.paths.join(False, CA_CERT_CONFIG), + "\n%s" % CA_CERT_FILENAME, omode="ab") -def remove_default_ca_certs(): +def remove_default_ca_certs(cloud): """ Removes all default trusted CA certificates from the system. To actually apply the change you must also call L{update_ca_certs}. """ - util.delete_dir_contents(CA_CERT_PATH) - util.delete_dir_contents(CA_CERT_SYSTEM_PATH) - util.write_file(CA_CERT_CONFIG, "", mode=0644) + util.delete_dir_contents(cloud.paths.join(False, CA_CERT_PATH)) + util.delete_dir_contents(cloud.paths.join(False, CA_CERT_SYSTEM_PATH)) + util.write_file(cloud.paths.join(False, CA_CERT_CONFIG), "", mode=0644) debconf_sel = "ca-certificates ca-certificates/trust_new_crts select no" util.subp(('debconf-set-selections', '-'), debconf_sel) -def handle(name, cfg, _cloud, log, _args): +def handle(name, cfg, cloud, log, _args): """ Call to handle ca-cert sections in cloud-config file. @@ -82,14 +85,14 @@ def handle(name, cfg, _cloud, log, _args): # default trusted CA certs first. if ca_cert_cfg.get("remove-defaults", False): log.debug("Removing default certificates") - remove_default_ca_certs() + remove_default_ca_certs(cloud) # If we are given any new trusted CA certs to add, add them. if "trusted" in ca_cert_cfg: trusted_certs = util.get_cfg_option_list(ca_cert_cfg, "trusted") if trusted_certs: log.debug("Adding %d certificates" % len(trusted_certs)) - add_ca_certs(trusted_certs) + add_ca_certs(cloud, trusted_certs) # Update the system with the new cert configuration. log.debug("Updating certificates") diff --git a/cloudinit/transforms/chef.py b/cloudinit/transforms/chef.py index 31bfb85f..4e8ef346 100644 --- a/cloudinit/transforms/chef.py +++ b/cloudinit/transforms/chef.py @@ -36,30 +36,40 @@ def handle(name, cfg, cloud, log, _args): return chef_cfg = cfg['chef'] - # ensure the chef directories we use exist - util.ensure_dirs(['/etc/chef', '/var/log/chef', '/var/lib/chef', - '/var/cache/chef', '/var/backups/chef', '/var/run/chef']) + # Ensure the chef directories we use exist + c_dirs = [ + '/etc/chef', + '/var/log/chef', + '/var/lib/chef', + '/var/cache/chef', + '/var/backups/chef', + '/var/run/chef', + ] + for d in c_dirs: + util.ensure_dir(cloud.paths.join(False, d)) - # set the validation key based on the presence of either 'validation_key' + # Set the validation key based on the presence of either 'validation_key' # or 'validation_cert'. In the case where both exist, 'validation_key' # takes precedence for key in ('validation_key', 'validation_cert'): if key in chef_cfg and chef_cfg[key]: - util.write_file('/etc/chef/validation.pem', chef_cfg[key]) + v_fn = cloud.paths.join(False, '/etc/chef/validation.pem') + util.write_file(v_fn, chef_cfg[key]) break - # create the chef config from template + # Create the chef config from template template_fn = cloud.get_template_filename('chef_client.rb') if template_fn: + iid = str(cloud.datasource.get_instance_id()) params = { 'server_url': chef_cfg['server_url'], - 'node_name': util.get_cfg_option_str(chef_cfg, 'node_name', - cloud.datasource.get_instance_id()), + 'node_name': util.get_cfg_option_str(chef_cfg, 'node_name', iid), 'environment': util.get_cfg_option_str(chef_cfg, 'environment', '_default'), 'validation_name': chef_cfg['validation_name'] } - templater.render_to_file(template_fn, '/etc/chef/client.rb', params) + out_fn = cloud.paths.join(False, '/etc/chef/client.rb') + templater.render_to_file(template_fn, out_fn, params) else: log.warn("No template found, not rendering to /etc/chef/client.rb") @@ -71,7 +81,8 @@ def handle(name, cfg, cloud, log, _args): initial_attributes = chef_cfg['initial_attributes'] for k in list(initial_attributes.keys()): initial_json[k] = initial_attributes[k] - util.write_file('/etc/chef/firstboot.json', json.dumps(initial_json)) + firstboot_fn = cloud.paths.join(False, '/etc/chef/firstboot.json') + util.write_file(firstboot_fn, json.dumps(initial_json)) # If chef is not installed, we install chef based on 'install_type' if not os.path.isfile('/usr/bin/chef-client'): diff --git a/cloudinit/transforms/keys_to_console.py b/cloudinit/transforms/keys_to_console.py index e974375f..40758198 100644 --- a/cloudinit/transforms/keys_to_console.py +++ b/cloudinit/transforms/keys_to_console.py @@ -29,23 +29,24 @@ frequency = PER_INSTANCE helper_tool = '/usr/lib/cloud-init/write-ssh-key-fingerprints' -def handle(name, cfg, _cloud, log, _args): +def handle(name, cfg, cloud, log, _args): if not os.path.exists(helper_tool): log.warn(("Unable to activate transform %s," " helper tool not found at %s"), name, helper_tool) return fp_blacklist = util.get_cfg_option_list(cfg, - "ssh_fp_console_blacklist", []) + "ssh_fp_console_blacklist", []) key_blacklist = util.get_cfg_option_list(cfg, - "ssh_key_console_blacklist", ["ssh-dss"]) + "ssh_key_console_blacklist", + ["ssh-dss"]) try: cmd = [helper_tool] cmd.append(','.join(fp_blacklist)) cmd.append(','.join(key_blacklist)) (stdout, _stderr) = util.subp(cmd) - util.write_file('/dev/console', stdout) + util.write_file(cloud.paths.join(False, '/dev/console'), stdout) except: log.warn("Writing keys to /dev/console failed!") raise diff --git a/cloudinit/transforms/landscape.py b/cloudinit/transforms/landscape.py index 19948d0e..29ce41b9 100644 --- a/cloudinit/transforms/landscape.py +++ b/cloudinit/transforms/landscape.py @@ -33,12 +33,12 @@ from cloudinit.settings import PER_INSTANCE frequency = PER_INSTANCE -lsc_client_cfg_file = "/etc/landscape/client.conf" +LSC_CLIENT_CFG_FILE = "/etc/landscape/client.conf" distros = ['ubuntu'] # defaults taken from stock client.conf in landscape-client 11.07.1.1-0ubuntu2 -lsc_builtincfg = { +LSC_BUILTIN_CFG = { 'client': { 'log_level': "info", 'url': "https://landscape.canonical.com/message-system", @@ -48,7 +48,7 @@ lsc_builtincfg = { } -def handle(name, cfg, _cloud, log, _args): +def handle(name, cfg, cloud, log, _args): """ Basically turn a top level 'landscape' entry with a 'client' dict and render it to ConfigObj format under '[client]' section in @@ -66,15 +66,19 @@ def handle(name, cfg, _cloud, log, _args): " but not a dictionary type," " is a %s instead"), util.obj_name(ls_cloudcfg)) - merged = merge_together([lsc_builtincfg, lsc_client_cfg_file, ls_cloudcfg]) + lsc_client_fn = cloud.paths.join(True, LSC_CLIENT_CFG_FILE) + merged = merge_together([LSC_BUILTIN_CFG, lsc_client_fn, ls_cloudcfg]) - if not os.path.isdir(os.path.dirname(lsc_client_cfg_file)): - util.ensure_dir(os.path.dirname(lsc_client_cfg_file)) + lsc_dir = cloud.paths.join(False, os.path.dirname(lsc_client_fn)) + if not os.path.isdir(lsc_dir): + util.ensure_dir(lsc_dir) contents = StringIO() merged.write(contents) - util.write_file(lsc_client_cfg_file, contents.getvalue()) - log.debug("Wrote landscape config file to %s", lsc_client_cfg_file) + contents.flush() + + util.write_file(lsc_client_fn, contents.getvalue()) + log.debug("Wrote landscape config file to %s", lsc_client_fn) def merge_together(objs): diff --git a/cloudinit/transforms/mcollective.py b/cloudinit/transforms/mcollective.py index 5464fe8c..9754d6b8 100644 --- a/cloudinit/transforms/mcollective.py +++ b/cloudinit/transforms/mcollective.py @@ -24,8 +24,8 @@ from StringIO import StringIO from cloudinit import cfg as config from cloudinit import util -pubcert_file = "/etc/mcollective/ssl/server-public.pem" -pricert_file = "/etc/mcollective/ssl/server-private.pem" +PUBCERT_FILE = "/etc/mcollective/ssl/server-public.pem" +PRICERT_FILE = "/etc/mcollective/ssl/server-private.pem" def handle(name, cfg, cloud, log, _args): @@ -47,7 +47,8 @@ def handle(name, cfg, cloud, log, _args): mcollective_config = config.DefaultingConfigParser() # Read server.cfg values from original file in order to be able to mix # the rest up - old_contents = util.load_file('/etc/mcollective/server.cfg') + server_cfg_fn = cloud.paths.join(True, '/etc/mcollective/server.cfg') + old_contents = util.load_file(server_cfg_fn) # It doesn't contain any sections so just add one temporarily # Use a hash id based off the contents, # just incase of conflicts... (try to not have any...) @@ -61,17 +62,19 @@ def handle(name, cfg, cloud, log, _args): section_head = section_tpl % (attempts) sectioned_contents = "%s\n%s" % (section_head, old_contents) mcollective_config.readfp(StringIO(sectioned_contents), - filename='/etc/mcollective/server.cfg') + filename=server_cfg_fn) for (cfg_name, cfg) in mcollective_cfg['conf'].iteritems(): if cfg_name == 'public-cert': - util.write_file(pubcert_file, cfg, mode=0644) + pubcert_fn = cloud.paths.join(True, PUBCERT_FILE) + util.write_file(pubcert_fn, cfg, mode=0644) mcollective_config.set(cfg_name, - 'plugin.ssl_server_public', pubcert_file) + 'plugin.ssl_server_public', pubcert_fn) mcollective_config.set(cfg_name, 'securityprovider', 'ssl') elif cfg_name == 'private-cert': - util.write_file(pricert_file, cfg, mode=0600) + pricert_fn = cloud.paths.join(True, PRICERT_FILE) + util.write_file(pricert_fn, cfg, mode=0600) mcollective_config.set(cfg_name, - 'plugin.ssl_server_private', pricert_file) + 'plugin.ssl_server_private', pricert_fn) mcollective_config.set(cfg_name, 'securityprovider', 'ssl') else: # Iterate throug the config items, we'll use ConfigParser.set @@ -80,15 +83,15 @@ def handle(name, cfg, cloud, log, _args): mcollective_config.set(cfg_name, o, v) # We got all our config as wanted we'll rename # the previous server.cfg and create our new one - util.rename('/etc/mcollective/server.cfg', - '/etc/mcollective/server.cfg.old') + old_fn = "%s.old" % (server_cfg_fn) + util.rename(server_cfg_fn, old_fn) # Now we got the whole file, write to disk except the section # we added so that config parser won't error out when trying to read. # Note below, that we've just used ConfigParser because it generally # works. Below, we remove the initial 'nullsection' header. contents = mcollective_config.stringify() contents = contents.replace("%s\n" % (section_head), "") - util.write_file('/etc/mcollective/server.cfg', contents, mode=0644) + util.write_file(server_cfg_fn, contents, mode=0644) # Start mcollective util.subp(['service', 'mcollective', 'start'], capture=False) diff --git a/cloudinit/transforms/mounts.py b/cloudinit/transforms/mounts.py index 44182b87..700fbc44 100644 --- a/cloudinit/transforms/mounts.py +++ b/cloudinit/transforms/mounts.py @@ -168,7 +168,7 @@ def handle(_name, cfg, cloud, log, _args): cc_lines.append('\t'.join(line)) fstab_lines = [] - fstab = util.load_file("/etc/fstab") + fstab = util.load_file(cloud.paths.join(True, "/etc/fstab")) for line in fstab.splitlines(): try: toks = ws.split(line) @@ -180,7 +180,7 @@ def handle(_name, cfg, cloud, log, _args): fstab_lines.extend(cc_lines) contents = "%s\n" % ('\n'.join(fstab_lines)) - util.write_file("/etc/fstab", contents) + util.write_file(cloud.paths.join(False, "/etc/fstab"), contents) if needswap: try: @@ -190,7 +190,7 @@ def handle(_name, cfg, cloud, log, _args): for d in dirs: try: - util.ensure_dir(d) + util.ensure_dir(cloud.paths.join(False, d)) except: util.logexc(log, "Failed to make '%s' config-mount", d) diff --git a/cloudinit/transforms/phone_home.py b/cloudinit/transforms/phone_home.py index 98ff2b85..a8752527 100644 --- a/cloudinit/transforms/phone_home.py +++ b/cloudinit/transforms/phone_home.py @@ -77,7 +77,7 @@ def handle(name, cfg, cloud, log, args): for (n, path) in pubkeys.iteritems(): try: - all_keys[n] = util.load_file(path) + all_keys[n] = util.load_file(cloud.paths.join(True, path)) except: util.logexc(log, ("%s: failed to open, can not" " phone home that data"), path) diff --git a/cloudinit/transforms/puppet.py b/cloudinit/transforms/puppet.py index 76cc9732..d55118ea 100644 --- a/cloudinit/transforms/puppet.py +++ b/cloudinit/transforms/puppet.py @@ -43,7 +43,8 @@ def handle(name, cfg, cloud, log, _args): # ... and then update the puppet configuration if 'conf' in puppet_cfg: # Add all sections from the conf object to puppet.conf - contents = util.load_file('/etc/puppet/puppet.conf') + puppet_conf_fn = cloud.paths.join(False, '/etc/puppet/puppet.conf') + contents = util.load_file(puppet_conf_fn) # Create object for reading puppet.conf values puppet_config = config.DefaultingConfigParser() # Read puppet.conf values from original file in order to be able to @@ -51,21 +52,27 @@ def handle(name, cfg, cloud, log, _args): cleaned_lines = [i.lstrip() for i in contents.splitlines()] cleaned_contents = '\n'.join(cleaned_lines) puppet_config.readfp(StringIO(cleaned_contents), - filename='/etc/puppet/puppet.conf') + filename=puppet_conf_fn) for (cfg_name, cfg) in puppet_cfg['conf'].iteritems(): - # ca_cert configuration is a special case - # Dump the puppetmaster ca certificate in the correct place + # Cert configuration is a special case + # Dump the puppet master ca certificate in the correct place if cfg_name == 'ca_cert': # Puppet ssl sub-directory isn't created yet # Create it with the proper permissions and ownership - util.ensure_dir('/var/lib/puppet/ssl', 0771) - util.chownbyid('/var/lib/puppet/ssl', + pp_ssl_dir = cloud.paths.join(False, '/var/lib/puppet/ssl') + util.ensure_dir(pp_ssl_dir, 0771) + util.chownbyid(pp_ssl_dir, pwd.getpwnam('puppet').pw_uid, 0) - util.ensure_dir('/var/lib/puppet/ssl/certs/') - util.chownbyid('/var/lib/puppet/ssl/certs/', + pp_ssl_certs = cloud.paths.join(False, + '/var/lib/puppet/ssl/certs/') + util.ensure_dir(pp_ssl_certs) + util.chownbyid(pp_ssl_certs, pwd.getpwnam('puppet').pw_uid, 0) - util.write_file('/var/lib/puppet/ssl/certs/ca.pem', cfg) - util.chownbyid('/var/lib/puppet/ssl/certs/ca.pem', + pp_ssl_ca_certs = cloud.paths.join(False, + ('/var/lib/puppet/' + 'ssl/certs/ca.pem')) + util.write_file(pp_ssl_ca_certs, cfg) + util.chownbyid(pp_ssl_ca_certs, pwd.getpwnam('puppet').pw_uid, 0) else: # Iterate throug the config items, we'll use ConfigParser.set @@ -82,10 +89,9 @@ def handle(name, cfg, cloud, log, _args): puppet_config.set(cfg_name, o, v) # We got all our config as wanted we'll rename # the previous puppet.conf and create our new one - util.rename('/etc/puppet/puppet.conf', - '/etc/puppet/puppet.conf.old') - contents = puppet_config.stringify() - util.write_file('/etc/puppet/puppet.conf', contents) + puppet_conf_old_fn = "%s.old" % (puppet_conf_fn) + util.rename(puppet_conf_fn, puppet_conf_old_fn) + util.write_file(puppet_conf_fn, puppet_config.stringify()) # Set puppet to automatically start if os.path.exists('/etc/default/puppet'): diff --git a/cloudinit/transforms/resizefs.py b/cloudinit/transforms/resizefs.py index fe012417..fd2bb9e1 100644 --- a/cloudinit/transforms/resizefs.py +++ b/cloudinit/transforms/resizefs.py @@ -62,7 +62,7 @@ def get_fs_type(st_dev, path, log): raise -def handle(name, cfg, _cloud, log, args): +def handle(name, cfg, cloud, log, args): if len(args) != 0: resize_root = args[0] else: @@ -74,11 +74,11 @@ def handle(name, cfg, _cloud, log, args): # TODO is the directory ok to be used?? resize_root_d = util.get_cfg_option_str(cfg, "resize_rootfs_tmp", "/run") + resize_root_d = cloud.paths.join(False, resize_root_d) util.ensure_dir(resize_root_d) - # TODO: allow what is to be resized to - # be configurable?? - resize_what = "/" + # TODO: allow what is to be resized to be configurable?? + resize_what = cloud.paths.join(False) with util.SilentTemporaryFile(prefix="cloudinit.resizefs.", dir=resize_root_d, delete=True) as tfh: devpth = tfh.name diff --git a/cloudinit/transforms/rightscale_userdata.py b/cloudinit/transforms/rightscale_userdata.py index 8dfd845f..dc06f9ec 100644 --- a/cloudinit/transforms/rightscale_userdata.py +++ b/cloudinit/transforms/rightscale_userdata.py @@ -78,6 +78,7 @@ def handle(name, _cfg, cloud, log, _args): urls = mdict[my_hookname] for (i, url) in enumerate(urls): fname = os.path.join(scripts_d, "rightscale-%02i" % (i)) + fname = cloud.paths.join(False, fname) try: resp = uhelp.readurl(url) # Ensure its a valid http response (and something gotten) diff --git a/cloudinit/transforms/rsyslog.py b/cloudinit/transforms/rsyslog.py index 71b74711..f2c1de1e 100644 --- a/cloudinit/transforms/rsyslog.py +++ b/cloudinit/transforms/rsyslog.py @@ -71,7 +71,8 @@ def handle(name, cfg, cloud, log, _args): try: contents = "%s\n" % (content) - util.write_file(filename, contents, omode=omode) + util.write_file(cloud.paths.join(False, filename), + contents, omode=omode) except Exception: util.logexc(log, "Failed to write to %s", filename) diff --git a/cloudinit/transforms/runcmd.py b/cloudinit/transforms/runcmd.py index 31a254a5..f121484b 100644 --- a/cloudinit/transforms/runcmd.py +++ b/cloudinit/transforms/runcmd.py @@ -33,6 +33,6 @@ def handle(name, cfg, cloud, log, _args): cmd = cfg["runcmd"] try: content = util.shellify(cmd) - util.write_file(out_fn, content, 0700) + util.write_file(cloud.paths.join(False, out_fn), content, 0700) except: util.logexc(log, "Failed to shellify %s into file %s", cmd, out_fn) diff --git a/cloudinit/transforms/salt_minion.py b/cloudinit/transforms/salt_minion.py index d05d2a1e..16f5286d 100644 --- a/cloudinit/transforms/salt_minion.py +++ b/cloudinit/transforms/salt_minion.py @@ -35,6 +35,7 @@ def handle(name, cfg, cloud, log, _args): # Ensure we can configure files at the right dir config_dir = salt_cfg.get("config_dir", '/etc/salt') + config_dir = cloud.paths.join(False, config_dir) util.ensure_dir(config_dir) # ... and then update the salt configuration @@ -47,6 +48,7 @@ def handle(name, cfg, cloud, log, _args): # ... copy the key pair if specified if 'public_key' in salt_cfg and 'private_key' in salt_cfg: pki_dir = salt_cfg.get('pki_dir', '/etc/salt/pki') + pki_dir = cloud.paths.join(pki_dir) with util.umask(077): util.ensure_dir(pki_dir) pub_name = os.path.join(pki_dir, 'minion.pub') diff --git a/cloudinit/transforms/set_passwords.py b/cloudinit/transforms/set_passwords.py index c0cc4e84..e7049f22 100644 --- a/cloudinit/transforms/set_passwords.py +++ b/cloudinit/transforms/set_passwords.py @@ -130,8 +130,8 @@ def handle(_name, cfg, cloud, log, args): replaced_auth = True new_lines.append(replacement) - new_contents = "\n".join(new_lines) - util.write_file('/etc/ssh/sshd_config', new_contents) + util.write_file(cloud.paths.join(False, '/etc/ssh/sshd_config'), + "\n".join(new_lines)) try: cmd = ['service'] diff --git a/cloudinit/transforms/ssh.py b/cloudinit/transforms/ssh.py index b1f2ce89..33d4bb54 100644 --- a/cloudinit/transforms/ssh.py +++ b/cloudinit/transforms/ssh.py @@ -64,7 +64,8 @@ def handle(_name, cfg, cloud, log, _args): if key in key2file: tgt_fn = key2file[key][0] tgt_perms = key2file[key][1] - util.write_file(tgt_fn, val, tgt_perms) + util.write_file(cloud.paths.join(False, tgt_fn), + val, tgt_perms) for (priv, pub) in priv2pub.iteritems(): if pub in cfg['ssh_keys'] or not priv in cfg['ssh_keys']: @@ -86,6 +87,7 @@ def handle(_name, cfg, cloud, log, _args): generate_keys) for keytype in genkeys: keyfile = '/etc/ssh/ssh_host_%s_key' % (keytype) + keyfile = cloud.paths.join(False, keyfile) if not os.path.exists(keyfile): cmd = ['ssh-keygen', '-t', keytype, '-N', '', '-f', keyfile] try: @@ -107,20 +109,21 @@ def handle(_name, cfg, cloud, log, _args): cfgkeys = cfg["ssh_authorized_keys"] keys.extend(cfgkeys) - apply_credentials(keys, user, disable_root, disable_root_opts) + apply_credentials(keys, user, cloud.paths, + disable_root, disable_root_opts) except: util.logexc(log, "Applying ssh credentials failed!") -def apply_credentials(keys, user, disable_root, disable_root_opts): +def apply_credentials(keys, user, paths, disable_root, disable_root_opts): keys = set(keys) if user: - ssh_util.setup_user_keys(keys, user, '') + ssh_util.setup_user_keys(keys, user, '', paths) if disable_root and user: key_prefix = disable_root_opts.replace('$USER', user) else: key_prefix = '' - ssh_util.setup_user_keys(keys, 'root', key_prefix) + ssh_util.setup_user_keys(keys, 'root', key_prefix, paths) -- cgit v1.2.3 From c431144283b6a348c761636e552c74091f4367f1 Mon Sep 17 00:00:00 2001 From: harlowja Date: Wed, 20 Jun 2012 00:29:51 -0700 Subject: Ensure any authorized key file found in authorized keys is path adjusted as well --- cloudinit/ssh_util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'cloudinit/ssh_util.py') diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py index 96143d32..fef3d40f 100644 --- a/cloudinit/ssh_util.py +++ b/cloudinit/ssh_util.py @@ -244,7 +244,7 @@ def setup_user_keys(keys, user, key_prefix, paths): akeys = akeys.replace("%%", '%') if not akeys.startswith('/'): akeys = os.path.join(pwent.pw_dir, akeys) - authorized_keys = akeys + authorized_keys = paths.join(False, akeys) except (IOError, OSError): authorized_keys = os.path.join(ssh_dir, 'authorized_keys') util.logexc(LOG, ("Failed extracting 'AuthorizedKeysFile'" -- cgit v1.2.3 From ec4bdc4fb8d8d3a8f8b4f498eb47eac740485ede Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Wed, 20 Jun 2012 17:13:55 -0700 Subject: Massive pylint + pep8 fixups! --- bin/cloud-init | 20 +++++++-------- cloudinit/cloud.py | 3 ++- cloudinit/config/__init__.py | 2 +- cloudinit/config/cc_chef.py | 14 +++++------ cloudinit/config/cc_disable_ec2_metadata.py | 4 +-- cloudinit/config/cc_final_message.py | 8 +++--- cloudinit/config/cc_foo.py | 8 +++--- cloudinit/config/cc_keys_to_console.py | 8 +++--- cloudinit/config/cc_landscape.py | 2 +- cloudinit/config/cc_mcollective.py | 4 +-- cloudinit/config/cc_mounts.py | 13 +++++----- cloudinit/config/cc_phone_home.py | 11 +++++--- cloudinit/config/cc_puppet.py | 2 +- cloudinit/config/cc_resizefs.py | 10 ++++---- cloudinit/config/cc_salt_minion.py | 2 +- cloudinit/config/cc_scripts_per_boot.py | 6 ++--- cloudinit/config/cc_scripts_per_instance.py | 6 ++--- cloudinit/config/cc_scripts_per_once.py | 6 ++--- cloudinit/config/cc_scripts_user.py | 6 ++--- cloudinit/config/cc_set_passwords.py | 4 +-- cloudinit/config/cc_ssh.py | 39 +++++++++++++++-------------- cloudinit/distros/__init__.py | 1 - cloudinit/distros/rhel.py | 14 +++++------ cloudinit/distros/ubuntu.py | 6 ++--- cloudinit/handlers/__init__.py | 8 +++--- cloudinit/helpers.py | 6 ++--- cloudinit/log.py | 2 -- cloudinit/settings.py | 2 +- cloudinit/sources/DataSourceCloudStack.py | 2 +- cloudinit/sources/DataSourceConfigDrive.py | 2 +- cloudinit/sources/DataSourceEc2.py | 6 ++--- cloudinit/sources/DataSourceMAAS.py | 1 + cloudinit/sources/DataSourceNoCloud.py | 2 +- cloudinit/ssh_util.py | 5 ++-- cloudinit/stages.py | 6 ++--- cloudinit/url_helper.py | 14 +++++------ cloudinit/user_data.py | 28 +++++++++------------ cloudinit/util.py | 37 ++++++++++++++------------- 38 files changed, 159 insertions(+), 161 deletions(-) (limited to 'cloudinit/ssh_util.py') diff --git a/bin/cloud-init b/bin/cloud-init index 032d5f39..c1788ef4 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -75,6 +75,7 @@ def welcome(action): sys.stderr.flush() LOG.info(welcome_msg) + def extract_fns(args): # Files are already opened so lets just pass that along # since it would of broke if it couldn't have @@ -329,11 +330,11 @@ def main_single(name, args): def main(): parser = argparse.ArgumentParser() - + # Top level args - parser.add_argument('--version', '-v', action='version', + parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + (version.version_string())) - parser.add_argument('--file', '-f', action='append', + parser.add_argument('--file', '-f', action='append', dest='files', help=('additional yaml configuration' ' files to use'), @@ -345,18 +346,18 @@ def main(): subparsers = parser.add_subparsers() # Each action and its sub-options (if any) - parser_init = subparsers.add_parser('init', + parser_init = subparsers.add_parser('init', help=('initializes cloud-init and' ' performs initial modules')) parser_init.add_argument("--local", '-l', action='store_true', help="start in local mode (default: %(default)s)", default=False) - # This is used so that we can know which action is selected + + # This is used so that we can know which action is selected + # the functor to use to run this subcommand parser_init.set_defaults(action=('init', main_init)) # These settings are used for the 'config' and 'final' stages - parser_mod = subparsers.add_parser('modules', + parser_mod = subparsers.add_parser('modules', help=('activates modules ' 'using a given configuration key')) parser_mod.add_argument("--mode", '-m', action='store', @@ -368,7 +369,7 @@ def main(): # These settings are used when you want to query information # stored in the cloud-init data objects/directories/files - parser_query = subparsers.add_parser('query', + parser_query = subparsers.add_parser('query', help=('query information stored ' 'in cloud-init')) parser_query.add_argument("--name", '-n', action="store", @@ -378,7 +379,7 @@ def main(): parser_query.set_defaults(action=('query', main_query)) # This subcommand allows you to run a single module - parser_single = subparsers.add_parser('single', + parser_single = subparsers.add_parser('single', help=('run a single module ')) parser_single.set_defaults(action=('single', main_single)) parser_single.add_argument("--name", '-n', action="store", @@ -394,10 +395,10 @@ def main(): ' pass to this module')) parser_single.set_defaults(action=('single', main_single)) - args = parser.parse_args() # Setup basic logging to start (until reinitialized) + # iff in debug mode... if args.debug: logging.setupBasicLogging() @@ -407,4 +408,3 @@ def main(): if __name__ == '__main__': sys.exit(main()) - diff --git a/cloudinit/cloud.py b/cloudinit/cloud.py index 90679202..6cdcb76a 100644 --- a/cloudinit/cloud.py +++ b/cloudinit/cloud.py @@ -38,6 +38,7 @@ LOG = logging.getLogger(__name__) # as providing a backwards compatible object that can be maintained # while the stages/other objects can be worked on independently... + class Cloud(object): def __init__(self, datasource, paths, cfg, distro, runners): self.datasource = datasource @@ -71,7 +72,7 @@ class Cloud(object): # The rest of thes are just useful proxies def get_userdata(self): return self.datasource.get_userdata() - + def get_instance_id(self): return self.datasource.get_instance_id() diff --git a/cloudinit/config/__init__.py b/cloudinit/config/__init__.py index 74e2f275..02e32462 100644 --- a/cloudinit/config/__init__.py +++ b/cloudinit/config/__init__.py @@ -25,7 +25,7 @@ from cloudinit import log as logging LOG = logging.getLogger(__name__) -# This prefix is used to make it less +# This prefix is used to make it less # of a change that when importing # we will not find something else with the same # name in the lookup path... diff --git a/cloudinit/config/cc_chef.py b/cloudinit/config/cc_chef.py index 4e8ef346..74af2a7e 100644 --- a/cloudinit/config/cc_chef.py +++ b/cloudinit/config/cc_chef.py @@ -24,7 +24,7 @@ import os from cloudinit import templater from cloudinit import util -ruby_version_default = "1.8" +RUBY_VERSION_DEFAULT = "1.8" def handle(name, cfg, cloud, log, _args): @@ -38,11 +38,11 @@ def handle(name, cfg, cloud, log, _args): # Ensure the chef directories we use exist c_dirs = [ - '/etc/chef', - '/var/log/chef', - '/var/lib/chef', - '/var/cache/chef', - '/var/backups/chef', + '/etc/chef', + '/var/log/chef', + '/var/lib/chef', + '/var/cache/chef', + '/var/backups/chef', '/var/run/chef', ] for d in c_dirs: @@ -92,7 +92,7 @@ def handle(name, cfg, cloud, log, _args): # this will install and run the chef-client from gems chef_version = util.get_cfg_option_str(chef_cfg, 'version', None) ruby_version = util.get_cfg_option_str(chef_cfg, 'ruby_version', - ruby_version_default) + RUBY_VERSION_DEFAULT) install_chef_from_gems(cloud.distro, ruby_version, chef_version) # and finally, run chef-client log.debug('Running chef-client') diff --git a/cloudinit/config/cc_disable_ec2_metadata.py b/cloudinit/config/cc_disable_ec2_metadata.py index c7d26029..62cca7cc 100644 --- a/cloudinit/config/cc_disable_ec2_metadata.py +++ b/cloudinit/config/cc_disable_ec2_metadata.py @@ -24,13 +24,13 @@ from cloudinit.settings import PER_ALWAYS frequency = PER_ALWAYS -reject_cmd = ['route', 'add', '-host', '169.254.169.254', 'reject'] +REJECT_CMD = ['route', 'add', '-host', '169.254.169.254', 'reject'] def handle(name, cfg, _cloud, log, _args): disabled = util.get_cfg_option_bool(cfg, "disable_ec2_metadata", False) if disabled: - util.subp(reject_cmd) + util.subp(REJECT_CMD) else: log.debug(("Skipping transform named %s," " disabling the ec2 route not enabled"), name) diff --git a/cloudinit/config/cc_final_message.py b/cloudinit/config/cc_final_message.py index c257b6d0..fd59aa1e 100644 --- a/cloudinit/config/cc_final_message.py +++ b/cloudinit/config/cc_final_message.py @@ -28,7 +28,7 @@ from cloudinit.settings import PER_ALWAYS frequency = PER_ALWAYS -final_message_def = ("Cloud-init v. {{version}} finished at {{timestamp}}." +FINAL_MESSAGE_DEF = ("Cloud-init v. {{version}} finished at {{timestamp}}." " Up {{uptime}} seconds.") @@ -39,21 +39,21 @@ def handle(_name, cfg, cloud, log, args): msg_in = args[0] else: msg_in = util.get_cfg_option_str(cfg, "final_message") - + if not msg_in: template_fn = cloud.get_template_filename('final_message') if template_fn: msg_in = util.load_file(template_fn) if not msg_in: - msg_in = final_message_def + msg_in = FINAL_MESSAGE_DEF uptime = util.uptime() ts = util.time_rfc2822() cver = version.version_string() try: subs = { - 'uptime': uptime, + 'uptime': uptime, 'timestamp': ts, 'version': cver, } diff --git a/cloudinit/config/cc_foo.py b/cloudinit/config/cc_foo.py index 99135704..e81e7faa 100644 --- a/cloudinit/config/cc_foo.py +++ b/cloudinit/config/cc_foo.py @@ -30,19 +30,19 @@ from cloudinit.settings import PER_INSTANCE # as well as any datasource provided configuration # c) A cloud object that can be used to access various # datasource and paths for the given distro and data provided -# by the various datasource instance types. +# by the various datasource instance types. # d) A argument list that may or may not be empty to this module. # Typically those are from module configuration where the module # is defined with some extra configuration that will eventually # be translated from yaml into arguments to this module. # 2. A optional 'frequency' that defines how often this module should be ran. -# Typically one of PER_INSTANCE, PER_ALWAYS, PER_ONCE. If not -# provided PER_INSTANCE will be assumed. +# Typically one of PER_INSTANCE, PER_ALWAYS, PER_ONCE. If not +# provided PER_INSTANCE will be assumed. # See settings.py for these constants. # 3. A optional 'distros' array/set/tuple that defines the known distros # this module will work with (if not all of them). This is used to write # a warning out if a module is being ran on a untested distribution for -# informational purposes. If non existent all distros are assumed and +# informational purposes. If non existent all distros are assumed and # no warning occurs. frequency = PER_INSTANCE diff --git a/cloudinit/config/cc_keys_to_console.py b/cloudinit/config/cc_keys_to_console.py index 40758198..a8fb3ba7 100644 --- a/cloudinit/config/cc_keys_to_console.py +++ b/cloudinit/config/cc_keys_to_console.py @@ -26,13 +26,13 @@ from cloudinit import util frequency = PER_INSTANCE # This is a tool that cloud init provides -helper_tool = '/usr/lib/cloud-init/write-ssh-key-fingerprints' +HELPER_TOOL = '/usr/lib/cloud-init/write-ssh-key-fingerprints' def handle(name, cfg, cloud, log, _args): - if not os.path.exists(helper_tool): + if not os.path.exists(HELPER_TOOL): log.warn(("Unable to activate transform %s," - " helper tool not found at %s"), name, helper_tool) + " helper tool not found at %s"), name, HELPER_TOOL) return fp_blacklist = util.get_cfg_option_list(cfg, @@ -42,7 +42,7 @@ def handle(name, cfg, cloud, log, _args): ["ssh-dss"]) try: - cmd = [helper_tool] + cmd = [HELPER_TOOL] cmd.append(','.join(fp_blacklist)) cmd.append(','.join(key_blacklist)) (stdout, _stderr) = util.subp(cmd) diff --git a/cloudinit/config/cc_landscape.py b/cloudinit/config/cc_landscape.py index 29ce41b9..599276a7 100644 --- a/cloudinit/config/cc_landscape.py +++ b/cloudinit/config/cc_landscape.py @@ -62,7 +62,7 @@ def handle(name, cfg, cloud, log, _args): ls_cloudcfg = cfg.get("landscape", {}) if not isinstance(ls_cloudcfg, dict): - raise Exception(("'landscape' key existed in config," + raise Exception(("'landscape' key existed in config," " but not a dictionary type," " is a %s instead"), util.obj_name(ls_cloudcfg)) diff --git a/cloudinit/config/cc_mcollective.py b/cloudinit/config/cc_mcollective.py index 4cec6494..ba5e13ca 100644 --- a/cloudinit/config/cc_mcollective.py +++ b/cloudinit/config/cc_mcollective.py @@ -52,7 +52,7 @@ def handle(name, cfg, cloud, log, _args): # It doesn't contain any sections so just add one temporarily # Use a hash id based off the contents, # just incase of conflicts... (try to not have any...) - # This is so that an error won't occur when reading (and no + # This is so that an error won't occur when reading (and no # sections exist in the file) section_tpl = "[nullsection_%s]" attempts = 0 @@ -85,7 +85,7 @@ def handle(name, cfg, cloud, log, _args): # the previous server.cfg and create our new one old_fn = "%s.old" % (server_cfg_fn) util.rename(server_cfg_fn, old_fn) - # Now we got the whole file, write to disk except the section + # Now we got the whole file, write to disk except the section # we added so that config parser won't error out when trying to read. # Note below, that we've just used ConfigParser because it generally # works. Below, we remove the initial 'nullsection' header. diff --git a/cloudinit/config/cc_mounts.py b/cloudinit/config/cc_mounts.py index 700fbc44..ab097c2a 100644 --- a/cloudinit/config/cc_mounts.py +++ b/cloudinit/config/cc_mounts.py @@ -24,10 +24,10 @@ import re from cloudinit import util -# shortname matches 'sda', 'sda1', 'xvda', 'hda', 'sdb', xvdb, vda, vdd1 -shortname_filter = r"^[x]{0,1}[shv]d[a-z][0-9]*$" -shortname = re.compile(shortname_filter) -ws = re.compile("[%s]+" % (whitespace)) +# Shortname matches 'sda', 'sda1', 'xvda', 'hda', 'sdb', xvdb, vda, vdd1 +SHORTNAME_FILTER = r"^[x]{0,1}[shv]d[a-z][0-9]*$" +SHORTNAME = re.compile(SHORTNAME_FILTER) +WS = re.compile("[%s]+" % (whitespace)) def is_mdname(name): @@ -55,7 +55,6 @@ def handle(_name, cfg, cloud, log, _args): if "mounts" in cfg: cfgmnt = cfg["mounts"] - for i in range(len(cfgmnt)): # skip something that wasn't a list if not isinstance(cfgmnt[i], list): @@ -85,7 +84,7 @@ def handle(_name, cfg, cloud, log, _args): cfgmnt[i][0] = renamed log.debug("Mapped metadata name %s to %s", startname, renamed) else: - if shortname.match(startname): + if SHORTNAME.match(startname): renamed = "/dev/%s" % startname log.debug("Mapped shortname name %s to %s", startname, renamed) cfgmnt[i][0] = renamed @@ -171,7 +170,7 @@ def handle(_name, cfg, cloud, log, _args): fstab = util.load_file(cloud.paths.join(True, "/etc/fstab")) for line in fstab.splitlines(): try: - toks = ws.split(line) + toks = WS.split(line) if toks[3].find(comment) != -1: continue except: diff --git a/cloudinit/config/cc_phone_home.py b/cloudinit/config/cc_phone_home.py index a8752527..dcb07b66 100644 --- a/cloudinit/config/cc_phone_home.py +++ b/cloudinit/config/cc_phone_home.py @@ -26,8 +26,13 @@ from cloudinit.settings import PER_INSTANCE frequency = PER_INSTANCE -post_list_all = ['pub_key_dsa', 'pub_key_rsa', 'pub_key_ecdsa', - 'instance_id', 'hostname'] +POST_LIST_ALL = [ + 'pub_key_dsa', + 'pub_key_rsa', + 'pub_key_ecdsa', + 'instance_id', + 'hostname' +] # phone_home: @@ -63,7 +68,7 @@ def handle(name, cfg, cloud, log, args): " is not an integer, using %s instead"), tries) if post_list == "all": - post_list = post_list_all + post_list = POST_LIST_ALL all_keys = {} all_keys['instance_id'] = cloud.get_instance_id() diff --git a/cloudinit/config/cc_puppet.py b/cloudinit/config/cc_puppet.py index 5fb88bf2..5154efba 100644 --- a/cloudinit/config/cc_puppet.py +++ b/cloudinit/config/cc_puppet.py @@ -63,7 +63,7 @@ def handle(name, cfg, cloud, log, _args): util.ensure_dir(pp_ssl_dir, 0771) util.chownbyid(pp_ssl_dir, pwd.getpwnam('puppet').pw_uid, 0) - pp_ssl_certs = cloud.paths.join(False, + pp_ssl_certs = cloud.paths.join(False, '/var/lib/puppet/ssl/certs/') util.ensure_dir(pp_ssl_certs) util.chownbyid(pp_ssl_certs, diff --git a/cloudinit/config/cc_resizefs.py b/cloudinit/config/cc_resizefs.py index 1690094a..c019989e 100644 --- a/cloudinit/config/cc_resizefs.py +++ b/cloudinit/config/cc_resizefs.py @@ -27,7 +27,7 @@ from cloudinit.settings import PER_ALWAYS frequency = PER_ALWAYS -resize_fs_prefixes_cmds = [ +RESIZE_FS_PREFIXES_CMDS = [ ('ext', 'resize2fs'), ('xfs', 'xfs_growfs'), ] @@ -89,16 +89,16 @@ def handle(name, cfg, cloud, log, args): # occurs this temporary file will still benefit from # auto deletion tfh.unlink_now() - + st_dev = nodeify_path(devpth, resize_what, log) fs_type = get_fs_type(st_dev, devpth, log) if not fs_type: log.warn("Could not determine filesystem type of %s", resize_what) return - + resizer = None fstype_lc = fs_type.lower() - for (pfix, root_cmd) in resize_fs_prefixes_cmds: + for (pfix, root_cmd) in RESIZE_FS_PREFIXES_CMDS: if fstype_lc.startswith(pfix): resizer = root_cmd break @@ -112,7 +112,7 @@ def handle(name, cfg, cloud, log, args): resize_cmd = [resizer, devpth] if resize_root == "noblock": - # Fork to a child that will run + # Fork to a child that will run # the resize command util.fork_cb(do_resize, resize_cmd, log) # Don't delete the file now in the parent diff --git a/cloudinit/config/cc_salt_minion.py b/cloudinit/config/cc_salt_minion.py index 16f5286d..986e6db6 100644 --- a/cloudinit/config/cc_salt_minion.py +++ b/cloudinit/config/cc_salt_minion.py @@ -32,7 +32,7 @@ def handle(name, cfg, cloud, log, _args): # Start by installing the salt package ... cloud.distro.install_packages(["salt"]) - + # Ensure we can configure files at the right dir config_dir = salt_cfg.get("config_dir", '/etc/salt') config_dir = cloud.paths.join(False, config_dir) diff --git a/cloudinit/config/cc_scripts_per_boot.py b/cloudinit/config/cc_scripts_per_boot.py index 364e1d02..d3c47442 100644 --- a/cloudinit/config/cc_scripts_per_boot.py +++ b/cloudinit/config/cc_scripts_per_boot.py @@ -26,16 +26,16 @@ from cloudinit.settings import PER_ALWAYS frequency = PER_ALWAYS -script_subdir = 'per-boot' +SCRIPT_SUBDIR = 'per-boot' def handle(name, _cfg, cloud, log, _args): # Comes from the following: # https://forums.aws.amazon.com/thread.jspa?threadID=96918 - runparts_path = os.path.join(cloud.get_cpath(), 'scripts', script_subdir) + runparts_path = os.path.join(cloud.get_cpath(), 'scripts', SCRIPT_SUBDIR) try: util.runparts(runparts_path) except: log.warn("Failed to run transform %s (%s in %s)", - name, script_subdir, runparts_path) + name, SCRIPT_SUBDIR, runparts_path) raise diff --git a/cloudinit/config/cc_scripts_per_instance.py b/cloudinit/config/cc_scripts_per_instance.py index d75ab47d..8e428ac2 100644 --- a/cloudinit/config/cc_scripts_per_instance.py +++ b/cloudinit/config/cc_scripts_per_instance.py @@ -26,16 +26,16 @@ from cloudinit.settings import PER_INSTANCE frequency = PER_INSTANCE -script_subdir = 'per-instance' +SCRIPT_SUBDIR = 'per-instance' def handle(name, _cfg, cloud, log, _args): # Comes from the following: # https://forums.aws.amazon.com/thread.jspa?threadID=96918 - runparts_path = os.path.join(cloud.get_cpath(), 'scripts', script_subdir) + runparts_path = os.path.join(cloud.get_cpath(), 'scripts', SCRIPT_SUBDIR) try: util.runparts(runparts_path) except: log.warn("Failed to run transform %s (%s in %s)", - name, script_subdir, runparts_path) + name, SCRIPT_SUBDIR, runparts_path) raise diff --git a/cloudinit/config/cc_scripts_per_once.py b/cloudinit/config/cc_scripts_per_once.py index 80f8c325..e7a29a44 100644 --- a/cloudinit/config/cc_scripts_per_once.py +++ b/cloudinit/config/cc_scripts_per_once.py @@ -26,16 +26,16 @@ from cloudinit.settings import PER_ONCE frequency = PER_ONCE -script_subdir = 'per-once' +SCRIPT_SUBDIR = 'per-once' def handle(name, _cfg, cloud, log, _args): # Comes from the following: # https://forums.aws.amazon.com/thread.jspa?threadID=96918 - runparts_path = os.path.join(cloud.get_cpath(), 'scripts', script_subdir) + runparts_path = os.path.join(cloud.get_cpath(), 'scripts', SCRIPT_SUBDIR) try: util.runparts(runparts_path) except: log.warn("Failed to run transform %s (%s in %s)", - name, script_subdir, runparts_path) + name, SCRIPT_SUBDIR, runparts_path) raise diff --git a/cloudinit/config/cc_scripts_user.py b/cloudinit/config/cc_scripts_user.py index f4fe3a2a..1ff05aae 100644 --- a/cloudinit/config/cc_scripts_user.py +++ b/cloudinit/config/cc_scripts_user.py @@ -26,17 +26,17 @@ from cloudinit.settings import PER_INSTANCE frequency = PER_INSTANCE -script_subdir = 'scripts' +SCRIPT_SUBDIR = 'scripts' def handle(name, _cfg, cloud, log, _args): # This is written to by the user data handlers # Ie, any custom shell scripts that come down # go here... - runparts_path = os.path.join(cloud.get_ipath_cur(), script_subdir) + runparts_path = os.path.join(cloud.get_ipath_cur(), SCRIPT_SUBDIR) try: util.runparts(runparts_path) except: log.warn("Failed to run transform %s (%s in %s)", - name, script_subdir, runparts_path) + name, SCRIPT_SUBDIR, runparts_path) raise diff --git a/cloudinit/config/cc_set_passwords.py b/cloudinit/config/cc_set_passwords.py index e7049f22..ce17f357 100644 --- a/cloudinit/config/cc_set_passwords.py +++ b/cloudinit/config/cc_set_passwords.py @@ -25,7 +25,7 @@ from cloudinit import util from string import letters, digits # pylint: disable=W0402 # We are removing certain 'painful' letters/numbers -pw_set = (letters.translate(None, 'loLOI') + +PW_SET = (letters.translate(None, 'loLOI') + digits.translate(None, '01')) @@ -148,4 +148,4 @@ def handle(_name, cfg, cloud, log, args): def rand_user_password(pwlen=9): - return util.rand_str(pwlen, select_from=pw_set) + return util.rand_str(pwlen, select_from=PW_SET) diff --git a/cloudinit/config/cc_ssh.py b/cloudinit/config/cc_ssh.py index e5e99560..4019ae90 100644 --- a/cloudinit/config/cc_ssh.py +++ b/cloudinit/config/cc_ssh.py @@ -24,11 +24,11 @@ import glob from cloudinit import util from cloudinit import ssh_util -DISABLE_ROOT_OPTS = ( "no-port-forwarding,no-agent-forwarding," -"no-X11-forwarding,command=\"echo \'Please login as the user \\\"$USER\\\" " +DISABLE_ROOT_OPTS = ("no-port-forwarding,no-agent-forwarding," +"no-X11-forwarding,command=\"echo \'Please login as the user \\\"$USER\\\" " "rather than the user \\\"root\\\".\';echo;sleep 10\"") -key2file = { +KEY_2_FILE = { "rsa_private": ("/etc/ssh/ssh_host_rsa_key", 0600), "rsa_public": ("/etc/ssh/ssh_host_rsa_key.pub", 0644), "dsa_private": ("/etc/ssh/ssh_host_dsa_key", 0600), @@ -37,15 +37,17 @@ key2file = { "ecdsa_public": ("/etc/ssh/ssh_host_ecdsa_key.pub", 0644), } -priv2pub = { - 'rsa_private': 'rsa_public', +PRIV_2_PUB = { + 'rsa_private': 'rsa_public', 'dsa_private': 'dsa_public', 'ecdsa_private': 'ecdsa_public', } -key_gen_tpl = 'o=$(ssh-keygen -yf "%s") && echo "$o" root@localhost > "%s"' +KEY_GEN_TPL = 'o=$(ssh-keygen -yf "%s") && echo "$o" root@localhost > "%s"' -generate_keys = ['rsa', 'dsa', 'ecdsa'] +GENERATE_KEY_NAMES = ['rsa', 'dsa', 'ecdsa'] + +KEY_FILE_TPL = '/etc/ssh/ssh_host_%s_key' def handle(_name, cfg, cloud, log, _args): @@ -58,21 +60,21 @@ def handle(_name, cfg, cloud, log, _args): util.del_file(f) except: util.logexc(log, "Failed deleting key file %s", f) - + if "ssh_keys" in cfg: # if there are keys in cloud-config, use them for (key, val) in cfg["ssh_keys"].iteritems(): - if key in key2file: - tgt_fn = key2file[key][0] - tgt_perms = key2file[key][1] + if key in KEY_2_FILE: + tgt_fn = KEY_2_FILE[key][0] + tgt_perms = KEY_2_FILE[key][1] util.write_file(cloud.paths.join(False, tgt_fn), val, tgt_perms) - for (priv, pub) in priv2pub.iteritems(): + for (priv, pub) in PRIV_2_PUB.iteritems(): if pub in cfg['ssh_keys'] or not priv in cfg['ssh_keys']: continue - pair = (key2file[priv][0], key2file[pub][0]) - cmd = ['sh', '-xc', key_gen_tpl % pair] + pair = (KEY_2_FILE[priv][0], KEY_2_FILE[pub][0]) + cmd = ['sh', '-xc', KEY_GEN_TPL % pair] try: # TODO: Is this guard needed? with util.SeLinuxGuard("/etc/ssh", recursive=True): @@ -84,12 +86,11 @@ def handle(_name, cfg, cloud, log, _args): else: # if not, generate them genkeys = util.get_cfg_option_list(cfg, - 'ssh_genkeytypes', - generate_keys) + 'ssh_genkeytypes', + GENERATE_KEY_NAMES) for keytype in genkeys: - keyfile = '/etc/ssh/ssh_host_%s_key' % (keytype) - keyfile = cloud.paths.join(False, keyfile) - util.ensure_dir(os.path.dirname(keyfile)) + keyfile = cloud.paths.join(False, KEY_FILE_TPL % (keytype)) + util.ensure_dir(os.path.dirname(keyfile)) if not os.path.exists(keyfile): cmd = ['ssh-keygen', '-t', keytype, '-N', '', '-f', keyfile] try: diff --git a/cloudinit/distros/__init__.py b/cloudinit/distros/__init__.py index 45dd85ec..25a60c52 100644 --- a/cloudinit/distros/__init__.py +++ b/cloudinit/distros/__init__.py @@ -157,4 +157,3 @@ def fetch(distro_name, mods=(__name__, )): % (distro_name)) distro_cls = getattr(mod, 'Distro') return distro_cls - diff --git a/cloudinit/distros/rhel.py b/cloudinit/distros/rhel.py index b67ae5b8..5cbefa6e 100644 --- a/cloudinit/distros/rhel.py +++ b/cloudinit/distros/rhel.py @@ -35,7 +35,7 @@ class Distro(distros.Distro): def __init__(self, name, cfg, paths): distros.Distro.__init__(self, name, cfg, paths) - + def install_packages(self, pkglist): self.package_command('install', pkglist) @@ -210,12 +210,12 @@ class Distro(distros.Distro): def package_command(self, command, args=None): cmd = ['yum'] # If enabled, then yum will be tolerant of errors on the command line - # with regard to packages. - # For example: if you request to install foo, bar and baz and baz is + # with regard to packages. + # For example: if you request to install foo, bar and baz and baz is # installed; yum won't error out complaining that baz is already - # installed. + # installed. cmd.append("-t") - # Determines whether or not yum prompts for confirmation + # Determines whether or not yum prompts for confirmation # of critical actions. We don't want to prompt... cmd.append("-y") cmd.append(command) @@ -223,8 +223,8 @@ class Distro(distros.Distro): cmd.extend(args) # Allow the output of this to flow outwards (ie not be captured) util.subp(cmd, capture=False) - - + + # This is a util function to translate a ubuntu /etc/network/interfaces 'blob' # to a rhel equiv. that can then be written to /etc/sysconfig/network-scripts/ # TODO remove when we have python-netcf active... diff --git a/cloudinit/distros/ubuntu.py b/cloudinit/distros/ubuntu.py index 5a1b572e..fd7b7b8d 100644 --- a/cloudinit/distros/ubuntu.py +++ b/cloudinit/distros/ubuntu.py @@ -36,11 +36,11 @@ class Distro(distros.Distro): def __init__(self, name, cfg, paths): distros.Distro.__init__(self, name, cfg, paths) - # This will be used to restrict certain + # This will be used to restrict certain # calls from repeatly happening (when they # should only happen say once per instance...) self._runner = helpers.Runners(paths) - + def install_packages(self, pkglist): self._update_package_sources() self.package_command('install', pkglist) @@ -131,4 +131,4 @@ class Distro(distros.Distro): def _update_package_sources(self): self._runner.run("update-sources", self.package_command, - ["update"], freq=PER_INSTANCE) \ No newline at end of file + ["update"], freq=PER_INSTANCE) diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py index c6f2119c..d52b1cba 100644 --- a/cloudinit/handlers/__init__.py +++ b/cloudinit/handlers/__init__.py @@ -104,7 +104,7 @@ def run_part(mod, data, ctype, filename, payload, frequency): except: mod_ver = 1 try: - LOG.debug("Calling handler %s (%s, %s, %s) with frequency %s", + LOG.debug("Calling handler %s (%s, %s, %s) with frequency %s", mod, ctype, filename, mod_ver, frequency) if mod_ver >= 2: # Treat as v. 2 which does get a frequency @@ -114,7 +114,7 @@ def run_part(mod, data, ctype, filename, payload, frequency): mod.handle_part(data, ctype, filename, payload) except: util.logexc(LOG, ("Failed calling handler %s (%s, %s, %s)" - " with frequency %s"), + " with frequency %s"), mod, ctype, filename, mod_ver, frequency) @@ -178,7 +178,7 @@ def walker_callback(pdata, ctype, filename, payload): payload, pdata['frequency']) -# Callback is a function that will be called with +# Callback is a function that will be called with # (data, content_type, filename, payload) def walk(msg, callback, data): partnum = 0 @@ -226,5 +226,3 @@ def type_from_starts_with(payload, default=None): if payload_lc.startswith(text): return INCLUSION_TYPES_MAP[text] return default - - diff --git a/cloudinit/helpers.py b/cloudinit/helpers.py index 45633e0f..4447d1ee 100644 --- a/cloudinit/helpers.py +++ b/cloudinit/helpers.py @@ -84,7 +84,7 @@ class FileSemaphores(object): try: util.del_dir(self.sem_path) except (IOError, OSError): - util.logexc(LOG, "Failed deleting semaphore directory %s", + util.logexc(LOG, "Failed deleting semaphore directory %s", self.sem_path) def _acquire(self, name, freq): @@ -212,7 +212,7 @@ class Paths(object): self.cfgs = path_cfgs # Populate all the initial paths self.cloud_dir = self.join(False, - path_cfgs.get('cloud_dir', + path_cfgs.get('cloud_dir', '/var/lib/cloud')) self.instance_link = os.path.join(self.cloud_dir, 'instance') self.boot_finished = os.path.join(self.instance_link, "boot-finished") @@ -237,7 +237,7 @@ class Paths(object): # Set when a datasource becomes active self.datasource = ds - # joins the paths but also appends a read + # joins the paths but also appends a read # or write root if available def join(self, read_only, *paths): if read_only: diff --git a/cloudinit/log.py b/cloudinit/log.py index 478946f8..fc1428a2 100644 --- a/cloudinit/log.py +++ b/cloudinit/log.py @@ -20,7 +20,6 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . - import logging import logging.handlers import logging.config @@ -53,7 +52,6 @@ def setupBasicLogging(): root.setLevel(DEBUG) - def setupLogging(cfg=None): # See if the config provides any logging conf... if not cfg: diff --git a/cloudinit/settings.py b/cloudinit/settings.py index 8a1eaeb3..fac9b862 100644 --- a/cloudinit/settings.py +++ b/cloudinit/settings.py @@ -47,7 +47,7 @@ CFG_BUILTIN = { 'paths': { 'cloud_dir': '/var/lib/cloud', 'templates_dir': '/etc/cloud/templates/', - }, + }, 'distro': 'ubuntu', }, } diff --git a/cloudinit/sources/DataSourceCloudStack.py b/cloudinit/sources/DataSourceCloudStack.py index b1817654..83c577e6 100644 --- a/cloudinit/sources/DataSourceCloudStack.py +++ b/cloudinit/sources/DataSourceCloudStack.py @@ -121,7 +121,7 @@ class DataSourceCloudStack(sources.DataSource): None, self.metadata_address) self.metadata = boto_utils.get_instance_metadata(self.api_ver, self.metadata_address) - LOG.debug("Crawl of metadata service took %s seconds", + LOG.debug("Crawl of metadata service took %s seconds", int(time.time() - start_time)) return True except Exception: diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index 5da1ffea..9905dad4 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -36,7 +36,7 @@ CFG_DRIVE_FILES = [ "meta.js", ] DEFAULT_METADATA = { - "instance-id": DEFAULT_IID, + "instance-id": DEFAULT_IID, "dsmode": DEFAULT_MODE, } CFG_DRIVE_DEV_ENV = 'CLOUD_INIT_CONFIG_DRIVE_DEVICE' diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py index 55447102..0598dfa2 100644 --- a/cloudinit/sources/DataSourceEc2.py +++ b/cloudinit/sources/DataSourceEc2.py @@ -38,7 +38,7 @@ DEF_MD_URL = "http://169.254.169.254" DEF_MD_VERSION = '2009-04-04' # Default metadata urls that will be used if none are provided -# They will be checked for 'resolveability' and some of the +# They will be checked for 'resolveability' and some of the # following may be discarded if they do not resolve DEF_MD_URLS = [DEF_MD_URL, "http://instance-data:8773"] @@ -69,7 +69,7 @@ class DataSourceEc2(sources.DataSource): None, self.metadata_address) self.metadata = boto_utils.get_instance_metadata(self.api_ver, self.metadata_address) - LOG.debug("Crawl of metadata service took %s seconds", + LOG.debug("Crawl of metadata service took %s seconds", int(time.time() - start_time)) return True except Exception: @@ -201,7 +201,7 @@ class DataSourceEc2(sources.DataSource): return None # Example: - # 'block-device-mapping': + # 'block-device-mapping': # {'ami': '/dev/sda1', # 'ephemeral0': '/dev/sdb', # 'root': '/dev/sda1'} diff --git a/cloudinit/sources/DataSourceMAAS.py b/cloudinit/sources/DataSourceMAAS.py index bb8fbac1..104e7a54 100644 --- a/cloudinit/sources/DataSourceMAAS.py +++ b/cloudinit/sources/DataSourceMAAS.py @@ -251,6 +251,7 @@ datasources = [ (DataSourceMAAS, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)), ] + # Return a list of data sources that match this set of dependencies def get_datasource_list(depends): return sources.list_from_depends(depends, datasources) diff --git a/cloudinit/sources/DataSourceNoCloud.py b/cloudinit/sources/DataSourceNoCloud.py index 2b016d1c..8499a97c 100644 --- a/cloudinit/sources/DataSourceNoCloud.py +++ b/cloudinit/sources/DataSourceNoCloud.py @@ -154,7 +154,7 @@ class DataSourceNoCloud(sources.DataSource): (self.dsmode in ("local", seeded_interfaces))): LOG.info("Updating network interfaces from %s", self) self.distro.apply_network(md['network-interfaces']) - + if md['dsmode'] == self.dsmode: self.seed = ",".join(found) self.metadata = md diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py index fef3d40f..45dd5535 100644 --- a/cloudinit/ssh_util.py +++ b/cloudinit/ssh_util.py @@ -103,10 +103,10 @@ class AuthKeyLineParser(object): elif curc == '"': quoted = not quoted i = i + 1 - + options = ent[0:i] options_lst = [] - + # Now use a csv parser to pull the options # out of the above string that we just found an endpoint for. # @@ -211,7 +211,6 @@ def update_authorized_keys(fname, keys): def setup_user_keys(keys, user, key_prefix, paths): - # Make sure the users .ssh dir is setup accordingly pwent = pwd.getpwnam(user) ssh_dir = os.path.join(pwent.pw_dir, '.ssh') diff --git a/cloudinit/stages.py b/cloudinit/stages.py index ae6e2de5..84a965c2 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -287,7 +287,7 @@ class Init(object): def cloudify(self): # Form the needed options to cloudify our members - return cloud.Cloud(self.datasource, + return cloud.Cloud(self.datasource, self.paths, self.cfg, self.distro, helpers.Runners(self.paths)) @@ -318,7 +318,7 @@ class Init(object): def consume(self, frequency=PER_INSTANCE): cdir = self.paths.get_cpath("handlers") idir = self.paths.get_ipath("handlers") - + # Add the path to the plugins dir to the top of our list for import # instance dir should be read before cloud-dir if cdir and cdir not in sys.path: @@ -417,7 +417,7 @@ class Modules(object): except: util.logexc(LOG, ("Failed loading of datasource" " config object from %s"), self.datasource) - + if self.base_cfg: t_cfgs.append(self.base_cfg) diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py index 1c583eba..223278ce 100644 --- a/cloudinit/url_helper.py +++ b/cloudinit/url_helper.py @@ -47,11 +47,11 @@ class UrlResponse(object): @property def contents(self): return self._contents - + @property def headers(self): return self._headers - + def __str__(self): if not self.contents: return '' @@ -66,7 +66,7 @@ class UrlResponse(object): return True else: return False - + def readurl(url, data=None, timeout=None, retries=0, sec_between=1, headers=None): @@ -89,8 +89,8 @@ def readurl(url, data=None, timeout=None, excepts = [] LOG.info(("Attempting to open '%s' with %s attempts" - " (%s retries, timeout=%s) to be performed"), - url, attempts, retries, timeout) + " (%s retries, timeout=%s) to be performed"), + url, attempts, retries, timeout) open_args = {} if timeout is not None: open_args['timeout'] = int(timeout) @@ -112,7 +112,7 @@ def readurl(url, data=None, timeout=None, excepts.append(e) except urllib2.URLError as e: # This can be a message string or - # another exception instance + # another exception instance # (socket.error for remote URLs, OSError for local URLs). if (isinstance(e.reason, (OSError)) and e.reason.errno == errno.ENOENT): @@ -128,7 +128,7 @@ def readurl(url, data=None, timeout=None, # Didn't work out LOG.warn("Failed reading from %s after %s attempts", url, attempts) - + # It must of errored at least once for code # to get here so re-raise the last error LOG.debug("%s errors occured, re-raising the last one", len(excepts)) diff --git a/cloudinit/user_data.py b/cloudinit/user_data.py index b7902d44..4babb8e5 100644 --- a/cloudinit/user_data.py +++ b/cloudinit/user_data.py @@ -65,33 +65,33 @@ class UserDataProcessor(object): # multipart/* are just containers if part.get_content_maintype() == 'multipart': continue - + ctype = None ctype_orig = part.get_content_type() payload = part.get_payload(decode=True) - + if not ctype_orig: ctype_orig = UNDEF_TYPE - + if ctype_orig in TYPE_NEEDED: ctype = handlers.type_from_starts_with(payload) - + if ctype is None: ctype = ctype_orig - + if ctype in INCLUDE_TYPES: self._do_include(payload, append_msg) continue - + if ctype in ARCHIVE_TYPES: self._explode_archive(payload, append_msg) continue - + if 'Content-Type' in base_msg: base_msg.replace_header('Content-Type', ctype) else: base_msg['Content-Type'] = ctype - + self._attach_part(append_msg, part) def _get_include_once_filename(self, entry): @@ -108,8 +108,8 @@ class UserDataProcessor(object): lc_line = line.lower() if lc_line.startswith("#include-once"): line = line[len("#include-once"):].lstrip() - # Every following include will now - # not be refetched.... but will be + # Every following include will now + # not be refetched.... but will be # re-read from a local urlcache (if it worked) include_once_on = True elif lc_line.startswith("#include"): @@ -190,10 +190,10 @@ class UserDataProcessor(object): """ if ATTACHMENT_FIELD not in outer_msg: outer_msg[ATTACHMENT_FIELD] = '0' - + if new_count is not None: outer_msg.replace_header(ATTACHMENT_FIELD, str(new_count)) - + fetched_count = 0 try: fetched_count = int(outer_msg.get(ATTACHMENT_FIELD)) @@ -234,7 +234,3 @@ def convert_string(raw_data, headers=None): msg = MIMEBase(maintype, subtype, *headers) msg.set_payload(data) return msg - - - - diff --git a/cloudinit/util.py b/cloudinit/util.py index 91d20a76..56c01fab 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -35,7 +35,7 @@ import pwd import random import shutil import socket -import string # pylint: disable=W0402 +import string # pylint: disable=W0402 import subprocess import sys import tempfile @@ -153,13 +153,15 @@ def SilentTemporaryFile(**kwargs): # file to unlink has been unlinked elsewhere.. LOG.debug("Created temporary file %s", fh.name) fh.unlink = del_file - # Add a new method that will unlink + + # Add a new method that will unlink # right 'now' but still lets the exit # method attempt to remove it (which will # not throw due to our del file being quiet # about files that are not there) def unlink_now(): fh.unlink(fh.name) + setattr(fh, 'unlink_now', unlink_now) return fh @@ -199,7 +201,7 @@ def is_false_str(val, addons=None): def translate_bool(val, addons=None): if not val: - # This handles empty lists and false and + # This handles empty lists and false and # other things that python believes are false return False # If its already a boolean skip @@ -214,7 +216,6 @@ def rand_str(strlen=32, select_from=None): return "".join([random.choice(select_from) for _x in range(0, strlen)]) - def read_conf(fname): try: return load_yaml(load_file(fname), default={}) @@ -275,7 +276,7 @@ def is_ipv4(instr): def merge_base_cfg(cfgfile, cfg_builtin=None): syscfg = read_conf_with_confd(cfgfile) - + kern_contents = read_cc_from_cmdline() kerncfg = {} if kern_contents: @@ -575,7 +576,7 @@ def load_yaml(blob, default=None, allowed=(dict,)): try: blob = str(blob) LOG.debug(("Attempting to load yaml from string " - "of length %s with allowed root types %s"), + "of length %s with allowed root types %s"), len(blob), allowed) converted = yaml.load(blob) if not isinstance(converted, allowed): @@ -625,7 +626,7 @@ def read_conf_d(confd): # remove anything not ending in '.cfg' confs = [f for f in confs if f.endswith(".cfg")] - + # remove anything not a file confs = [f for f in confs if os.path.isfile(os.path.join(confd, f))] @@ -726,9 +727,9 @@ def get_fqdn_from_hosts(hostname, filename="/etc/hosts"): """ For each host a single line should be present with the following information: - - IP_address canonical_hostname [aliases...] - + + IP_address canonical_hostname [aliases...] + Fields of the entry are separated by any number of blanks and/or tab characters. Text from a "#" character until the end of the line is a comment, and is ignored. Host names may contain only alphanumeric @@ -747,7 +748,7 @@ def get_fqdn_from_hosts(hostname, filename="/etc/hosts"): if not line: continue - # If there there is less than 3 entries + # If there there is less than 3 entries # (IP_address, canonical_hostname, alias) # then ignore this line toks = line.split() @@ -829,7 +830,7 @@ def close_stdin(): os.dup2(fp.fileno(), sys.stdin.fileno()) -def find_devs_with(criteria=None, oformat='device', +def find_devs_with(criteria=None, oformat='device', tag=None, no_cache=False, path=None): """ find devices matching given criteria (via blkid) @@ -841,23 +842,23 @@ def find_devs_with(criteria=None, oformat='device', blk_id_cmd = ['blkid'] options = [] if criteria: - # Search for block devices with tokens named NAME that + # Search for block devices with tokens named NAME that # have the value 'value' and display any devices which are found. # Common values for NAME include TYPE, LABEL, and UUID. # If there are no devices specified on the command line, - # all block devices will be searched; otherwise, + # all block devices will be searched; otherwise, # only search the devices specified by the user. options.append("-t%s" % (criteria)) if tag: # For each (specified) device, show only the tags that match tag. options.append("-s%s" % (tag)) if no_cache: - # If you want to start with a clean cache - # (i.e. don't report devices previously scanned + # If you want to start with a clean cache + # (i.e. don't report devices previously scanned # but not necessarily available at this time), specify /dev/null. options.extend(["-c", "/dev/null"]) if oformat: - # Display blkid's output using the specified format. + # Display blkid's output using the specified format. # The format parameter may be: # full, value, list, device, udev, export options.append('-o%s' % (oformat)) @@ -1104,7 +1105,7 @@ def mounts(): (dev, mp, fstype, opts, _freq, _passno) = mpline.split() except: continue - # If the name of the mount point contains spaces these + # If the name of the mount point contains spaces these # can be escaped as '\040', so undo that.. mp = mp.replace("\\040", " ") mounted[dev] = { -- cgit v1.2.3 From 081d0cb81213f69a51cbcce9063d2e0792bb5e34 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Thu, 21 Jun 2012 12:30:32 -0700 Subject: Simplify sshd config loading to make it use one set of functions/objects/classes --- cloudinit/config/cc_set_passwords.py | 45 ++++++++++++++------------------- cloudinit/ssh_util.py | 49 +++++++++++++++++++++++++++++++----- 2 files changed, 62 insertions(+), 32 deletions(-) (limited to 'cloudinit/ssh_util.py') diff --git a/cloudinit/config/cc_set_passwords.py b/cloudinit/config/cc_set_passwords.py index eb68ddfe..5b72224b 100644 --- a/cloudinit/config/cc_set_passwords.py +++ b/cloudinit/config/cc_set_passwords.py @@ -20,6 +20,7 @@ import sys +from cloudinit import ssh_util from cloudinit import util from string import letters, digits # pylint: disable=W0402 @@ -101,39 +102,31 @@ def handle(_name, cfg, cloud, log, args): pw_auth = 'no' if change_pwauth: - new_lines = [] replaced_auth = False - replacement = "PasswordAuthentication %s" % (pw_auth) - - # See http://linux.die.net/man/5/sshd_config - conf_fn = cloud.paths.join(True, '/etc/ssh/sshd_config') - # Todo: use the common ssh_util function for this parsing... - old_lines = util.load_file(conf_fn).splitlines() - for i, line in enumerate(old_lines): - if not line.strip() or line.startswith("#"): - new_lines.append(line) - continue - splitup = line.split(None, 1) - if len(splitup) <= 1: - new_lines.append(line) - continue - (cmd, args) = splitup + + # See: man sshd_config + conf_fn = cloud.paths.join(True, ssh_util.DEF_SSHD_CFG) + old_lines = ssh_util.parse_ssh_config(conf_fn) + new_lines = [] + i = 0 + for (i, line) in enumerate(old_lines): # Keywords are case-insensitive and arguments are case-sensitive - cmd = cmd.lower().strip() - if cmd == 'passwordauthentication': - log.debug("Replacing auth line %s with %s", i + 1, replacement) + if line.key == 'passwordauthentication': + log.debug("Replacing auth line %s with %s", i + 1, pw_auth) replaced_auth = True - new_lines.append(replacement) - else: - new_lines.append(line) + line.value = pw_auth + new_lines.append(line) if not replaced_auth: - log.debug("Adding new auth line %s", replacement) + log.debug("Adding new auth line %s", i + 1) replaced_auth = True - new_lines.append(replacement) + new_lines.append(ssh_util.SshdConfigLine('', + 'PasswordAuthentication', + pw_auth)) - util.write_file(cloud.paths.join(False, '/etc/ssh/sshd_config'), - "\n".join(new_lines)) + lines = [str(e) for e in new_lines] + ssh_rw_fn = cloud.paths.join(False, ssh_util.DEF_SSHD_CFG) + util.write_file(ssh_rw_fn, "\n".join(lines)) try: cmd = ['service'] diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py index 45dd5535..fc8b9b3d 100644 --- a/cloudinit/ssh_util.py +++ b/cloudinit/ssh_util.py @@ -29,6 +29,8 @@ from cloudinit import log as logging from cloudinit import util LOG = logging.getLogger(__name__) + +# See: man sshd_config DEF_SSHD_CFG = "/etc/ssh/sshd_config" @@ -233,7 +235,7 @@ def setup_user_keys(keys, user, key_prefix, paths): # The following tokens are defined: %% is replaced by a literal # '%', %h is replaced by the home directory of the user being # authenticated and %u is replaced by the username of that user. - ssh_cfg = parse_ssh_config(sshd_conf_fn) + ssh_cfg = parse_ssh_config_map(sshd_conf_fn) akeys = ssh_cfg.get("authorizedkeysfile", '') akeys = akeys.strip() if not akeys: @@ -258,19 +260,54 @@ def setup_user_keys(keys, user, key_prefix, paths): util.chownbyid(authorized_keys, pwent.pw_uid, pwent.pw_gid) +class SshdConfigLine(object): + def __init__(self, line, k=None, v=None): + self.line = line + self._key = k + self.value = v + + @property + def key(self): + if self._key is None: + return None + # Keywords are case-insensitive + return self._key.lower() + + def __str__(self): + if self._key is None: + return str(self.line) + else: + v = str(self._key) + if self.value: + v += " " + str(self.value) + return v + + def parse_ssh_config(fname): + # See: man sshd_config # The file contains keyword-argument pairs, one per line. # Lines starting with '#' and empty lines are interpreted as comments. # Note: key-words are case-insensitive and arguments are case-sensitive - ret = {} + lines = [] if not os.path.isfile(fname): - return ret + return lines for line in util.load_file(fname).splitlines(): line = line.strip() if not line or line.startswith("#"): + lines.append(SshdConfigLine(line)) continue (key, val) = line.split(None, 1) - key = key.strip().lower() - if key: - ret[key] = val + lines.append(SshdConfigLine(line, key, val)) + return lines + + +def parse_ssh_config_map(fname): + lines = parse_ssh_config(fname) + if not lines: + return {} + ret = {} + for line in lines: + if not line.key: + continue + ret[line.key] = line.value return ret -- cgit v1.2.3 From 25df1844285a48cd4ba0226c9c65e66973374845 Mon Sep 17 00:00:00 2001 From: Joshua Harlow Date: Fri, 29 Jun 2012 13:46:19 -0700 Subject: Don't remove the key if its not there (or was already removed) --- cloudinit/ssh_util.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'cloudinit/ssh_util.py') diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py index fc8b9b3d..e0a2f0ca 100644 --- a/cloudinit/ssh_util.py +++ b/cloudinit/ssh_util.py @@ -197,7 +197,8 @@ def update_authorized_keys(fname, keys): # Replace it with our better one ent = k # Don't add it later - to_add.remove(k) + if k in to_add: + to_add.remove(k) entries[i] = ent # Now append any entries we did not match above -- cgit v1.2.3