summaryrefslogtreecommitdiff
path: root/cloudinit
diff options
context:
space:
mode:
authorScott Moser <smoser@ubuntu.com>2011-01-18 19:26:57 +0000
committerScott Moser <smoser@ubuntu.com>2011-01-18 19:26:57 +0000
commite40a1c8143ee88e0eb9b979fabff587ec53b971d (patch)
tree1872b28fa73e186900fac0926f46f52df9fcc670 /cloudinit
parent1a35587abb5226c3b008f1a034c56145cb878489 (diff)
downloadvyos-cloud-init-e40a1c8143ee88e0eb9b979fabff587ec53b971d.tar.gz
vyos-cloud-init-e40a1c8143ee88e0eb9b979fabff587ec53b971d.zip
remove 'biultin' config, separate cloud.cfg
This set of changes makes '/etc/cloud/cloud.cfg' support "#include" and "#opt_include". The idea is to then provide a base configuration and allow distro or local changes that would override that.
Diffstat (limited to 'cloudinit')
-rw-r--r--cloudinit/__init__.py122
-rw-r--r--cloudinit/util.py57
2 files changed, 82 insertions, 97 deletions
diff --git a/cloudinit/__init__.py b/cloudinit/__init__.py
index 6a59a23f..7b300a63 100644
--- a/cloudinit/__init__.py
+++ b/cloudinit/__init__.py
@@ -27,86 +27,17 @@ userdata = datadir + '/user-data.txt.i'
user_scripts_dir = datadir + "/scripts"
boothooks_dir = datadir + "/boothooks"
cloud_config = datadir + '/cloud-config.txt'
-#cloud_config = '/tmp/cloud-config.txt'
data_source_cache = cachedir + '/obj.pkl'
system_config = '/etc/cloud/cloud.cfg'
cfg_env_name = "CLOUD_CFG"
+def_log_file = '/var/log/cloud-init.log'
cfg_builtin = """
+log_cfgs: [ ]
cloud_type: auto
-user: ubuntu
-disable_root: 1
-
-cloud_config_modules:
- - mounts
- - ssh-import-id
- - ssh
- - grub-dpkg
- - apt-update-upgrade
- - puppet
- - updates-check
- - disable-ec2-metadata
- - runcmd
- - byobu
-
-log_cfg: built_in
"""
-
-def_log_file = '/var/log/cloud-init.log'
logger_name = "cloudinit"
-built_in_log_base = """
-[loggers]
-keys=root,cloudinit
-
-[handlers]
-keys=consoleHandler,cloudLogHandler
-
-[formatters]
-keys=simpleFormatter,arg0Formatter
-
-[logger_root]
-level=DEBUG
-handlers=consoleHandler,cloudLogHandler
-
-[logger_cloudinit]
-level=DEBUG
-qualname=cloudinit
-handlers=
-propagate=1
-
-[handler_consoleHandler]
-class=StreamHandler
-level=WARNING
-formatter=arg0Formatter
-args=(sys.stderr,)
-
-[formatter_arg0Formatter]
-format=%(asctime)s - %(filename)s[%(levelname)s]: %(message)s
-
-[formatter_simpleFormatter]
-format=[CLOUDINIT] %(asctime)s - %(filename)s[%(levelname)s]: %(message)s
-datefmt=
-
-"""
-
-built_in_log_clougLogHandlerLog="""
-[handler_cloudLogHandler]
-class=FileHandler
-level=DEBUG
-formatter=simpleFormatter
-args=('__CLOUDINIT_LOGGER_FILE__',)
-"""
-
-built_in_log_cloudLogHandlerSyslog= """
-[handler_cloudLogHandler]
-class=handlers.SysLogHandler
-level=DEBUG
-formatter=simpleFormatter
-args=("/dev/log", handlers.SysLogHandler.LOG_USER)
-"""
-
-
import os
from configobj import ConfigObj
@@ -132,32 +63,31 @@ def logging_set_from_cfg_file(cfg_file=system_config):
logging_set_from_cfg(util.get_base_cfg(cfg_file,cfg_builtin))
def logging_set_from_cfg(cfg, logfile=None):
- if logfile is None:
+ log_cfgs = []
+ logcfg=util.get_cfg_option_str(cfg, "log_cfg", False)
+ if logcfg:
+ # if there is a 'logcfg' entry in the config, respect
+ # it, it is the old keyname
+ log_cfgs = [ logcfg ]
+ elif "log_cfgs" in cfg:
+ for cfg in cfg['log_cfgs']:
+ if isinstance(cfg,list):
+ log_cfgs.append('\n'.join(cfg))
+ else:
+ log_cfgs.append()
+
+ if not len(log_cfgs):
+ sys.stderr.write("Warning, no logging configured\n")
+
+ for logcfg in log_cfgs:
try:
- open(def_log_file,"a").close()
- logfile = def_log_file
- except IOError as e:
- if e.errno == errno.EACCES:
- logfile = "/dev/null"
- else: raise
-
- logcfg=util.get_cfg_option_str(cfg, "log_cfg", "built_in")
- failsafe = "%s\n%s" % (built_in_log_base, built_in_log_clougLogHandlerLog)
- builtin = False
- if logcfg.lower() == "built_in":
- logcfg = "%s\n%s" % (built_in_log_base, built_in_log_cloudLogHandlerSyslog)
- builtin = True
-
- logcfg=logcfg.replace("__CLOUDINIT_LOGGER_FILE__",logfile)
- try:
- logging.config.fileConfig(StringIO.StringIO(logcfg))
- return
- except:
- if not builtin:
- sys.stderr.write("Warning, setting config.fileConfig failed\n")
+ logging.config.fileConfig(StringIO.StringIO(logcfg))
+ return
+ except:
+ pass
+
+ raise Exception("no valid logging found\n")
- failsafe=failsafe.replace("__CLOUDINIT_LOGGER_FILE__",logfile)
- logging.config.fileConfig(StringIO.StringIO(failsafe))
import DataSourceEc2
import DataSourceNoCloud
@@ -196,7 +126,7 @@ class CloudInit:
if self.cfg:
return(self.cfg)
- conf = util.get_base_cfg(system_config,cfg_builtin)
+ conf = util.get_base_cfg(self.sysconfig,cfg_builtin)
# support reading the old ConfigObj format file and merging
# it into the yaml dictionary
diff --git a/cloudinit/util.py b/cloudinit/util.py
index c1b4fd2d..05462b9d 100644
--- a/cloudinit/util.py
+++ b/cloudinit/util.py
@@ -17,6 +17,7 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import yaml
import os
+import os.path
import errno
import subprocess
from Cheetah.Template import Template
@@ -24,6 +25,7 @@ import cloudinit
import urllib2
import logging
import traceback
+import re
WARN = logging.WARN
DEBUG = logging.DEBUG
@@ -41,7 +43,8 @@ def read_conf(fname):
raise
def get_base_cfg(cfgfile,cfg_builtin=""):
- syscfg = read_conf(cfgfile)
+ contents = read_file_with_includes(cfgfile)
+ syscfg = yaml.load(contents)
if cfg_builtin:
builtin = yaml.load(cfg_builtin)
else:
@@ -168,3 +171,55 @@ def read_seeded(base="", ext="", timeout=2):
def logexc(log,lvl=logging.DEBUG):
log.log(lvl,traceback.format_exc())
+
+class RecursiveInclude(Exception):
+ pass
+
+def read_file_with_includes(fname, rel = ".", stack=[], patt = None):
+ if not fname.startswith("/"):
+ fname = os.sep.join((rel, fname))
+
+ fname = os.path.realpath(fname)
+
+ if fname in stack:
+ raise(RecursiveInclude("%s recursively included" % fname))
+ if len(stack) > 10:
+ raise(RecursiveInclude("%s included, stack size = %i" %
+ (fname, len(stack))))
+
+ if patt == None:
+ patt = re.compile("^#(opt_include|include)[ \t].*$",re.MULTILINE)
+
+ try:
+ fp = open(fname)
+ contents = fp.read()
+ fp.close()
+ except:
+ raise
+
+ rel = os.path.dirname(fname)
+ stack.append(fname)
+
+ cur = 0
+ clen = len(contents)
+ while True:
+ match = patt.search(contents[cur:])
+ if not match: break
+ loc = match.start() + cur
+ endl = match.end() + cur
+
+ (key, cur_fname) = contents[loc:endl].split(None,2)
+ cur_fname = cur_fname.strip()
+
+ try:
+ inc_contents = read_file_with_includes(cur_fname, rel, stack, patt)
+ except IOError, e:
+ if e.errno == errno.ENOENT and key == "#opt_include":
+ inc_contents = ""
+ else:
+ raise
+ contents = contents[0:loc] + inc_contents + contents[endl+1:]
+ cur = loc + len(inc_contents)
+ stack.pop()
+ return(contents)
+