From b5574a9925b29417a1b351e7b38c54bc7d144dba Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Thu, 30 Jul 2015 18:06:01 -0400 Subject: tests pass --- cloudinit/sources/__init__.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index a21c08c2..c4848d5d 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -27,6 +27,7 @@ import six from cloudinit import importer from cloudinit import log as logging +from cloudinit import reporting from cloudinit import type_utils from cloudinit import user_data as ud from cloudinit import util @@ -246,17 +247,22 @@ def normalize_pubkey_data(pubkey_data): return keys -def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list): +def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list, reporter): ds_list = list_sources(cfg_list, ds_deps, pkg_list) ds_names = [type_utils.obj_name(f) for f in ds_list] LOG.debug("Searching for data source in: %s", ds_names) for cls in ds_list: + myreporter = reporting.ReportStack( + "check-%s" % cls, "searching for %s" % cls, + parent=reporter, exc_result=reporting.status.WARN) + try: - LOG.debug("Seeing if we can get any data from %s", cls) - s = cls(sys_cfg, distro, paths) - if s.get_data(): - return (s, type_utils.obj_name(cls)) + with myreporter: + LOG.debug("Seeing if we can get any data from %s", cls) + s = cls(sys_cfg, distro, paths) + if s.get_data(): + return (s, type_utils.obj_name(cls)) except Exception: util.logexc(LOG, "Getting data from %s failed", cls) -- cgit v1.2.3 From b22302d8e2b539f61faede7efb3a163966bf170a Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Fri, 31 Jul 2015 14:38:09 +0000 Subject: fix issues found when testing --- bin/cloud-init | 4 ++-- cloudinit/reporting.py | 16 ++++++++++------ cloudinit/sources/__init__.py | 5 +++-- 3 files changed, 15 insertions(+), 10 deletions(-) (limited to 'cloudinit/sources') diff --git a/bin/cloud-init b/bin/cloud-init index 6a47e5e8..c808eda5 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -636,8 +636,8 @@ def main(): "running single module %s" % args.name) report_on = args.report - reporter = reporting.ReportStack(rname, rdesc, reporting=report_on) - with reporter: + args.reporter = reporting.ReportStack(rname, rdesc, reporting=report_on) + with args.reporter: return util.log_time( logfunc=LOG.debug, msg="cloud-init mode '%s'" % name, get_uptime=True, func=functor, args=(name, args)) diff --git a/cloudinit/reporting.py b/cloudinit/reporting.py index c925f661..1bd7df0d 100644 --- a/cloudinit/reporting.py +++ b/cloudinit/reporting.py @@ -137,7 +137,6 @@ class ReportStack(object): def __init__(self, name, description, parent=None, reporting=None, exc_result=None): self.parent = parent - self.reporting = reporting self.name = name self.description = description @@ -145,18 +144,23 @@ class ReportStack(object): exc_result = status.FAIL self.exc_result = exc_result + # use parents reporting value if not provided if reporting is None: - # if reporting is specified respect it, otherwise use parent's value if parent: reporting = parent.reporting else: reporting = True + self.reporting = reporting + if parent: - self.fullname = '/'.join((name, parent.fullname,)) + self.fullname = '/'.join((parent.fullname, name,)) else: self.fullname = self.name self.children = {} + def __repr__(self): + return ("%s reporting=%s" % (self.fullname, self.reporting)) + def __enter__(self): self.exception = None if self.reporting: @@ -166,10 +170,10 @@ class ReportStack(object): return self def childrens_finish_info(self, result=None, description=None): - for result in (status.FAIL, status.WARN): + for cand_result in (status.FAIL, status.WARN): for name, (value, msg) in self.children.items(): - if value == result: - return (result, "[" + name + "]" + msg) + if value == cand_result: + return (value, "[" + name + "]" + msg) if result is None: result = status.SUCCESS if description is None: diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index c4848d5d..f585c3e4 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -252,9 +252,10 @@ def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list, reporter): ds_names = [type_utils.obj_name(f) for f in ds_list] LOG.debug("Searching for data source in: %s", ds_names) - for cls in ds_list: + for i, cls in enumerate(ds_list): + name=ds_names[i].replace("DataSource", "") myreporter = reporting.ReportStack( - "check-%s" % cls, "searching for %s" % cls, + "check-%s" % name, "searching for %s" % name, parent=reporter, exc_result=reporting.status.WARN) try: -- cgit v1.2.3 From cc923ca255f4ce8c23819e263066e34133f3dd31 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Fri, 31 Jul 2015 15:23:04 +0000 Subject: add nicer formating and messages for datasource searching --- cloudinit/sources/__init__.py | 33 +++++++++++++++++++++++++++------ 1 file changed, 27 insertions(+), 6 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index f585c3e4..c174a58f 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -247,22 +247,43 @@ def normalize_pubkey_data(pubkey_data): return keys +class SearchReportStack(reporting.ReportStack): + def __init__(self, source, ds_deps, parent): + self.source = source.replace("DataSource", "") + name = "check-%s" % self.source + self.found = False + self.mode = "network" if DEP_NETWORK in ds_deps else "local" + description = "searching for %s data from %s" % ( + self.mode, self.source) + super(SearchReportStack, self).__init__( + name=name, description=description, parent=parent, + exc_result=reporting.status.WARN) + + def finish_info(self, exc): + # return tuple of description, and value + if exc: + # by default, exceptions are fatal + return (self.exc_result, self.description) + if self.found: + description = "found %s data from %s" % (self.mode, self.source) + else: + description = "no %s data found from %s" % (self.mode, self.source) + return self.childrens_finish_info(description=description) + + def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list, reporter): ds_list = list_sources(cfg_list, ds_deps, pkg_list) ds_names = [type_utils.obj_name(f) for f in ds_list] LOG.debug("Searching for data source in: %s", ds_names) for i, cls in enumerate(ds_list): - name=ds_names[i].replace("DataSource", "") - myreporter = reporting.ReportStack( - "check-%s" % name, "searching for %s" % name, - parent=reporter, exc_result=reporting.status.WARN) - + srcname=ds_names[i] try: - with myreporter: + with SearchReportStack(srcname, ds_deps, reporter) as rep: LOG.debug("Seeing if we can get any data from %s", cls) s = cls(sys_cfg, distro, paths) if s.get_data(): + rep.found = True return (s, type_utils.obj_name(cls)) except Exception: util.logexc(LOG, "Getting data from %s failed", cls) -- cgit v1.2.3 From f36706442b4c1913ea8f7953993b9e03f3adf623 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Fri, 31 Jul 2015 16:12:37 +0000 Subject: address Daniel's comments in review --- bin/cloud-init | 3 ++- cloudinit/reporting.py | 34 +++++++++++++++------------------- cloudinit/sources/__init__.py | 7 +++---- cloudinit/stages.py | 3 ++- 4 files changed, 22 insertions(+), 25 deletions(-) (limited to 'cloudinit/sources') diff --git a/bin/cloud-init b/bin/cloud-init index c808eda5..d0ac4c7f 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -636,7 +636,8 @@ def main(): "running single module %s" % args.name) report_on = args.report - args.reporter = reporting.ReportStack(rname, rdesc, reporting=report_on) + args.reporter = reporting.ReportStack( + rname, rdesc, reporting_enabled=report_on) with args.reporter: return util.log_time( logfunc=LOG.debug, msg="cloud-init mode '%s'" % name, diff --git a/cloudinit/reporting.py b/cloudinit/reporting.py index 1bd7df0d..154f4e03 100644 --- a/cloudinit/reporting.py +++ b/cloudinit/reporting.py @@ -11,6 +11,7 @@ report events in a structured manner. import abc import logging +import sys from cloudinit.registry import DictRegistry @@ -83,9 +84,9 @@ class LogHandler(ReportingHandler): logger.info(event.as_string()) -class PrintHandler(ReportingHandler): +class StderrHandler(ReportingHandler): def publish_event(self, event): - print(event.as_string()) + sys.stderr.write(event.as_string() + "\n") def add_configuration(config): @@ -134,23 +135,20 @@ def report_start_event(event_name, event_description): class ReportStack(object): - def __init__(self, name, description, parent=None, reporting=None, - exc_result=None): + def __init__(self, name, description, parent=None, + reporting_enabled=None, result_on_exception=status.FAIL): self.parent = parent self.name = name self.description = description - - if exc_result is None: - exc_result = status.FAIL - self.exc_result = exc_result + self.result_on_exception = result_on_exception # use parents reporting value if not provided - if reporting is None: + if reporting_enabled is None: if parent: - reporting = parent.reporting + reporting_enabled = parent.reporting_enabled else: - reporting = True - self.reporting = reporting + reporting_enabled = True + self.reporting_enabled = reporting_enabled if parent: self.fullname = '/'.join((parent.fullname, name,)) @@ -159,11 +157,10 @@ class ReportStack(object): self.children = {} def __repr__(self): - return ("%s reporting=%s" % (self.fullname, self.reporting)) + return ("%s reporting=%s" % (self.fullname, self.reporting_enabled)) def __enter__(self): - self.exception = None - if self.reporting: + if self.reporting_enabled: report_start_event(self.fullname, self.description) if self.parent: self.parent.children[self.name] = (None, None) @@ -184,18 +181,17 @@ class ReportStack(object): # return tuple of description, and value if exc: # by default, exceptions are fatal - return (self.exc_result, self.description) + return (self.result_on_exception, self.description) return self.childrens_finish_info() def __exit__(self, exc_type, exc_value, traceback): - self.exception = exc_value (result, msg) = self.finish_info(exc_value) if self.parent: self.parent.children[self.name] = (result, msg) - if self.reporting: + if self.reporting_enabled: report_finish_event(self.fullname, msg, result) available_handlers.register_item('log', LogHandler) -available_handlers.register_item('print', PrintHandler) +available_handlers.register_item('print', StderrHandler) add_configuration(DEFAULT_CONFIG) diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index c174a58f..0dc75f9e 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -257,7 +257,7 @@ class SearchReportStack(reporting.ReportStack): self.mode, self.source) super(SearchReportStack, self).__init__( name=name, description=description, parent=parent, - exc_result=reporting.status.WARN) + result_on_exception=reporting.status.WARN) def finish_info(self, exc): # return tuple of description, and value @@ -276,10 +276,9 @@ def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list, reporter): ds_names = [type_utils.obj_name(f) for f in ds_list] LOG.debug("Searching for data source in: %s", ds_names) - for i, cls in enumerate(ds_list): - srcname=ds_names[i] + for name, cls in zip(ds_names, ds_list): try: - with SearchReportStack(srcname, ds_deps, reporter) as rep: + with SearchReportStack(name, ds_deps, reporter) as rep: LOG.debug("Seeing if we can get any data from %s", cls) s = cls(sys_cfg, distro, paths) if s.get_data(): diff --git a/cloudinit/stages.py b/cloudinit/stages.py index 2bf7a1c4..82197d02 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -68,7 +68,8 @@ class Init(object): if reporter is None: reporter = reporting.ReportStack( - name="init-reporter", description="init-desc", reporting=False) + name="init-reporter", description="init-desc", + reporting_enabled=False) self.reporter = reporter def _reset(self, reset_ds=False): -- cgit v1.2.3 From 4f4e6d1cf90928daa1ab339f687b3319454aefdd Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Fri, 31 Jul 2015 16:31:26 +0000 Subject: move 'mode' out of SearchReportStack --- cloudinit/sources/__init__.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 0dc75f9e..6f2d2276 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -248,13 +248,12 @@ def normalize_pubkey_data(pubkey_data): class SearchReportStack(reporting.ReportStack): - def __init__(self, source, ds_deps, parent): + def __init__(self, source, mode, parent): self.source = source.replace("DataSource", "") name = "check-%s" % self.source self.found = False - self.mode = "network" if DEP_NETWORK in ds_deps else "local" - description = "searching for %s data from %s" % ( - self.mode, self.source) + self.mode = mode + description = "searching for %s data from %s" % (mode, self.source) super(SearchReportStack, self).__init__( name=name, description=description, parent=parent, result_on_exception=reporting.status.WARN) @@ -274,11 +273,12 @@ class SearchReportStack(reporting.ReportStack): def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list, reporter): ds_list = list_sources(cfg_list, ds_deps, pkg_list) ds_names = [type_utils.obj_name(f) for f in ds_list] - LOG.debug("Searching for data source in: %s", ds_names) + mode = "network" if DEP_NETWORK in ds_deps else "local" + LOG.debug("Searching for %s data source in: %s", mode, ds_names) for name, cls in zip(ds_names, ds_list): try: - with SearchReportStack(name, ds_deps, reporter) as rep: + with SearchReportStack(name, mode, reporter) as rep: LOG.debug("Seeing if we can get any data from %s", cls) s = cls(sys_cfg, distro, paths) if s.get_data(): -- cgit v1.2.3 From 07b452e166b5d2ff34d5558b1dbba42ab0f1f23c Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Fri, 31 Jul 2015 19:27:52 +0000 Subject: plumb the rest the reporting through --- bin/cloud-init | 9 +++++---- cloudinit/cloud.py | 8 +++++++- cloudinit/reporting.py | 32 +++++++++++++++++++++----------- cloudinit/sources/__init__.py | 32 +++++++------------------------- cloudinit/stages.py | 29 ++++++++++++++++++++++++----- 5 files changed, 64 insertions(+), 46 deletions(-) (limited to 'cloudinit/sources') diff --git a/bin/cloud-init b/bin/cloud-init index de3b9fbf..d369a806 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -284,7 +284,7 @@ def main_init(name, args): return (init.datasource, ["Consuming user data failed!"]) # Stage 8 - re-read and apply relevant cloud-config to include user-data - mods = stages.Modules(init, extract_fns(args)) + mods = stages.Modules(init, extract_fns(args), reporter=args.reporter) # Stage 9 try: outfmt_orig = outfmt @@ -329,7 +329,7 @@ def main_modules(action_name, args): if not args.force: return [(msg)] # Stage 3 - mods = stages.Modules(init, extract_fns(args)) + mods = stages.Modules(init, extract_fns(args), reporter=args.reporter) # Stage 4 try: LOG.debug("Closing stdin") @@ -384,7 +384,7 @@ def main_single(name, args): if not args.force: return 1 # Stage 3 - mods = stages.Modules(init, extract_fns(args)) + mods = stages.Modules(init, extract_fns(args), reporter=args.reporter) mod_args = args.module_args if mod_args: LOG.debug("Using passed in arguments %s", mod_args) @@ -630,7 +630,8 @@ def main(): else: rname, rdesc = ("init-network", "searching for network datasources") elif name == "modules": - rname, rdesc = ("modules-%s" % args.mode, "running modules for %s") + rname, rdesc = ("modules-%s" % args.mode, + "running modules for %s" % args.mode) elif name == "single": rname, rdesc = ("single/%s" % args.name, "running single module %s" % args.name) diff --git a/cloudinit/cloud.py b/cloudinit/cloud.py index 95e0cfb2..71eb80eb 100644 --- a/cloudinit/cloud.py +++ b/cloudinit/cloud.py @@ -40,12 +40,18 @@ LOG = logging.getLogger(__name__) class Cloud(object): - def __init__(self, datasource, paths, cfg, distro, runners): + def __init__(self, datasource, paths, cfg, distro, runners, reporter=None): self.datasource = datasource self.paths = paths self.distro = distro self._cfg = cfg self._runners = runners + if reporter is None: + reporter = reporting.ReportStack( + name="unnamed-cloud-reporter", + description="unnamed-cloud-reporter", + reporting_enabled=False) + self.reporter = reporter # If a 'user' manipulates logging or logging services # it is typically useful to cause the logging to be diff --git a/cloudinit/reporting.py b/cloudinit/reporting.py index 154f4e03..08014c70 100644 --- a/cloudinit/reporting.py +++ b/cloudinit/reporting.py @@ -86,7 +86,8 @@ class LogHandler(ReportingHandler): class StderrHandler(ReportingHandler): def publish_event(self, event): - sys.stderr.write(event.as_string() + "\n") + #sys.stderr.write(event.as_string() + "\n") + print(event.as_string()) def add_configuration(config): @@ -135,12 +136,14 @@ def report_start_event(event_name, event_description): class ReportStack(object): - def __init__(self, name, description, parent=None, + def __init__(self, name, description, message=None, parent=None, reporting_enabled=None, result_on_exception=status.FAIL): self.parent = parent self.name = name self.description = description + self.message = message self.result_on_exception = result_on_exception + self.result = None # use parents reporting value if not provided if reporting_enabled is None: @@ -160,28 +163,35 @@ class ReportStack(object): return ("%s reporting=%s" % (self.fullname, self.reporting_enabled)) def __enter__(self): + self.result = None if self.reporting_enabled: report_start_event(self.fullname, self.description) if self.parent: self.parent.children[self.name] = (None, None) return self - def childrens_finish_info(self, result=None, description=None): + def childrens_finish_info(self): for cand_result in (status.FAIL, status.WARN): for name, (value, msg) in self.children.items(): if value == cand_result: return (value, "[" + name + "]" + msg) - if result is None: - result = status.SUCCESS - if description is None: - description = self.description - return (result, description) - + return (self.result, self.message) + + @property + def message(self): + if self._message is not None: + return self._message + return self.description + + @message.setter + def message(self, value): + self._message = value + + def finish_info(self, exc): # return tuple of description, and value if exc: - # by default, exceptions are fatal - return (self.result_on_exception, self.description) + return (self.result_on_exception, self.message) return self.childrens_finish_info() def __exit__(self, exc_type, exc_value, traceback): diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 6f2d2276..3b48f173 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -247,29 +247,6 @@ def normalize_pubkey_data(pubkey_data): return keys -class SearchReportStack(reporting.ReportStack): - def __init__(self, source, mode, parent): - self.source = source.replace("DataSource", "") - name = "check-%s" % self.source - self.found = False - self.mode = mode - description = "searching for %s data from %s" % (mode, self.source) - super(SearchReportStack, self).__init__( - name=name, description=description, parent=parent, - result_on_exception=reporting.status.WARN) - - def finish_info(self, exc): - # return tuple of description, and value - if exc: - # by default, exceptions are fatal - return (self.exc_result, self.description) - if self.found: - description = "found %s data from %s" % (self.mode, self.source) - else: - description = "no %s data found from %s" % (self.mode, self.source) - return self.childrens_finish_info(description=description) - - def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list, reporter): ds_list = list_sources(cfg_list, ds_deps, pkg_list) ds_names = [type_utils.obj_name(f) for f in ds_list] @@ -277,12 +254,17 @@ def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list, reporter): LOG.debug("Searching for %s data source in: %s", mode, ds_names) for name, cls in zip(ds_names, ds_list): + myrep = reporting.ReportStack( + name="search-%s-%s" % (mode, name.replace("DataSource", "")), + description="searching for %s data from %s" % (mode, name), + message = "no %s data found from %s" % (mode, name), + parent=reporter) try: - with SearchReportStack(name, mode, reporter) as rep: + with myrep: LOG.debug("Seeing if we can get any data from %s", cls) s = cls(sys_cfg, distro, paths) if s.get_data(): - rep.found = True + myrep.message = "found %s data from %s" % (mode, name) return (s, type_utils.obj_name(cls)) except Exception: util.logexc(LOG, "Getting data from %s failed", cls) diff --git a/cloudinit/stages.py b/cloudinit/stages.py index 79d22538..8c79ae4e 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -341,7 +341,8 @@ class Init(object): # Form the needed options to cloudify our members return cloud.Cloud(self.datasource, self.paths, self.cfg, - self.distro, helpers.Runners(self.paths)) + self.distro, helpers.Runners(self.paths), + reporter=self.reporter) def update(self): if not self._write_to_cache(): @@ -507,8 +508,14 @@ class Init(object): def consume_data(self, frequency=PER_INSTANCE): # Consume the userdata first, because we need want to let the part # handlers run first (for merging stuff) - self._consume_userdata(frequency) - self._consume_vendordata(frequency) + with reporting.ReportStack( + "consume-user-data", "reading and applying user-data", + parent=self.reporter): + self._consume_userdata(frequency) + with reporting.ReportStack( + "consume-vendor-data", "reading and applying vendor-data", + parent=self.reporter): + self._consume_userdata(frequency) # Perform post-consumption adjustments so that # modules that run during the init stage reflect @@ -581,11 +588,12 @@ class Init(object): class Modules(object): - def __init__(self, init, cfg_files=None): + def __init__(self, init, cfg_files=None, reporter=None): self.init = init self.cfg_files = cfg_files # Created on first use self._cached_cfg = None + self.reporter = reporter @property def cfg(self): @@ -695,7 +703,18 @@ class Modules(object): which_ran.append(name) # This name will affect the semaphore name created run_name = "config-%s" % (name) - cc.run(run_name, mod.handle, func_args, freq=freq) + + desc="running %s with frequency %s" % (run_name, freq) + myrep = reporting.ReportStack( + name=run_name, description=desc, parent=self.reporter) + + with myrep: + ran, _r = cc.run(run_name, mod.handle, func_args, freq=freq) + if ran: + myrep.message = "%s ran successfully" % run_name + else: + myrep.message = "%s previously ran" % run_name + except Exception as e: util.logexc(LOG, "Running module %s (%s) failed", name, mod) failures.append((name, e)) -- cgit v1.2.3 From 89c564a6fd5ac89869f83541370557e3fa58495c Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Sun, 2 Aug 2015 17:51:40 -0400 Subject: fix tests from sync change ReportStack to ReportEventStack change default ReportEventStack to be status.SUCCESS instead of None --- bin/cloud-init | 3 +- cloudinit/cloud.py | 2 +- cloudinit/reporting/__init__.py | 91 +++++++++++++++++++++++++++++++++++---- cloudinit/reporting/handlers.py | 7 +++ cloudinit/sources/__init__.py | 2 +- cloudinit/stages.py | 10 ++--- tests/unittests/test_reporting.py | 19 ++++---- 7 files changed, 109 insertions(+), 25 deletions(-) (limited to 'cloudinit/sources') diff --git a/bin/cloud-init b/bin/cloud-init index d369a806..51253c42 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -637,7 +637,8 @@ def main(): "running single module %s" % args.name) report_on = args.report - args.reporter = reporting.ReportStack( + reporting.add_configuration({'print': {'type': 'print'}}) + args.reporter = reporting.ReportEventStack( rname, rdesc, reporting_enabled=report_on) with args.reporter: return util.log_time( diff --git a/cloudinit/cloud.py b/cloudinit/cloud.py index 71eb80eb..a0fb42a3 100644 --- a/cloudinit/cloud.py +++ b/cloudinit/cloud.py @@ -47,7 +47,7 @@ class Cloud(object): self._cfg = cfg self._runners = runners if reporter is None: - reporter = reporting.ReportStack( + reporter = reporting.ReportEventStack( name="unnamed-cloud-reporter", description="unnamed-cloud-reporter", reporting_enabled=False) diff --git a/cloudinit/reporting/__init__.py b/cloudinit/reporting/__init__.py index b0364eec..78dde715 100644 --- a/cloudinit/reporting/__init__.py +++ b/cloudinit/reporting/__init__.py @@ -22,6 +22,15 @@ DEFAULT_CONFIG = { instantiated_handler_registry = DictRegistry() +class _nameset(set): + def __getattr__(self, name): + if name in self: + return name + raise AttributeError("%s not a valid value" % name) + + +status = _nameset(("SUCCESS", "WARN", "FAIL")) + class ReportingEvent(object): """Encapsulation of event formatting.""" @@ -39,17 +48,16 @@ class ReportingEvent(object): class FinishReportingEvent(ReportingEvent): - def __init__(self, name, description, successful=None): + def __init__(self, name, description, result=status.SUCCESS): super(FinishReportingEvent, self).__init__( FINISH_EVENT_TYPE, name, description) - self.successful = successful + self.result = result + if result not in status: + raise ValueError("Invalid result: %s" % result) def as_string(self): - if self.successful is None: - return super(FinishReportingEvent, self).as_string() - success_string = 'success' if self.successful else 'fail' return '{0}: {1}: {2}: {3}'.format( - self.event_type, self.name, success_string, self.description) + self.event_type, self.name, self.result, self.description) def add_configuration(config): @@ -74,12 +82,13 @@ def report_event(event): handler.publish_event(event) -def report_finish_event(event_name, event_description, successful=None): +def report_finish_event(event_name, event_description, + result=status.SUCCESS): """Report a "finish" event. See :py:func:`.report_event` for parameter details. """ - event = FinishReportingEvent(event_name, event_description, successful) + event = FinishReportingEvent(event_name, event_description, result) return report_event(event) @@ -97,4 +106,70 @@ def report_start_event(event_name, event_description): return report_event(event) +class ReportEventStack(object): + def __init__(self, name, description, message=None, parent=None, + reporting_enabled=None, result_on_exception=status.FAIL): + self.parent = parent + self.name = name + self.description = description + self.message = message + self.result_on_exception = result_on_exception + self.result = status.SUCCESS + + # use parents reporting value if not provided + if reporting_enabled is None: + if parent: + reporting_enabled = parent.reporting_enabled + else: + reporting_enabled = True + self.reporting_enabled = reporting_enabled + + if parent: + self.fullname = '/'.join((parent.fullname, name,)) + else: + self.fullname = self.name + self.children = {} + + def __repr__(self): + return ("%s reporting=%s" % (self.fullname, self.reporting_enabled)) + + def __enter__(self): + self.result = status.SUCCESS + if self.reporting_enabled: + report_start_event(self.fullname, self.description) + if self.parent: + self.parent.children[self.name] = (None, None) + return self + + def childrens_finish_info(self): + for cand_result in (status.FAIL, status.WARN): + for name, (value, msg) in self.children.items(): + if value == cand_result: + return (value, "[" + name + "]" + msg) + return (self.result, self.message) + + @property + def message(self): + if self._message is not None: + return self._message + return self.description + + @message.setter + def message(self, value): + self._message = value + + def finish_info(self, exc): + # return tuple of description, and value + if exc: + return (self.result_on_exception, self.message) + return self.childrens_finish_info() + + def __exit__(self, exc_type, exc_value, traceback): + (result, msg) = self.finish_info(exc_value) + if self.parent: + self.parent.children[self.name] = (result, msg) + if self.reporting_enabled: + report_finish_event(self.fullname, msg, result) + + add_configuration(DEFAULT_CONFIG) diff --git a/cloudinit/reporting/handlers.py b/cloudinit/reporting/handlers.py index be323f53..1d5ca524 100644 --- a/cloudinit/reporting/handlers.py +++ b/cloudinit/reporting/handlers.py @@ -21,5 +21,12 @@ class LogHandler(ReportingHandler): logger.info(event.as_string()) +class StderrHandler(ReportingHandler): + def publish_event(self, event): + #sys.stderr.write(event.as_string() + "\n") + print(event.as_string()) + + available_handlers = DictRegistry() available_handlers.register_item('log', LogHandler) +available_handlers.register_item('print', StderrHandler) diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 3b48f173..d07cf1fa 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -254,7 +254,7 @@ def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list, reporter): LOG.debug("Searching for %s data source in: %s", mode, ds_names) for name, cls in zip(ds_names, ds_list): - myrep = reporting.ReportStack( + myrep = reporting.ReportEventStack( name="search-%s-%s" % (mode, name.replace("DataSource", "")), description="searching for %s data from %s" % (mode, name), message = "no %s data found from %s" % (mode, name), diff --git a/cloudinit/stages.py b/cloudinit/stages.py index 8c79ae4e..42989bb4 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -67,7 +67,7 @@ class Init(object): self.datasource = NULL_DATA_SOURCE if reporter is None: - reporter = reporting.ReportStack( + reporter = reporting.ReportEventStack( name="init-reporter", description="init-desc", reporting_enabled=False) self.reporter = reporter @@ -242,7 +242,7 @@ class Init(object): if self.datasource is not NULL_DATA_SOURCE: return self.datasource - with reporting.ReportStack( + with reporting.ReportEventStack( name="check-cache", description="attempting to read from cache", parent=self.reporter) as myrep: ds = self._restore_from_cache() @@ -508,11 +508,11 @@ class Init(object): def consume_data(self, frequency=PER_INSTANCE): # Consume the userdata first, because we need want to let the part # handlers run first (for merging stuff) - with reporting.ReportStack( + with reporting.ReportEventStack( "consume-user-data", "reading and applying user-data", parent=self.reporter): self._consume_userdata(frequency) - with reporting.ReportStack( + with reporting.ReportEventStack( "consume-vendor-data", "reading and applying vendor-data", parent=self.reporter): self._consume_userdata(frequency) @@ -705,7 +705,7 @@ class Modules(object): run_name = "config-%s" % (name) desc="running %s with frequency %s" % (run_name, freq) - myrep = reporting.ReportStack( + myrep = reporting.ReportEventStack( name=run_name, description=desc, parent=self.reporter) with myrep: diff --git a/tests/unittests/test_reporting.py b/tests/unittests/test_reporting.py index 5700118f..4f4cf3a4 100644 --- a/tests/unittests/test_reporting.py +++ b/tests/unittests/test_reporting.py @@ -32,7 +32,7 @@ class TestReportStartEvent(TestCase): class TestReportFinishEvent(TestCase): - def _report_finish_event(self, result=None): + def _report_finish_event(self, result=reporting.status.SUCCESS): event_name, event_description = 'my_test_event', 'my description' reporting.report_finish_event( event_name, event_description, result=result) @@ -95,31 +95,32 @@ class TestReportingHandler(TestCase): def test_no_default_publish_event_implementation(self): self.assertRaises(NotImplementedError, - reporting.ReportingHandler().publish_event, None) + reporting.handlers.ReportingHandler().publish_event, + None) class TestLogHandler(TestCase): - @mock.patch.object(reporting.logging, 'getLogger') + @mock.patch.object(reporting.handlers.logging, 'getLogger') def test_appropriate_logger_used(self, getLogger): event_type, event_name = 'test_type', 'test_name' event = reporting.ReportingEvent(event_type, event_name, 'description') - reporting.LogHandler().publish_event(event) + reporting.handlers.LogHandler().publish_event(event) self.assertEqual( [mock.call( 'cloudinit.reporting.{0}.{1}'.format(event_type, event_name))], getLogger.call_args_list) - @mock.patch.object(reporting.logging, 'getLogger') + @mock.patch.object(reporting.handlers.logging, 'getLogger') def test_single_log_message_at_info_published(self, getLogger): event = reporting.ReportingEvent('type', 'name', 'description') - reporting.LogHandler().publish_event(event) + reporting.handlers.LogHandler().publish_event(event) self.assertEqual(1, getLogger.return_value.info.call_count) - @mock.patch.object(reporting.logging, 'getLogger') + @mock.patch.object(reporting.handlers.logging, 'getLogger') def test_log_message_uses_event_as_string(self, getLogger): event = reporting.ReportingEvent('type', 'name', 'description') - reporting.LogHandler().publish_event(event) + reporting.handlers.LogHandler().publish_event(event) self.assertIn(event.as_string(), getLogger.return_value.info.call_args[0][0]) @@ -130,7 +131,7 @@ class TestDefaultRegisteredHandler(TestCase): registered_items = ( reporting.instantiated_handler_registry.registered_items) for _, item in registered_items.items(): - if isinstance(item, reporting.LogHandler): + if isinstance(item, reporting.handlers.LogHandler): break else: self.fail('No reporting LogHandler registered by default.') -- cgit v1.2.3 From e29c07adc1aa9d042ae790d1cb900a6a51a85952 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Sun, 2 Aug 2015 18:06:50 -0400 Subject: event name doesnt need mode as it is run through init-local or init-net --- cloudinit/sources/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index d07cf1fa..cf50c1fb 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -255,7 +255,7 @@ def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list, reporter): for name, cls in zip(ds_names, ds_list): myrep = reporting.ReportEventStack( - name="search-%s-%s" % (mode, name.replace("DataSource", "")), + name="search-%s" % name.replace("DataSource", ""), description="searching for %s data from %s" % (mode, name), message = "no %s data found from %s" % (mode, name), parent=reporter) -- cgit v1.2.3 From 5585b397cfb4ba397e9cfba3d86e3d10af20eb71 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Tue, 4 Aug 2015 22:01:27 -0500 Subject: fix pep8 --- bin/cloud-init | 3 ++- cloudinit/reporting/handlers.py | 7 ------- cloudinit/sources/__init__.py | 2 +- cloudinit/stages.py | 10 ++++++---- tests/unittests/test_reporting.py | 6 ++++-- 5 files changed, 13 insertions(+), 15 deletions(-) (limited to 'cloudinit/sources') diff --git a/bin/cloud-init b/bin/cloud-init index 51253c42..40cdbb06 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -628,7 +628,8 @@ def main(): if args.local: rname, rdesc = ("init-local", "searching for local datasources") else: - rname, rdesc = ("init-network", "searching for network datasources") + rname, rdesc = ("init-network", + "searching for network datasources") elif name == "modules": rname, rdesc = ("modules-%s" % args.mode, "running modules for %s" % args.mode) diff --git a/cloudinit/reporting/handlers.py b/cloudinit/reporting/handlers.py index 1d5ca524..be323f53 100644 --- a/cloudinit/reporting/handlers.py +++ b/cloudinit/reporting/handlers.py @@ -21,12 +21,5 @@ class LogHandler(ReportingHandler): logger.info(event.as_string()) -class StderrHandler(ReportingHandler): - def publish_event(self, event): - #sys.stderr.write(event.as_string() + "\n") - print(event.as_string()) - - available_handlers = DictRegistry() available_handlers.register_item('log', LogHandler) -available_handlers.register_item('print', StderrHandler) diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index cf50c1fb..838cd198 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -257,7 +257,7 @@ def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list, reporter): myrep = reporting.ReportEventStack( name="search-%s" % name.replace("DataSource", ""), description="searching for %s data from %s" % (mode, name), - message = "no %s data found from %s" % (mode, name), + message="no %s data found from %s" % (mode, name), parent=reporter) try: with myrep: diff --git a/cloudinit/stages.py b/cloudinit/stages.py index 7b489b9f..d300709d 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -243,7 +243,8 @@ class Init(object): return self.datasource with reporting.ReportEventStack( - name="check-cache", description="attempting to read from cache", + name="check-cache", + description="attempting to read from cache", parent=self.reporter) as myrep: ds = self._restore_from_cache() if ds: @@ -708,17 +709,18 @@ class Modules(object): # This name will affect the semaphore name created run_name = "config-%s" % (name) - desc="running %s with frequency %s" % (run_name, freq) + desc = "running %s with frequency %s" % (run_name, freq) myrep = reporting.ReportEventStack( name=run_name, description=desc, parent=self.reporter) with myrep: - ran, _r = cc.run(run_name, mod.handle, func_args, freq=freq) + ran, _r = cc.run(run_name, mod.handle, func_args, + freq=freq) if ran: myrep.message = "%s ran successfully" % run_name else: myrep.message = "%s previously ran" % run_name - + except Exception as e: util.logexc(LOG, "Running module %s (%s) failed", name, mod) failures.append((name, e)) diff --git a/tests/unittests/test_reporting.py b/tests/unittests/test_reporting.py index 4f4cf3a4..ddfac541 100644 --- a/tests/unittests/test_reporting.py +++ b/tests/unittests/test_reporting.py @@ -51,7 +51,8 @@ class TestReportFinishEvent(TestCase): self, instantiated_handler_registry): event_name, event_description = self._report_finish_event() expected_string_representation = ': '.join( - ['finish', event_name, reporting.status.SUCCESS, event_description]) + ['finish', event_name, reporting.status.SUCCESS, + event_description]) self.assertHandlersPassedObjectWithAsString( instantiated_handler_registry.registered_items, expected_string_representation) @@ -63,7 +64,8 @@ class TestReportFinishEvent(TestCase): event_name, event_description = self._report_finish_event( result=reporting.status.SUCCESS) expected_string_representation = ': '.join( - ['finish', event_name, reporting.status.SUCCESS, event_description]) + ['finish', event_name, reporting.status.SUCCESS, + event_description]) self.assertHandlersPassedObjectWithAsString( instantiated_handler_registry.registered_items, expected_string_representation) -- cgit v1.2.3 From ebd393e56ba21f8a84571dff499e6d6fb6852042 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Thu, 6 Aug 2015 18:34:57 -0500 Subject: tests pass --- bin/cloud-init | 10 +++ cloudinit/reporting/handlers.py | 28 +++++++ cloudinit/sources/DataSourceMAAS.py | 88 +++++----------------- cloudinit/url_helper.py | 142 ++++++++++++++++++++++++++++++++++-- cloudinit/util.py | 3 +- 5 files changed, 196 insertions(+), 75 deletions(-) (limited to 'cloudinit/sources') diff --git a/bin/cloud-init b/bin/cloud-init index 40cdbb06..ad2e624a 100755 --- a/bin/cloud-init +++ b/bin/cloud-init @@ -137,6 +137,11 @@ def run_module_section(mods, action_name, section): return failures +def apply_reporting_cfg(cfg): + reporting.reset_configuration() + reporting.update_configuration(cfg.get('reporting'), {}) + + def main_init(name, args): deps = [sources.DEP_FILESYSTEM, sources.DEP_NETWORK] if args.local: @@ -191,6 +196,7 @@ def main_init(name, args): " longer be active shortly")) logging.resetLogging() logging.setupLogging(init.cfg) + apply_reporting_cfg(init.cfg) # Any log usage prior to setupLogging above did not have local user log # config applied. We send the welcome message now, as stderr/out have @@ -283,6 +289,8 @@ def main_init(name, args): util.logexc(LOG, "Consuming user data failed!") return (init.datasource, ["Consuming user data failed!"]) + apply_reporting_cfg(init.cfg) + # Stage 8 - re-read and apply relevant cloud-config to include user-data mods = stages.Modules(init, extract_fns(args), reporter=args.reporter) # Stage 9 @@ -343,6 +351,7 @@ def main_modules(action_name, args): " longer be active shortly")) logging.resetLogging() logging.setupLogging(mods.cfg) + apply_reporting_cfg(init.cfg) # now that logging is setup and stdout redirected, send welcome welcome(name, msg=w_msg) @@ -405,6 +414,7 @@ def main_single(name, args): " longer be active shortly")) logging.resetLogging() logging.setupLogging(mods.cfg) + apply_reporting_cfg(init.cfg) # now that logging is setup and stdout redirected, send welcome welcome(name, msg=w_msg) diff --git a/cloudinit/reporting/handlers.py b/cloudinit/reporting/handlers.py index 86cbe3c3..d8f69641 100644 --- a/cloudinit/reporting/handlers.py +++ b/cloudinit/reporting/handlers.py @@ -34,5 +34,33 @@ class LogHandler(ReportingHandler): logger.info(event.as_string()) +class WebHookHandler(ReportingHandler): + def __init__(self, endpoint, consumer_key=None, token_key=None, + token_secret=None, consumer_secret=None, timeout=None, + retries=None): + super(WebHookHandler, self).__init__() + + if any(consumer_key, token_key, token_secret, consumer_secret): + self.oauth_helper = url_helper.OauthHelper( + consumer_key=consumer_key, token_key=token_key, + token_secret=token_secret, consumer_secret=consumer_secret) + else: + self.oauth_helper = None + self.endpoint = endpoint + self.timeout = timeout + self.retries = retries + self.ssl_details = util.fetch_ssl_details() + + def publish_event(self, event): + if self.oauth_helper: + readurl = self.oauth_helper.readurl + else: + readurl = url_helper.readurl + return readurl( + self.endpoint, data=event.as_dict(), + timeout=self.timeout, + retries=self.retries, ssl_details=self.ssl_details) + + available_handlers = DictRegistry() available_handlers.register_item('log', LogHandler) diff --git a/cloudinit/sources/DataSourceMAAS.py b/cloudinit/sources/DataSourceMAAS.py index c1a0eb61..279da238 100644 --- a/cloudinit/sources/DataSourceMAAS.py +++ b/cloudinit/sources/DataSourceMAAS.py @@ -52,7 +52,20 @@ class DataSourceMAAS(sources.DataSource): sources.DataSource.__init__(self, sys_cfg, distro, paths) self.base_url = None self.seed_dir = os.path.join(paths.seed_dir, 'maas') - self.oauth_clockskew = None + self.oauth_helper = self._get_helper() + + def _get_helper(self): + mcfg = self.ds_cfg + # If we are missing token_key, token_secret or consumer_key + # then just do non-authed requests + for required in ('token_key', 'token_secret', 'consumer_key'): + if required not in mcfg: + return url_helper.OauthUrlHelper() + + return url_helper.OauthHelper( + consumer_key=mcfg['consumer_key'], token_key=mcfg['token_key'], + token_secret=mcfg['token_secret'], + consumer_secret=mcfg.get('consumer_secret')) def __str__(self): root = sources.DataSource.__str__(self) @@ -84,9 +97,9 @@ class DataSourceMAAS(sources.DataSource): self.base_url = url - (userdata, metadata) = read_maas_seed_url(self.base_url, - self._md_headers, - paths=self.paths) + (userdata, metadata) = read_maas_seed_url( + self.base_url, self.oauth_helper.md_headers, + paths=self.paths) self.userdata_raw = userdata self.metadata = metadata return True @@ -94,31 +107,8 @@ class DataSourceMAAS(sources.DataSource): util.logexc(LOG, "Failed fetching metadata from url %s", url) return False - def _md_headers(self, url): - mcfg = self.ds_cfg - - # If we are missing token_key, token_secret or consumer_key - # then just do non-authed requests - for required in ('token_key', 'token_secret', 'consumer_key'): - if required not in mcfg: - return {} - - consumer_secret = mcfg.get('consumer_secret', "") - - timestamp = None - if self.oauth_clockskew: - timestamp = int(time.time()) + self.oauth_clockskew - - return oauth_headers(url=url, - consumer_key=mcfg['consumer_key'], - token_key=mcfg['token_key'], - token_secret=mcfg['token_secret'], - consumer_secret=consumer_secret, - timestamp=timestamp) - def wait_for_metadata_service(self, url): mcfg = self.ds_cfg - max_wait = 120 try: max_wait = int(mcfg.get("max_wait", max_wait)) @@ -138,10 +128,8 @@ class DataSourceMAAS(sources.DataSource): starttime = time.time() check_url = "%s/%s/meta-data/instance-id" % (url, MD_VERSION) urls = [check_url] - url = url_helper.wait_for_url(urls=urls, max_wait=max_wait, - timeout=timeout, - exception_cb=self._except_cb, - headers_cb=self._md_headers) + url = self.oauth_helper.wait_for_url( + urls=urls, max_wait=max_wait, timeout=timeout) if url: LOG.debug("Using metadata source: '%s'", url) @@ -151,26 +139,6 @@ class DataSourceMAAS(sources.DataSource): return bool(url) - def _except_cb(self, msg, exception): - if not (isinstance(exception, url_helper.UrlError) and - (exception.code == 403 or exception.code == 401)): - return - - if 'date' not in exception.headers: - LOG.warn("Missing header 'date' in %s response", exception.code) - return - - date = exception.headers['date'] - try: - ret_time = time.mktime(parsedate(date)) - except Exception as e: - LOG.warn("Failed to convert datetime '%s': %s", date, e) - return - - self.oauth_clockskew = int(ret_time - time.time()) - LOG.warn("Setting oauth clockskew to %d", self.oauth_clockskew) - return - def read_maas_seed_dir(seed_d): """ @@ -280,24 +248,6 @@ def check_seed_contents(content, seed): return (userdata, md) -def oauth_headers(url, consumer_key, token_key, token_secret, consumer_secret, - timestamp=None): - if timestamp: - timestamp = str(timestamp) - else: - timestamp = None - - client = oauth1.Client( - consumer_key, - client_secret=consumer_secret, - resource_owner_key=token_key, - resource_owner_secret=token_secret, - signature_method=oauth1.SIGNATURE_PLAINTEXT, - timestamp=timestamp) - uri, signed_headers, body = client.sign(url) - return signed_headers - - class MAASSeedDirNone(Exception): pass diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py index 0e65f431..2141cdc5 100644 --- a/cloudinit/url_helper.py +++ b/cloudinit/url_helper.py @@ -25,6 +25,10 @@ import time import six import requests +import oauthlib.oauth1 as oauth1 +import os +import json +from functools import partial from requests import exceptions from six.moves.urllib.parse import ( @@ -147,13 +151,14 @@ class UrlResponse(object): class UrlError(IOError): - def __init__(self, cause, code=None, headers=None): + def __init__(self, cause, code=None, headers=None, url=None): IOError.__init__(self, str(cause)) self.cause = cause self.code = code self.headers = headers if self.headers is None: self.headers = {} + self.url = url def _get_ssl_args(url, ssl_details): @@ -247,9 +252,10 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1, and hasattr(e, 'response') # This appeared in v 0.10.8 and hasattr(e.response, 'status_code')): excps.append(UrlError(e, code=e.response.status_code, - headers=e.response.headers)) + headers=e.response.headers, + url=url)) else: - excps.append(UrlError(e)) + excps.append(UrlError(e, url=url)) if SSL_ENABLED and isinstance(e, exceptions.SSLError): # ssl exceptions are not going to get fixed by waiting a # few seconds @@ -333,11 +339,11 @@ def wait_for_url(urls, max_wait=None, timeout=None, if not response.contents: reason = "empty response [%s]" % (response.code) url_exc = UrlError(ValueError(reason), code=response.code, - headers=response.headers) + headers=response.headers, url=url) elif not response.ok(): reason = "bad status code [%s]" % (response.code) url_exc = UrlError(ValueError(reason), code=response.code, - headers=response.headers) + headers=response.headers, url=url) else: return url except UrlError as e: @@ -368,3 +374,129 @@ def wait_for_url(urls, max_wait=None, timeout=None, time.sleep(sleep_time) return False + + +class OauthUrlHelper(object): + def __init__(self, consumer_key=None, token_key=None, + token_secret=None, consumer_secret=None, + skew_data_file="/run/oauth_skew.json"): + self.consumer_key = consumer_key + self.consumer_secret = consumer_secret or "" + self.token_key = token_key + self.token_secret = token_secret + self.skew_data_file = skew_data_file + self.skew_data = {} + self._do_oauth = True + self.skew_change_limit = 5 + required = (self.token_key, self.token_secret, self.consumer_key) + if not any(required): + self._do_oauth = False + elif not all(required): + raise ValueError("all or none of token_key, token_secret, or " + "consumer_key can be set") + + self.skew_data = self.read_skew_file() + + def read_skew_file(self): + if self.skew_data_file and os.path.isfile(self.skew_data_file): + with open(self.skew_data_file, mode="r") as fp: + return json.load(fp.read()) + return None + + def update_skew_file(self, host, value): + # this is not atomic + cur = self.read_skew_file() + if cur is None or not self.skew_data_file: + return + cur[host] = value + with open(self.skew_data_file, mode="w") as fp: + fp.write(json.dumps(cur)) + + def exception_cb(self, msg, exception): + if not (isinstance(exception, UrlError) and + (exception.code == 403 or exception.code == 401)): + return + + if 'date' not in exception.headers: + LOG.warn("Missing header 'date' in %s response", exception.code) + return + + date = exception.headers['date'] + try: + ret_time = time.mktime(parsedate(date)) + except Exception as e: + LOG.warn("Failed to convert datetime '%s': %s", date, e) + return + + host = urlparse(exception.url).netloc + skew = int(ret_time - time.time()) + old_skew = self.skew_data.get(host) + if abs(old_skew - skew) > self.skew_change_limit: + self.update_skew_file(host, skew) + LOG.warn("Setting oauth clockskew for %s to %d", + host, skew) + skew_data[host] = skew + + return + + def headers_cb(self, url): + if not self._do_oauth: + return {} + + timestamp = None + host = urlparse(url).netloc + if host in self.skew_data: + timestamp = int(time.time()) + self.skew_data[host] + + return oauth_headers( + url=url, consumer_key=self.consumer_key, + token_key=self.token_key, token_secret=self.token_secret, + consumer_secret=self.consumer_secret, timestamp=timestamp) + + def _wrapped(self, wrapped_func, args, kwargs): + kwargs['headers_cb'] = partial( + self._headers_cb, kwargs.get('headers_cb')) + kwargs['exception_cb'] = partial( + self._exception_cb, kwargs.get('exception_cb')) + return wrapped_func(*args, **kwargs) + + def wait_for_url(self, *args, **kwargs): + return self._wrapped(wait_for_url, args, kwargs) + + def readurl(self, *args, **kwargs): + return self._wrapped(readurl, args, kwargs) + + def _exception_cb(self, extra_exception_cb, url, msg, exception): + ret = None + try: + if extra_exception_cb: + ret = extra_exception_cb(msg, exception) + finally: + self.exception_cb(self, msg, exception) + return ret + + def _headers_cb(self, extra_headers_cb, url): + headers = {} + if extra_headers_cb: + headers = extra_headers_cb(url) + if headers: + headers.update(self.headers_cb(url)) + return headers + + +def oauth_headers(url, consumer_key, token_key, token_secret, consumer_secret, + timestamp=None): + if timestamp: + timestamp = str(timestamp) + else: + timestamp = None + + client = oauth1.Client( + consumer_key, + client_secret=consumer_secret, + resource_owner_key=token_key, + resource_owner_secret=token_secret, + signature_method=oauth1.SIGNATURE_PLAINTEXT, + timestamp=timestamp) + uri, signed_headers, body = client.sign(url) + return signed_headers diff --git a/cloudinit/util.py b/cloudinit/util.py index 02ba654a..09e583f5 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -782,7 +782,8 @@ def read_file_or_url(url, timeout=5, retries=10, code = e.errno if e.errno == errno.ENOENT: code = url_helper.NOT_FOUND - raise url_helper.UrlError(cause=e, code=code, headers=None) + raise url_helper.UrlError(cause=e, code=code, headers=None, + url=url) return url_helper.FileResponse(file_path, contents=contents) else: return url_helper.readurl(url, -- cgit v1.2.3 From 48cb8699efb5c6116dfa7b4d76d0a5fb6b3fbbbf Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Fri, 7 Aug 2015 00:22:49 -0500 Subject: hopefully fix DataSourceMAAS --- cloudinit/sources/DataSourceMAAS.py | 58 +++++++++++----------------- tests/unittests/test_datasource/test_maas.py | 2 +- 2 files changed, 24 insertions(+), 36 deletions(-) (limited to 'cloudinit/sources') diff --git a/cloudinit/sources/DataSourceMAAS.py b/cloudinit/sources/DataSourceMAAS.py index 279da238..2f36bbe2 100644 --- a/cloudinit/sources/DataSourceMAAS.py +++ b/cloudinit/sources/DataSourceMAAS.py @@ -164,12 +164,12 @@ def read_maas_seed_dir(seed_d): return check_seed_contents(md, seed_d) -def read_maas_seed_url(seed_url, header_cb=None, timeout=None, +def read_maas_seed_url(seed_url, read_file_or_url=None, timeout=None, version=MD_VERSION, paths=None): """ Read the maas datasource at seed_url. - - header_cb is a method that should return a headers dictionary for - a given url + read_file_or_url is a method that should provide an interface + like util.read_file_or_url Expected format of seed_url is are the following files: * //meta-data/instance-id @@ -190,14 +190,12 @@ def read_maas_seed_url(seed_url, header_cb=None, timeout=None, 'user-data': "%s/%s" % (base_url, 'user-data'), } + if read_file_or_url is None: + read_file_or_url = util.read_file_or_url + md = {} for name in file_order: url = files.get(name) - if not header_cb: - def _cb(url): - return {} - header_cb = _cb - if name == 'user-data': retries = 0 else: @@ -205,10 +203,8 @@ def read_maas_seed_url(seed_url, header_cb=None, timeout=None, try: ssl_details = util.fetch_ssl_details(paths) - resp = util.read_file_or_url(url, retries=retries, - headers_cb=header_cb, - timeout=timeout, - ssl_details=ssl_details) + resp = read_file_or_url(url, retries=retries, + timeout=timeout, ssl_details=ssl_details) if resp.ok(): if name in BINARY_FIELDS: md[name] = resp.contents @@ -311,47 +307,39 @@ if __name__ == "__main__": if key in cfg and creds[key] is None: creds[key] = cfg[key] - def geturl(url, headers_cb): - req = Request(url, data=None, headers=headers_cb(url)) - return urlopen(req).read() + oauth_helper = url_helper.OauthUrlHelper(**creds) + + def geturl(url): + return oauth_helper.readurl(url).contents def printurl(url, headers_cb): - print("== %s ==\n%s\n" % (url, geturl(url, headers_cb))) + print("== %s ==\n%s\n" % (url, geturl(url))) - def crawl(url, headers_cb=None): + def crawl(url): if url.endswith("/"): - for line in geturl(url, headers_cb).splitlines(): + for line in geturl(url).splitlines(): if line.endswith("/"): - crawl("%s%s" % (url, line), headers_cb) + crawl("%s%s" % (url, line)) else: - printurl("%s%s" % (url, line), headers_cb) + printurl("%s%s" % (url, line)) else: - printurl(url, headers_cb) - - def my_headers(url): - headers = {} - if creds.get('consumer_key', None) is not None: - headers = oauth_headers(url, **creds) - return headers + printurl(url) if args.subcmd == "check-seed": - if args.url.startswith("http"): - (userdata, metadata) = read_maas_seed_url(args.url, - header_cb=my_headers, - version=args.apiver) - else: - (userdata, metadata) = read_maas_seed_url(args.url) + (userdata, metadata) = read_maas_seed_url( + args.url, read_file_or_url=oauth_helper.read_file_or_url, + version=args.apiver) print("=== userdata ===") print(userdata) print("=== metadata ===") pprint.pprint(metadata) elif args.subcmd == "get": - printurl(args.url, my_headers) + printurl(args.url) elif args.subcmd == "crawl": if not args.url.endswith("/"): args.url = "%s/" % args.url - crawl(args.url, my_headers) + crawl(args.url) main() diff --git a/tests/unittests/test_datasource/test_maas.py b/tests/unittests/test_datasource/test_maas.py index f109bb04..eb97b692 100644 --- a/tests/unittests/test_datasource/test_maas.py +++ b/tests/unittests/test_datasource/test_maas.py @@ -141,7 +141,7 @@ class TestMAASDataSource(TestCase): with mock.patch.object(url_helper, 'readurl', side_effect=side_effect()) as mockobj: userdata, metadata = DataSourceMAAS.read_maas_seed_url( - my_seed, header_cb=my_headers_cb, version=my_ver) + my_seed, version=my_ver) self.assertEqual(b"foodata", userdata) self.assertEqual(metadata['instance-id'], -- cgit v1.2.3