From 72d6adcb2e4cb5911f7809b89835965d4bf04476 Mon Sep 17 00:00:00 2001 From: Lars Kellogg-Stedman Date: Fri, 22 Jul 2016 15:09:24 -0400 Subject: Update build tools to work with git - Update HACKING.rst to include git instructions - update MANIFEST.in and .gitignore to ignore git-related things - replaced tarball generation scripts with git-based script - have the spec files correctly identify themselves as cheetah templates - make brpm work with git --- tools/make-dist-tarball | 21 ------------------- tools/make-tarball | 54 ++++++++++++++++++++++--------------------------- tools/read-dependencies | 22 +++++++++++++++----- 3 files changed, 41 insertions(+), 56 deletions(-) delete mode 100755 tools/make-dist-tarball (limited to 'tools') diff --git a/tools/make-dist-tarball b/tools/make-dist-tarball deleted file mode 100755 index 5b078515..00000000 --- a/tools/make-dist-tarball +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/sh - -Usage() { - cat <&2 ; exit 1; } - -out="${topdir}/cloud-init-${tag}.tar.gz" - -bzr export --format=tgz --root="cloud-init-$tag" \ - "--revision=tag:${tag}" "$out" "$topdir" && - echo "Wrote ${out}" diff --git a/tools/make-tarball b/tools/make-tarball index b7039150..4828a622 100755 --- a/tools/make-tarball +++ b/tools/make-tarball @@ -1,39 +1,33 @@ #!/bin/sh set -e -find_root() { - local topd - if [ -z "${CLOUD_INIT_TOP_D}" ]; then - topd=$(cd "$(dirname "${0}")" && cd .. && pwd) - else - topd=$(cd "${CLOUD_INIT_TOP_D}" && pwd) - fi - [ $? -eq 0 -a -f "${topd}/setup.py" ] || return - ROOT_DIR="$topd" -} +rev=${1:-HEAD} +revname=$(git describe $rev) -if ! find_root; then - echo "Unable to locate 'setup.py' file that should" \ - "exist in the cloud-init root directory." 1>&2 - exit 1; -fi - -REVNO=$(bzr revno "$ROOT_DIR") +# revname could be 0.7.5 or 0.7.5-NNN-gHASH +# turn that into 0.7.5 or 0.7.5+NNN.gHASH +case "$revname" in + *-*) revname=$(echo "$revname" | sed -e 's/-/+/' -e 's/-/./') +esac -if [ ! -z "$1" ]; then - ARCHIVE_FN="$1" -else - VERSION=$("$ROOT_DIR/tools/read-version") - ARCHIVE_FN="$PWD/cloud-init-$VERSION~bzr$REVNO.tar.gz" -fi +archive_base="cloud-init-$revname" -export_uncommitted="" -if [ "${UNCOMMITTED:-0}" != "0" ]; then - export_uncommitted="--uncommitted" +# when building an archiving from HEAD, ensure that there aren't any +# uncomitted changes in the working directory (because these would not +# end up in the archive). +if [ "$rev" = HEAD ] && ! git diff-index --quiet HEAD --; then + if [ -z "$SKIP_UNCOMITTED_CHANGES_CHECK" ]; then + echo "ERROR: There are uncommitted changes in your working directory." >&2 + exit 1 + else + echo "WARNING: There are uncommitted changes in your working directory." >&2 + echo " This changes will not be included in the archive." >&2 + fi fi -bzr export ${export_uncommitted} \ - --format=tgz --root="cloud-init-$VERSION~bzr$REVNO" \ - "--revision=${REVNO}" "${ARCHIVE_FN}" "$ROOT_DIR" +git archive \ + --format=tar.gz \ + --prefix="$archive_base/" "$rev" \ + "--output=$archive_base.tar.gz" -echo "$ARCHIVE_FN" +echo "${archive_base}.tar.gz" diff --git a/tools/read-dependencies b/tools/read-dependencies index 6a6f3e12..9fc503eb 100755 --- a/tools/read-dependencies +++ b/tools/read-dependencies @@ -1,8 +1,13 @@ #!/usr/bin/env python +# You might be tempted to rewrite this as a shell script, but you +# would be surprised to discover that things like 'egrep' or 'sed' may +# differ between Linux and *BSD. + import os import re import sys +import subprocess if 'CLOUD_INIT_TOP_D' in os.environ: topd = os.path.realpath(os.environ.get('CLOUD_INIT_TOP_D')) @@ -16,14 +21,21 @@ for fname in ("setup.py", "requirements.txt"): sys.exit(1) if len(sys.argv) > 1: - reqfile = sys.argv[1] + reqfile = sys.argv[1] else: - reqfile = "requirements.txt" - + reqfile = "requirements.txt" + with open(os.path.join(topd, reqfile), "r") as fp: for line in fp: - if not line.strip() or line.startswith("#"): + line = line.strip() + if not line or line.startswith("#"): continue - sys.stdout.write(re.split("[>=.<]*", line)[0].strip() + "\n") + + # remove pip-style markers + dep = line.split(';')[0] + + # remove version requirements + dep = re.split("[>=.<]*", dep)[0].strip() + print(dep) sys.exit(0) -- cgit v1.2.3 From 10f82bd474c5bc91b330beccd883da06b0014a99 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Fri, 5 Aug 2016 11:56:48 -0400 Subject: adjust tools and version information. upstream snapshots are versioned in the format 'X.Y.Z+.g' where X.Y.Z are major, minor, and micro. Distance is number of commits since last annotated tag, and commit is the git commit. bddeb and brpm will now create and use the "upstream version" like above. Things changed here: - tools/make-tarball update cloudinit/version.py to contain the full version support --output support '--long' to always create the long format version string. - bddeb: - use quilt debian source format - use read-version and long version in changelog. - brpm: - change to use read-version and upstream long version in the spec. - flake8 changes - tools/read-version - read version from git or from cloudinit/version. - provide --json output with more nicely formed data. --- cloudinit/version.py | 15 +++-- packages/bddeb | 110 ++++++++++++++++--------------------- packages/brpm | 85 +++++++++++----------------- packages/debian/changelog.in | 2 +- packages/debian/source/format | 1 + packages/redhat/cloud-init.spec.in | 2 +- tools/make-tarball | 59 ++++++++++++++++---- tools/read-version | 81 ++++++++++++++++++++++----- 8 files changed, 205 insertions(+), 150 deletions(-) create mode 100644 packages/debian/source/format (limited to 'tools') diff --git a/cloudinit/version.py b/cloudinit/version.py index 3d1d1d23..01785eb8 100644 --- a/cloudinit/version.py +++ b/cloudinit/version.py @@ -16,12 +16,17 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -from distutils import version as vr +__VERSION__ = "0.7.6" +__EXPORT_VERSION__ = "@@EXPORT_VERSION@@" -def version(): - return vr.StrictVersion("0.7.7") +def version_string(): + if not __EXPORT_VERSION__.startswith("@@"): + return __EXPORT_VERSION__ + return __VERSION__ -def version_string(): - return str(version()) +def full_version_string(): + if __EXPORT_VERSION__.startswith("@@"): + raise ValueError("No full version available") + return __EXPORT_VERSION__ diff --git a/packages/bddeb b/packages/bddeb index 46c07c88..94496c74 100755 --- a/packages/bddeb +++ b/packages/bddeb @@ -1,6 +1,7 @@ #!/usr/bin/env python3 -import glob +import argparse +import json import os import shutil import sys @@ -15,15 +16,13 @@ def find_root(): if os.path.isfile(os.path.join(top_dir, 'setup.py')): return os.path.abspath(top_dir) raise OSError(("Unable to determine where your cloud-init topdir is." - " set CLOUD_INIT_TOP_D?")) - -# Use the util functions from cloudinit -sys.path.insert(0, find_root()) + " set CLOUD_INIT_TOP_D?")) -from cloudinit import templater -from cloudinit import util - -import argparse +if "avoid-pep8-E402-import-not-top-of-file": + # Use the util functions from cloudinit + sys.path.insert(0, find_root()) + from cloudinit import templater + from cloudinit import util # Package names that will showup in requires to what we can actually # use in our debian 'control' file, this is a translation of the 'requires' @@ -58,27 +57,37 @@ NONSTD_NAMED_PACKAGES = { DEBUILD_ARGS = ["-S", "-d"] -def write_debian_folder(root, version, revno, pkgmap, - pyver="3", append_requires=[]): +def run_helper(helper, args=None, strip=True): + if args is None: + args = [] + cmd = [util.abs_join(find_root(), 'tools', helper)] + args + (stdout, _stderr) = util.subp(cmd) + if strip: + stdout = stdout.strip() + return stdout + + +def write_debian_folder(root, version_data, pkgmap, pyver="3", + append_requires=[]): deb_dir = util.abs_join(root, 'debian') - os.makedirs(deb_dir) + + # Just copy debian/ dir and then update files + pdeb_d = util.abs_join(find_root(), 'packages', 'debian') + util.subp(['cp', '-a', pdeb_d, deb_dir]) # Fill in the change log template templater.render_to_file(util.abs_join(find_root(), 'packages', 'debian', 'changelog.in'), util.abs_join(deb_dir, 'changelog'), - params={ - 'version': version, - 'revision': revno, - }) + params=version_data) # Write out the control file template - cmd = [util.abs_join(find_root(), 'tools', 'read-dependencies')] - (stdout, _stderr) = util.subp(cmd) - pypi_pkgs = [p.lower().strip() for p in stdout.splitlines()] + reqs = run_helper('read-dependencies').splitlines() + test_reqs = run_helper( + 'read-dependencies', ['test-requirements.txt']).splitlines() - (stdout, _stderr) = util.subp(cmd + ['test-requirements.txt']) - pypi_test_pkgs = [p.lower().strip() for p in stdout.splitlines()] + pypi_pkgs = [p.lower().strip() for p in reqs] + pypi_test_pkgs = [p.lower().strip() for p in test_reqs] # Map to known packages requires = append_requires @@ -109,11 +118,9 @@ def write_debian_folder(root, version, revno, pkgmap, util.abs_join(deb_dir, 'rules'), params={'python': python, 'pyver': pyver}) - # Just copy any other files directly (including .in) - pdeb_d = util.abs_join(find_root(), 'packages', 'debian') - for f in [os.path.join(pdeb_d, f) for f in os.listdir(pdeb_d)]: - if os.path.isfile(f): - shutil.copy(f, util.abs_join(deb_dir, os.path.basename(f))) + +def read_version(): + return json.loads(run_helper('read-version', ['--json'])) def main(): @@ -140,11 +147,10 @@ def main(): default=os.environ.get("INIT_SYSTEM", "upstart,systemd")) - for ent in DEBUILD_ARGS: parser.add_argument(ent, dest="debuild_args", action='append_const', - const=ent, help=("pass through '%s' to debuild" % ent), - default=[]) + const=ent, default=[], + help=("pass through '%s' to debuild" % ent)) parser.add_argument("--sign", default=False, action='store_true', help="sign result. do not pass -us -uc to debuild") @@ -181,54 +187,30 @@ def main(): with util.tempdir() as tdir: # output like 0.7.6-1022-g36e92d3 - cmd = ['git', 'describe', '--long'] - (sysout, _stderr) = util.subp(cmd) - version, extra = sysout.strip().split("-", 1) + ver_data = read_version() # This is really only a temporary archive # since we will extract it then add in the debian # folder, then re-archive it for debian happiness print("Creating a temporary tarball using the 'make-tarball' helper") - cmd = [util.abs_join(find_root(), 'tools', 'make-tarball')] - (sysout, _stderr) = util.subp(cmd) - arch_fn = sysout.strip() - tmp_arch_fn = util.abs_join(tdir, os.path.basename(arch_fn)) - shutil.move(arch_fn, tmp_arch_fn) - - print("Extracting temporary tarball %r" % (tmp_arch_fn)) - cmd = ['tar', '-xvzf', tmp_arch_fn, '-C', tdir] + tarball = "cloud-init_%s.orig.tar.gz" % ver_data['version_long'] + tarball_fp = util.abs_join(tdir, tarball) + run_helper('make-tarball', ['--long', '--output=' + tarball_fp]) + + print("Extracting temporary tarball %r" % (tarball)) + cmd = ['tar', '-xvzf', tarball_fp, '-C', tdir] util.subp(cmd, capture=capture) - extracted_name = tmp_arch_fn[:-len('.tar.gz')] - os.remove(tmp_arch_fn) - xdir = util.abs_join(tdir, 'cloud-init') - shutil.move(extracted_name, xdir) + xdir = util.abs_join(tdir, "cloud-init-%s" % ver_data['version_long']) print("Creating a debian/ folder in %r" % (xdir)) if args.cloud_utils: - append_requires=['cloud-utils | cloud-guest-utils'] + append_requires = ['cloud-utils | cloud-guest-utils'] else: - append_requires=[] - write_debian_folder(xdir, version, extra, pkgmap, + append_requires = [] + write_debian_folder(xdir, ver_data, pkgmap, pyver=pyver, append_requires=append_requires) - # The naming here seems to follow some debian standard - # so it will whine if it is changed... - tar_fn = "cloud-init_%s+%s~bddeb.orig.tar.gz" % (version, extra) - print("Archiving the adjusted source into %r" % - (util.abs_join(tdir, tar_fn))) - cmd = ['tar', '-czvf', - util.abs_join(tdir, tar_fn), - '-C', xdir] - cmd.extend(os.listdir(xdir)) - util.subp(cmd, capture=capture) - - # Copy it locally for reference - shutil.copy(util.abs_join(tdir, tar_fn), - util.abs_join(os.getcwd(), tar_fn)) - print("Copied that archive to %r for local usage (if desired)." % - (util.abs_join(os.getcwd(), tar_fn))) - print("Running 'debuild %s' in %r" % (' '.join(args.debuild_args), xdir)) with util.chdir(xdir): diff --git a/packages/brpm b/packages/brpm index 5d16eb71..14d75f2b 100755 --- a/packages/brpm +++ b/packages/brpm @@ -2,11 +2,11 @@ import argparse import glob +import json import os import shutil import sys import tempfile -import time def find_root(): @@ -21,11 +21,11 @@ def find_root(): " set CLOUD_INIT_TOP_D?")) -# Use the util functions from cloudinit -sys.path.insert(0, find_root()) - -from cloudinit import templater -from cloudinit import util +if "avoid-pep8-E402-import-not-top-of-file": + # Use the util functions from cloudinit + sys.path.insert(0, find_root()) + from cloudinit import templater + from cloudinit import util # Map python requirements to package names. If a match isn't found # here, we assume 'python-'. @@ -43,15 +43,24 @@ PACKAGE_MAP = { RPM_BUILD_SUBDIRS = ['BUILD', 'RPMS', 'SOURCES', 'SPECS', 'SRPMS'] +def run_helper(helper, args=None, strip=True): + if args is None: + args = [] + cmd = [util.abs_join(find_root(), 'tools', helper)] + args + (stdout, _stderr) = util.subp(cmd) + if strip: + stdout = stdout.strip() + return stdout + + def read_dependencies(): '''Returns the Python depedencies from requirements.txt. This explicitly removes 'argparse' from the list of requirements for python >= 2.7, because with 2.7 argparse became part of the standard library.''' - cmd = [util.abs_join(find_root(), 'tools', 'read-dependencies')] - (stdout, _stderr) = util.subp(cmd) + stdout = run_helper('read-dependencies') return [p.lower().strip() for p in stdout.splitlines() - if p != 'argparse' or (p == 'argparse' - and sys.version_info[0:2] < (2, 7))] + if p != 'argparse' or (p == 'argparse' and + sys.version_info[0:2] < (2, 7))] def translate_dependencies(deps, distro): @@ -64,53 +73,22 @@ def translate_dependencies(deps, distro): def read_version(): - '''Read version information. We parse the version itself from - the changelog, and then ask git for the commit id and distance - from the last tag.''' - # Figure out the version and revno - cmd = [util.abs_join(find_root(), 'tools', 'read-version')] - (stdout, _stderr) = util.subp(cmd) - version = stdout.strip() + return json.loads(run_helper('read-version', ['--json'])) - cmd = ['git', 'describe', '--tags'] - (stdout, _stderr) = util.subp(cmd) - git_version = stdout.strip() - try: - _version, distance, revno = git_version.split('-') - except ValueError: - distance = None - revno = None - - return (version, distance, revno) - - -def generate_spec_contents(args, tmpl_fn, top_dir, arc_fn): - - # This will get us something like ('0.7.6', None, None) for a - # tagged commit, and something like ('0.7.6', '1026', 'gd1d5796') - # for an untagged commited. - version, distance, revno = read_version() +def generate_spec_contents(args, version_data, tmpl_fn, top_dir, arc_fn): # Tmpl params subs = {} - subs['version'] = version - subs['revno'] = revno - subs['distance'] = distance - - if distance is not None: - now = time.strftime('%Y%m%d', time.localtime()) - release = '.%sgit%s' % (now, revno) - else: - release = '' if args.sub_release is not None: - subs['subrelease'] = release + "." + str(args.sub_release) + subs['subrelease'] = str(args.sub_release) else: - subs['subrelease'] = release + subs['subrelease'] = "" subs['archive_name'] = arc_fn subs['source_name'] = os.path.basename(arc_fn).replace('.tar.gz', '') + subs.update(version_data) # Map to known packages python_deps = read_dependencies() @@ -176,20 +154,19 @@ def main(): for dir in RPM_BUILD_SUBDIRS] util.ensure_dirs(build_dirs) + version_data = read_version() + # Archive the code - cmd = [util.abs_join(find_root(), 'tools', 'make-tarball')] - (stdout, _stderr) = util.subp(cmd) - archive_fn = stdout.strip() - print "Archived source as %s" % archive_fn - real_archive_fn = os.path.join(topdir, 'SOURCES', - os.path.basename(archive_fn)) - shutil.move(archive_fn, real_archive_fn) + archive_fn = "cloud-init-%s.tar.gz" % version_data['version_long'] + real_archive_fn = os.path.join(topdir, 'SOURCES', archive_fn) + archive_fn = run_helper( + 'make-tarball', ['--long', '--output=' + real_archive_fn]) print("Archived the code in %r" % (real_archive_fn)) # Form the spec file to be used tmpl_fn = util.abs_join(find_root(), 'packages', args.distro, 'cloud-init.spec.in') - contents = generate_spec_contents(args, tmpl_fn, topdir, + contents = generate_spec_contents(args, version_data, tmpl_fn, topdir, os.path.basename(archive_fn)) spec_fn = util.abs_join(topdir, 'SPECS', 'cloud-init.spec') util.write_file(spec_fn, contents) diff --git a/packages/debian/changelog.in b/packages/debian/changelog.in index 544d23cf..f8e98258 100644 --- a/packages/debian/changelog.in +++ b/packages/debian/changelog.in @@ -1,5 +1,5 @@ ## template:basic -cloud-init (${version}+${revision}~bddeb-1) UNRELEASED; urgency=low +cloud-init (${version_long}-1~bddeb) UNRELEASED; urgency=low * build diff --git a/packages/debian/source/format b/packages/debian/source/format new file mode 100644 index 00000000..163aaf8d --- /dev/null +++ b/packages/debian/source/format @@ -0,0 +1 @@ +3.0 (quilt) diff --git a/packages/redhat/cloud-init.spec.in b/packages/redhat/cloud-init.spec.in index c30d33ca..2f02f017 100644 --- a/packages/redhat/cloud-init.spec.in +++ b/packages/redhat/cloud-init.spec.in @@ -6,7 +6,7 @@ # Or: http://www.rpm.org/max-rpm/ch-rpm-inside.html Name: cloud-init -Version: ${version} +Version: ${version_long} Release: 1${subrelease}%{?dist} Summary: Cloud instance init scripts diff --git a/tools/make-tarball b/tools/make-tarball index 4828a622..bd7399c1 100755 --- a/tools/make-tarball +++ b/tools/make-tarball @@ -1,16 +1,53 @@ #!/bin/sh set -e +TEMP_D="" +cleanup() { + [ -z "$TEMP_D" ] || rm -Rf "${TEMP_D}" +} +trap cleanup EXIT + +Usage() { + cat <&2; exit 1; } + +long_opt="" +while [ $# -ne 0 ]; do + cur=$1; next=$2 + case "$cur" in + -o|--output) output=$next; shift;; + --long) long_opt="--long";; + --) shift; break;; + esac + shift; +done + rev=${1:-HEAD} -revname=$(git describe $rev) +git_describe=$(git describe ${long_opt} $rev) -# revname could be 0.7.5 or 0.7.5-NNN-gHASH +# git_describe could be 0.7.5 or 0.7.5-NNN-gHASH # turn that into 0.7.5 or 0.7.5+NNN.gHASH -case "$revname" in - *-*) revname=$(echo "$revname" | sed -e 's/-/+/' -e 's/-/./') +case "$git_describe" in + *-*) version=$(echo "$git_describe" | sed -e 's/-/+/' -e 's/-/./');; + *) version=${git_describe};; esac -archive_base="cloud-init-$revname" +archive_base="cloud-init-$version" +if [ -z "$output" ]; then + output="$archive_base.tar.gz" +fi # when building an archiving from HEAD, ensure that there aren't any # uncomitted changes in the working directory (because these would not @@ -25,9 +62,11 @@ if [ "$rev" = HEAD ] && ! git diff-index --quiet HEAD --; then fi fi -git archive \ - --format=tar.gz \ - --prefix="$archive_base/" "$rev" \ - "--output=$archive_base.tar.gz" +git archive --format=tar --prefix="$archive_base/" "$rev" | + ( cd "$TEMP_D" && tar xpf - ) + +sed -i "s,@@EXPORT_VERSION@@,$version," "$archive_base/cloudinit/version.py" + +( cd "$TEMP_D" && tar cpzf - "$archive_base/" ) > "$output" -echo "${archive_base}.tar.gz" +echo "$output" diff --git a/tools/read-version b/tools/read-version index d02651e9..e585ab2e 100755 --- a/tools/read-version +++ b/tools/read-version @@ -1,26 +1,77 @@ #!/usr/bin/env python import os -import re +import json import sys -if 'CLOUD_INIT_TOP_D' in os.environ: - topd = os.path.realpath(os.environ.get('CLOUD_INIT_TOP_D')) -else: - topd = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) +if "avoid-pep8-E402-import-not-top-of-file": + _tdir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) + sys.path.insert(0, _tdir) + from cloudinit import version as ci_version + from cloudinit import util + + +use_long = '--long' in sys.argv or os.environ.get('CI_RV_LONG') +use_tags = '--tags' in sys.argv or os.environ.get('CI_RV_TAGS') +output_json = '--json' in sys.argv + +src_version = ci_version.version_string() +version_long = None + +if os.path.isdir(os.path.join(_tdir, ".git")): + def fix_git_version(ver): + ver = ver.strip() + if "-" in ver: + # change X.Y.Z-1023-gHASH to X.Y.Z+1023.ghash + return "{0}+{1}.{2}".format(*ver.split("-")) + return ver + + flags = [] + if use_tags: + flags = ['--tags'] + cmd = ['git', 'describe'] + flags -for fname in ("setup.py", "ChangeLog"): - if not os.path.isfile(os.path.join(topd, fname)): - sys.stderr.write("Unable to locate '%s' file that should " - "exist in cloud-init root directory." % fname) + version = fix_git_version(util.subp(cmd)[0]) + + if not version.startswith(src_version): + sys.stderr.write("git describe version (%s) differs from " + "cloudinit.version (%s)\n" % (version, src_version)) sys.exit(1) -vermatch = re.compile(r"^[0-9]+[.][0-9]+[.][0-9]+:$") + version_long = fix_git_version(util.subp(cmd + ["--long"])[0]) +else: + version = src_version + try: + version_long = ci_version.full_version_string() + except ValueError: + pass + +# version is X.Y.Z[+xxx.gHASH] +# version_long is None or X.Y.Z+xxx.gHASH +release = version.partition("+")[0] +extra = None +commit = None +distance = None -with open(os.path.join(topd, "ChangeLog"), "r") as fp: - for line in fp: - if vermatch.match(line): - sys.stdout.write(line.strip()[:-1] + "\n") - break +if version_long: + info = version_long.partition("+")[2] + extra = "+" + info + distance, commit = info.split(".") + # remove the 'g' from gHASH + commit = commit[1:] + +data = { + 'release': release, + 'version': version, + 'version_long': version_long, + 'extra': extra, + 'commit': commit, + 'distance': distance, +} + +if output_json: + sys.stdout.write(json.dumps(data, indent=1) + "\n") +else: + sys.stdout.write(release + "\n") sys.exit(0) -- cgit v1.2.3 From 42bed116b411eb25ebd8368b2b4ed6c56ffd85e7 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Fri, 5 Aug 2016 15:51:34 -0400 Subject: drop modification of version during make-tarball, tools changes. Modification of the tarball became problematic, as it meant that any tool extracting source would find the orig source tarball different. I found this unusable when trying to use 'gbp buildpackage'. Other changes here are to better support using python3 or python2 for the build. Makefile will try to call the right python version and can be told which python to use. read-version: by adding 'tiny_p' and avoiding the import of cloudinit.util, we need less dependencies to run this. --- Makefile | 19 +++++++++++-------- cloudinit/version.py | 9 --------- packages/debian/rules.in | 2 +- setup.py | 4 ++-- tools/make-tarball | 8 +------- tools/read-version | 26 +++++++++++++++++++------- 6 files changed, 34 insertions(+), 34 deletions(-) (limited to 'tools') diff --git a/Makefile b/Makefile index 32c50aee..5d35dcc0 100644 --- a/Makefile +++ b/Makefile @@ -1,21 +1,20 @@ CWD=$(shell pwd) -PYVER ?= 3 +PYVER ?= $(shell for p in python3 python2; do \ + out=$(which $$p 2>&1) && echo $$p && exit; done; \ + exit 1) noseopts ?= -v YAML_FILES=$(shell find cloudinit bin tests tools -name "*.yaml" -type f ) YAML_FILES+=$(shell find doc/examples -name "cloud-config*.txt" -type f ) -CHANGELOG_VERSION=$(shell $(CWD)/tools/read-version) -CODE_VERSION=$(shell python -c "from cloudinit import version; print version.version_string()") - PIP_INSTALL := pip install -ifeq ($(PYVER),3) +ifeq ($(PYVER),python3) pyflakes = pyflakes3 unittests = unittest3 yaml = yaml else -ifeq ($(PYVER),2) +ifeq ($(PYVER),python2) pyflakes = pyflakes unittests = unittest else @@ -28,6 +27,10 @@ ifeq ($(distro),) distro = redhat endif +READ_VERSION=$(shell $(PYVER) $(CWD)/tools/read-version) +CODE_VERSION=$(shell $(PYVER) -c "from cloudinit import version; print(version.version_string())") + + all: check check: check_version pep8 $(pyflakes) test $(yaml) @@ -58,8 +61,8 @@ pip-test-requirements: test: $(unittests) check_version: - @if [ "$(CHANGELOG_VERSION)" != "$(CODE_VERSION)" ]; then \ - echo "Error: ChangeLog version $(CHANGELOG_VERSION)" \ + @if [ "$(READ_VERSION)" != "$(CODE_VERSION)" ]; then \ + echo "Error: read-version version $(READ_VERSION)" \ "not equal to code version $(CODE_VERSION)"; exit 2; \ else true; fi diff --git a/cloudinit/version.py b/cloudinit/version.py index 01785eb8..aa8ccd7e 100644 --- a/cloudinit/version.py +++ b/cloudinit/version.py @@ -17,16 +17,7 @@ # along with this program. If not, see . __VERSION__ = "0.7.6" -__EXPORT_VERSION__ = "@@EXPORT_VERSION@@" def version_string(): - if not __EXPORT_VERSION__.startswith("@@"): - return __EXPORT_VERSION__ return __VERSION__ - - -def full_version_string(): - if __EXPORT_VERSION__.startswith("@@"): - raise ValueError("No full version available") - return __EXPORT_VERSION__ diff --git a/packages/debian/rules.in b/packages/debian/rules.in index cf2dd405..9b004357 100755 --- a/packages/debian/rules.in +++ b/packages/debian/rules.in @@ -14,7 +14,7 @@ override_dh_install: override_dh_auto_test: ifeq (,$(findstring nocheck,$(DEB_BUILD_OPTIONS))) - http_proxy= make PYVER=${pyver} check + http_proxy= make PYVER=python${pyver} check else @echo check disabled by DEB_BUILD_OPTIONS=$(DEB_BUILD_OPTIONS) endif diff --git a/setup.py b/setup.py index 0af576a9..4abbb67e 100755 --- a/setup.py +++ b/setup.py @@ -116,13 +116,13 @@ def in_virtualenv(): def get_version(): - cmd = ['tools/read-version'] + cmd = [sys.executable, 'tools/read-version'] (ver, _e) = tiny_p(cmd) return str(ver).strip() def read_requires(): - cmd = ['tools/read-dependencies'] + cmd = [sys.executable, 'tools/read-dependencies'] (deps, _e) = tiny_p(cmd) return str(deps).splitlines() diff --git a/tools/make-tarball b/tools/make-tarball index bd7399c1..57358447 100755 --- a/tools/make-tarball +++ b/tools/make-tarball @@ -62,11 +62,5 @@ if [ "$rev" = HEAD ] && ! git diff-index --quiet HEAD --; then fi fi -git archive --format=tar --prefix="$archive_base/" "$rev" | - ( cd "$TEMP_D" && tar xpf - ) - -sed -i "s,@@EXPORT_VERSION@@,$version," "$archive_base/cloudinit/version.py" - -( cd "$TEMP_D" && tar cpzf - "$archive_base/" ) > "$output" - +git archive --format=tar.gz --prefix="$archive_base/" "$rev" > "$output" echo "$output" diff --git a/tools/read-version b/tools/read-version index e585ab2e..78e34157 100755 --- a/tools/read-version +++ b/tools/read-version @@ -2,13 +2,28 @@ import os import json +import subprocess import sys if "avoid-pep8-E402-import-not-top-of-file": _tdir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) sys.path.insert(0, _tdir) from cloudinit import version as ci_version - from cloudinit import util + + +def tiny_p(cmd, capture=True): + # python 2.6 doesn't have check_output + stdout = subprocess.PIPE + stderr = subprocess.PIPE + sp = subprocess.Popen(cmd, stdout=stdout, + stderr=stderr, stdin=None, + universal_newlines=True) + (out, err) = sp.communicate() + ret = sp.returncode + if ret not in [0]: + raise RuntimeError("Failed running %s [rc=%s] (%s, %s)" % + (cmd, ret, out, err)) + return out use_long = '--long' in sys.argv or os.environ.get('CI_RV_LONG') @@ -31,20 +46,17 @@ if os.path.isdir(os.path.join(_tdir, ".git")): flags = ['--tags'] cmd = ['git', 'describe'] + flags - version = fix_git_version(util.subp(cmd)[0]) + version = fix_git_version(tiny_p(cmd)) if not version.startswith(src_version): sys.stderr.write("git describe version (%s) differs from " "cloudinit.version (%s)\n" % (version, src_version)) sys.exit(1) - version_long = fix_git_version(util.subp(cmd + ["--long"])[0]) + version_long = fix_git_version(tiny_p(cmd + ["--long"])) else: version = src_version - try: - version_long = ci_version.full_version_string() - except ValueError: - pass + version_long = None # version is X.Y.Z[+xxx.gHASH] # version_long is None or X.Y.Z+xxx.gHASH -- cgit v1.2.3 From 48ec60ae19c749223cb58dcbdbf0ecb7343f2a31 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Mon, 8 Aug 2016 13:24:59 -0400 Subject: For upstream snapshot versions do not modify git-describe output. For upstream version directly use the output of git-describe (X.Y.Z-number.gHASH) rather than rather than changing it to (X.Y.Z+number.gHASH). The rpm version does not allow '-' in Version, so we create and use rpm_upstream_version in the rpm spec file. That is of format: X.Y.Z+number.gHASH --- packages/brpm | 9 +++++++++ packages/redhat/cloud-init.spec.in | 2 +- tools/make-tarball | 9 +-------- tools/read-version | 17 +++++------------ 4 files changed, 16 insertions(+), 21 deletions(-) (limited to 'tools') diff --git a/packages/brpm b/packages/brpm index 14d75f2b..89696ab8 100755 --- a/packages/brpm +++ b/packages/brpm @@ -90,6 +90,15 @@ def generate_spec_contents(args, version_data, tmpl_fn, top_dir, arc_fn): subs['source_name'] = os.path.basename(arc_fn).replace('.tar.gz', '') subs.update(version_data) + # rpm does not like '-' in the Version, so change + # X.Y.Z-N-gHASH to X.Y.Z+N.gHASH + if "-" in version_data.get('version'): + ver, commits, ghash = version_data['version'].split("-") + rpm_upstream_version = "%s+%s.%s" % (ver, commits, ghash) + else: + rpm_upstream_version = version_data['version'] + subs['rpm_upstream_version'] = rpm_upstream_version + # Map to known packages python_deps = read_dependencies() package_deps = translate_dependencies(python_deps, args.distro) diff --git a/packages/redhat/cloud-init.spec.in b/packages/redhat/cloud-init.spec.in index 2f02f017..d0ae048f 100644 --- a/packages/redhat/cloud-init.spec.in +++ b/packages/redhat/cloud-init.spec.in @@ -6,7 +6,7 @@ # Or: http://www.rpm.org/max-rpm/ch-rpm-inside.html Name: cloud-init -Version: ${version_long} +Version: ${rpm_upstream_version} Release: 1${subrelease}%{?dist} Summary: Cloud instance init scripts diff --git a/tools/make-tarball b/tools/make-tarball index 57358447..d8705896 100755 --- a/tools/make-tarball +++ b/tools/make-tarball @@ -35,14 +35,7 @@ while [ $# -ne 0 ]; do done rev=${1:-HEAD} -git_describe=$(git describe ${long_opt} $rev) - -# git_describe could be 0.7.5 or 0.7.5-NNN-gHASH -# turn that into 0.7.5 or 0.7.5+NNN.gHASH -case "$git_describe" in - *-*) version=$(echo "$git_describe" | sed -e 's/-/+/' -e 's/-/./');; - *) version=${git_describe};; -esac +version=$(git describe ${long_opt} $rev) archive_base="cloud-init-$version" if [ -z "$output" ]; then diff --git a/tools/read-version b/tools/read-version index 78e34157..85c62343 100755 --- a/tools/read-version +++ b/tools/read-version @@ -34,26 +34,19 @@ src_version = ci_version.version_string() version_long = None if os.path.isdir(os.path.join(_tdir, ".git")): - def fix_git_version(ver): - ver = ver.strip() - if "-" in ver: - # change X.Y.Z-1023-gHASH to X.Y.Z+1023.ghash - return "{0}+{1}.{2}".format(*ver.split("-")) - return ver - flags = [] if use_tags: flags = ['--tags'] cmd = ['git', 'describe'] + flags - version = fix_git_version(tiny_p(cmd)) + version = tiny_p(cmd).strip() if not version.startswith(src_version): sys.stderr.write("git describe version (%s) differs from " "cloudinit.version (%s)\n" % (version, src_version)) sys.exit(1) - version_long = fix_git_version(tiny_p(cmd + ["--long"])) + version_long = tiny_p(cmd + ["--long"]).strip() else: version = src_version version_long = None @@ -66,9 +59,9 @@ commit = None distance = None if version_long: - info = version_long.partition("+")[2] - extra = "+" + info - distance, commit = info.split(".") + info = version_long.partition("-")[2] + extra = "-" + info + distance, commit = info.split("-") # remove the 'g' from gHASH commit = commit[1:] -- cgit v1.2.3 From 537477335449c7730633d321905c57f694441eb3 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Tue, 9 Aug 2016 02:59:29 -0400 Subject: read-version: do not attempt git-describe if no git. Even if there is a .git directory, we can't use git if there is no git executable in the path. In that case just fall back to the cloud-init version. --- tools/read-version | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) (limited to 'tools') diff --git a/tools/read-version b/tools/read-version index 85c62343..5ecf7247 100755 --- a/tools/read-version +++ b/tools/read-version @@ -26,6 +26,25 @@ def tiny_p(cmd, capture=True): return out +def which(program): + # Return path of program for execution if found in path + def is_exe(fpath): + return os.path.isfile(fpath) and os.access(fpath, os.X_OK) + + _fpath, _ = os.path.split(program) + if _fpath: + if is_exe(program): + return program + else: + for path in os.environ.get("PATH", "").split(os.pathsep): + path = path.strip('"') + exe_file = os.path.join(path, program) + if is_exe(exe_file): + return exe_file + + return None + + use_long = '--long' in sys.argv or os.environ.get('CI_RV_LONG') use_tags = '--tags' in sys.argv or os.environ.get('CI_RV_TAGS') output_json = '--json' in sys.argv @@ -33,7 +52,7 @@ output_json = '--json' in sys.argv src_version = ci_version.version_string() version_long = None -if os.path.isdir(os.path.join(_tdir, ".git")): +if os.path.isdir(os.path.join(_tdir, ".git")) and which("git"): flags = [] if use_tags: flags = ['--tags'] -- cgit v1.2.3 From 3973223593ab7bded806f02473164ac105f2896e Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Tue, 9 Aug 2016 23:02:28 -0600 Subject: make-tarball: older versions of git with --format=tar. Some older versions of git (Centos 6) do not have --format=tar.gz. To work around this, create a .tar file and then compress it. --- tools/make-tarball | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) (limited to 'tools') diff --git a/tools/make-tarball b/tools/make-tarball index d8705896..c150dd2f 100755 --- a/tools/make-tarball +++ b/tools/make-tarball @@ -55,5 +55,9 @@ if [ "$rev" = HEAD ] && ! git diff-index --quiet HEAD --; then fi fi -git archive --format=tar.gz --prefix="$archive_base/" "$rev" > "$output" +TEMP_D=$(mktemp -d) +tar=${output##*/} +tar="$TEMP_D/${tar%.gz}" +git archive --format=tar --prefix="$archive_base/" "$rev" > "$tar" +gzip -9 -c "$tar" > "$output" echo "$output" -- cgit v1.2.3 From 1e85ba042c786e56449642aec59874a9bb059262 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Wed, 10 Aug 2016 13:18:40 -0400 Subject: tools/read-version: update to address change in version commit 48ec60ae changed over several tools to use X.Y.Z-XXX-gHASH but missed tools/read-version. The end result was that check_version failed. --- tools/read-version | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'tools') diff --git a/tools/read-version b/tools/read-version index 5ecf7247..c10f9b46 100755 --- a/tools/read-version +++ b/tools/read-version @@ -71,8 +71,8 @@ else: version_long = None # version is X.Y.Z[+xxx.gHASH] -# version_long is None or X.Y.Z+xxx.gHASH -release = version.partition("+")[0] +# version_long is None or X.Y.Z-xxx-gHASH +release = version.partition("-")[0] extra = None commit = None distance = None -- cgit v1.2.3 From 648dbbf6b090c81e989f1ab70bf99f4de16a6a70 Mon Sep 17 00:00:00 2001 From: Brent Baude Date: Wed, 10 Aug 2016 16:36:49 -0600 Subject: Get Azure endpoint server from DHCP client It is more efficient and cross-distribution safe to use the hooks function from dhclient to obtain the Azure endpoint server (DHCP option 245). This is done by providing shell scritps that are called by the hooks infrastructure of both dhclient and NetworkManager. The hooks then invoke 'cloud-init dhclient-hook' that maintains json data with the dhclient options in /run/cloud-init/dhclient.hooks/.json . The azure helper then pulls the value from /run/cloud-init/dhclient.hooks/.json file(s). If that file does not exist or the value is not present, it will then fall back to the original method of scraping the dhcp client lease file. --- cloudinit/atomic_helper.py | 25 ++++++ cloudinit/cmd/main.py | 45 ++++++---- cloudinit/dhclient_hook.py | 50 +++++++++++ cloudinit/sources/DataSourceAzure.py | 15 ++-- cloudinit/sources/helpers/azure.py | 99 +++++++++++++++++++--- config/cloud.cfg | 6 ++ doc/sources/azure/README.rst | 32 ++++++- setup.py | 2 + .../unittests/test_datasource/test_azure_helper.py | 15 +++- tools/hook-dhclient | 9 ++ tools/hook-network-manager | 9 ++ tools/hook-rhel.sh | 12 +++ 12 files changed, 277 insertions(+), 42 deletions(-) create mode 100644 cloudinit/atomic_helper.py create mode 100644 cloudinit/dhclient_hook.py create mode 100755 tools/hook-dhclient create mode 100755 tools/hook-network-manager create mode 100755 tools/hook-rhel.sh (limited to 'tools') diff --git a/cloudinit/atomic_helper.py b/cloudinit/atomic_helper.py new file mode 100644 index 00000000..15319f71 --- /dev/null +++ b/cloudinit/atomic_helper.py @@ -0,0 +1,25 @@ +#!/usr/bin/python +# vi: ts=4 expandtab + +import json +import os +import tempfile + + +def atomic_write_file(path, content, mode='w'): + tf = None + try: + tf = tempfile.NamedTemporaryFile(dir=os.path.dirname(path), + delete=False, mode=mode) + tf.write(content) + tf.close() + os.rename(tf.name, path) + except Exception as e: + if tf is not None: + os.unlink(tf.name) + raise e + + +def atomic_write_json(path, data): + return atomic_write_file(path, json.dumps(data, indent=1, + sort_keys=True) + "\n") diff --git a/cloudinit/cmd/main.py b/cloudinit/cmd/main.py index 63621c1d..ba22b168 100644 --- a/cloudinit/cmd/main.py +++ b/cloudinit/cmd/main.py @@ -25,7 +25,6 @@ import argparse import json import os import sys -import tempfile import time import traceback @@ -47,6 +46,10 @@ from cloudinit.reporting import events from cloudinit.settings import (PER_INSTANCE, PER_ALWAYS, PER_ONCE, CLOUD_CONFIG) +from cloudinit.atomic_helper import atomic_write_json + +from cloudinit.dhclient_hook import LogDhclient + # Pretty little cheetah formatted welcome message template WELCOME_MSG_TPL = ("Cloud-init v. ${version} running '${action}' at " @@ -452,22 +455,10 @@ def main_single(name, args): return 0 -def atomic_write_file(path, content, mode='w'): - tf = None - try: - tf = tempfile.NamedTemporaryFile(dir=os.path.dirname(path), - delete=False, mode=mode) - tf.write(content) - tf.close() - os.rename(tf.name, path) - except Exception as e: - if tf is not None: - os.unlink(tf.name) - raise e - - -def atomic_write_json(path, data): - return atomic_write_file(path, json.dumps(data, indent=1) + "\n") +def dhclient_hook(name, args): + record = LogDhclient(args) + record.check_hooks_dir() + record.record() def status_wrapper(name, args, data_d=None, link_d=None): @@ -627,7 +618,6 @@ def main(sysv_args=None): # This subcommand allows you to run a single module parser_single = subparsers.add_parser('single', help=('run a single module ')) - parser_single.set_defaults(action=('single', main_single)) parser_single.add_argument("--name", '-n', action="store", help="module name to run", required=True) @@ -644,6 +634,16 @@ def main(sysv_args=None): ' pass to this module')) parser_single.set_defaults(action=('single', main_single)) + parser_dhclient = subparsers.add_parser('dhclient-hook', + help=('run the dhclient hook' + 'to record network info')) + parser_dhclient.add_argument("net_action", + help=('action taken on the interface')) + parser_dhclient.add_argument("net_interface", + help=('the network interface being acted' + ' upon')) + parser_dhclient.set_defaults(action=('dhclient_hook', dhclient_hook)) + args = parser.parse_args(args=sysv_args) try: @@ -677,9 +677,18 @@ def main(sysv_args=None): "running single module %s" % args.name) report_on = args.report + elif name == 'dhclient_hook': + rname, rdesc = ("dhclient-hook", + "running dhclient-hook module") + args.reporter = events.ReportEventStack( rname, rdesc, reporting_enabled=report_on) + with args.reporter: return util.log_time( logfunc=LOG.debug, msg="cloud-init mode '%s'" % name, get_uptime=True, func=functor, args=(name, args)) + + +if __name__ == '__main__': + main(sys.argv) diff --git a/cloudinit/dhclient_hook.py b/cloudinit/dhclient_hook.py new file mode 100644 index 00000000..9dcbe39c --- /dev/null +++ b/cloudinit/dhclient_hook.py @@ -0,0 +1,50 @@ +#!/usr/bin/python +# vi: ts=4 expandtab + +import os + +from cloudinit.atomic_helper import atomic_write_json +from cloudinit import log as logging +from cloudinit import stages + +LOG = logging.getLogger(__name__) + + +class LogDhclient(object): + + def __init__(self, cli_args): + self.hooks_dir = self._get_hooks_dir() + self.net_interface = cli_args.net_interface + self.net_action = cli_args.net_action + self.hook_file = os.path.join(self.hooks_dir, + self.net_interface + ".json") + + @staticmethod + def _get_hooks_dir(): + i = stages.Init() + return os.path.join(i.paths.get_runpath(), 'dhclient.hooks') + + def check_hooks_dir(self): + if not os.path.exists(self.hooks_dir): + os.makedirs(self.hooks_dir) + else: + # If the action is down and the json file exists, we need to + # delete the file + if self.net_action is 'down' and os.path.exists(self.hook_file): + os.remove(self.hook_file) + + @staticmethod + def get_vals(info): + new_info = {} + for k, v in info.items(): + if k.startswith("DHCP4_") or k.startswith("new_"): + key = (k.replace('DHCP4_', '').replace('new_', '')).lower() + new_info[key] = v + return new_info + + def record(self): + envs = os.environ + if self.hook_file is None: + return + atomic_write_json(self.hook_file, self.get_vals(envs)) + LOG.debug("Wrote dhclient options in %s", self.hook_file) diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index 8c7e8673..a251fe01 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -20,18 +20,17 @@ import base64 import contextlib import crypt import fnmatch +from functools import partial import os import os.path import time -import xml.etree.ElementTree as ET - from xml.dom import minidom - -from cloudinit.sources.helpers.azure import get_metadata_from_fabric +import xml.etree.ElementTree as ET from cloudinit import log as logging from cloudinit.settings import PER_ALWAYS from cloudinit import sources +from cloudinit.sources.helpers.azure import get_metadata_from_fabric from cloudinit import util LOG = logging.getLogger(__name__) @@ -107,6 +106,8 @@ def temporary_hostname(temp_hostname, cfg, hostname_command='hostname'): class DataSourceAzureNet(sources.DataSource): + FALLBACK_LEASE = '/var/lib/dhcp/dhclient.eth0.leases' + def __init__(self, sys_cfg, distro, paths): sources.DataSource.__init__(self, sys_cfg, distro, paths) self.seed_dir = os.path.join(paths.seed_dir, 'azure') @@ -115,6 +116,8 @@ class DataSourceAzureNet(sources.DataSource): self.ds_cfg = util.mergemanydict([ util.get_cfg_by_path(sys_cfg, DS_CFG_PATH, {}), BUILTIN_DS_CONFIG]) + self.dhclient_lease_file = self.paths.cfgs.get('dhclient_lease', + self.FALLBACK_LEASE) def __str__(self): root = sources.DataSource.__str__(self) @@ -226,7 +229,9 @@ class DataSourceAzureNet(sources.DataSource): write_files(ddir, files, dirmode=0o700) if self.ds_cfg['agent_command'] == '__builtin__': - metadata_func = get_metadata_from_fabric + metadata_func = partial(get_metadata_from_fabric, + fallback_lease_file=self. + dhclient_lease_file) else: metadata_func = self.get_metadata_from_agent try: diff --git a/cloudinit/sources/helpers/azure.py b/cloudinit/sources/helpers/azure.py index 63ccf10e..6e43440f 100644 --- a/cloudinit/sources/helpers/azure.py +++ b/cloudinit/sources/helpers/azure.py @@ -1,3 +1,4 @@ +import json import logging import os import re @@ -6,6 +7,7 @@ import struct import tempfile import time +from cloudinit import stages from contextlib import contextmanager from xml.etree import ElementTree @@ -187,19 +189,32 @@ class WALinuxAgentShim(object): ' ', '']) - def __init__(self): + def __init__(self, fallback_lease_file=None): LOG.debug('WALinuxAgentShim instantiated...') - self.endpoint = self.find_endpoint() + self.dhcpoptions = None + self._endpoint = None self.openssl_manager = None self.values = {} + self.lease_file = fallback_lease_file def clean_up(self): if self.openssl_manager is not None: self.openssl_manager.clean_up() @staticmethod - def get_ip_from_lease_value(lease_value): - unescaped_value = lease_value.replace('\\', '') + def _get_hooks_dir(): + _paths = stages.Init() + return os.path.join(_paths.paths.get_runpath(), "dhclient.hooks") + + @property + def endpoint(self): + if self._endpoint is None: + self._endpoint = self.find_endpoint(self.lease_file) + return self._endpoint + + @staticmethod + def get_ip_from_lease_value(fallback_lease_value): + unescaped_value = fallback_lease_value.replace('\\', '') if len(unescaped_value) > 4: hex_string = '' for hex_pair in unescaped_value.split(':'): @@ -213,15 +228,75 @@ class WALinuxAgentShim(object): return socket.inet_ntoa(packed_bytes) @staticmethod - def find_endpoint(): - LOG.debug('Finding Azure endpoint...') - content = util.load_file('/var/lib/dhcp/dhclient.eth0.leases') - value = None + def _get_value_from_leases_file(fallback_lease_file): + leases = [] + content = util.load_file(fallback_lease_file) + LOG.debug("content is {}".format(content)) for line in content.splitlines(): if 'unknown-245' in line: - value = line.strip(' ').split(' ', 2)[-1].strip(';\n"') + # Example line from Ubuntu + # option unknown-245 a8:3f:81:10; + leases.append(line.strip(' ').split(' ', 2)[-1].strip(';\n"')) + # Return the "most recent" one in the list + if len(leases) < 1: + return None + else: + return leases[-1] + + @staticmethod + def _load_dhclient_json(): + dhcp_options = {} + hooks_dir = WALinuxAgentShim._get_hooks_dir() + if not os.path.exists(hooks_dir): + LOG.debug("%s not found.", hooks_dir) + return None + hook_files = [os.path.join(hooks_dir, x) + for x in os.listdir(hooks_dir)] + for hook_file in hook_files: + try: + name = os.path.basename(hook_file).replace('.json', '') + dhcp_options[name] = json.loads(util.load_file((hook_file))) + except ValueError: + raise ValueError("%s is not valid JSON data", hook_file) + return dhcp_options + + @staticmethod + def _get_value_from_dhcpoptions(dhcp_options): + if dhcp_options is None: + return None + # the MS endpoint server is given to us as DHPC option 245 + _value = None + for interface in dhcp_options: + _value = dhcp_options[interface].get('unknown_245', None) + if _value is not None: + LOG.debug("Endpoint server found in dhclient options") + break + return _value + + @staticmethod + def find_endpoint(fallback_lease_file=None): + LOG.debug('Finding Azure endpoint...') + value = None + # Option-245 stored in /run/cloud-init/dhclient.hooks/.json + # a dhclient exit hook that calls cloud-init-dhclient-hook + dhcp_options = WALinuxAgentShim._load_dhclient_json() + value = WALinuxAgentShim._get_value_from_dhcpoptions(dhcp_options) if value is None: - raise ValueError('No endpoint found in DHCP config.') + # Fallback and check the leases file if unsuccessful + LOG.debug("Unable to find endpoint in dhclient logs. " + " Falling back to check lease files") + if fallback_lease_file is None: + LOG.warn("No fallback lease file was specified.") + value = None + else: + LOG.debug("Looking for endpoint in lease file %s", + fallback_lease_file) + value = WALinuxAgentShim._get_value_from_leases_file( + fallback_lease_file) + + if value is None: + raise ValueError('No endpoint found.') + endpoint_ip_address = WALinuxAgentShim.get_ip_from_lease_value(value) LOG.debug('Azure endpoint found at %s', endpoint_ip_address) return endpoint_ip_address @@ -271,8 +346,8 @@ class WALinuxAgentShim(object): LOG.info('Reported ready to Azure fabric.') -def get_metadata_from_fabric(): - shim = WALinuxAgentShim() +def get_metadata_from_fabric(fallback_lease_file=None): + shim = WALinuxAgentShim(fallback_lease_file=fallback_lease_file) try: return shim.register_with_azure_and_fetch_data() finally: diff --git a/config/cloud.cfg b/config/cloud.cfg index 2d7fb473..93ef3423 100644 --- a/config/cloud.cfg +++ b/config/cloud.cfg @@ -98,6 +98,7 @@ system_info: cloud_dir: /var/lib/cloud/ templates_dir: /etc/cloud/templates/ upstart_dir: /etc/init/ + dhclient_lease: package_mirrors: - arches: [i386, amd64] failsafe: @@ -114,3 +115,8 @@ system_info: primary: http://ports.ubuntu.com/ubuntu-ports security: http://ports.ubuntu.com/ubuntu-ports ssh_svcname: ssh +datasource: + Azure: + set_hostname: False + agent_command: __builtin__ + diff --git a/doc/sources/azure/README.rst b/doc/sources/azure/README.rst index 8239d1fa..48f3cc7a 100644 --- a/doc/sources/azure/README.rst +++ b/doc/sources/azure/README.rst @@ -9,10 +9,34 @@ Azure Platform The azure cloud-platform provides initial data to an instance via an attached CD formated in UDF. That CD contains a 'ovf-env.xml' file that provides some information. Additional information is obtained via interaction with the -"endpoint". The ip address of the endpoint is advertised to the instance -inside of dhcp option 245. On ubuntu, that can be seen in -/var/lib/dhcp/dhclient.eth0.leases as a colon delimited hex value (example: -``option unknown-245 64:41:60:82;`` is 100.65.96.130) +"endpoint". + +To find the endpoint, we now leverage the dhcp client's ability to log its +known values on exit. The endpoint server is special DHCP option 245. +Depending on your networking stack, this can be done +by calling a script in /etc/dhcp/dhclient-exit-hooks or a file in +/etc/NetworkManager/dispatcher.d. Both of these call a sub-command +'dhclient_hook' of cloud-init itself. This sub-command will write the client +information in json format to /run/cloud-init/dhclient.hook/.json. + +In order for cloud-init to leverage this method to find the endpoint, the +cloud.cfg file must contain: + +datasource: + Azure: + set_hostname: False + agent_command: __builtin__ + +If those files are not available, the fallback is to check the leases file +for the endpoint server (again option 245). + +You can define the path to the lease file with the 'dhclient_lease' configuration +value under system_info: and paths:. For example: + + dhclient_lease: /var/lib/dhcp/dhclient.eth0.leases + +If no configuration value is provided, the dhclient_lease value will fallback to +/var/lib/dhcp/dhclient.eth0.leases. walinuxagent ------------ diff --git a/setup.py b/setup.py index 4abbb67e..bbadd7bf 100755 --- a/setup.py +++ b/setup.py @@ -176,6 +176,8 @@ else: (ETC + '/cloud', glob('config/*.cfg')), (ETC + '/cloud/cloud.cfg.d', glob('config/cloud.cfg.d/*')), (ETC + '/cloud/templates', glob('templates/*')), + (ETC + '/NetworkManager/dispatcher.d/', ['tools/hook-network-manager']), + (ETC + '/dhcp/dhclient-exit-hooks.d/', ['tools/hook-dhclient']), (USR_LIB_EXEC + '/cloud-init', ['tools/uncloud-init', 'tools/write-ssh-key-fingerprints']), (USR + '/share/doc/cloud-init', [f for f in glob('doc/*') if is_f(f)]), diff --git a/tests/unittests/test_datasource/test_azure_helper.py b/tests/unittests/test_datasource/test_azure_helper.py index 65202ff0..64523e16 100644 --- a/tests/unittests/test_datasource/test_azure_helper.py +++ b/tests/unittests/test_datasource/test_azure_helper.py @@ -54,13 +54,17 @@ class TestFindEndpoint(TestCase): self.load_file = patches.enter_context( mock.patch.object(azure_helper.util, 'load_file')) + self.dhcp_options = patches.enter_context( + mock.patch.object(azure_helper.WALinuxAgentShim, + '_load_dhclient_json')) + def test_missing_file(self): - self.load_file.side_effect = IOError - self.assertRaises(IOError, + self.assertRaises(ValueError, azure_helper.WALinuxAgentShim.find_endpoint) def test_missing_special_azure_line(self): self.load_file.return_value = '' + self.dhcp_options.return_value = {'eth0': {'key': 'value'}} self.assertRaises(ValueError, azure_helper.WALinuxAgentShim.find_endpoint) @@ -72,13 +76,18 @@ class TestFindEndpoint(TestCase): ' option unknown-245 {0};'.format(encoded_address), '}']) + def test_from_dhcp_client(self): + self.dhcp_options.return_value = {"eth0": {"unknown_245": "5:4:3:2"}} + self.assertEqual('5.4.3.2', + azure_helper.WALinuxAgentShim.find_endpoint(None)) + def test_latest_lease_used(self): encoded_addresses = ['5:4:3:2', '4:3:2:1'] file_content = '\n'.join([self._build_lease_content(encoded_address) for encoded_address in encoded_addresses]) self.load_file.return_value = file_content self.assertEqual(encoded_addresses[-1].replace(':', '.'), - azure_helper.WALinuxAgentShim.find_endpoint()) + azure_helper.WALinuxAgentShim.find_endpoint("foobar")) class TestExtractIpAddressFromLeaseValue(TestCase): diff --git a/tools/hook-dhclient b/tools/hook-dhclient new file mode 100755 index 00000000..d099979a --- /dev/null +++ b/tools/hook-dhclient @@ -0,0 +1,9 @@ +#!/bin/sh +# This script writes DHCP lease information into the cloud-init run directory +# It is sourced, not executed. For more information see dhclient-script(8). + +case "$reason" in + BOUND) cloud-init dhclient-hook up "$interface";; + DOWN|RELEASE|REBOOT|STOP|EXPIRE) + cloud-init dhclient-hook down "$interface";; +esac diff --git a/tools/hook-network-manager b/tools/hook-network-manager new file mode 100755 index 00000000..447b134e --- /dev/null +++ b/tools/hook-network-manager @@ -0,0 +1,9 @@ +#!/bin/sh +# This script hooks into NetworkManager(8) via its scripts +# arguments are 'interface-name' and 'action' +# + +case "$1:$2" in + *:up) exec cloud-init dhclient-hook up "$1";; + *:down) exec cloud-init dhclient-hook down "$1";; +esac diff --git a/tools/hook-rhel.sh b/tools/hook-rhel.sh new file mode 100755 index 00000000..5e963a89 --- /dev/null +++ b/tools/hook-rhel.sh @@ -0,0 +1,12 @@ +#!/bin/sh +# Current versions of RHEL and CentOS do not honor the directory +# /etc/dhcp/dhclient-exit-hooks.d so this file can be placed in +# /etc/dhcp/dhclient.d instead + +hook-rhel_config(){ + cloud-init dhclient-hook up "$interface" +} + +hook-rhel_restore(){ + cloud-init dhclient-hook down "$interface" +} -- cgit v1.2.3 From 64522efe710faf6fa1615dbb60a2fc4cc8a7c278 Mon Sep 17 00:00:00 2001 From: Scott Moser Date: Thu, 18 Aug 2016 12:25:29 -0400 Subject: azure dhclient-hook cleanups This adds some function to the generator to maintain the presense of a flag file '/run/cloud-init/enabled' indicating that cloud-init is enabled. Then, only run the dhclient hooks if on Azure and cloud-init is enabled. The test for is_azure currently only checks to see that the board vendor is Microsoft, not actually that we are on azure. Running should not be harmful anywhere, other than slowing down dhclient. The value of this additional code is that then dhclient having run does not task the system with the load of cloud-init. Additionally, some changes to config are done here. * rename 'dhclient_leases' to 'dhclient_lease_file' * move that to the datasource config (datasource/Azure/dhclient_lease_file) Also, it removes the config in config/cloud.cfg that set agent_command to __builtin__. This means that by default cloud-init still needs the agent installed. The suggested follow-on improvement is to use __builtin__ if there is no walinux-agent installed. --- cloudinit/sources/DataSourceAzure.py | 13 +++++++------ cloudinit/sources/helpers/azure.py | 3 ++- config/cloud.cfg | 6 ------ doc/sources/azure/README.rst | 9 +++------ systemd/cloud-init-generator | 5 +++++ tools/hook-dhclient | 25 ++++++++++++++++++++----- tools/hook-network-manager | 23 +++++++++++++++++++---- tools/hook-rhel.sh | 15 +++++++++++++++ 8 files changed, 71 insertions(+), 28 deletions(-) (limited to 'tools') diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index a251fe01..dbc2bb68 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -54,6 +54,7 @@ BUILTIN_DS_CONFIG = { 'hostname_command': 'hostname', }, 'disk_aliases': {'ephemeral0': '/dev/sdb'}, + 'dhclient_lease_file': '/var/lib/dhcp/dhclient.eth0.leases', } BUILTIN_CLOUD_CONFIG = { @@ -106,8 +107,6 @@ def temporary_hostname(temp_hostname, cfg, hostname_command='hostname'): class DataSourceAzureNet(sources.DataSource): - FALLBACK_LEASE = '/var/lib/dhcp/dhclient.eth0.leases' - def __init__(self, sys_cfg, distro, paths): sources.DataSource.__init__(self, sys_cfg, distro, paths) self.seed_dir = os.path.join(paths.seed_dir, 'azure') @@ -116,8 +115,7 @@ class DataSourceAzureNet(sources.DataSource): self.ds_cfg = util.mergemanydict([ util.get_cfg_by_path(sys_cfg, DS_CFG_PATH, {}), BUILTIN_DS_CONFIG]) - self.dhclient_lease_file = self.paths.cfgs.get('dhclient_lease', - self.FALLBACK_LEASE) + self.dhclient_lease_file = self.ds_cfg.get('dhclient_lease_file') def __str__(self): root = sources.DataSource.__str__(self) @@ -126,6 +124,9 @@ class DataSourceAzureNet(sources.DataSource): def get_metadata_from_agent(self): temp_hostname = self.metadata.get('local-hostname') hostname_command = self.ds_cfg['hostname_bounce']['hostname_command'] + agent_cmd = self.ds_cfg['agent_command'] + LOG.debug("Getting metadata via agent. hostname=%s cmd=%s", + temp_hostname, agent_cmd) with temporary_hostname(temp_hostname, self.ds_cfg, hostname_command=hostname_command) \ as previous_hostname: @@ -141,7 +142,7 @@ class DataSourceAzureNet(sources.DataSource): util.logexc(LOG, "handling set_hostname failed") try: - invoke_agent(self.ds_cfg['agent_command']) + invoke_agent(agent_cmd) except util.ProcessExecutionError: # claim the datasource even if the command failed util.logexc(LOG, "agent command '%s' failed.", @@ -234,13 +235,13 @@ class DataSourceAzureNet(sources.DataSource): dhclient_lease_file) else: metadata_func = self.get_metadata_from_agent + try: fabric_data = metadata_func() except Exception as exc: LOG.info("Error communicating with Azure fabric; assume we aren't" " on Azure.", exc_info=True) return False - self.metadata['instance-id'] = util.read_dmi_data('system-uuid') self.metadata.update(fabric_data) diff --git a/cloudinit/sources/helpers/azure.py b/cloudinit/sources/helpers/azure.py index 6e43440f..689ed4cc 100644 --- a/cloudinit/sources/helpers/azure.py +++ b/cloudinit/sources/helpers/azure.py @@ -190,7 +190,8 @@ class WALinuxAgentShim(object): '']) def __init__(self, fallback_lease_file=None): - LOG.debug('WALinuxAgentShim instantiated...') + LOG.debug('WALinuxAgentShim instantiated, fallback_lease_file=%s', + fallback_lease_file) self.dhcpoptions = None self._endpoint = None self.openssl_manager = None diff --git a/config/cloud.cfg b/config/cloud.cfg index 93ef3423..2d7fb473 100644 --- a/config/cloud.cfg +++ b/config/cloud.cfg @@ -98,7 +98,6 @@ system_info: cloud_dir: /var/lib/cloud/ templates_dir: /etc/cloud/templates/ upstart_dir: /etc/init/ - dhclient_lease: package_mirrors: - arches: [i386, amd64] failsafe: @@ -115,8 +114,3 @@ system_info: primary: http://ports.ubuntu.com/ubuntu-ports security: http://ports.ubuntu.com/ubuntu-ports ssh_svcname: ssh -datasource: - Azure: - set_hostname: False - agent_command: __builtin__ - diff --git a/doc/sources/azure/README.rst b/doc/sources/azure/README.rst index 48f3cc7a..ec7d9e84 100644 --- a/doc/sources/azure/README.rst +++ b/doc/sources/azure/README.rst @@ -30,13 +30,10 @@ datasource: If those files are not available, the fallback is to check the leases file for the endpoint server (again option 245). -You can define the path to the lease file with the 'dhclient_lease' configuration -value under system_info: and paths:. For example: +You can define the path to the lease file with the 'dhclient_lease_file' +configuration. The default value is /var/lib/dhcp/dhclient.eth0.leases. - dhclient_lease: /var/lib/dhcp/dhclient.eth0.leases - -If no configuration value is provided, the dhclient_lease value will fallback to -/var/lib/dhcp/dhclient.eth0.leases. + dhclient_lease_file: /var/lib/dhcp/dhclient.eth0.leases walinuxagent ------------ diff --git a/systemd/cloud-init-generator b/systemd/cloud-init-generator index 2d319695..fedb6309 100755 --- a/systemd/cloud-init-generator +++ b/systemd/cloud-init-generator @@ -6,6 +6,7 @@ DEBUG_LEVEL=1 LOG_D="/run/cloud-init" ENABLE="enabled" DISABLE="disabled" +RUN_ENABLED_FILE="$LOG_D/$ENABLE" CLOUD_SYSTEM_TARGET="/lib/systemd/system/cloud-init.target" CLOUD_TARGET_NAME="cloud-init.target" # lxc sets 'container', but lets make that explicitly a global @@ -107,6 +108,7 @@ main() { "ln $CLOUD_SYSTEM_TARGET $link_path" fi fi + : > "$RUN_ENABLED_FILE" elif [ "$result" = "$DISABLE" ]; then if [ -f "$link_path" ]; then if rm -f "$link_path"; then @@ -118,6 +120,9 @@ main() { else debug 1 "already disabled: no change needed [no $link_path]" fi + if [ -e "$RUN_ENABLED_FILE" ]; then + rm -f "$RUN_ENABLED_FILE" + fi else debug 0 "unexpected result '$result'" ret=3 diff --git a/tools/hook-dhclient b/tools/hook-dhclient index d099979a..6a4626c6 100755 --- a/tools/hook-dhclient +++ b/tools/hook-dhclient @@ -1,9 +1,24 @@ #!/bin/sh # This script writes DHCP lease information into the cloud-init run directory # It is sourced, not executed. For more information see dhclient-script(8). +is_azure() { + local dmi_path="/sys/class/dmi/id/board_vendor" vendor="" + if [ -e "$dmi_path" ] && read vendor < "$dmi_path"; then + [ "$vendor" = "Microsoft Corporation" ] && return 0 + fi + return 1 +} -case "$reason" in - BOUND) cloud-init dhclient-hook up "$interface";; - DOWN|RELEASE|REBOOT|STOP|EXPIRE) - cloud-init dhclient-hook down "$interface";; -esac +is_enabled() { + # only execute hooks if cloud-init is enabled and on azure + [ -e /run/cloud-init/enabled ] || return 1 + is_azure +} + +if is_enabled; then + case "$reason" in + BOUND) cloud-init dhclient-hook up "$interface";; + DOWN|RELEASE|REBOOT|STOP|EXPIRE) + cloud-init dhclient-hook down "$interface";; + esac +fi diff --git a/tools/hook-network-manager b/tools/hook-network-manager index 447b134e..98a36c8a 100755 --- a/tools/hook-network-manager +++ b/tools/hook-network-manager @@ -2,8 +2,23 @@ # This script hooks into NetworkManager(8) via its scripts # arguments are 'interface-name' and 'action' # +is_azure() { + local dmi_path="/sys/class/dmi/id/board_vendor" vendor="" + if [ -e "$dmi_path" ] && read vendor < "$dmi_path"; then + [ "$vendor" = "Microsoft Corporation" ] && return 0 + fi + return 1 +} -case "$1:$2" in - *:up) exec cloud-init dhclient-hook up "$1";; - *:down) exec cloud-init dhclient-hook down "$1";; -esac +is_enabled() { + # only execute hooks if cloud-init is enabled and on azure + [ -e /run/cloud-init/enabled ] || return 1 + is_azure +} + +if is_enabled; then + case "$1:$2" in + *:up) exec cloud-init dhclient-hook up "$1";; + *:down) exec cloud-init dhclient-hook down "$1";; + esac +fi diff --git a/tools/hook-rhel.sh b/tools/hook-rhel.sh index 5e963a89..8232414c 100755 --- a/tools/hook-rhel.sh +++ b/tools/hook-rhel.sh @@ -2,11 +2,26 @@ # Current versions of RHEL and CentOS do not honor the directory # /etc/dhcp/dhclient-exit-hooks.d so this file can be placed in # /etc/dhcp/dhclient.d instead +is_azure() { + local dmi_path="/sys/class/dmi/id/board_vendor" vendor="" + if [ -e "$dmi_path" ] && read vendor < "$dmi_path"; then + [ "$vendor" = "Microsoft Corporation" ] && return 0 + fi + return 1 +} + +is_enabled() { + # only execute hooks if cloud-init is enabled and on azure + [ -e /run/cloud-init/enabled ] || return 1 + is_azure +} hook-rhel_config(){ + is_enabled || return 0 cloud-init dhclient-hook up "$interface" } hook-rhel_restore(){ + is_enabled || return 0 cloud-init dhclient-hook down "$interface" } -- cgit v1.2.3