summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rwxr-xr-xtools/bddeb33
-rwxr-xr-xtools/hacking.py175
-rwxr-xr-xtools/make-dist-tarball25
-rwxr-xr-xtools/mock-meta.py444
-rwxr-xr-xtools/read-dependencies45
-rwxr-xr-xtools/read-version70
-rwxr-xr-xtools/run-pep835
-rwxr-xr-xtools/run-pylint13
8 files changed, 770 insertions, 70 deletions
diff --git a/tools/bddeb b/tools/bddeb
deleted file mode 100755
index 598f71bb..00000000
--- a/tools/bddeb
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/sh
-
-TEMP_D=$(mktemp -d "${TMPDIR:-/tmp}/${0##*/}.XXXXXXX")
-#TEMP_D=/tmp/my.d
-start=${PWD}
-rm -Rf "${TEMP_D}"; mkdir "${TEMP_D}"
-set -e
-trap "rm -Rf '${TEMP_D}'" exit
-files=$(bzr ls --versioned)
-revno=$(bzr revno)
-version=$(awk \
- -F= '$1 ~ /version$/ { gsub("[^0-9.]","",$2); print $2; }' setup.py)
-mkdir "${TEMP_D}/cloud-init"
-otar="$TEMP_D/cloud-init_$version~bzr${revno}.orig.tar.gz"
-tar -czf - ${files} > "$otar"
-tar -C "${TEMP_D}/cloud-init" -xzf - <"$otar"
-
-if [ ! -d "${TEMP_D}/cloud-init/debian" ]; then
- rsync -a debian.trunk/ "${TEMP_D}/cloud-init/debian"
-fi
-sed -i -e "s,VERSION,$version," -e "s,REVNO,bzr$revno," \
- "$TEMP_D/cloud-init/debian/changelog"
-cd "${TEMP_D}/cloud-init"
-debuild "$@"
-#for x in ../*.deb; do
-# echo wrote ${x##*/}
-#done
-debname="cloud-init_${version}~bzr${revno}-0_all.deb"
-mv "../$debname" "$start"
-link="$start/cloud-init_all.deb"
-echo "wrote $debname"
-[ ! -e "$link" -o -L "$link" ]
- { ln -sf "$debname" "$link" && echo "linked ${link##*/}"; }
diff --git a/tools/hacking.py b/tools/hacking.py
new file mode 100755
index 00000000..d0c27d25
--- /dev/null
+++ b/tools/hacking.py
@@ -0,0 +1,175 @@
+#!/usr/bin/env python
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright (c) 2012, Cloudscaling
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""cloudinit HACKING file compliance testing (based off of nova hacking.py)
+
+built on top of pep8.py
+"""
+
+import inspect
+import logging
+import os
+import re
+import sys
+import tokenize
+import warnings
+
+import pep8
+
+# Don't need this for testing
+logging.disable('LOG')
+
+# N1xx comments
+# N2xx except
+# N3xx imports
+# N4xx docstrings
+# N[5-9]XX (future use)
+
+DOCSTRING_TRIPLE = ['"""', "'''"]
+VERBOSE_MISSING_IMPORT = False
+_missingImport = set([])
+
+
+def import_normalize(line):
+ # convert "from x import y" to "import x.y"
+ # handle "from x import y as z" to "import x.y as z"
+ split_line = line.split()
+ if (line.startswith("from ") and "," not in line and
+ split_line[2] == "import" and split_line[3] != "*" and
+ split_line[1] != "__future__" and
+ (len(split_line) == 4 or
+ (len(split_line) == 6 and split_line[4] == "as"))):
+ return "import %s.%s" % (split_line[1], split_line[3])
+ else:
+ return line
+
+
+def cloud_import_alphabetical(physical_line, line_number, lines):
+ """Check for imports in alphabetical order.
+
+ HACKING guide recommendation for imports:
+ imports in human alphabetical order
+ N306
+ """
+ # handle import x
+ # use .lower since capitalization shouldn't dictate order
+ split_line = import_normalize(physical_line.strip()).lower().split()
+ split_previous = import_normalize(lines[line_number - 2]
+ ).strip().lower().split()
+ # with or without "as y"
+ length = [2, 4]
+ if (len(split_line) in length and len(split_previous) in length and
+ split_line[0] == "import" and split_previous[0] == "import"):
+ if split_line[1] < split_previous[1]:
+ return (0, "N306: imports not in alphabetical order (%s, %s)"
+ % (split_previous[1], split_line[1]))
+
+
+def cloud_docstring_start_space(physical_line):
+ """Check for docstring not start with space.
+
+ HACKING guide recommendation for docstring:
+ Docstring should not start with space
+ N401
+ """
+ pos = max([physical_line.find(i) for i in DOCSTRING_TRIPLE]) # start
+ if (pos != -1 and len(physical_line) > pos + 1):
+ if (physical_line[pos + 3] == ' '):
+ return (pos, "N401: one line docstring should not start with"
+ " a space")
+
+
+def cloud_todo_format(physical_line):
+ """Check for 'TODO()'.
+
+ HACKING guide recommendation for TODO:
+ Include your name with TODOs as in "#TODO(termie)"
+ N101
+ """
+ pos = physical_line.find('TODO')
+ pos1 = physical_line.find('TODO(')
+ pos2 = physical_line.find('#') # make sure it's a comment
+ if (pos != pos1 and pos2 >= 0 and pos2 < pos):
+ return pos, "N101: Use TODO(NAME)"
+
+
+def cloud_docstring_one_line(physical_line):
+ """Check one line docstring end.
+
+ HACKING guide recommendation for one line docstring:
+ A one line docstring looks like this and ends in a period.
+ N402
+ """
+ pos = max([physical_line.find(i) for i in DOCSTRING_TRIPLE]) # start
+ end = max([physical_line[-4:-1] == i for i in DOCSTRING_TRIPLE]) # end
+ if (pos != -1 and end and len(physical_line) > pos + 4):
+ if (physical_line[-5] != '.'):
+ return pos, "N402: one line docstring needs a period"
+
+
+def cloud_docstring_multiline_end(physical_line):
+ """Check multi line docstring end.
+
+ HACKING guide recommendation for docstring:
+ Docstring should end on a new line
+ N403
+ """
+ pos = max([physical_line.find(i) for i in DOCSTRING_TRIPLE]) # start
+ if (pos != -1 and len(physical_line) == pos):
+ print physical_line
+ if (physical_line[pos + 3] == ' '):
+ return (pos, "N403: multi line docstring end on new line")
+
+
+
+current_file = ""
+
+
+def readlines(filename):
+ """Record the current file being tested."""
+ pep8.current_file = filename
+ return open(filename).readlines()
+
+
+def add_cloud():
+ """Monkey patch pep8 for cloud-init guidelines.
+
+ Look for functions that start with cloud_
+ and add them to pep8 module.
+
+ Assumes you know how to write pep8.py checks
+ """
+ for name, function in globals().items():
+ if not inspect.isfunction(function):
+ continue
+ if name.startswith("cloud_"):
+ exec("pep8.%s = %s" % (name, name))
+
+if __name__ == "__main__":
+ # NOVA based 'hacking.py' error codes start with an N
+ pep8.ERRORCODE_REGEX = re.compile(r'[EWN]\d{3}')
+ add_cloud()
+ pep8.current_file = current_file
+ pep8.readlines = readlines
+ try:
+ pep8._main()
+ finally:
+ if len(_missingImport) > 0:
+ print >> sys.stderr, ("%i imports missing in this test environment"
+ % len(_missingImport))
+
diff --git a/tools/make-dist-tarball b/tools/make-dist-tarball
deleted file mode 100755
index d6d53aa7..00000000
--- a/tools/make-dist-tarball
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/sh
-
-Usage() {
- cat <<EOF
-Usage: ${0##*/} version
- make a tarball of 'version'
- must be in a bzr directory, and 'version' must be a tag
-
-EOF
-}
-
-topdir=$PWD
-tag=${1}
-
-[ -n "$tag" ] || { Usage 1>&2 ; exit 1; }
-
-tmpd=$(mktemp -d );
-trap "rm -Rf '${tmpd}'" 0
-
-out=${topdir}/cloud-init-${tag}.tar.gz
-
-cd ${tmpd} &&
- bzr branch -r "tag:${tag}" "${topdir}" ./cloud-init-${tag} &&
- tar czf "${out}" cloud-init-${tag}/ --exclude cloud-init-${tag}/.bzr &&
- echo "wrote ${out}"
diff --git a/tools/mock-meta.py b/tools/mock-meta.py
new file mode 100755
index 00000000..4548e4ae
--- /dev/null
+++ b/tools/mock-meta.py
@@ -0,0 +1,444 @@
+#!/usr/bin/python
+
+# Provides a somewhat random, somewhat compat, somewhat useful mock version of
+#
+# http://docs.amazonwebservices.com/AWSEC2/2007-08-29/DeveloperGuide/AESDG-chapter-instancedata.html
+
+"""
+To use this to mimic the EC2 metadata service entirely, run it like:
+ # Where 'eth0' is *some* interface.
+ sudo ifconfig eth0:0 169.254.169.254 netmask 255.255.255.255
+
+ sudo ./mock-meta -a 169.254.169.254 -p 80
+
+Then:
+ wget -q http://169.254.169.254/latest/meta-data/instance-id -O -; echo
+ curl --silent http://169.254.169.254/latest/meta-data/instance-id ; echo
+ ec2metadata --instance-id
+"""
+
+import functools
+import httplib
+import json
+import logging
+import os
+import random
+import string
+import sys
+import yaml
+
+from optparse import OptionParser
+
+from BaseHTTPServer import (HTTPServer, BaseHTTPRequestHandler)
+
+log = logging.getLogger('meta-server')
+
+EC2_VERSIONS = [
+ '1.0',
+ '2007-01-19',
+ '2007-03-01',
+ '2007-08-29',
+ '2007-10-10',
+ '2007-12-15',
+ '2008-02-01',
+ '2008-09-01',
+ '2009-04-04',
+]
+
+BLOCK_DEVS = [
+ 'ami',
+ 'ephemeral0',
+ 'root',
+]
+
+DEV_PREFIX = 'v' # This seems to vary alot depending on images...
+DEV_MAPPINGS = {
+ 'ephemeral0': '%sda2' % (DEV_PREFIX),
+ 'root': '/dev/%sda1' % (DEV_PREFIX),
+ 'ami': '%sda1' % (DEV_PREFIX),
+ 'swap': '%sda3' % (DEV_PREFIX),
+}
+
+META_CAPABILITIES = [
+ 'aki-id',
+ 'ami-id',
+ 'ami-launch-index',
+ 'ami-manifest-path',
+ 'ari-id',
+ 'block-device-mapping/',
+ 'hostname',
+ 'instance-action',
+ 'instance-id',
+ 'instance-type',
+ 'local-hostname',
+ 'local-ipv4',
+ 'placement/',
+ 'product-codes',
+ 'public-hostname',
+ 'public-ipv4',
+ 'public-keys/',
+ 'reservation-id',
+ 'security-groups'
+]
+
+PUB_KEYS = {
+ 'brickies': [
+ ('ssh-rsa '
+ 'AAAAB3NzaC1yc2EAAAABIwAAAQEA3I7VUf2l5gSn5uavROsc5HRDpZdQueUq5ozemNSj8T'
+ '7enqKHOEaFoU2VoPgGEWC9RyzSQVeyD6s7APMcE82EtmW4skVEgEGSbDc1pvxzxtchBj78'
+ 'hJP6Cf5TCMFSXw+Fz5rF1dR23QDbN1mkHs7adr8GW4kSWqU7Q7NDwfIrJJtO7Hi42GyXtv'
+ 'EONHbiRPOe8stqUly7MvUoN+5kfjBM8Qqpfl2+FNhTYWpMfYdPUnE7u536WqzFmsaqJctz'
+ '3gBxH9Ex7dFtrxR4qiqEr9Qtlu3xGn7Bw07/+i1D+ey3ONkZLN+LQ714cgj8fRS4Hj29SC'
+ 'mXp5Kt5/82cD/VN3NtHw== brickies'),
+ '',
+ ],
+}
+
+INSTANCE_TYPES = [
+ 'm1.large',
+ 'm1.medium',
+ 'm1.small',
+ 'm1.xlarge',
+]
+
+AVAILABILITY_ZONES = [
+ "us-east-1a",
+ "us-east-1b",
+ "us-east-1c",
+ "us-east-1d",
+ 'eu-west-1a',
+ 'eu-west-1b',
+ 'us-west-1',
+]
+
+PLACEMENT_CAPABILITIES = {
+ 'availability-zone': AVAILABILITY_ZONES,
+}
+
+NOT_IMPL_RESPONSE = json.dumps({})
+
+
+class WebException(Exception):
+ def __init__(self, code, msg):
+ Exception.__init__(self, msg)
+ self.code = code
+
+
+def yamlify(data):
+ formatted = yaml.dump(data,
+ line_break="\n",
+ indent=4,
+ explicit_start=True,
+ explicit_end=True,
+ default_flow_style=False)
+ return formatted
+
+
+def format_text(text):
+ if not len(text):
+ return "<<"
+ lines = text.splitlines()
+ nlines = []
+ for line in lines:
+ nlines.append("<< %s" % line)
+ return "\n".join(nlines)
+
+
+def traverse(keys, mp):
+ result = dict(mp)
+ for k in keys:
+ try:
+ result = result.get(k)
+ except (AttributeError, TypeError):
+ result = None
+ break
+ return result
+
+
+ID_CHARS = [c for c in (string.ascii_uppercase + string.digits)]
+def id_generator(size=6, lower=False):
+ txt = ''.join(random.choice(ID_CHARS) for x in range(size))
+ if lower:
+ return txt.lower()
+ else:
+ return txt
+
+
+def get_ssh_keys():
+ keys = {}
+ keys.update(PUB_KEYS)
+
+ # Nice helper to add in the 'running' users key (if they have one)
+ key_pth = os.path.expanduser('~/.ssh/id_rsa.pub')
+ if not os.path.isfile(key_pth):
+ key_pth = os.path.expanduser('~/.ssh/id_dsa.pub')
+
+ if os.path.isfile(key_pth):
+ with open(key_pth, 'rb') as fh:
+ contents = fh.read()
+ keys[os.getlogin()] = [contents, '']
+
+ return keys
+
+
+class MetaDataHandler(object):
+
+ def __init__(self, opts):
+ self.opts = opts
+ self.instances = {}
+
+ def get_data(self, params, who, **kwargs):
+ if not params:
+ # Show the root level capabilities when
+ # no params are passed...
+ caps = sorted(META_CAPABILITIES)
+ return "\n".join(caps)
+ action = params[0]
+ action = action.lower()
+ if action == 'instance-id':
+ return 'i-%s' % (id_generator(lower=True))
+ elif action == 'ami-launch-index':
+ return "%s" % random.choice([0, 1, 2, 3])
+ elif action == 'aki-id':
+ return 'aki-%s' % (id_generator(lower=True))
+ elif action == 'ami-id':
+ return 'ami-%s' % (id_generator(lower=True))
+ elif action == 'ari-id':
+ return 'ari-%s' % (id_generator(lower=True))
+ elif action == 'block-device-mapping':
+ nparams = params[1:]
+ if not nparams:
+ return "\n".join(BLOCK_DEVS)
+ else:
+ subvalue = traverse(nparams, DEV_MAPPINGS)
+ if not subvalue:
+ return "\n".join(sorted(list(DEV_MAPPINGS.keys())))
+ else:
+ return str(subvalue)
+ elif action in ['hostname', 'local-hostname', 'public-hostname']:
+ # Just echo back there own hostname that they called in on..
+ return "%s" % (who)
+ elif action == 'instance-type':
+ return random.choice(INSTANCE_TYPES)
+ elif action == 'ami-manifest-path':
+ return 'my-amis/spamd-image.manifest.xml'
+ elif action == 'security-groups':
+ return 'default'
+ elif action in ['local-ipv4', 'public-ipv4']:
+ # Just echo back there own ip that they called in on...
+ return "%s" % (kwargs.get('client_ip', '10.0.0.1'))
+ elif action == 'reservation-id':
+ return "r-%s" % (id_generator(lower=True))
+ elif action == 'product-codes':
+ return "%s" % (id_generator(size=8))
+ elif action == 'public-keys':
+ nparams = params[1:]
+ # This is a weird kludge, why amazon why!!!
+ # public-keys is messed up, a list of /latest/meta-data/public-keys/
+ # shows something like: '0=brickies'
+ # but a GET to /latest/meta-data/public-keys/0=brickies will fail
+ # you have to know to get '/latest/meta-data/public-keys/0', then
+ # from there you get a 'openssh-key', which you can get.
+ # this hunk of code just re-works the object for that.
+ avail_keys = get_ssh_keys()
+ key_ids = sorted(list(avail_keys.keys()))
+ if nparams:
+ mybe_key = nparams[0]
+ try:
+ key_id = int(mybe_key)
+ key_name = key_ids[key_id]
+ except:
+ raise WebException(httplib.BAD_REQUEST, "Unknown key id %r" % mybe_key)
+ # Extract the possible sub-params
+ result = traverse(nparams[1:], {
+ "openssh-key": "\n".join(avail_keys[key_name]),
+ })
+ if isinstance(result, (dict)):
+ # TODO: This might not be right??
+ result = "\n".join(sorted(result.keys()))
+ if not result:
+ result = ''
+ return result
+ else:
+ contents = []
+ for (i, key_id) in enumerate(key_ids):
+ contents.append("%s=%s" % (i, key_id))
+ return "\n".join(contents)
+ elif action == 'placement':
+ nparams = params[1:]
+ if not nparams:
+ pcaps = sorted(PLACEMENT_CAPABILITIES.keys())
+ return "\n".join(pcaps)
+ else:
+ pentry = nparams[0].strip().lower()
+ if pentry == 'availability-zone':
+ zones = PLACEMENT_CAPABILITIES[pentry]
+ return "%s" % random.choice(zones)
+ else:
+ return "%s" % (PLACEMENT_CAPABILITIES.get(pentry, ''))
+ else:
+ log.warn(("Did not implement action %s, "
+ "returning empty response: %r"),
+ action, NOT_IMPL_RESPONSE)
+ return NOT_IMPL_RESPONSE
+
+
+class UserDataHandler(object):
+
+ def __init__(self, opts):
+ self.opts = opts
+
+ def _get_user_blob(self, **kwargs):
+ blob = None
+ if self.opts['user_data_file'] is not None:
+ blob = self.opts['user_data_file']
+ if not blob:
+ blob_mp = {
+ 'hostname': kwargs.get('who', 'localhost'),
+ }
+ lines = [
+ "#cloud-config",
+ yamlify(blob_mp),
+ ]
+ blob = "\n".join(lines)
+ return blob.strip()
+
+ def get_data(self, params, who, **kwargs):
+ if not params:
+ return self._get_user_blob(who=who)
+ return NOT_IMPL_RESPONSE
+
+
+# Seem to need to use globals since can't pass
+# data into the request handlers instances...
+# Puke!
+meta_fetcher = None
+user_fetcher = None
+
+
+class Ec2Handler(BaseHTTPRequestHandler):
+
+ def _get_versions(self):
+ versions = ['latest'] + EC2_VERSIONS
+ versions = sorted(versions)
+ return "\n".join(versions)
+
+ def log_message(self, format, *args):
+ msg = "%s - %s" % (self.address_string(), format % (args))
+ log.info(msg)
+
+ def _find_method(self, path):
+ # Puke! (globals)
+ global meta_fetcher
+ global user_fetcher
+ func_mapping = {
+ 'user-data': user_fetcher.get_data,
+ 'meta-data': meta_fetcher.get_data,
+ }
+ segments = [piece for piece in path.split('/') if len(piece)]
+ log.info("Received segments %s", segments)
+ if not segments:
+ return self._get_versions
+ date = segments[0].strip().lower()
+ if date not in self._get_versions():
+ raise WebException(httplib.BAD_REQUEST, "Unknown version format %r" % date)
+ if len(segments) < 2:
+ raise WebException(httplib.BAD_REQUEST, "No action provided")
+ look_name = segments[1].lower()
+ if look_name not in func_mapping:
+ raise WebException(httplib.BAD_REQUEST, "Unknown requested data %r" % look_name)
+ base_func = func_mapping[look_name]
+ who = self.address_string()
+ ip_from = self.client_address[0]
+ if who == ip_from:
+ # Nothing resolved, so just use 'localhost'
+ who = 'localhost'
+ kwargs = {
+ 'params': list(segments[2:]),
+ 'who': who,
+ 'client_ip': ip_from,
+ }
+ return functools.partial(base_func, **kwargs)
+
+ def _do_response(self):
+ who = self.client_address
+ log.info("Got a call from %s for path %s", who, self.path)
+ try:
+ func = self._find_method(self.path)
+ data = func()
+ if not data:
+ data = ''
+ self.send_response(httplib.OK)
+ self.send_header("Content-Type", "binary/octet-stream")
+ self.send_header("Content-Length", len(data))
+ log.info("Sending data (len=%s):\n%s", len(data), format_text(data))
+ self.end_headers()
+ self.wfile.write(data)
+ except RuntimeError as e:
+ log.exception("Error somewhere in the server.")
+ self.send_error(httplib.INTERNAL_SERVER_ERROR, message=str(e))
+ except WebException as e:
+ code = e.code
+ log.exception(str(e))
+ self.send_error(code, message=str(e))
+
+ def do_GET(self):
+ self._do_response()
+
+ def do_POST(self):
+ self._do_response()
+
+
+def setup_logging(log_level, format='%(levelname)s: @%(name)s : %(message)s'):
+ root_logger = logging.getLogger()
+ console_logger = logging.StreamHandler(sys.stdout)
+ console_logger.setFormatter(logging.Formatter(format))
+ root_logger.addHandler(console_logger)
+ root_logger.setLevel(log_level)
+
+
+def extract_opts():
+ parser = OptionParser()
+ parser.add_option("-p", "--port", dest="port", action="store", type=int, default=80,
+ help="port from which to serve traffic (default: %default)", metavar="PORT")
+ parser.add_option("-a", "--addr", dest="address", action="store", type=str, default='0.0.0.0',
+ help="address from which to serve traffic (default: %default)", metavar="ADDRESS")
+ parser.add_option("-f", '--user-data-file', dest='user_data_file', action='store',
+ help="user data filename to serve back to incoming requests", metavar='FILE')
+ (options, args) = parser.parse_args()
+ out = dict()
+ out['extra'] = args
+ out['port'] = options.port
+ out['user_data_file'] = None
+ out['address'] = options.address
+ if options.user_data_file:
+ if not os.path.isfile(options.user_data_file):
+ parser.error("Option -f specified a non-existent file")
+ with open(options.user_data_file, 'rb') as fh:
+ out['user_data_file'] = fh.read()
+ return out
+
+
+def setup_fetchers(opts):
+ global meta_fetcher
+ global user_fetcher
+ meta_fetcher = MetaDataHandler(opts)
+ user_fetcher = UserDataHandler(opts)
+
+
+def run_server():
+ # Using global here since it doesn't seem like we
+ # can pass opts into a request handler constructor...
+ opts = extract_opts()
+ setup_logging(logging.DEBUG)
+ setup_fetchers(opts)
+ log.info("CLI opts: %s", opts)
+ server_address = (opts['address'], opts['port'])
+ server = HTTPServer(server_address, Ec2Handler)
+ sa = server.socket.getsockname()
+ log.info("Serving ec2 metadata on %s using port %s ...", sa[0], sa[1])
+ server.serve_forever()
+
+
+if __name__ == '__main__':
+ run_server()
diff --git a/tools/read-dependencies b/tools/read-dependencies
new file mode 100755
index 00000000..72e1e095
--- /dev/null
+++ b/tools/read-dependencies
@@ -0,0 +1,45 @@
+#!/usr/bin/python
+# vi: ts=4 expandtab
+
+import os
+import sys
+import re
+
+
+def parse_requires(fn):
+ requires = []
+ with open(fn, 'r') as fh:
+ lines = fh.read().splitlines()
+ for line in lines:
+ line = line.strip()
+ if not line or line[0] == '#':
+ continue
+ else:
+ requires.append(line)
+ return requires
+
+
+def find_requires(args):
+ p_files = []
+ if args:
+ p_files.append(args[0])
+ p_files.append(os.path.join(os.pardir, "Requires"))
+ p_files.append(os.path.join(os.getcwd(), 'Requires'))
+ found = None
+ for fn in p_files:
+ if os.path.isfile(fn):
+ found = fn
+ break
+ return found
+
+
+if __name__ == '__main__':
+ run_args = sys.argv[1:]
+ fn = find_requires(run_args)
+ if not fn:
+ sys.stderr.write("'Requires' file not found!\n")
+ sys.exit(1)
+ else:
+ deps = parse_requires(fn)
+ for entry in deps:
+ print entry
diff --git a/tools/read-version b/tools/read-version
new file mode 100755
index 00000000..e6167a2c
--- /dev/null
+++ b/tools/read-version
@@ -0,0 +1,70 @@
+#!/usr/bin/python
+# vi: ts=4 expandtab
+
+import os
+import sys
+import re
+
+from distutils import version as ver
+
+possible_topdir = os.path.normpath(os.path.join(os.path.abspath(
+ sys.argv[0]), os.pardir, os.pardir))
+if os.path.exists(os.path.join(possible_topdir, "cloudinit", "__init__.py")):
+ sys.path.insert(0, possible_topdir)
+
+from cloudinit import version as cver
+
+def parse_versions(fn):
+ with open(fn, 'r') as fh:
+ lines = fh.read().splitlines()
+ versions = []
+ for line in lines:
+ line = line.strip()
+ if line.startswith("-") or not line:
+ continue
+ if not re.match(r"[\d]", line):
+ continue
+ line = line.strip(":")
+ if (re.match(r"^[\d+]\.[\d+]\.[\d+]$", line) or
+ re.match(r"^[\d+]\.[\d+]$", line)):
+ versions.append(line)
+ return versions
+
+def find_changelog(args):
+ p_files = []
+ if args:
+ p_files.append(args[0])
+ p_files.append(os.path.join(os.pardir, "ChangeLog"))
+ p_files.append(os.path.join(os.getcwd(), 'ChangeLog'))
+ found = None
+ for fn in p_files:
+ if os.path.isfile(fn):
+ found = fn
+ break
+ return found
+
+
+if __name__ == '__main__':
+ run_args = sys.argv[1:]
+ fn = find_changelog(run_args)
+ if not fn:
+ sys.stderr.write("'ChangeLog' file not found!\n")
+ sys.exit(1)
+ else:
+ versions = parse_versions(fn)
+ if not versions:
+ sys.stderr.write("No versions found in %s!\n" % (fn))
+ sys.exit(1)
+ else:
+ # Check that the code version is the same
+ # as the version we found!
+ ch_ver = versions[0].strip()
+ code_ver = cver.version()
+ ch_ver_obj = ver.StrictVersion(ch_ver)
+ if ch_ver_obj != code_ver:
+ sys.stderr.write(("Code version %s does not match"
+ " changelog version %s\n") %
+ (code_ver, ch_ver_obj))
+ sys.exit(1)
+ sys.stdout.write(ch_ver)
+ sys.exit(0)
diff --git a/tools/run-pep8 b/tools/run-pep8
new file mode 100755
index 00000000..ea46c117
--- /dev/null
+++ b/tools/run-pep8
@@ -0,0 +1,35 @@
+#!/bin/bash
+
+ci_files='cloud*.py cloudinit/*.py cloudinit/config/*.py'
+test_files=$(find tests -name "*.py")
+def_files="$ci_files $test_files"
+
+if [ $# -eq 0 ]; then
+ files=( )
+ for f in $def_files; do
+ [ -f "$f" ] || { echo "failed, $f not a file" 1>&2; exit 1; }
+ files[${#files[@]}]=${f}
+ done
+else
+ files=( "$@" );
+fi
+
+if [ -f 'hacking.py' ]
+then
+ base=`pwd`
+else
+ base=`pwd`/tools/
+fi
+
+cmd=(
+ ${base}/hacking.py
+
+ --ignore=E501 # Line too long (these are caught by pylint)
+
+ "${files[@]}"
+)
+
+echo -e "\nRunning 'cloudinit' pep8:"
+echo "${cmd[@]}"
+"${cmd[@]}"
+
diff --git a/tools/run-pylint b/tools/run-pylint
index 46748ffb..dd6369aa 100755
--- a/tools/run-pylint
+++ b/tools/run-pylint
@@ -1,6 +1,6 @@
#!/bin/bash
-ci_files='cloud*.py cloudinit/*.py cloudinit/CloudConfig/*.py'
+ci_files='cloud*.py cloudinit/*.py cloudinit/config/*.py'
test_files=$(find tests -name "*.py")
def_files="$ci_files $test_files"
@@ -38,14 +38,3 @@ echo -e "\nRunning pylint:"
echo "${cmd[@]}"
"${cmd[@]}"
-cmd=(
- pep8
-
- --ignore=E501 # Line too long (these are caught by pylint above)
-
- "${files[@]}"
-)
-
-echo -e "\nRunning pep8:"
-echo "${cmd[@]}"
-"${cmd[@]}"