summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
authorChad Smith <chad.smith@canonical.com>2018-02-02 11:11:36 -0700
committerChad Smith <chad.smith@canonical.com>2018-02-02 11:11:36 -0700
commit78013bc65030421699b5feb66bc8b7a205abfbc0 (patch)
tree2ebf7111129f4aaf8a833ba6d226d4513ed59388 /tools
parent192261fe38a32edbd1f605ba25bbb6f4822a0720 (diff)
parentf7deaf15acf382d62554e2b1d70daa9a9109d542 (diff)
downloadvyos-cloud-init-78013bc65030421699b5feb66bc8b7a205abfbc0.tar.gz
vyos-cloud-init-78013bc65030421699b5feb66bc8b7a205abfbc0.zip
merge from master at 17.2-30-gf7deaf15
Diffstat (limited to 'tools')
-rwxr-xr-xtools/ds-identify116
-rwxr-xr-xtools/hacking.py172
-rwxr-xr-xtools/make-mime.py2
-rwxr-xr-xtools/mock-meta.py45
-rwxr-xr-xtools/read-version15
5 files changed, 117 insertions, 233 deletions
diff --git a/tools/ds-identify b/tools/ds-identify
index ee5e05a4..cd268242 100755
--- a/tools/ds-identify
+++ b/tools/ds-identify
@@ -83,6 +83,7 @@ _DI_LOGGED=""
# set DI_MAIN='noop' in environment to source this file with no main called.
DI_MAIN=${DI_MAIN:-main}
+DI_BLKID_OUTPUT=""
DI_DEFAULT_POLICY="search,found=all,maybe=all,notfound=${DI_DISABLED}"
DI_DEFAULT_POLICY_NO_DMI="search,found=all,maybe=all,notfound=${DI_ENABLED}"
DI_DMI_CHASSIS_ASSET_TAG=""
@@ -91,6 +92,7 @@ DI_DMI_SYS_VENDOR=""
DI_DMI_PRODUCT_SERIAL=""
DI_DMI_PRODUCT_UUID=""
DI_FS_LABELS=""
+DI_ISO9660_DEVS=""
DI_KERNEL_CMDLINE=""
DI_VIRT=""
DI_PID_1_PRODUCT_NAME=""
@@ -181,32 +183,43 @@ block_dev_with_label() {
return 0
}
-read_fs_labels() {
- cached "${DI_FS_LABELS}" && return 0
+read_fs_info() {
+ cached "${DI_BLKID_OUTPUT}" && return 0
# do not rely on links in /dev/disk which might not be present yet.
# note that older blkid versions do not report DEVNAME in 'export' output.
- local out="" ret=0 oifs="$IFS" line="" delim=","
- local labels=""
if is_container; then
# blkid will in a container, or at least currently in lxd
# not provide useful information.
DI_FS_LABELS="$UNAVAILABLE:container"
- else
- out=$(blkid -c /dev/null -o export) || {
- ret=$?
- error "failed running [$ret]: blkid -c /dev/null -o export"
- return $ret
- }
- IFS="$CR"
- set -- $out
- IFS="$oifs"
- for line in "$@"; do
- case "${line}" in
- LABEL=*) labels="${labels}${line#LABEL=}${delim}";;
- esac
- done
- DI_FS_LABELS="${labels%${delim}}"
+ DI_ISO9660_DEVS="$UNAVAILABLE:container"
+ return
fi
+ local oifs="$IFS" line="" delim=","
+ local ret=0 out="" labels="" dev="" label="" ftype="" isodevs=""
+ out=$(blkid -c /dev/null -o export) || {
+ ret=$?
+ error "failed running [$ret]: blkid -c /dev/null -o export"
+ DI_FS_LABELS="$UNAVAILABLE:error"
+ DI_ISO9660_DEVS="$UNAVAILABLE:error"
+ return $ret
+ }
+ IFS="$CR"
+ set -- $out
+ IFS="$oifs"
+ for line in "$@" ""; do
+ case "${line}" in
+ DEVNAME=*) dev=${line#DEVNAME=};;
+ LABEL=*) label="${line#LABEL=}";
+ labels="${labels}${line#LABEL=}${delim}";;
+ TYPE=*) ftype=${line#TYPE=};;
+ "") if [ "$ftype" = "iso9660" ]; then
+ isodevs="${isodevs} ${dev}=$label"
+ fi
+ ftype=""; devname=""; label="";
+ esac
+ done
+ DI_FS_LABELS="${labels%${delim}}"
+ DI_ISO9660_DEVS="${isodevs# }"
}
cached() {
@@ -214,10 +227,6 @@ cached() {
}
-has_cdrom() {
- [ -e "${PATH_ROOT}/dev/cdrom" ]
-}
-
detect_virt() {
local virt="${UNAVAILABLE}" r="" out=""
if [ -d /run/systemd ]; then
@@ -570,6 +579,8 @@ dscheck_NoCloud() {
check_configdrive_v2() {
if has_fs_with_label "config-2"; then
return ${DS_FOUND}
+ elif has_fs_with_label "CONFIG-2"; then
+ return ${DS_FOUND}
fi
# look in /config-drive <vlc>/seed/config_drive for a directory
# openstack/YYYY-MM-DD format with a file meta_data.json
@@ -621,14 +632,13 @@ ovf_vmware_guest_customization() {
[ "${DI_VIRT}" = "vmware" ] || return 1
# we have to have the plugin to do vmware customization
- local found="" pkg="" pre="/usr/lib"
+ local found="" pkg="" pre="${PATH_ROOT}/usr/lib"
for pkg in vmware-tools open-vm-tools; do
if [ -f "$pre/$pkg/plugins/vmsvc/libdeployPkgPlugin.so" ]; then
found="$pkg"; break;
fi
done
[ -n "$found" ] || return 1
-
# vmware customization is disabled by default
# (disable_vmware_customization=true). If it is set to false, then
# user has requested customization.
@@ -644,20 +654,57 @@ ovf_vmware_guest_customization() {
return 1
}
+is_cdrom_ovf() {
+ local dev="$1" label="$2"
+ # skip devices that don't look like cdrom paths.
+ case "$dev" in
+ /dev/sr[0-9]|/dev/hd[a-z]) :;;
+ *) debug 1 "skipping iso dev $dev"
+ return 1;;
+ esac
+
+ # fast path known 'OVF' labels
+ case "$label" in
+ OVF-TRANSPORT|ovf-transport|OVFENV|ovfenv) return 0;;
+ esac
+
+ # explicitly skip known labels of other types. rd_rdfe is azure.
+ case "$label" in
+ config-2|CONFIG-2|rd_rdfe_stable*|cidata) return 1;;
+ esac
+
+ local idstr="http://schemas.dmtf.org/ovf/environment/1"
+ grep --quiet --ignore-case "$idstr" "${PATH_ROOT}$dev"
+}
+
dscheck_OVF() {
- local p=""
check_seed_dir ovf ovf-env.xml && return "${DS_FOUND}"
+ [ "${DI_VIRT}" = "none" ] && return ${DS_NOT_FOUND}
+
+ # Azure provides ovf. Skip false positive by dis-allowing.
+ is_azure_chassis && return $DS_NOT_FOUND
+
+ local isodevs="${DI_ISO9660_DEVS}"
+ case "$isodevs" in
+ ""|$UNAVAILABLE:*) return ${DS_NOT_FOUND};;
+ esac
+
+ # DI_ISO9660_DEVS is <device>=label, like /dev/sr0=OVF-TRANSPORT
+ for tok in $isodevs; do
+ is_cdrom_ovf "${tok%%=*}" "${tok#*=}" && return $DS_FOUND
+ done
+
if ovf_vmware_guest_customization; then
return ${DS_FOUND}
fi
- has_cdrom || return ${DS_NOT_FOUND}
+ return ${DS_NOT_FOUND}
+}
- # FIXME: currently just return maybe if there is a cdrom
- # ovf iso9660 transport does not specify an fs label.
- # better would be to check if
- return ${DS_MAYBE}
+is_azure_chassis() {
+ local azure_chassis="7783-7084-3265-9085-8269-3286-77"
+ dmi_chassis_asset_tag_matches "${azure_chassis}"
}
dscheck_Azure() {
@@ -667,8 +714,7 @@ dscheck_Azure() {
# UUID="112D211272645f72" LABEL="rd_rdfe_stable.161212-1209"
# TYPE="udf">/dev/sr0</device>
#
- local azure_chassis="7783-7084-3265-9085-8269-3286-77"
- dmi_chassis_asset_tag_matches "${azure_chassis}" && return $DS_FOUND
+ is_azure_chassis && return $DS_FOUND
check_seed_dir azure ovf-env.xml && return ${DS_FOUND}
[ "${DI_VIRT}" = "microsoft" ] || return ${DS_NOT_FOUND}
@@ -930,7 +976,7 @@ collect_info() {
read_dmi_product_name
read_dmi_product_serial
read_dmi_product_uuid
- read_fs_labels
+ read_fs_info
}
print_info() {
@@ -942,7 +988,7 @@ _print_info() {
local n="" v="" vars=""
vars="DMI_PRODUCT_NAME DMI_SYS_VENDOR DMI_PRODUCT_SERIAL"
vars="$vars DMI_PRODUCT_UUID PID_1_PRODUCT_NAME DMI_CHASSIS_ASSET_TAG"
- vars="$vars FS_LABELS KERNEL_CMDLINE VIRT"
+ vars="$vars FS_LABELS ISO9660_DEVS KERNEL_CMDLINE VIRT"
vars="$vars UNAME_KERNEL_NAME UNAME_KERNEL_RELEASE UNAME_KERNEL_VERSION"
vars="$vars UNAME_MACHINE UNAME_NODENAME UNAME_OPERATING_SYSTEM"
vars="$vars DSNAME DSLIST"
diff --git a/tools/hacking.py b/tools/hacking.py
deleted file mode 100755
index e6a05136..00000000
--- a/tools/hacking.py
+++ /dev/null
@@ -1,172 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012, Cloudscaling
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""cloudinit HACKING file compliance testing (based off of nova hacking.py)
-
-built on top of pep8.py
-"""
-
-import inspect
-import logging
-import re
-import sys
-
-import pep8
-
-# Don't need this for testing
-logging.disable('LOG')
-
-# N1xx comments
-# N2xx except
-# N3xx imports
-# N4xx docstrings
-# N[5-9]XX (future use)
-
-DOCSTRING_TRIPLE = ['"""', "'''"]
-VERBOSE_MISSING_IMPORT = False
-_missingImport = set([])
-
-
-def import_normalize(line):
- # convert "from x import y" to "import x.y"
- # handle "from x import y as z" to "import x.y as z"
- split_line = line.split()
- if (line.startswith("from ") and "," not in line and
- split_line[2] == "import" and split_line[3] != "*" and
- split_line[1] != "__future__" and
- (len(split_line) == 4 or (len(split_line) == 6 and
- split_line[4] == "as"))):
- return "import %s.%s" % (split_line[1], split_line[3])
- else:
- return line
-
-
-def cloud_import_alphabetical(physical_line, line_number, lines):
- """Check for imports in alphabetical order.
-
- HACKING guide recommendation for imports:
- imports in human alphabetical order
- N306
- """
- # handle import x
- # use .lower since capitalization shouldn't dictate order
- split_line = import_normalize(physical_line.strip()).lower().split()
- split_previous = import_normalize(lines[line_number - 2])
- split_previous = split_previous.strip().lower().split()
- # with or without "as y"
- length = [2, 4]
- if (len(split_line) in length and len(split_previous) in length and
- split_line[0] == "import" and split_previous[0] == "import"):
- if split_line[1] < split_previous[1]:
- return (0, "N306: imports not in alphabetical order (%s, %s)"
- % (split_previous[1], split_line[1]))
-
-
-def cloud_docstring_start_space(physical_line):
- """Check for docstring not start with space.
-
- HACKING guide recommendation for docstring:
- Docstring should not start with space
- N401
- """
- pos = max([physical_line.find(i) for i in DOCSTRING_TRIPLE]) # start
- if (pos != -1 and len(physical_line) > pos + 1):
- if (physical_line[pos + 3] == ' '):
- return (pos,
- "N401: one line docstring should not start with a space")
-
-
-def cloud_todo_format(physical_line):
- """Check for 'TODO()'.
-
- HACKING guide recommendation for TODO:
- Include your name with TODOs as in "#TODO(termie)"
- N101
- """
- pos = physical_line.find('TODO')
- pos1 = physical_line.find('TODO(')
- pos2 = physical_line.find('#') # make sure it's a comment
- if (pos != pos1 and pos2 >= 0 and pos2 < pos):
- return pos, "N101: Use TODO(NAME)"
-
-
-def cloud_docstring_one_line(physical_line):
- """Check one line docstring end.
-
- HACKING guide recommendation for one line docstring:
- A one line docstring looks like this and ends in a period.
- N402
- """
- pos = max([physical_line.find(i) for i in DOCSTRING_TRIPLE]) # start
- end = max([physical_line[-4:-1] == i for i in DOCSTRING_TRIPLE]) # end
- if (pos != -1 and end and len(physical_line) > pos + 4):
- if (physical_line[-5] != '.'):
- return pos, "N402: one line docstring needs a period"
-
-
-def cloud_docstring_multiline_end(physical_line):
- """Check multi line docstring end.
-
- HACKING guide recommendation for docstring:
- Docstring should end on a new line
- N403
- """
- pos = max([physical_line.find(i) for i in DOCSTRING_TRIPLE]) # start
- if (pos != -1 and len(physical_line) == pos):
- print(physical_line)
- if (physical_line[pos + 3] == ' '):
- return (pos, "N403: multi line docstring end on new line")
-
-
-current_file = ""
-
-
-def readlines(filename):
- """Record the current file being tested."""
- pep8.current_file = filename
- return open(filename).readlines()
-
-
-def add_cloud():
- """Monkey patch pep8 for cloud-init guidelines.
-
- Look for functions that start with cloud_
- and add them to pep8 module.
-
- Assumes you know how to write pep8.py checks
- """
- for name, function in globals().items():
- if not inspect.isfunction(function):
- continue
- if name.startswith("cloud_"):
- exec("pep8.%s = %s" % (name, name))
-
-
-if __name__ == "__main__":
- # NOVA based 'hacking.py' error codes start with an N
- pep8.ERRORCODE_REGEX = re.compile(r'[EWN]\d{3}')
- add_cloud()
- pep8.current_file = current_file
- pep8.readlines = readlines
- try:
- pep8._main()
- finally:
- if len(_missingImport) > 0:
- sys.stderr.write(
- "%i imports missing in this test environment\n" %
- len(_missingImport))
-
-# vi: ts=4 expandtab
diff --git a/tools/make-mime.py b/tools/make-mime.py
index f6a72044..d321479b 100755
--- a/tools/make-mime.py
+++ b/tools/make-mime.py
@@ -23,7 +23,7 @@ def file_content_type(text):
filename, content_type = text.split(":", 1)
return (open(filename, 'r'), filename, content_type.strip())
except ValueError:
- raise argparse.ArgumentError("Invalid value for %r" % (text))
+ raise argparse.ArgumentError(text, "Invalid value for %r" % (text))
def main():
diff --git a/tools/mock-meta.py b/tools/mock-meta.py
index a5d14ab7..724f7fc4 100755
--- a/tools/mock-meta.py
+++ b/tools/mock-meta.py
@@ -17,6 +17,7 @@ Then:
ec2metadata --instance-id
"""
+import argparse
import functools
import json
import logging
@@ -27,8 +28,6 @@ import string
import sys
import yaml
-from optparse import OptionParser
-
try:
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
import httplib as hclient
@@ -415,29 +414,27 @@ def setup_logging(log_level, fmt='%(levelname)s: @%(name)s : %(message)s'):
def extract_opts():
- parser = OptionParser()
- parser.add_option("-p", "--port", dest="port", action="store", type=int,
- default=80, metavar="PORT",
- help=("port from which to serve traffic"
- " (default: %default)"))
- parser.add_option("-a", "--addr", dest="address", action="store", type=str,
- default='::', metavar="ADDRESS",
- help=("address from which to serve traffic"
- " (default: %default)"))
- parser.add_option("-f", '--user-data-file', dest='user_data_file',
- action='store', metavar='FILE',
- help=("user data filename to serve back to"
- "incoming requests"))
- (options, args) = parser.parse_args()
- out = dict()
- out['extra'] = args
- out['port'] = options.port
- out['user_data_file'] = None
- out['address'] = options.address
- if options.user_data_file:
- if not os.path.isfile(options.user_data_file):
+ parser = argparse.ArgumentParser()
+ parser.add_argument("-p", "--port", dest="port", action="store", type=int,
+ default=80, metavar="PORT",
+ help=("port from which to serve traffic"
+ " (default: %default)"))
+ parser.add_argument("-a", "--addr", dest="address", action="store",
+ type=str, default='::', metavar="ADDRESS",
+ help=("address from which to serve traffic"
+ " (default: %default)"))
+ parser.add_argument("-f", '--user-data-file', dest='user_data_file',
+ action='store', metavar='FILE',
+ help=("user data filename to serve back to"
+ "incoming requests"))
+ parser.add_argument('extra', nargs='*')
+ args = parser.parse_args()
+ out = {'port': args.port, 'address': args.address, 'extra': args.extra,
+ 'user_data_file': None}
+ if args.user_data_file:
+ if not os.path.isfile(args.user_data_file):
parser.error("Option -f specified a non-existent file")
- with open(options.user_data_file, 'rb') as fh:
+ with open(args.user_data_file, 'rb') as fh:
out['user_data_file'] = fh.read()
return out
diff --git a/tools/read-version b/tools/read-version
index d9ed30da..3ea9e66e 100755
--- a/tools/read-version
+++ b/tools/read-version
@@ -45,6 +45,19 @@ def which(program):
return None
+def is_gitdir(path):
+ # Return boolean indicating if path is a git tree.
+ git_meta = os.path.join(path, '.git')
+ if os.path.isdir(git_meta):
+ return True
+ if os.path.exists(git_meta):
+ # in a git worktree, .git is a file with 'gitdir: x'
+ with open(git_meta, "rb") as fp:
+ if b'gitdir:' in fp.read():
+ return True
+ return False
+
+
use_long = '--long' in sys.argv or os.environ.get('CI_RV_LONG')
use_tags = '--tags' in sys.argv or os.environ.get('CI_RV_TAGS')
output_json = '--json' in sys.argv
@@ -52,7 +65,7 @@ output_json = '--json' in sys.argv
src_version = ci_version.version_string()
version_long = None
-if os.path.isdir(os.path.join(_tdir, ".git")) and which("git"):
+if is_gitdir(_tdir) and which("git"):
flags = []
if use_tags:
flags = ['--tags']