diff options
Diffstat (limited to 'tools')
-rw-r--r-- | tools/.github-cla-signers | 59 | ||||
-rw-r--r-- | tools/.lp-to-git-user | 2 | ||||
-rwxr-xr-x | tools/build-on-netbsd | 25 | ||||
-rwxr-xr-x | tools/ds-identify | 153 | ||||
-rwxr-xr-x | tools/hook-hotplug | 22 | ||||
-rwxr-xr-x | tools/mock-meta.py | 301 | ||||
-rwxr-xr-x | tools/read-dependencies | 16 | ||||
-rwxr-xr-x | tools/render-cloudcfg | 78 | ||||
-rwxr-xr-x | tools/run-container | 9 | ||||
-rwxr-xr-x | tools/run-flake8 (renamed from tools/run-pyflakes) | 4 | ||||
-rwxr-xr-x | tools/run-pep8 | 21 | ||||
-rwxr-xr-x | tools/validate-yaml.py | 4 | ||||
-rwxr-xr-x | tools/write-ssh-key-fingerprints | 58 |
13 files changed, 508 insertions, 244 deletions
diff --git a/tools/.github-cla-signers b/tools/.github-cla-signers index 1e0c3ea4..ac157a2f 100644 --- a/tools/.github-cla-signers +++ b/tools/.github-cla-signers @@ -1,33 +1,92 @@ ader1990 +ajmyyra +akutz AlexBaranowski Aman306 +andgein +andrewbogott +andrewlukoshko +antonyc aswinrajamannar +beantaxi beezly bipinbachhao BirknerAlex +bmhughes candlerb +cawamata +cclauss +chrislalos +ciprianbadescu +citrus-it +cjp256 +dankenigsberg +ddymko dermotbradley dhensby eandersson +eb3095 emmanuelthome +eslerm +esposem +GabrielNagy +giggsoff +hamalq +holmanb +impl +irishgordo izzyleung +j5awry +Jille +JohnKepplers johnsonshi +jordimassaguerpla jqueuniet jsf9k +jshen28 +klausenbusk +KsenijaS landon912 lucasmoura +lucendio lungj +mal +mamercad manuelisimo marlluslustosa matthewruffell +maxnet +megian +mitechie +nazunalika +nicolasbock nishigori +olivierlemasle omBratteng onitake +qubidt +renanrodrigo +rhansen riedel +sarahwzadara +slingamn slyon smoser sshedi +stappersg +steverweber +t-8ch TheRealFalcon +taoyama +timothegenzmer +tnt-dev tomponline tsanghan +tSU-RooT +vorlonofportland +vteratipally +Vultaire WebSpider +xiachen-rh +xnox +zhuzaifangxuele diff --git a/tools/.lp-to-git-user b/tools/.lp-to-git-user index 21171ac6..9b09d568 100644 --- a/tools/.lp-to-git-user +++ b/tools/.lp-to-git-user @@ -4,6 +4,7 @@ "ahosmanmsft": "AOhassan", "andreipoltavchenko": "pa-yourserveradmin-com", "askon": "ask0n", + "b1sandmann": "B1Sandmann", "bitfehler": "bitfehler", "chad.smith": "blackboxsw", "chcheng": "chengcheng-chcheng", @@ -29,6 +30,7 @@ "rjschwei": "rjschwei", "tribaal": "chrisglass", "trstringer": "trstringer", + "vlastimil-holer": "vholer", "vtqanh": "anhvoms", "xiaofengw": "xiaofengw-vmware" } diff --git a/tools/build-on-netbsd b/tools/build-on-netbsd index d2a7067d..0d4eb58b 100755 --- a/tools/build-on-netbsd +++ b/tools/build-on-netbsd @@ -2,17 +2,26 @@ fail() { echo "FAILED:" "$@" 1>&2; exit 1; } +PYTHON="${PYTHON:-python3}" +if [ ! $(which ${PYTHON}) ]; then + echo "Please install python first." + exit 1 +fi +py_prefix=$(${PYTHON} -c 'import sys; print("py%d%d" % (sys.version_info.major, sys.version_info.minor))') + # Check dependencies: depschecked=/tmp/c-i.dependencieschecked pkgs=" bash dmidecode - py37-configobj - py37-jinja2 - py37-oauthlib - py37-requests - py37-setuptools - py37-yaml + ${py_prefix}-configobj + ${py_prefix}-jinja2 + ${py_prefix}-oauthlib + ${py_prefix}-requests + ${py_prefix}-setuptools + ${py_prefix}-netifaces + ${py_prefix}-yaml + ${py_prefix}-jsonschema sudo " [ -f "$depschecked" ] || pkg_add ${pkgs} || fail "install packages" @@ -20,8 +29,8 @@ pkgs=" touch $depschecked # Build the code and install in /usr/pkg/: -python3.7 setup.py build -python3.7 setup.py install -O1 --distro netbsd --skip-build --init-system sysvinit_netbsd +${PYTHON} setup.py build +${PYTHON} setup.py install -O1 --distro netbsd --skip-build --init-system sysvinit_netbsd mv -v /usr/local/etc/rc.d/cloud* /etc/rc.d # Enable cloud-init in /etc/rc.conf: diff --git a/tools/ds-identify b/tools/ds-identify index 496dbb8a..794a96f4 100755 --- a/tools/ds-identify +++ b/tools/ds-identify @@ -1,5 +1,5 @@ #!/bin/sh -# shellcheck disable=2015,2039,2162,2166 +# shellcheck disable=2015,2039,2162,2166,3043 # # ds-identify is configured via /etc/cloud/ds-identify.cfg # or on the kernel command line. It takes the following inputs: @@ -124,8 +124,9 @@ DI_DSNAME="" # this has to match the builtin list in cloud-init, it is what will # be searched if there is no setting found in config. DI_DSLIST_DEFAULT="MAAS ConfigDrive NoCloud AltCloud Azure Bigstep \ -CloudSigma CloudStack DigitalOcean AliYun Ec2 GCE OpenNebula OpenStack \ -OVF SmartOS Scaleway Hetzner IBMCloud Oracle Exoscale RbxCloud" +CloudSigma CloudStack DigitalOcean Vultr AliYun Ec2 GCE OpenNebula OpenStack \ +OVF SmartOS Scaleway Hetzner IBMCloud Oracle Exoscale RbxCloud UpCloud VMware \ +LXD" DI_DSLIST="" DI_MODE="" DI_ON_FOUND="" @@ -141,6 +142,7 @@ error() { debug 0 "$@" stderr "$@" } + warn() { set -- "WARN:" "$@" debug 0 "$@" @@ -344,7 +346,6 @@ geom_label_status_as() { return $ret } - read_fs_info_freebsd() { local oifs="$IFS" line="" delim="," local ret=0 labels="" dev="" label="" ftype="" isodevs="" @@ -404,7 +405,6 @@ cached() { [ -n "$1" ] && _RET="$1" && return || return 1 } - detect_virt() { local virt="${UNAVAILABLE}" r="" out="" if [ -d /run/systemd ]; then @@ -450,7 +450,7 @@ detect_virt() { read_virt() { cached "$DI_VIRT" && return 0 detect_virt - DI_VIRT=${_RET} + DI_VIRT="${_RET}" } is_container() { @@ -616,6 +616,7 @@ read_pid1_product_name() { dmi_chassis_asset_tag_matches() { is_container && return 1 + # shellcheck disable=2254 case "${DI_DMI_CHASSIS_ASSET_TAG}" in $1) return 0;; esac @@ -624,6 +625,7 @@ dmi_chassis_asset_tag_matches() { dmi_product_name_matches() { is_container && return 1 + # shellcheck disable=2254 case "${DI_DMI_PRODUCT_NAME}" in $1) return 0;; esac @@ -632,6 +634,7 @@ dmi_product_name_matches() { dmi_product_serial_matches() { is_container && return 1 + # shellcheck disable=2254 case "${DI_DMI_PRODUCT_SERIAL}" in $1) return 0;; esac @@ -765,7 +768,7 @@ check_config() { while read line; do line=${line%%#*} case "$line" in - $key:\ *|$key:) + $key:\ *|"${key}":) ret=${line#*:}; ret=${ret# }; found=$((found+1)) @@ -800,6 +803,12 @@ dscheck_MAAS() { return ${DS_NOT_FOUND} } +# LXD datasource requires active /dev/lxd/sock +# https://linuxcontainers.org/lxd/docs/master/dev-lxd +dscheck_LXD() { + [ -S /dev/lxd/sock ] && return ${DS_FOUND} || return ${DS_NOT_FOUND} +} + dscheck_NoCloud() { local fslabel="cidata CIDATA" d="" case " ${DI_KERNEL_CMDLINE} " in @@ -813,6 +822,7 @@ dscheck_NoCloud() { check_seed_dir "$d" meta-data user-data && return ${DS_FOUND} check_writable_seed_dir "$d" meta-data user-data && return ${DS_FOUND} done + # shellcheck disable=2086 if has_fs_with_label $fslabel; then return ${DS_FOUND} fi @@ -883,6 +893,11 @@ dscheck_RbxCloud() { return ${DS_NOT_FOUND} } +dscheck_UpCloud() { + dmi_sys_vendor_is UpCloud && return ${DS_FOUND} + return ${DS_NOT_FOUND} +} + ovf_vmware_guest_customization() { # vmware guest customization @@ -891,11 +906,16 @@ ovf_vmware_guest_customization() { # we have to have the plugin to do vmware customization local found="" pkg="" pre="${PATH_ROOT}/usr/lib" + local x86="x86_64-linux-gnu" aarch="aarch64-linux-gnu" local ppath="plugins/vmsvc/libdeployPkgPlugin.so" for pkg in vmware-tools open-vm-tools; do if [ -f "$pre/$pkg/$ppath" -o -f "${pre}64/$pkg/$ppath" ]; then found="$pkg"; break; fi + # search in multiarch dir + if [ -f "$pre/$x86/$pkg/$ppath" -o -f "$pre/$aarch/$pkg/$ppath" ]; then + found="$pkg"; break; + fi done [ -n "$found" ] || return 1 # vmware customization is disabled by default @@ -1235,11 +1255,11 @@ dscheck_AltCloud() { ctype="${DI_DMI_PRODUCT_NAME}" fi case "$ctype" in - ${match_rhev}) + "${match_rhev}") probe_floppy || return ${DS_NOT_FOUND} dev="/dev/floppy" ;; - ${match_vsphere}) + "${match_vsphere}") block_dev_with_label CDROM || return ${DS_NOT_FOUND} dev="$_RET" ;; @@ -1305,6 +1325,7 @@ is_ibm_provisioning() { msg="config '$pcfg' exists." is_prov=true if [ -f "$logf" ]; then + # shellcheck disable=3013 if [ "$logf" -nt "$PATH_PROC_1_ENVIRON" ]; then msg="$msg log '$logf' from current boot." else @@ -1320,7 +1341,7 @@ is_ibm_provisioning() { } is_ibm_cloud() { - cached "${_IS_IBM_CLOUD}" && return ${_IS_IBM_CLOUD} + cached "${_IS_IBM_CLOUD}" && return "${_IS_IBM_CLOUD}" local ret=1 if [ "$DI_VIRT" = "xen" ]; then if is_ibm_provisioning; then @@ -1345,6 +1366,98 @@ dscheck_IBMCloud() { return ${DS_NOT_FOUND} } +dscheck_Vultr() { + dmi_sys_vendor_is Vultr && return $DS_FOUND + + case " $DI_KERNEL_CMDLINE " in + *\ vultr\ *) return $DS_FOUND ;; + esac + + if [ -f "${PATH_ROOT}/etc/vultr" ]; then + return $DS_FOUND + fi + + return $DS_NOT_FOUND +} + +vmware_has_envvar_vmx_guestinfo() { + [ -n "${VMX_GUESTINFO:-}" ] +} + +vmware_has_envvar_vmx_guestinfo_metadata() { + [ -n "${VMX_GUESTINFO_METADATA:-}" ] +} + +vmware_has_envvar_vmx_guestinfo_userdata() { + [ -n "${VMX_GUESTINFO_USERDATA:-}" ] +} + +vmware_has_envvar_vmx_guestinfo_vendordata() { + [ -n "${VMX_GUESTINFO_VENDORDATA:-}" ] +} + +vmware_has_rpctool() { + command -v vmware-rpctool >/dev/null 2>&1 +} + +vmware_rpctool_guestinfo() { + vmware-rpctool "info-get guestinfo.${1}" 2>/dev/null | grep "[[:alnum:]]" +} + +vmware_rpctool_guestinfo_metadata() { + vmware_rpctool_guestinfo "metadata" +} + +vmware_rpctool_guestinfo_userdata() { + vmware_rpctool_guestinfo "userdata" +} + +vmware_rpctool_guestinfo_vendordata() { + vmware_rpctool_guestinfo "vendordata" +} + +dscheck_VMware() { + # Checks to see if there is valid data for the VMware datasource. + # The data transports are checked in the following order: + # + # * envvars + # * guestinfo + # + # Please note when updating this function with support for new data + # transports, the order should match the order in the _get_data + # function from the file DataSourceVMware.py. + + # Check to see if running in a container and the VMware + # datasource is configured via environment variables. + if vmware_has_envvar_vmx_guestinfo; then + if vmware_has_envvar_vmx_guestinfo_metadata || \ + vmware_has_envvar_vmx_guestinfo_userdata || \ + vmware_has_envvar_vmx_guestinfo_vendordata; then + return "${DS_FOUND}" + fi + fi + + # Do not proceed unless the detected platform is VMware. + if [ ! "${DI_VIRT}" = "vmware" ]; then + return "${DS_NOT_FOUND}" + fi + + # Do not proceed if the vmware-rpctool command is not present. + if ! vmware_has_rpctool; then + return "${DS_NOT_FOUND}" + fi + + # Activate the VMware datasource only if any of the fields used + # by the datasource are present in the guestinfo table. + if { vmware_rpctool_guestinfo_metadata || \ + vmware_rpctool_guestinfo_userdata || \ + vmware_rpctool_guestinfo_vendordata; } >/dev/null 2>&1; then + return "${DS_FOUND}" + fi + + return "${DS_NOT_FOUND}" +} + collect_info() { read_uname_info read_virt @@ -1544,10 +1657,10 @@ parse_policy() { for tok in "$@"; do val=${tok#*=} case "$tok" in - $DI_ENABLED|$DI_DISABLED|search|report) mode=$tok;; + "${DI_ENABLED}"|"${DI_DISABLED}"|search|report) mode=$tok;; found=all|found=first) found=$val;; maybe=all|maybe=none) maybe=$val;; - notfound=$DI_ENABLED|notfound=$DI_DISABLED) notfound=$val;; + notfound="${DI_ENABLED}"|notfound="${DI_DISABLED}") notfound=$val;; found=*) parse_warn found "$val" "${_def_found}" found=${_def_found};; @@ -1628,11 +1741,11 @@ _main() { fi case "$DI_MODE" in - $DI_DISABLED) + "${DI_DISABLED}") debug 1 "mode=$DI_DISABLED. returning $ret_dis" return $ret_dis ;; - $DI_ENABLED) + "${DI_ENABLED}") debug 1 "mode=$DI_ENABLED. returning $ret_en" return $ret_en;; search|report) :;; @@ -1672,11 +1785,11 @@ _main() { $dscheck_fn ret="$?" case "$ret" in - $DS_FOUND) + "${DS_FOUND}") debug 1 "check for '$ds' returned found"; exfound_cfg="${exfound_cfg:+${exfound_cfg}${CR}}${_RET_excfg}" found="${found} $ds";; - $DS_MAYBE) + "${DS_MAYBE}") debug 1 "check for '$ds' returned maybe"; exmaybe_cfg="${exmaybe_cfg:+${exmaybe_cfg}${CR}}${_RET_excfg}" maybe="${maybe} $ds";; @@ -1715,16 +1828,16 @@ _main() { local basemsg="No ds found [mode=$DI_MODE, notfound=$DI_ON_NOTFOUND]." local msg="" ret=3 case "$DI_MODE:$DI_ON_NOTFOUND" in - report:$DI_DISABLED) + report:"${DI_DISABLED}") msg="$basemsg Would disable cloud-init [$ret_dis]" ret=$ret_en;; - report:$DI_ENABLED) + report:"${DI_ENABLED}") msg="$basemsg Would enable cloud-init [$ret_en]" ret=$ret_en;; - search:$DI_DISABLED) + search:"${DI_DISABLED}") msg="$basemsg Disabled cloud-init [$ret_dis]" ret=$ret_dis;; - search:$DI_ENABLED) + search:"${DI_ENABLED}") msg="$basemsg Enabled cloud-init [$ret_en]" ret=$ret_en;; *) error "Unexpected result";; diff --git a/tools/hook-hotplug b/tools/hook-hotplug new file mode 100755 index 00000000..35bd3da2 --- /dev/null +++ b/tools/hook-hotplug @@ -0,0 +1,22 @@ +#!/bin/bash +# This file is part of cloud-init. See LICENSE file for license information. + +# This script checks if cloud-init has hotplug hooked and if +# cloud-init has finished; if so invoke cloud-init hotplug-hook + +is_finished() { + [ -e /run/cloud-init/result.json ] +} + +if is_finished; then + # open cloud-init's hotplug-hook fifo rw + exec 3<>/run/cloud-init/hook-hotplug-cmd + env_params=( + --subsystem="${SUBSYSTEM}" + handle + --devpath="${DEVPATH}" + --udevaction="${ACTION}" + ) + # write params to cloud-init's hotplug-hook fifo + echo "${env_params[@]}" >&3 +fi diff --git a/tools/mock-meta.py b/tools/mock-meta.py index 9dd067b9..4ac1ea4f 100755 --- a/tools/mock-meta.py +++ b/tools/mock-meta.py @@ -36,74 +36,74 @@ except ImportError: from http import client as hclient -log = logging.getLogger('meta-server') +log = logging.getLogger("meta-server") EC2_VERSIONS = [ - '1.0', - '2007-01-19', - '2007-03-01', - '2007-08-29', - '2007-10-10', - '2007-12-15', - '2008-02-01', - '2008-09-01', - '2009-04-04', + "1.0", + "2007-01-19", + "2007-03-01", + "2007-08-29", + "2007-10-10", + "2007-12-15", + "2008-02-01", + "2008-09-01", + "2009-04-04", ] BLOCK_DEVS = [ - 'ami', - 'ephemeral0', - 'root', + "ami", + "ephemeral0", + "root", ] -DEV_PREFIX = 'v' # This seems to vary alot depending on images... +DEV_PREFIX = "v" # This seems to vary alot depending on images... DEV_MAPPINGS = { - 'ephemeral0': '%sda2' % (DEV_PREFIX), - 'root': '/dev/%sda1' % (DEV_PREFIX), - 'ami': '%sda1' % (DEV_PREFIX), - 'swap': '%sda3' % (DEV_PREFIX), + "ephemeral0": "%sda2" % (DEV_PREFIX), + "root": "/dev/%sda1" % (DEV_PREFIX), + "ami": "%sda1" % (DEV_PREFIX), + "swap": "%sda3" % (DEV_PREFIX), } META_CAPABILITIES = [ - 'aki-id', - 'ami-id', - 'ami-launch-index', - 'ami-manifest-path', - 'ari-id', - 'block-device-mapping/', - 'hostname', - 'instance-action', - 'instance-id', - 'instance-type', - 'local-hostname', - 'local-ipv4', - 'placement/', - 'product-codes', - 'public-hostname', - 'public-ipv4', - 'public-keys/', - 'reservation-id', - 'security-groups' + "aki-id", + "ami-id", + "ami-launch-index", + "ami-manifest-path", + "ari-id", + "block-device-mapping/", + "hostname", + "instance-action", + "instance-id", + "instance-type", + "local-hostname", + "local-ipv4", + "placement/", + "product-codes", + "public-hostname", + "public-ipv4", + "public-keys/", + "reservation-id", + "security-groups", ] PUB_KEYS = { - 'brickies': [ - ('ssh-rsa ' - 'AAAAB3NzaC1yc2EAAAABIwAAAQEA3I7VUf2l5gSn5uavROsc5HRDpZdQueUq5ozemN' - 'Sj8T7enqKHOEaFoU2VoPgGEWC9RyzSQVeyD6s7APMcE82EtmW4skVEgEGSbDc1pvxz' - 'xtchBj78hJP6Cf5TCMFSXw+Fz5rF1dR23QDbN1mkHs7adr8GW4kSWqU7Q7NDwfIrJJ' - 'tO7Hi42GyXtvEONHbiRPOe8stqUly7MvUoN+5kfjBM8Qqpfl2+FNhTYWpMfYdPUnE7' - 'u536WqzFmsaqJctz3gBxH9Ex7dFtrxR4qiqEr9Qtlu3xGn7Bw07/+i1D+ey3ONkZLN' - '+LQ714cgj8fRS4Hj29SCmXp5Kt5/82cD/VN3NtHw== brickies'), - '', + "brickies": [ + "ssh-rsa " + "AAAAB3NzaC1yc2EAAAABIwAAAQEA3I7VUf2l5gSn5uavROsc5HRDpZdQueUq5ozemN" + "Sj8T7enqKHOEaFoU2VoPgGEWC9RyzSQVeyD6s7APMcE82EtmW4skVEgEGSbDc1pvxz" + "xtchBj78hJP6Cf5TCMFSXw+Fz5rF1dR23QDbN1mkHs7adr8GW4kSWqU7Q7NDwfIrJJ" + "tO7Hi42GyXtvEONHbiRPOe8stqUly7MvUoN+5kfjBM8Qqpfl2+FNhTYWpMfYdPUnE7" + "u536WqzFmsaqJctz3gBxH9Ex7dFtrxR4qiqEr9Qtlu3xGn7Bw07/+i1D+ey3ONkZLN" + "+LQ714cgj8fRS4Hj29SCmXp5Kt5/82cD/VN3NtHw== brickies", + "", ], } INSTANCE_TYPES = [ - 'm1.large', - 'm1.medium', - 'm1.small', - 'm1.xlarge', + "m1.large", + "m1.medium", + "m1.small", + "m1.xlarge", ] AVAILABILITY_ZONES = [ @@ -111,13 +111,13 @@ AVAILABILITY_ZONES = [ "us-east-1b", "us-east-1c", "us-east-1d", - 'eu-west-1a', - 'eu-west-1b', - 'us-west-1', + "eu-west-1a", + "eu-west-1b", + "us-west-1", ] PLACEMENT_CAPABILITIES = { - 'availability-zone': AVAILABILITY_ZONES, + "availability-zone": AVAILABILITY_ZONES, } NOT_IMPL_RESPONSE = json.dumps({}) @@ -130,12 +130,14 @@ class WebException(Exception): def yamlify(data): - formatted = yaml.dump(data, - line_break="\n", - indent=4, - explicit_start=True, - explicit_end=True, - default_flow_style=False) + formatted = yaml.dump( + data, + line_break="\n", + indent=4, + explicit_start=True, + explicit_end=True, + default_flow_style=False, + ) return formatted @@ -164,7 +166,7 @@ ID_CHARS = [c for c in (string.ascii_uppercase + string.digits)] def id_generator(size=6, lower=False): - txt = ''.join(random.choice(ID_CHARS) for x in range(size)) + txt = "".join(random.choice(ID_CHARS) for x in range(size)) if lower: return txt.lower() else: @@ -176,14 +178,14 @@ def get_ssh_keys(): keys.update(PUB_KEYS) # Nice helper to add in the 'running' users key (if they have one) - key_pth = os.path.expanduser('~/.ssh/id_rsa.pub') + key_pth = os.path.expanduser("~/.ssh/id_rsa.pub") if not os.path.isfile(key_pth): - key_pth = os.path.expanduser('~/.ssh/id_dsa.pub') + key_pth = os.path.expanduser("~/.ssh/id_dsa.pub") if os.path.isfile(key_pth): - with open(key_pth, 'rb') as fh: + with open(key_pth, "rb") as fh: contents = fh.read() - keys[os.getlogin()] = [contents, ''] + keys[os.getlogin()] = [contents, ""] return keys @@ -193,7 +195,6 @@ class HTTPServerV6(HTTPServer): class MetaDataHandler(object): - def __init__(self, opts): self.opts = opts self.instances = {} @@ -206,17 +207,17 @@ class MetaDataHandler(object): return "\n".join(caps) action = params[0] action = action.lower() - if action == 'instance-id': - return 'i-%s' % (id_generator(lower=True)) - elif action == 'ami-launch-index': + if action == "instance-id": + return "i-%s" % (id_generator(lower=True)) + elif action == "ami-launch-index": return "%s" % random.choice([0, 1, 2, 3]) - elif action == 'aki-id': - return 'aki-%s' % (id_generator(lower=True)) - elif action == 'ami-id': - return 'ami-%s' % (id_generator(lower=True)) - elif action == 'ari-id': - return 'ari-%s' % (id_generator(lower=True)) - elif action == 'block-device-mapping': + elif action == "aki-id": + return "aki-%s" % (id_generator(lower=True)) + elif action == "ami-id": + return "ami-%s" % (id_generator(lower=True)) + elif action == "ari-id": + return "ari-%s" % (id_generator(lower=True)) + elif action == "block-device-mapping": nparams = params[1:] if not nparams: return "\n".join(BLOCK_DEVS) @@ -226,23 +227,23 @@ class MetaDataHandler(object): return "\n".join(sorted(list(DEV_MAPPINGS.keys()))) else: return str(subvalue) - elif action in ['hostname', 'local-hostname', 'public-hostname']: + elif action in ["hostname", "local-hostname", "public-hostname"]: # Just echo back there own hostname that they called in on.. return "%s" % (who) - elif action == 'instance-type': + elif action == "instance-type": return random.choice(INSTANCE_TYPES) - elif action == 'ami-manifest-path': - return 'my-amis/spamd-image.manifest.xml' - elif action == 'security-groups': - return 'default' - elif action in ['local-ipv4', 'public-ipv4']: + elif action == "ami-manifest-path": + return "my-amis/spamd-image.manifest.xml" + elif action == "security-groups": + return "default" + elif action in ["local-ipv4", "public-ipv4"]: # Just echo back there own ip that they called in on... - return "%s" % (kwargs.get('client_ip', '10.0.0.1')) - elif action == 'reservation-id': + return "%s" % (kwargs.get("client_ip", "10.0.0.1")) + elif action == "reservation-id": return "r-%s" % (id_generator(lower=True)) - elif action == 'product-codes': + elif action == "product-codes": return "%s" % (id_generator(size=8)) - elif action == 'public-keys': + elif action == "public-keys": nparams = params[1:] # This is a weird kludge, why amazon why!!! # public-keys is messed up, list of /latest/meta-data/public-keys/ @@ -267,51 +268,55 @@ class MetaDataHandler(object): hclient.NOT_FOUND, "Unknown key id %r" % mybe_key ) from e # Extract the possible sub-params - result = traverse(nparams[1:], { - "openssh-key": "\n".join(avail_keys[key_name]), - }) + result = traverse( + nparams[1:], + { + "openssh-key": "\n".join(avail_keys[key_name]), + }, + ) if isinstance(result, (dict)): # TODO(harlowja): This might not be right?? result = "\n".join(sorted(result.keys())) if not result: - result = '' + result = "" return result else: contents = [] for (i, key_id) in enumerate(key_ids): contents.append("%s=%s" % (i, key_id)) return "\n".join(contents) - elif action == 'placement': + elif action == "placement": nparams = params[1:] if not nparams: pcaps = sorted(PLACEMENT_CAPABILITIES.keys()) return "\n".join(pcaps) else: pentry = nparams[0].strip().lower() - if pentry == 'availability-zone': + if pentry == "availability-zone": zones = PLACEMENT_CAPABILITIES[pentry] return "%s" % random.choice(zones) else: - return "%s" % (PLACEMENT_CAPABILITIES.get(pentry, '')) + return "%s" % (PLACEMENT_CAPABILITIES.get(pentry, "")) else: - log.warning(("Did not implement action %s, " - "returning empty response: %r"), - action, NOT_IMPL_RESPONSE) + log.warning( + "Did not implement action %s, returning empty response: %r", + action, + NOT_IMPL_RESPONSE, + ) return NOT_IMPL_RESPONSE class UserDataHandler(object): - def __init__(self, opts): self.opts = opts def _get_user_blob(self, **kwargs): blob = None - if self.opts['user_data_file'] is not None: - blob = self.opts['user_data_file'] + if self.opts["user_data_file"] is not None: + blob = self.opts["user_data_file"] if not blob: blob_mp = { - 'hostname': kwargs.get('who', 'localhost'), + "hostname": kwargs.get("who", "localhost"), } lines = [ "#cloud-config", @@ -334,9 +339,8 @@ user_fetcher = None class Ec2Handler(BaseHTTPRequestHandler): - def _get_versions(self): - versions = ['latest'] + EC2_VERSIONS + versions = ["latest"] + EC2_VERSIONS versions = sorted(versions) return "\n".join(versions) @@ -347,33 +351,35 @@ class Ec2Handler(BaseHTTPRequestHandler): def _find_method(self, path): # Puke! (globals) func_mapping = { - 'user-data': user_fetcher.get_data, - 'meta-data': meta_fetcher.get_data, + "user-data": user_fetcher.get_data, + "meta-data": meta_fetcher.get_data, } - segments = [piece for piece in path.split('/') if len(piece)] + segments = [piece for piece in path.split("/") if len(piece)] log.info("Received segments %s", segments) if not segments: return self._get_versions date = segments[0].strip().lower() if date not in self._get_versions(): - raise WebException(hclient.BAD_REQUEST, - "Unknown version format %r" % date) + raise WebException( + hclient.BAD_REQUEST, "Unknown version format %r" % date + ) if len(segments) < 2: raise WebException(hclient.BAD_REQUEST, "No action provided") look_name = segments[1].lower() if look_name not in func_mapping: - raise WebException(hclient.BAD_REQUEST, - "Unknown requested data %r" % look_name) + raise WebException( + hclient.BAD_REQUEST, "Unknown requested data %r" % look_name + ) base_func = func_mapping[look_name] who = self.address_string() ip_from = self.client_address[0] if who == ip_from: # Nothing resolved, so just use 'localhost' - who = 'localhost' + who = "localhost" kwargs = { - 'params': list(segments[2:]), - 'who': who, - 'client_ip': ip_from, + "params": list(segments[2:]), + "who": who, + "client_ip": ip_from, } return functools.partial(base_func, **kwargs) @@ -384,12 +390,13 @@ class Ec2Handler(BaseHTTPRequestHandler): func = self._find_method(self.path) data = func() if not data: - data = '' + data = "" self.send_response(hclient.OK) self.send_header("Content-Type", "binary/octet-stream") self.send_header("Content-Length", len(data)) - log.info("Sending data (len=%s):\n%s", len(data), - format_text(data)) + log.info( + "Sending data (len=%s):\n%s", len(data), format_text(data) + ) self.end_headers() self.wfile.write(data.encode()) except RuntimeError as e: @@ -407,7 +414,7 @@ class Ec2Handler(BaseHTTPRequestHandler): self._do_response() -def setup_logging(log_level, fmt='%(levelname)s: @%(name)s : %(message)s'): +def setup_logging(log_level, fmt="%(levelname)s: @%(name)s : %(message)s"): root_logger = logging.getLogger() console_logger = logging.StreamHandler(sys.stdout) console_logger.setFormatter(logging.Formatter(fmt)) @@ -417,27 +424,47 @@ def setup_logging(log_level, fmt='%(levelname)s: @%(name)s : %(message)s'): def extract_opts(): parser = argparse.ArgumentParser() - parser.add_argument("-p", "--port", dest="port", action="store", type=int, - default=80, metavar="PORT", - help=("port from which to serve traffic" - " (default: %default)")) - parser.add_argument("-a", "--addr", dest="address", action="store", - type=str, default='::', metavar="ADDRESS", - help=("address from which to serve traffic" - " (default: %default)")) - parser.add_argument("-f", '--user-data-file', dest='user_data_file', - action='store', metavar='FILE', - help=("user data filename to serve back to" - "incoming requests")) - parser.add_argument('extra', nargs='*') + parser.add_argument( + "-p", + "--port", + dest="port", + action="store", + type=int, + default=80, + metavar="PORT", + help="port from which to serve traffic (default: %default)", + ) + parser.add_argument( + "-a", + "--addr", + dest="address", + action="store", + type=str, + default="::", + metavar="ADDRESS", + help="address from which to serve traffic (default: %default)", + ) + parser.add_argument( + "-f", + "--user-data-file", + dest="user_data_file", + action="store", + metavar="FILE", + help="user data filename to serve back toincoming requests", + ) + parser.add_argument("extra", nargs="*") args = parser.parse_args() - out = {'port': args.port, 'address': args.address, 'extra': args.extra, - 'user_data_file': None} + out = { + "port": args.port, + "address": args.address, + "extra": args.extra, + "user_data_file": None, + } if args.user_data_file: if not os.path.isfile(args.user_data_file): parser.error("Option -f specified a non-existent file") - with open(args.user_data_file, 'rb') as fh: - out['user_data_file'] = fh.read() + with open(args.user_data_file, "rb") as fh: + out["user_data_file"] = fh.read() return out @@ -455,14 +482,14 @@ def run_server(): setup_logging(logging.DEBUG) setup_fetchers(opts) log.info("CLI opts: %s", opts) - server_address = (opts['address'], opts['port']) + server_address = (opts["address"], opts["port"]) server = HTTPServerV6(server_address, Ec2Handler) sa = server.socket.getsockname() log.info("Serving ec2 metadata on %s using port %s ...", sa[0], sa[1]) server.serve_forever() -if __name__ == '__main__': +if __name__ == "__main__": run_server() # vi: ts=4 expandtab diff --git a/tools/read-dependencies b/tools/read-dependencies index 6ad5f701..efa5879c 100755 --- a/tools/read-dependencies +++ b/tools/read-dependencies @@ -23,6 +23,9 @@ DEFAULT_REQUIREMENTS = 'requirements.txt' # Map the appropriate package dir needed for each distro choice DISTRO_PKG_TYPE_MAP = { 'centos': 'redhat', + 'eurolinux': 'redhat', + 'miraclelinux': 'redhat', + 'rocky': 'redhat', 'redhat': 'redhat', 'debian': 'debian', 'ubuntu': 'debian', @@ -39,6 +42,7 @@ MAYBE_RELIABLE_YUM_INSTALL = [ error ":: http proxy in use => forcing the use of fixed URLs in /etc/yum.repos.d/*.repo" sed -i --regexp-extended '/^#baseurl=/s/#// ; /^(mirrorlist|metalink)=/s/^/#/' /etc/yum.repos.d/*.repo sed -i 's/download\.fedoraproject\.org/dl.fedoraproject.org/g' /etc/yum.repos.d/*.repo + sed -i 's/download\.example/dl.fedoraproject.org/g' /etc/yum.repos.d/*.repo } configure_repos_for_proxy_use n=0; max=10; @@ -64,11 +68,17 @@ ZYPPER_INSTALL = [ '--auto-agree-with-licenses'] DRY_DISTRO_INSTALL_PKG_CMD = { + 'rocky': ['yum', 'install', '--assumeyes'], 'centos': ['yum', 'install', '--assumeyes'], + 'eurolinux': ['yum', 'install', '--assumeyes'], + 'miraclelinux': ['yum', 'install', '--assumeyes'], 'redhat': ['yum', 'install', '--assumeyes'], } DISTRO_INSTALL_PKG_CMD = { + 'rocky': MAYBE_RELIABLE_YUM_INSTALL, + 'eurolinux': MAYBE_RELIABLE_YUM_INSTALL, + 'miraclelinux': MAYBE_RELIABLE_YUM_INSTALL, 'centos': MAYBE_RELIABLE_YUM_INSTALL, 'redhat': MAYBE_RELIABLE_YUM_INSTALL, 'debian': ['apt', 'install', '-y'], @@ -81,6 +91,8 @@ DISTRO_INSTALL_PKG_CMD = { # List of base system packages required to enable ci automation CI_SYSTEM_BASE_PKGS = { 'common': ['make', 'sudo', 'tar'], + 'eurolinux': ['python3-tox'], + 'miraclelinux': ['python3-tox'], 'redhat': ['python3-tox'], 'centos': ['python3-tox'], 'ubuntu': ['devscripts', 'python3-dev', 'libssl-dev', 'tox', 'sbuild'], @@ -273,10 +285,10 @@ def pkg_install(pkg_list, distro, test_distro=False, dry_run=False): cmd = DRY_DISTRO_INSTALL_PKG_CMD[distro] install_cmd.extend(cmd) - if distro in ['centos', 'redhat']: + if distro in ['centos', 'redhat', 'rocky', 'eurolinux']: # CentOS and Redhat need epel-release to access oauthlib and jsonschema subprocess.check_call(install_cmd + ['epel-release']) - if distro in ['suse', 'opensuse', 'redhat', 'centos']: + if distro in ['suse', 'opensuse', 'redhat', 'rocky', 'centos', 'eurolinux']: pkg_list.append('rpm-build') subprocess.check_call(install_cmd + pkg_list) diff --git a/tools/render-cloudcfg b/tools/render-cloudcfg index ed454840..176df36b 100755 --- a/tools/render-cloudcfg +++ b/tools/render-cloudcfg @@ -1,47 +1,65 @@ #!/usr/bin/env python3 -import argparse import os import sys +import argparse -VARIANTS = ["alpine", "amazon", "arch", "centos", "debian", "fedora", - "freebsd", "netbsd", "openbsd", "rhel", "suse", "ubuntu", - "unknown"] - - -if "avoid-pep8-E402-import-not-top-of-file": +def main(): _tdir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) sys.path.insert(0, _tdir) - from cloudinit import templater - from cloudinit import util - from cloudinit.atomic_helper import write_file - + from cloudinit import templater, util # pylint: disable=E0401 -def main(): + VARIANTS = [ + "almalinux", + "alpine", + "amazon", + "arch", + "centos", + "cloudlinux", + "debian", + "eurolinux", + "fedora", + "freebsd", + "gentoo", + "miraclelinux", + "netbsd", + "openbsd", + "openEuler", + "photon", + "rhel", + "suse", + "rocky", + "ubuntu", + "unknown", + "virtuozzo", + ] parser = argparse.ArgumentParser() platform = util.system_info() parser.add_argument( - "--variant", default=platform['variant'], action="store", - help="define the variant.", choices=VARIANTS) + "--variant", + default=platform["variant"], + action="store", + help="define the variant.", + choices=VARIANTS, + ) parser.add_argument( - "template", nargs="?", action="store", - default='./config/cloud.cfg.tmpl', - help="Path to the cloud.cfg template") + "template", + nargs="?", + action="store", + default="./config/cloud.cfg.tmpl", + help="Path to the cloud.cfg template", + ) parser.add_argument( - "output", nargs="?", action="store", default="-", - help="Output file. Use '-' to write to stdout") + "output", + nargs="?", + action="store", + default="-", + help="Output file. Use '-' to write to stdout", + ) - args = parser.parse_args() + args = parser.parse_args(sys.argv[1:]) + templater.render_cloudcfg(args.variant, args.template, args.output) - with open(args.template, 'r') as fh: - contents = fh.read() - tpl_params = {'variant': args.variant} - contents = (templater.render_string(contents, tpl_params)).rstrip() + "\n" - util.load_yaml(contents) - if args.output == "-": - sys.stdout.write(contents) - else: - write_file(args.output, contents, omode="w") -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/tools/run-container b/tools/run-container index 15948e77..e049dfdc 100755 --- a/tools/run-container +++ b/tools/run-container @@ -191,7 +191,7 @@ os_info() { get_os_info() { # run inside container, set OS_NAME, OS_VERSION - # example OS_NAME are centos, debian, opensuse + # example OS_NAME are centos, debian, opensuse, rockylinux [ -n "${OS_NAME:-}" -a -n "${OS_VERSION:-}" ] && return 0 if [ -f /etc/os-release ]; then OS_NAME=$(sh -c '. /etc/os-release; echo $ID') @@ -247,7 +247,7 @@ apt_install() { install_packages() { get_os_info || return case "$OS_NAME" in - centos) yum_install "$@";; + centos|rocky*) yum_install "$@";; opensuse) zypper_install "$@";; debian|ubuntu) apt_install "$@";; *) error "Do not know how to install packages on ${OS_NAME}"; @@ -353,6 +353,7 @@ wait_for_boot() { inside "$name" sh -c "echo proxy=$http_proxy >> /etc/yum.conf" inside "$name" sh -c "sed -i --regexp-extended '/^#baseurl=/s/#// ; /^(mirrorlist|metalink)=/s/^/#/' /etc/yum.repos.d/*.repo" inside "$name" sh -c "sed -i 's/download\.fedoraproject\.org/dl.fedoraproject.org/g' /etc/yum.repos.d/*.repo" + inside "$name" sh -c "sed -i 's/download\.example/dl.fedoraproject.org/g' /etc/yum.repos.d/*.repo" else debug 1 "do not know how to configure proxy on $OS_NAME" fi @@ -485,7 +486,7 @@ main() { local build_pkg="" build_srcpkg="" pkg_ext="" distflag="" case "$OS_NAME" in - centos) distflag="--distro=redhat";; + centos|rocky) distflag="--distro=redhat";; opensuse) distflag="--distro=suse";; esac @@ -494,7 +495,7 @@ main() { build_pkg="./packages/bddeb -d" build_srcpkg="./packages/bddeb -S -d" pkg_ext=".deb";; - centos|opensuse) + centos|opensuse|rocky) build_pkg="./packages/brpm $distflag" build_srcpkg="./packages/brpm $distflag --srpm" pkg_ext=".rpm";; diff --git a/tools/run-pyflakes b/tools/run-flake8 index 179afebe..0021cdb9 100755 --- a/tools/run-pyflakes +++ b/tools/run-flake8 @@ -2,7 +2,7 @@ CR=" " -pycheck_dirs=( "cloudinit/" "tests/" "tools/" ) +pycheck_dirs=( "cloudinit/" "tests/" "tools/" "setup.py" ) set -f if [ $# -eq 0 ]; then @@ -11,7 +11,7 @@ else files=( "$@" ) fi -cmd=( "python3" -m "pyflakes" "${files[@]}" ) +cmd=( "python3" -m "flake8" "${files[@]}" ) echo "Running: " "${cmd[@]}" 1>&2 exec "${cmd[@]}" diff --git a/tools/run-pep8 b/tools/run-pep8 deleted file mode 100755 index 4bd0bbfb..00000000 --- a/tools/run-pep8 +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash - -pycheck_dirs=( "cloudinit/" "tests/" "tools/" ) - -CR=" -" -[ "$1" = "-v" ] && { verbose="$1"; shift; } || verbose="" - -set -f -if [ $# -eq 0 ]; then unset IFS - IFS="$CR" - files=( "${bin_files[@]}" "${pycheck_dirs[@]}" ) - unset IFS -else - files=( "$@" ) -fi - -myname=${0##*/} -cmd=( "${myname#run-}" $verbose "${files[@]}" ) -echo "Running: " "${cmd[@]}" 1>&2 -exec "${cmd[@]}" diff --git a/tools/validate-yaml.py b/tools/validate-yaml.py index d8bbcfcb..b5d77a97 100755 --- a/tools/validate-yaml.py +++ b/tools/validate-yaml.py @@ -12,8 +12,8 @@ if __name__ == "__main__": for fn in sys.argv[1:]: sys.stdout.write("%s" % (fn)) try: - fh = open(fn, 'rb') - yaml.safe_load(fh.read().decode('utf-8')) + fh = open(fn, "rb") + yaml.safe_load(fh.read().decode("utf-8")) fh.close() sys.stdout.write(" - ok\n") except Exception as e: diff --git a/tools/write-ssh-key-fingerprints b/tools/write-ssh-key-fingerprints index 2a3dca7c..9409257d 100755 --- a/tools/write-ssh-key-fingerprints +++ b/tools/write-ssh-key-fingerprints @@ -1,39 +1,61 @@ #!/bin/sh # This file is part of cloud-init. See LICENSE file for license information. -logger_opts="-p user.info -t ec2" -# rhels' version of logger_opts does not support long -# for of -s (--stderr), so use short form. -logger_opts="$logger_opts -s" +do_syslog() { + log_message=$1 + + # rhels' version of logger_opts does not support long + # form of -s (--stderr), so use short form. + logger_opts="-s" + + # Need to end the options list with "--" to ensure that any minus symbols + # in the text passed to logger are not interpreted as logger options. + logger_opts="$logger_opts -p user.info -t cloud-init --" + + # shellcheck disable=SC2086 # logger give error if $logger_opts quoted + logger $logger_opts "$log_message" +} + # Redirect stderr to stdout exec 2>&1 fp_blist=",${1}," key_blist=",${2}," -{ -echo -echo "#############################################################" -echo "-----BEGIN SSH HOST KEY FINGERPRINTS-----" + +fingerprint_header_shown=0 for f in /etc/ssh/ssh_host_*key.pub; do [ -f "$f" ] || continue - read ktype line < "$f" + # shellcheck disable=SC2034 # Unused "line" required for word splitting + read -r ktype line < "$f" # skip the key if its type is in the blacklist [ "${fp_blist#*,$ktype,}" = "${fp_blist}" ] || continue - ssh-keygen -l -f "$f" + if [ $fingerprint_header_shown -eq 0 ]; then + do_syslog "#############################################################" + do_syslog "-----BEGIN SSH HOST KEY FINGERPRINTS-----" + fingerprint_header_shown=1 + fi + do_syslog "$(ssh-keygen -l -f "$f")" done -echo "-----END SSH HOST KEY FINGERPRINTS-----" -echo "#############################################################" - -} | logger $logger_opts +if [ $fingerprint_header_shown -eq 1 ]; then + do_syslog "-----END SSH HOST KEY FINGERPRINTS-----" + do_syslog "#############################################################" +fi -echo "-----BEGIN SSH HOST KEY KEYS-----" +key_header_shown=0 for f in /etc/ssh/ssh_host_*key.pub; do [ -f "$f" ] || continue - read ktype line < "$f" + # shellcheck disable=SC2034 # Unused "line" required for word splitting + read -r ktype line < "$f" # skip the key if its type is in the blacklist [ "${key_blist#*,$ktype,}" = "${key_blist}" ] || continue - cat $f + if [ $key_header_shown -eq 0 ]; then + echo "-----BEGIN SSH HOST KEY KEYS-----" + key_header_shown=1 + fi + cat "$f" done -echo "-----END SSH HOST KEY KEYS-----" +if [ $key_header_shown -eq 1 ]; then + echo "-----END SSH HOST KEY KEYS-----" +fi |