diff options
author | Scott Moser <smoser@brickies.net> | 2016-11-04 10:38:15 -0400 |
---|---|---|
committer | Scott Moser <smoser@brickies.net> | 2016-11-04 10:38:15 -0400 |
commit | 223e50f51e281e716b2b289c2c9b772c3eaeb563 (patch) | |
tree | 0020c78b04aee768cdc6cb25438e7a48c7d1887e | |
parent | 81c821916d18efeee4339ed326c6d9a5a3d136e9 (diff) | |
parent | a1cdebdea65ccd827060c823146992bba9debe19 (diff) | |
download | vyos-cloud-init-223e50f51e281e716b2b289c2c9b772c3eaeb563.tar.gz vyos-cloud-init-223e50f51e281e716b2b289c2c9b772c3eaeb563.zip |
merge from master at 0.7.8-34-ga1cdebd
-rw-r--r-- | .gitignore | 1 | ||||
-rw-r--r-- | HACKING.rst | 85 | ||||
-rw-r--r-- | cloudinit/net/cmdline.py | 7 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceAliYun.py | 49 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceEc2.py | 18 | ||||
-rw-r--r-- | cloudinit/util.py | 3 | ||||
-rwxr-xr-x | packages/bddeb | 1 | ||||
-rw-r--r-- | test-requirements.txt | 1 | ||||
-rw-r--r-- | tests/unittests/test_datasource/test_aliyun.py | 148 | ||||
-rw-r--r-- | tests/unittests/test_util.py | 2 | ||||
-rw-r--r-- | tox.ini | 3 |
11 files changed, 274 insertions, 44 deletions
@@ -4,3 +4,4 @@ dist *.pyc __pycache__ .tox +.coverage diff --git a/HACKING.rst b/HACKING.rst index 63a5bde0..4072d0fd 100644 --- a/HACKING.rst +++ b/HACKING.rst @@ -3,28 +3,35 @@ Hacking on cloud-init ===================== This document describes how to contribute changes to cloud-init. +It assumes you have a `Launchpad`_ account, and refers to your launchpad user +as ``LP_USER`` throughout. Do these things once -------------------- -* If you have not already, be sure to sign the CCA: +* To contribute, you must sign the Canonical `contributor license agreement`_ - - `Canonical Contributor Agreement`_ + If you have already signed it as an individual, your Launchpad user will be listed in the `contributor-agreement-canonical`_ group. Unfortunately there is no easy way to check if an organization or company you are doing work for has signed. If you are unsure or have questions, email `Scott Moser <mailto:scott.moser@canonical.com>`_ or ping smoser in ``#cloud-init`` channel via freenode. -* Clone the `LaunchPad`_ repository: +* Clone the upstream `repository`_ on Launchpad:: - git clone YOUR_USERNAME@git.launchpad.net:cloud-init + git clone https://git.launchpad.net/cloud-init cd cloud-init - If you would prefer a bzr style `git clone lp:cloud-init`, see - the `Instructions on LaunchPad`_ for more information. + There is more information on Launchpad as a git hosting site in + `Launchpad git documentation`_. -* Create a new remote pointing to your personal LaunchPad - repository:: +* Create a new remote pointing to your personal Launchpad repository - git remote add YOUR_USERNAME YOUR_USERNAME@git.launchpad.net:~YOUR_USERNAME/cloud-init + This is equivalent to 'fork' on github:: -.. _Canonical Contributor Agreement: http://www.canonical.com/contributors + git remote add LP_USER git+ssh://LP_USER@git.launchpad.net/~LP_USER/cloud-init + git push LP_USER master + +.. _repository: https://git.launchpad.net/cloud-init +.. _contributor license agreement: http://www.canonical.com/contributors +.. _contributor-agreement-canonical: https://launchpad.net/%7Econtributor-agreement-canonical/+members +.. _Launchpad git documentation: https://help.launchpad.net/Code/Git Do these things for each feature or bug --------------------------------------- @@ -33,40 +40,60 @@ Do these things for each feature or bug git checkout -b my-topic-branch -.. _Instructions on launchpad: https://help.launchpad.net/Code/Git - * Make and commit your changes (note, you can make multiple commits, fixes, more commits.):: git commit -* Check pep8 and test, and address any issues:: +* Run unit tests and lint/formatting checks with `tox`_:: - make test pep8 + tox -* Push your changes to your personal LaunchPad repository:: +* Push your changes to your personal Launchpad repository:: - git push -u YOUR_USERNAME my-topic-branch + git push -u LP_USER my-topic-branch * Use your browser to create a merge request: - - Open the branch on `LaunchPad`_ + - Open the branch on Launchpad. + + - You can see a web view of your repository and navigate to the branch at: + + ``https://code.launchpad.net/~LP_USER/cloud-init/`` + + - It will typically be at: + + ``https://code.launchpad.net/~LP_USER/cloud-init/+git/cloud-init/+ref/BRANCHNAME`` + + for example, here is larsks move-to-git branch: https://code.launchpad.net/~larsks/cloud-init/+git/cloud-init/+ref/feature/move-to-git + + - Click 'Propose for merging' + - Select 'lp:cloud-init' as the target repository + - Type '``master``' as the Target reference path + - Click 'Propose Merge' + - On the next page, hit 'Set commit message' and type a git combined git style commit message like:: + + Activate the frobnicator. + + The frobnicator was previously inactive and now runs by default. + This may save the world some day. Then, list the bugs you fixed + as footers with syntax as shown here. + + The commit message should be one summary line of less than + 74 characters followed by a blank line, and then one or more + paragraphs describing the change and why it was needed. - - It will typically be at - ``https://code.launchpad.net/~YOUR_USERNAME/cloud-init/+git/cloud-init/+ref/BRANCHNAME`` - for example - https://code.launchpad.net/~larsks/cloud-init/+git/cloud-init/+ref/feature/move-to-git + This is the message that will be used on the commit when it + is sqaushed and merged into trunk. - - Click 'Propose for merging` - - Select ``cloud-init`` as the target repository - - Select ``master`` as the target reference path + LP: #1 -Then, someone on cloud-init-dev (currently `Scott Moser`_ and `Joshua -Harlow`_) will review your changes and follow up in the merge request. +Then, someone in the `cloud-init-dev`_ group will review your changes and +follow up in the merge request. -Feel free to ping and/or join ``#cloud-init`` on freenode (irc) if you +Feel free to ping and/or join ``#cloud-init`` on freenode irc if you have any questions. +.. _tox: https://tox.readthedocs.io/en/latest/ .. _Launchpad: https://launchpad.net -.. _Scott Moser: https://launchpad.net/~smoser -.. _Joshua Harlow: https://launchpad.net/~harlowja +.. _cloud-init-dev: https://launchpad.net/~cloud-init-dev/+members#active diff --git a/cloudinit/net/cmdline.py b/cloudinit/net/cmdline.py index 933317d5..4075a279 100644 --- a/cloudinit/net/cmdline.py +++ b/cloudinit/net/cmdline.py @@ -76,12 +76,13 @@ def _klibc_to_config_entry(content, mac_addrs=None): data = _load_shell_content(content) try: - name = data['DEVICE'] + name = data['DEVICE'] if 'DEVICE' in data else data['DEVICE6'] except KeyError: - raise ValueError("no 'DEVICE' entry in data") + raise ValueError("no 'DEVICE' or 'DEVICE6' entry in data") # ipconfig on precise does not write PROTO - proto = data.get('PROTO') + # IPv6 config gives us IPV6PROTO, not PROTO. + proto = data.get('PROTO', data.get('IPV6PROTO')) if not proto: if data.get('filename'): proto = 'dhcp' diff --git a/cloudinit/sources/DataSourceAliYun.py b/cloudinit/sources/DataSourceAliYun.py new file mode 100644 index 00000000..19957212 --- /dev/null +++ b/cloudinit/sources/DataSourceAliYun.py @@ -0,0 +1,49 @@ +# vi: ts=4 expandtab + +import os + +from cloudinit import sources +from cloudinit.sources import DataSourceEc2 as EC2 + +DEF_MD_VERSION = "2016-01-01" + + +class DataSourceAliYun(EC2.DataSourceEc2): + metadata_urls = ["http://100.100.100.200"] + + def __init__(self, sys_cfg, distro, paths): + super(DataSourceAliYun, self).__init__(sys_cfg, distro, paths) + self.seed_dir = os.path.join(paths.seed_dir, "AliYun") + self.api_ver = DEF_MD_VERSION + + def get_hostname(self, fqdn=False, _resolve_ip=False): + return self.metadata.get('hostname', 'localhost.localdomain') + + def get_public_ssh_keys(self): + return parse_public_keys(self.metadata.get('public-keys', {})) + + +def parse_public_keys(public_keys): + keys = [] + for key_id, key_body in public_keys.items(): + if isinstance(key_body, str): + keys.append(key_body.strip()) + elif isinstance(key_body, list): + keys.extend(key_body) + elif isinstance(key_body, dict): + key = key_body.get('openssh-key', []) + if isinstance(key, str): + keys.append(key.strip()) + elif isinstance(key, list): + keys.extend(key) + return keys + +# Used to match classes to dependencies +datasources = [ + (DataSourceAliYun, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)), +] + + +# Return a list of data sources that match this set of dependencies +def get_datasource_list(depends): + return sources.list_from_depends(depends, datasources) diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py index 6fe2a0bb..bc84ef5d 100644 --- a/cloudinit/sources/DataSourceEc2.py +++ b/cloudinit/sources/DataSourceEc2.py @@ -31,21 +31,19 @@ from cloudinit import util LOG = logging.getLogger(__name__) -DEF_MD_URL = "http://169.254.169.254" - # Which version we are requesting of the ec2 metadata apis DEF_MD_VERSION = '2009-04-04' -# Default metadata urls that will be used if none are provided -# They will be checked for 'resolveability' and some of the -# following may be discarded if they do not resolve -DEF_MD_URLS = [DEF_MD_URL, "http://instance-data.:8773"] - class DataSourceEc2(sources.DataSource): + # Default metadata urls that will be used if none are provided + # They will be checked for 'resolveability' and some of the + # following may be discarded if they do not resolve + metadata_urls = ["http://169.254.169.254", "http://instance-data.:8773"] + def __init__(self, sys_cfg, distro, paths): sources.DataSource.__init__(self, sys_cfg, distro, paths) - self.metadata_address = DEF_MD_URL + self.metadata_address = None self.seed_dir = os.path.join(paths.seed_dir, "ec2") self.api_ver = DEF_MD_VERSION @@ -106,7 +104,7 @@ class DataSourceEc2(sources.DataSource): return False # Remove addresses from the list that wont resolve. - mdurls = mcfg.get("metadata_urls", DEF_MD_URLS) + mdurls = mcfg.get("metadata_urls", self.metadata_urls) filtered = [x for x in mdurls if util.is_resolvable_url(x)] if set(filtered) != set(mdurls): @@ -117,7 +115,7 @@ class DataSourceEc2(sources.DataSource): mdurls = filtered else: LOG.warn("Empty metadata url list! using default list") - mdurls = DEF_MD_URLS + mdurls = self.metadata_urls urls = [] url2base = {} diff --git a/cloudinit/util.py b/cloudinit/util.py index 4b3fd0cb..9a3d3cd7 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -2342,7 +2342,8 @@ def read_dmi_data(key): # running dmidecode can be problematic on some arches (LP: #1243287) uname_arch = os.uname()[4] if not (uname_arch == "x86_64" or - (uname_arch.startswith("i") and uname_arch[2:] == "86")): + (uname_arch.startswith("i") and uname_arch[2:] == "86") or + uname_arch == 'aarch64'): LOG.debug("dmidata is not supported on %s", uname_arch) return None diff --git a/packages/bddeb b/packages/bddeb index abb7b607..79ac9768 100755 --- a/packages/bddeb +++ b/packages/bddeb @@ -29,6 +29,7 @@ if "avoid-pep8-E402-import-not-top-of-file": # file pypi package name to a debian/ubuntu package name. STD_NAMED_PACKAGES = [ 'configobj', + 'coverage', 'jinja2', 'jsonpatch', 'oauthlib', diff --git a/test-requirements.txt b/test-requirements.txt index 6bf38940..0e7fc8fb 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -3,6 +3,7 @@ httpretty>=0.7.1 mock nose unittest2 +coverage # Only needed if you want to know the test times # nose-timer diff --git a/tests/unittests/test_datasource/test_aliyun.py b/tests/unittests/test_datasource/test_aliyun.py new file mode 100644 index 00000000..6f1de072 --- /dev/null +++ b/tests/unittests/test_datasource/test_aliyun.py @@ -0,0 +1,148 @@ +import functools +import httpretty +import os + +from .. import helpers as test_helpers +from cloudinit import helpers +from cloudinit.sources import DataSourceAliYun as ay + +DEFAULT_METADATA = { + 'instance-id': 'aliyun-test-vm-00', + 'eipv4': '10.0.0.1', + 'hostname': 'test-hostname', + 'image-id': 'm-test', + 'launch-index': '0', + 'mac': '00:16:3e:00:00:00', + 'network-type': 'vpc', + 'private-ipv4': '192.168.0.1', + 'serial-number': 'test-string', + 'vpc-cidr-block': '192.168.0.0/16', + 'vpc-id': 'test-vpc', + 'vswitch-id': 'test-vpc', + 'vswitch-cidr-block': '192.168.0.0/16', + 'zone-id': 'test-zone-1', + 'ntp-conf': {'ntp_servers': [ + 'ntp1.aliyun.com', + 'ntp2.aliyun.com', + 'ntp3.aliyun.com']}, + 'source-address': ['http://mirrors.aliyun.com', + 'http://mirrors.aliyuncs.com'], + 'public-keys': {'key-pair-1': {'openssh-key': 'ssh-rsa AAAAB3...'}, + 'key-pair-2': {'openssh-key': 'ssh-rsa AAAAB3...'}} +} + +DEFAULT_USERDATA = """\ +#cloud-config + +hostname: localhost""" + + +def register_mock_metaserver(base_url, data): + def register_helper(register, base_url, body): + if isinstance(body, str): + register(base_url, body) + elif isinstance(body, list): + register(base_url.rstrip('/'), '\n'.join(body) + '\n') + elif isinstance(body, dict): + vals = [] + for k, v in body.items(): + if isinstance(v, (str, list)): + suffix = k.rstrip('/') + else: + suffix = k.rstrip('/') + '/' + vals.append(suffix) + url = base_url.rstrip('/') + '/' + suffix + register_helper(register, url, v) + register(base_url, '\n'.join(vals) + '\n') + + register = functools.partial(httpretty.register_uri, httpretty.GET) + register_helper(register, base_url, data) + + +class TestAliYunDatasource(test_helpers.HttprettyTestCase): + def setUp(self): + super(TestAliYunDatasource, self).setUp() + cfg = {'datasource': {'AliYun': {'timeout': '1', 'max_wait': '1'}}} + distro = {} + paths = helpers.Paths({}) + self.ds = ay.DataSourceAliYun(cfg, distro, paths) + self.metadata_address = self.ds.metadata_urls[0] + self.api_ver = self.ds.api_ver + + @property + def default_metadata(self): + return DEFAULT_METADATA + + @property + def default_userdata(self): + return DEFAULT_USERDATA + + @property + def metadata_url(self): + return os.path.join(self.metadata_address, + self.api_ver, 'meta-data') + '/' + + @property + def userdata_url(self): + return os.path.join(self.metadata_address, + self.api_ver, 'user-data') + + def regist_default_server(self): + register_mock_metaserver(self.metadata_url, self.default_metadata) + register_mock_metaserver(self.userdata_url, self.default_userdata) + + def _test_get_data(self): + self.assertEqual(self.ds.metadata, self.default_metadata) + self.assertEqual(self.ds.userdata_raw, + self.default_userdata.encode('utf8')) + + def _test_get_sshkey(self): + pub_keys = [v['openssh-key'] for (_, v) in + self.default_metadata['public-keys'].items()] + self.assertEqual(self.ds.get_public_ssh_keys(), pub_keys) + + def _test_get_iid(self): + self.assertEqual(self.default_metadata['instance-id'], + self.ds.get_instance_id()) + + def _test_host_name(self): + self.assertEqual(self.default_metadata['hostname'], + self.ds.get_hostname()) + + @httpretty.activate + def test_with_mock_server(self): + self.regist_default_server() + self.ds.get_data() + self._test_get_data() + self._test_get_sshkey() + self._test_get_iid() + self._test_host_name() + + def test_parse_public_keys(self): + public_keys = {} + self.assertEqual(ay.parse_public_keys(public_keys), []) + + public_keys = {'key-pair-0': 'ssh-key-0'} + self.assertEqual(ay.parse_public_keys(public_keys), + [public_keys['key-pair-0']]) + + public_keys = {'key-pair-0': 'ssh-key-0', 'key-pair-1': 'ssh-key-1'} + self.assertEqual(set(ay.parse_public_keys(public_keys)), + set([public_keys['key-pair-0'], + public_keys['key-pair-1']])) + + public_keys = {'key-pair-0': ['ssh-key-0', 'ssh-key-1']} + self.assertEqual(ay.parse_public_keys(public_keys), + public_keys['key-pair-0']) + + public_keys = {'key-pair-0': {'openssh-key': []}} + self.assertEqual(ay.parse_public_keys(public_keys), []) + + public_keys = {'key-pair-0': {'openssh-key': 'ssh-key-0'}} + self.assertEqual(ay.parse_public_keys(public_keys), + [public_keys['key-pair-0']['openssh-key']]) + + public_keys = {'key-pair-0': {'openssh-key': ['ssh-key-0', + 'ssh-key-1']}} + self.assertEqual(ay.parse_public_keys(public_keys), + public_keys['key-pair-0']['openssh-key']) diff --git a/tests/unittests/test_util.py b/tests/unittests/test_util.py index 881509aa..f6a8ab75 100644 --- a/tests/unittests/test_util.py +++ b/tests/unittests/test_util.py @@ -386,7 +386,7 @@ class TestReadDMIData(helpers.FilesystemMockingTestCase): dmi_name = 'use-dmidecode' self._configure_dmidecode_return(dmi_name, dmi_val) - expected = {'armel': None, 'aarch64': None, 'x86_64': dmi_val} + expected = {'armel': None, 'aarch64': dmi_val, 'x86_64': dmi_val} found = {} # we do not run the 'dmi-decode' binary on some arches # verify that anything requested that is not in the sysfs dir @@ -19,6 +19,9 @@ setenv = [testenv:py3] basepython = python3 +commands = {envpython} -m nose \ + {posargs:--with-coverage --cover-erase \ + --cover-branches --cover-package=cloudinit --cover-inclusive} [testenv:py26] commands = nosetests {posargs:tests} |