From 062643a37b685ff404241b468a259dffd45efdcb Mon Sep 17 00:00:00 2001 From: gbenhaim Date: Tue, 18 Jul 2017 19:48:41 +0300 Subject: [PATCH] clod-init: Adding cloud init support Signed-off-by: gbenhaim --- lago/lago_cloud_init.py | 248 ++++++++++++++++++++ lago/paths.py | 3 + lago/prefix.py | 24 +- lago/templates/cloud-init-meta-data-base.j2 | 2 + lago/templates/cloud-init-user-data-base.j2 | 9 + lago/utils.py | 62 +++++ tests/unit/lago/test_utils.py | 61 +++++ 7 files changed, 408 insertions(+), 1 deletion(-) create mode 100644 lago/lago_cloud_init.py create mode 100644 lago/templates/cloud-init-meta-data-base.j2 create mode 100644 lago/templates/cloud-init-user-data-base.j2 diff --git a/lago/lago_cloud_init.py b/lago/lago_cloud_init.py new file mode 100644 index 00000000..a2db96b6 --- /dev/null +++ b/lago/lago_cloud_init.py @@ -0,0 +1,248 @@ +import functools +import logging +import os +from os import path +import yaml +from textwrap import dedent +from jinja2 import Environment, PackageLoader + +import log_utils +import utils + +LOGGER = logging.getLogger(__name__) +LogTask = functools.partial(log_utils.LogTask, logger=LOGGER) + + +class LagoCloudInits(object): + def __init__(self, vms, iso_dir, ssh_public_key): + self._vms = vms + self._iso_dir = iso_dir + self._ssh_public_key = ssh_public_key + + def generate(self, collect_only=False, with_threads=False): + self._validate_iso_dir_exist() + jinja_env = Environment(loader=PackageLoader('lago', 'templates')) + + with LogTask('Creating cloud-init iso images'): + handlers = [ + LagoCloudInit( + vm, self._iso_dir, dev, self._ssh_public_key, jinja_env + ) for vm, dev in self._vms + ] + + if with_threads: + iso_specs = utils.invoke_different_funcs_in_parallel( + *list(handlers) + ) + else: + iso_specs = [] + for handler in handlers: + iso_specs.append(handler(collect_only)) + + return dict(iso_specs) + + def _validate_iso_dir_exist(self): + if not path.isdir(self._iso_dir): + os.mkdir(self._iso_dir) + + +class LagoCloudInit(object): + def __init__(self, vm, iso_dir, free_dev, ssh_public_key, jinja_env): + self._vm = vm + self._cloud_spec = self._vm.spec['cloud-init'] or {} + self._iso_dir = path.join(iso_dir, self._vm.name()) + self._iso_path = path.join( + self._iso_dir, '{}.iso'.format(self._vm.name()) + ) + self._free_dev = free_dev + self._ssh_public_key = ssh_public_key + self._jinja_env = jinja_env + self._mapping = None + self._validate_iso_dir_exist() + self._set_mapping() + + def _validate_iso_dir_exist(self): + if not path.isdir(self._iso_dir): + os.mkdir(self._iso_dir) + + def _set_mapping(self): + self._mapping = { + 'user-data': + { + 'root_password': self._vm.root_password(), + 'public_key': self._ssh_public_key + }, + 'meta-data': { + 'hostname': self._vm.name() + }, + } + + def generate(self, collect_only=False): + with LogTask('Creating cloud-init iso for {}'.format(self._vm.name())): + normalized_spec = self._normalize_spec() + if not collect_only: + + write_to_iso = [] + user_data = normalized_spec.pop('user-data') + if user_data: + user_data_dir = path.join(self._iso_dir, 'user-data') + self._write_user_data_to_file(user_data, user_data_dir) + write_to_iso.append(user_data_dir) + + for spec_type, spec in normalized_spec.viewitems(): + out_dir = path.join(self._iso_dir, spec_type) + self._write_yaml_to_file(spec, out_dir) + write_to_iso.append(out_dir) + + if write_to_iso: + self.gen_iso_image(self._iso_path, write_to_iso) + else: + LOGGER.debug( + '{}: no specs were found', format(self._vm.name()) + ) + else: + print yaml.safe_dump(normalized_spec) + + iso_spec = self._vm.name(), self._gen_iso_spec() + LOGGER.debug(iso_spec) + + return iso_spec + + def _normalize_spec(self): + """ + For all spec type in 'self._mapping', load the default and user + given spec and merge them. + + Returns: + dict: the merged default and user spec + """ + + normalized_spec = {} + + for spec_type, mapping in self._mapping.viewitems(): + normalized_spec[spec_type] = utils.deep_update( + self._load_default_spec(spec_type, **mapping), + self._load_given_spec( + self._cloud_spec.get(spec_type, {}), spec_type + ) + ) + + return normalized_spec + + def _load_given_spec(self, given_spec, spec_type): + """ + Load spec_type given from the user. + If 'path' is in the spec, the file will be loaded from 'path', + otherwise the spec will be returned without a change. + + Args: + dict or list: which represents the spec + spec_type(dict): the type of the spec + + Returns: + dict or list: which represents the spec + """ + if not spec_type: + LOGGER.debug('{} spec is empty'.format(spec_type)) + return given_spec + + if 'path' in given_spec: + LOGGER.debug( + 'loading {} spec from {}'. + format(spec_type, given_spec['path']) + ) + given_spec = self._load_spec_from_file(given_spec['path']) + + return given_spec + + def _load_default_spec(self, spec_type, **kwargs): + """ + Load default spec_type template from lago.templates + and render it with jinja2 + + Args: + spec_type(dict): the type of the spec + kwargs(dict): k, v for jinja2 + + Returns: + dict or list: which represnets the spec + """ + template_name = 'cloud-init-{}-{}.j2'.format( + spec_type, self._vm.distro() + ) + + base_template_name = 'cloud-init-{}-base.j2'.format(spec_type) + + template = self._jinja_env.select_template( + [template_name, base_template_name] + ) + + default_spec = template.render(**kwargs) + LOGGER.debug( + 'default spec for {}:\n{}'.format(spec_type, default_spec) + ) + + return yaml.safe_load(default_spec) + + def _gen_iso_spec(self): + return { + 'type': 'file', + 'path': self._iso_path, + 'dev': self._free_dev, + 'format': 'iso', + 'name': '{}-cloud-init'.format(self._vm.name()) + } + + def __call__(self, *args, **kwargs): + return self.generate(*args, **kwargs) + + @staticmethod + def _load_spec_from_file(path_to_file): + try: + with open(path_to_file, mode='rt') as f: + return yaml.safe_load(f) + except yaml.YAMLError: + raise LagoCloudInitParseError(path_to_file) + + @staticmethod + def _write_user_data_to_file(user_data, out_dir): + with open(out_dir, mode='wt') as f: + f.write('#cloud-config') + f.write('\n') + yaml.safe_dump(user_data, f) + + @staticmethod + def _write_yaml_to_file(spec, out_dir): + with open(out_dir, mode='wt') as f: + yaml.safe_dump(spec, f) + + @staticmethod + def gen_iso_image(out_file_name, files): + cmd = [ + 'genisoimage', + '-output', + out_file_name, + '-volid', + 'cidata', + '-joliet', + '-rock', + ] + + cmd.extend(files) + + utils.run_command_with_validation(cmd) + + +class LagoCloudInitException(utils.LagoException): + pass + + +class LagoCloudInitParseError(LagoCloudInitException): + def __init__(self, file_path): + super(LagoCloudInitParseError, self).__init__( + dedent( + """ + Failed to parse yaml file {}. + """.format(file_path) + ) + ) diff --git a/lago/paths.py b/lago/paths.py index 517e8a17..29c1a091 100644 --- a/lago/paths.py +++ b/lago/paths.py @@ -54,3 +54,6 @@ def prefix_lagofile(self): def scripts(self, *args): return self.prefixed('scripts', *args) + + def cloud_init(self): + return self.prefixed('cloud-init') diff --git a/lago/prefix.py b/lago/prefix.py index 93d35f8c..312d3a8d 100644 --- a/lago/prefix.py +++ b/lago/prefix.py @@ -20,6 +20,7 @@ import copy import functools import glob +from itertools import izip, imap import json import logging import os @@ -44,6 +45,7 @@ import log_utils import build import sdk_utils +import lago_cloud_init LOGGER = logging.getLogger(__name__) LogTask = functools.partial(log_utils.LogTask, logger=LOGGER) @@ -1167,7 +1169,8 @@ def virt_conf( template_repo=None, template_store=None, do_bootstrap=True, - do_build=True + do_build=True, + do_cloud_init=True ): """ Initializes all the virt infrastructure of the prefix, creating the @@ -1216,6 +1219,9 @@ def virt_conf( if do_build: self.build(conf['domains']) + if do_cloud_init: + self.cloud_init(self._virt_env.get_vms()) + self.save() rollback.clear() @@ -1238,6 +1244,22 @@ def build(self, conf): utils.invoke_in_parallel(build.Build.build, builders) + def cloud_init(self, vms): + + vm_obj = [vm for vm in vms.values() if 'cloud-init' in vm._spec] + free_dev = map(lambda vm: utils.allocate_dev(vm.disks).next(), vm_obj) + with open(self.paths.ssh_id_rsa_pub(), mode='rt') as f: + ssh_public_key = f.read() + + iso_specs = lago_cloud_init.LagoCloudInits( + vms=zip(vm_obj, free_dev), + iso_dir=self.paths.cloud_init(), + ssh_public_key=ssh_public_key + ).generate(collect_only=False) + + for vm_name, iso_spec in iso_specs.viewitems(): + vms[vm_name]._spec['disks'].append(iso_spec) + @sdk_utils.expose def export_vms( self, diff --git a/lago/templates/cloud-init-meta-data-base.j2 b/lago/templates/cloud-init-meta-data-base.j2 new file mode 100644 index 00000000..f11c9538 --- /dev/null +++ b/lago/templates/cloud-init-meta-data-base.j2 @@ -0,0 +1,2 @@ +instance-id: {{ hostname }}-001 +local-hostname: {{ hostname }} \ No newline at end of file diff --git a/lago/templates/cloud-init-user-data-base.j2 b/lago/templates/cloud-init-user-data-base.j2 new file mode 100644 index 00000000..11699ce5 --- /dev/null +++ b/lago/templates/cloud-init-user-data-base.j2 @@ -0,0 +1,9 @@ +#cloud-config +users: + - name: root + ssh-authorized-keys: + - {{ public_key }} +chpasswd: + list: + - root:{{ root_password }} + expire: False \ No newline at end of file diff --git a/lago/utils.py b/lago/utils.py index b02701f2..9c62afa8 100644 --- a/lago/utils.py +++ b/lago/utils.py @@ -792,6 +792,68 @@ def ver_cmp(ver1, ver2): ) +def allocate_dev(disks_spec, dev_type='sd'): + """ + Get free devices of type 'dev_type' + + Args: + disks_spec(dict): list of disks + dev_type(str): version string + + Returns: + generator which yields the next free device + """ + taken_devs = set() + for disk in disks_spec: + current_dev = disk.get('dev') + if current_dev and current_dev.startswith(dev_type): + try: + taken_devs.add(current_dev[2]) + except IndexError: + pass + + r = (i for i in xrange(ord('a'), ord('z') + 1)) + + for i in r: + dev = chr(i) + if dev not in taken_devs: + yield dev_type + dev + + +def deep_update(a, b): + """ + Recursively merge dict b into dict a. + List will be joined. + If a and b as the same key but its value's type, + differ between the two, the value from b will be taken. + + Args: + a(dict): + b(dict): version string + + Returns: + generator which yields the next free device + """ + if not ( + isinstance(a, collections.Mapping) + and isinstance(b, collections.Mapping) + ): + return a + + for k, v in b.iteritems(): + if k in a and (type(a[k] == type(v))): + if isinstance(v, list): + a[k] = a[k] + v + elif isinstance(v, collections.Mapping): + a[k] = deep_update(a[k], v) + else: + a[k] = v + else: + a[k] = v + + return a + + class LagoException(Exception): pass diff --git a/tests/unit/lago/test_utils.py b/tests/unit/lago/test_utils.py index 09686669..1cddfcf9 100644 --- a/tests/unit/lago/test_utils.py +++ b/tests/unit/lago/test_utils.py @@ -4,6 +4,8 @@ from lago import utils +import pytest + def deep_compare(original_obj, copy_obj): assert copy_obj == original_obj @@ -107,3 +109,62 @@ def test_fallback_to_yaml(self): expected = {'one': 1} loaded_conf = utils.load_virt_stream(virt_fd=bad_json) assert deep_compare(expected, loaded_conf) + + +class TestDeepUpdate(object): + @pytest.mark.parametrize( + 'a, b, expected', [ + ( + { + 'run_cmd': [1, 2] + }, { + 'run_cmd': [3, 4] + }, { + 'run_cmd': [1, 2, 3, 4] + } + ), ( + { + 'run_cmd_1': [1, 2], + 'run_cmd_2': ['a,', 'b'] + }, { + 'run_cmd_1': [3, 4] + }, { + 'run_cmd_1': [1, 2, 3, 4], + 'run_cmd_2': ['a,', 'b'] + } + ), ( + { + 'run_cmd_1': { + 'aa': [1, 2], + 'bb': 100 + }, + 'run_cmd_2': { + 'a': 1, + 'b': 2 + } + }, { + 'run_cmd_1': { + 'aa': [3, 4], + 'bb': 'hi' + }, + 'run_cmd_2': { + 'a': 10, + 'c': 3 + } + }, { + 'run_cmd_1': { + 'aa': [1, 2, 3, 4], + 'bb': 'hi' + }, + 'run_cmd_2': { + 'a': 10, + 'b': 2, + 'c': 3 + } + } + ), ('a', {}, 'a') + ] + ) + def test_deep_update(self, a, b, expected): + result = utils.deep_update(a, b) + assert deep_compare(result, expected)