diff --git a/CHANGES.md b/CHANGES.md index b2e3bf7..bc46660 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,5 +1,14 @@ # Changelog +## 0.9.0 +New Features: + +- *Hooks* can now be defined as artifact metadata and are executed at the specified step. + Current supported hooks are: `pre_create, pre_update, post_create, post_update` +- Stack *Outputs* are now written into a yaml file under `outputs` if enabled. Enabled via `options.StoreOutputs` +- Removed deprecated support for storing parameters as these can be constructed any time from existing and tracked configs +- some code cleanups and minor changes for cli outputs + ## 0.8.4 - New Feature: `create-docs` command Renders a markdown documentation next to the rendered stack templated by parsing parameters and other relvant metadata diff --git a/cloudbender/__init__.py b/cloudbender/__init__.py index 708aae7..1b79bfb 100644 --- a/cloudbender/__init__.py +++ b/cloudbender/__init__.py @@ -2,7 +2,7 @@ import logging __author__ = "Stefan Reimer" __email__ = "stefan@zero-downtimet.net" -__version__ = "0.8.5" +__version__ = "0.9.0" # Set up logging to ``/dev/null`` like a library is supposed to. diff --git a/cloudbender/cli.py b/cloudbender/cli.py index 80c67a4..085af01 100644 --- a/cloudbender/cli.py +++ b/cloudbender/cli.py @@ -2,6 +2,7 @@ import os import sys import click import functools +import re from concurrent.futures import ThreadPoolExecutor, as_completed @@ -91,7 +92,14 @@ def outputs(cb, stack_names, multi, include, values): stacks = _find_stacks(cb, stack_names, multi) for s in stacks: - s.get_outputs(include, values) + s.get_outputs() + + for output in s.outputs.keys(): + if re.search(include, output): + if values: + print("{}".format(output)) + else: + print("{}={}".format(output, s.outputs[output])) @click.command() diff --git a/cloudbender/core.py b/cloudbender/core.py index 6d3f962..dde0337 100644 --- a/cloudbender/core.py +++ b/cloudbender/core.py @@ -19,6 +19,7 @@ class CloudBender(object): "template_path": self.root.joinpath("cloudformation"), "hooks_path": self.root.joinpath("hooks"), "docs_path": self.root.joinpath("docs"), + "outputs_path": self.root.joinpath("outputs"), "artifact_paths": [self.root.joinpath("artifacts")] } @@ -35,7 +36,7 @@ class CloudBender(object): # Make sure all paths are abs for k, v in self.ctx.items(): - if k in ['config_path', 'template_path', 'hooks_path', 'docs_path', 'artifact_paths']: + if k in ['config_path', 'template_path', 'hooks_path', 'docs_path', 'artifact_paths', 'outputs_path']: if isinstance(v, list): new_list = [] for p in v: diff --git a/cloudbender/exceptions.py b/cloudbender/exceptions.py index 2604c51..fe64b1b 100644 --- a/cloudbender/exceptions.py +++ b/cloudbender/exceptions.py @@ -8,3 +8,7 @@ class ParameterIllegalValue(Exception): class InvalidProjectDir(Exception): """My documentation""" + + +class InvalidHook(Exception): + """My documentation""" diff --git a/cloudbender/hooks.py b/cloudbender/hooks.py new file mode 100644 index 0000000..1324bc3 --- /dev/null +++ b/cloudbender/hooks.py @@ -0,0 +1,49 @@ +import sys +import subprocess +from functools import wraps + +from .exceptions import InvalidHook + +import logging +logger = logging.getLogger(__name__) + + +def execute_hooks(hooks, stack): + for hook in hooks: + tokens = hook.split() + if tokens[0] in dir(sys.modules[__name__]): + logger.info("Executing hook: {}".format(hook)) + globals()[tokens[0]](arguments=tokens[1:], stack=stack) + else: + logger.warning("Unknown hook: {}".format(hook)) + + +def exec_hooks(func): + @wraps(func) + def decorated(self, *args, **kwargs): + execute_hooks(self.hooks.get("pre_" + func.__name__, []), self) + response = func(self, *args, **kwargs) + execute_hooks(self.hooks.get("post_" + func.__name__, []), self) + return response + + return decorated + + +# Various hooks + +def cmd(stack, arguments): + """ + Generic command via subprocess + """ + + try: + hook = subprocess.run(arguments, stdout=subprocess.PIPE) + logger.info(hook.stdout.decode("utf-8")) + except TypeError: + raise InvalidHook('Invalid argument {}'.format(arguments)) + + +def export_outputs_kubezero(stack, arguments): + """ Write outputs in yaml for kubezero helm chart """ + + logger.info(stack.outputs) diff --git a/cloudbender/stack.py b/cloudbender/stack.py index 4568c65..a4b328d 100644 --- a/cloudbender/stack.py +++ b/cloudbender/stack.py @@ -16,6 +16,7 @@ from .connection import BotoConnection from .jinja import JinjaEnv, read_config_file from . import __version__ from .exceptions import ParameterNotFound, ParameterIllegalValue +from .hooks import exec_hooks import cfnlint.core @@ -49,6 +50,7 @@ class Stack(object): self.tags = {} self.parameters = {} + self.outputs = {} self.options = {'Legacy': False} self.region = 'global' self.profile = '' @@ -56,6 +58,7 @@ class Stack(object): self.notfication_sns = [] self.id = (self.profile, self.region, self.stackname) + self.aws_stackid = None self.md5 = None self.mode = 'CloudBender' @@ -65,7 +68,9 @@ class Stack(object): self.cfn_data = None self.connection_manager = BotoConnection(self.profile, self.region) self.status = None + self.store_outputs = False self.dependencies = set() + self.hooks = {'post_create': [], 'post_update': [], 'pre_create': [], 'pre_update': []} self.default_lock = None self.multi_delete = True @@ -111,6 +116,9 @@ class Stack(object): if 'Mode' in self.options: self.mode = self.options['Mode'] + if 'StoreOutputs' in self.options: + self.store_outputs = True + if 'dependencies' in _config: for dep in _config['dependencies']: self.dependencies.add(dep) @@ -226,6 +234,17 @@ class Stack(object): else: self.dependencies.add(ref.split('DoT')[0]) + # Extract hooks + try: + for hook, func in self.cfn_data['Metadata']['Hooks'].items(): + if hook in ['post_update', 'post_create', 'pre_create', 'pre_update']: + if isinstance(func, list): + self.hooks[hook].extend(func) + else: + self.hooks[hook].append(func) + except KeyError: + pass + def write_template_file(self): if self.cfn_template: yaml_file = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname + ".yaml") @@ -298,7 +317,7 @@ class Stack(object): logger.info("Passed.") def get_outputs(self, include='.*', values=False): - """ Returns outputs of the stack as key=value """ + """ gets outputs of the stack """ try: stacks = self.connection_manager.call( @@ -308,19 +327,26 @@ class Stack(object): profile=self.profile, region=self.region)['Stacks'] try: - logger.debug("Stack outputs for {} in {}:".format(self.stackname, self.region)) for output in stacks[0]['Outputs']: - if re.search(include, output['OutputKey']): - if values: - print("{}".format(output['OutputValue'])) - else: - print("{}={}".format(output['OutputKey'], output['OutputValue'])) + self.outputs[output['OutputKey']] = output['OutputValue'] + logger.debug("Stack outputs for {} in {}: {}".format(self.stackname, self.region, self.outputs)) except KeyError: pass except ClientError as e: raise e + logger.info('{} {} Outputs:\n{}'.format(self.region, self.stackname, pprint.pformat(self.outputs, indent=2))) + + def write_outputs_file(self): + output_file = os.path.join(self.ctx['outputs_path'], self.rel_path, self.stackname + ".yaml") + ensure_dir(os.path.join(self.ctx['outputs_path'], self.rel_path)) + + # Render outputs as yaml under top level key "Outputs" + with open(output_file, 'w') as output_contents: + output_contents.write(yaml.dump({'Outputs': self.outputs})) + logger.info('Wrote outputs for %s to %s', self.stackname, output_file) + def create_docs(self, template=False): """ Read rendered template, parse documentation fragments, eg. parameter description and create a mardown doc file for the stack @@ -369,6 +395,7 @@ class Stack(object): if 'Parameters' in self.cfn_data: _errors = [] + _found = {} self.cfn_parameters = [] for p in self.cfn_data['Parameters']: # In Piped mode we try to resolve all Paramters first via stack_outputs @@ -392,7 +419,7 @@ class Stack(object): if 'NoEcho' in self.cfn_data['Parameters'][p] and self.cfn_data['Parameters'][p]['NoEcho']: value = '****' - logger.info('{} {} Parameter {}={}'.format(self.region, self.stackname, p, value)) + _found[p] = value else: # If we have a Default defined in the CFN skip, as AWS will use it if 'Default' not in self.cfn_data['Parameters'][p]: @@ -401,6 +428,9 @@ class Stack(object): if _errors: raise ParameterNotFound('Cannot find value for parameters: {0}'.format(_errors)) + logger.info('{} {} Parameters:\n{}'.format(self.region, self.stackname, pprint.pformat(_found, indent=2))) + + @exec_hooks def create(self): """Creates a stack """ @@ -410,7 +440,7 @@ class Stack(object): self.read_template_file() logger.info('Creating {0} {1}'.format(self.region, self.stackname)) - self.connection_manager.call( + self.aws_stackid = self.connection_manager.call( 'cloudformation', 'create_stack', {'StackName': self.stackname, 'TemplateBody': self.cfn_template, @@ -421,8 +451,15 @@ class Stack(object): 'Capabilities': ['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM', 'CAPABILITY_AUTO_EXPAND']}, profile=self.profile, region=self.region) - return self._wait_for_completion() + status = self._wait_for_completion() + self.get_outputs() + if self.store_outputs: + self.write_outputs_file() + + return status + + @exec_hooks def update(self): """Updates an existing stack """ @@ -433,7 +470,7 @@ class Stack(object): logger.info('Updating {0} {1}'.format(self.region, self.stackname)) try: - self.connection_manager.call( + self.aws_stackid = self.connection_manager.call( 'cloudformation', 'update_stack', {'StackName': self.stackname, 'TemplateBody': self.cfn_template, @@ -450,17 +487,25 @@ class Stack(object): else: raise e - return self._wait_for_completion() + status = self._wait_for_completion() + self.get_outputs() + if self.store_outputs: + self.write_outputs_file() + + return status + + @exec_hooks def delete(self): """Deletes a stack """ logger.info('Deleting {0} {1}'.format(self.region, self.stackname)) - self.connection_manager.call( + self.aws_stackid = self.connection_manager.call( 'cloudformation', 'delete_stack', {'StackName': self.stackname}, profile=self.profile, region=self.region) - return self._wait_for_completion() + status = self._wait_for_completion() + return status def create_change_set(self, change_set_name): """ Creates a Change Set with the name ``change_set_name``. """