OS environment variable support in config files, new jinja functions, cleanup
This commit is contained in:
parent
ce14bd212f
commit
68f9ca68d1
8
CHANGES.md
Normal file
8
CHANGES.md
Normal file
@ -0,0 +1,8 @@
|
||||
# Changelog
|
||||
|
||||
## 0.4.0
|
||||
- support for environment variables in any config file
|
||||
Example: `profile: {{ env.AWS_DEFAULT_PROFILE }}`
|
||||
- support for jinja `{% do %}` extension
|
||||
- support for inline yaml style complex data definitions, via custom jinja filter `yaml`
|
||||
- missing variables now cause warnings, but rendering continues with ''
|
@ -2,7 +2,7 @@ import logging
|
||||
|
||||
__author__ = 'Stefan Reimer'
|
||||
__email__ = 'stefan@zero-downtimet.net'
|
||||
__version__ = '0.3.3'
|
||||
__version__ = '0.4.0'
|
||||
|
||||
|
||||
# Set up logging to ``/dev/null`` like a library is supposed to.
|
||||
|
@ -187,3 +187,6 @@ cli.add_command(provision)
|
||||
cli.add_command(delete)
|
||||
cli.add_command(clean)
|
||||
cli.add_command(create_change_set)
|
||||
|
||||
if __name__ == '__main__':
|
||||
cli(obj={})
|
||||
|
@ -1,8 +1,9 @@
|
||||
import os
|
||||
import logging
|
||||
|
||||
from .utils import read_yaml_file, ensure_dir
|
||||
from .utils import ensure_dir
|
||||
from .stackgroup import StackGroup
|
||||
from .jinja import read_config_file
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -20,7 +21,7 @@ class CloudBender(object):
|
||||
"artifact_paths": [os.path.join(self.root, "artifacts")]
|
||||
}
|
||||
self.default_settings = {
|
||||
'vars': {'Mode': 'FortyTwo'}
|
||||
'vars': {'Mode': 'CloudBender'}
|
||||
}
|
||||
|
||||
if not os.path.isdir(self.root):
|
||||
@ -30,7 +31,7 @@ class CloudBender(object):
|
||||
"""Load the <path>/config.yaml, <path>/*.yaml as stacks, sub-folders are child groups """
|
||||
|
||||
# Read top level config.yaml and extract CloudBender CTX
|
||||
_config = read_yaml_file(os.path.join(self.ctx['config_path'], 'config.yaml'))
|
||||
_config = read_config_file(os.path.join(self.ctx['config_path'], 'config.yaml'))
|
||||
if _config and _config.get('CloudBender'):
|
||||
self.ctx.update(_config.get('CloudBender'))
|
||||
|
||||
|
@ -1,8 +1,13 @@
|
||||
import os
|
||||
import io
|
||||
import gzip
|
||||
import jinja2
|
||||
import re
|
||||
import base64
|
||||
import yaml
|
||||
|
||||
import jinja2
|
||||
from jinja2.utils import missing, object_type_repr
|
||||
from jinja2._compat import string_types
|
||||
|
||||
import pyminifier.token_utils
|
||||
import pyminifier.minification
|
||||
@ -26,6 +31,9 @@ def cloudbender_ctx(context, cb_ctx={}, reset=False, command=None, args={}):
|
||||
if 'dependencies' not in cb_ctx:
|
||||
cb_ctx['dependencies'] = set()
|
||||
|
||||
if 'mandatory_parameters' not in cb_ctx:
|
||||
cb_ctx['mandatory_parameters'] = set()
|
||||
|
||||
if command == 'get_dependencies':
|
||||
_deps = sorted(list(cb_ctx['dependencies']))
|
||||
if _deps:
|
||||
@ -179,11 +187,40 @@ def pyminify(source, obfuscate=False, minify=True):
|
||||
return minified_source
|
||||
|
||||
|
||||
def parse_yaml(block):
|
||||
return yaml.safe_load(block)
|
||||
|
||||
|
||||
class SilentUndefined(jinja2.Undefined):
|
||||
'''
|
||||
Log warning for undefiend but continue
|
||||
'''
|
||||
def _fail_with_undefined_error(self, *args, **kwargs):
|
||||
if self._undefined_hint is None:
|
||||
if self._undefined_obj is missing:
|
||||
hint = '%r is undefined' % self._undefined_name
|
||||
elif not isinstance(self._undefined_name, string_types):
|
||||
hint = '%s has no element %r' % (
|
||||
object_type_repr(self._undefined_obj),
|
||||
self._undefined_name
|
||||
)
|
||||
else:
|
||||
hint = '%r has no attribute %r' % (
|
||||
object_type_repr(self._undefined_obj),
|
||||
self._undefined_name
|
||||
)
|
||||
else:
|
||||
hint = self._undefined_hint
|
||||
|
||||
logger.warning("Undefined variable: {}".format(hint))
|
||||
return ''
|
||||
|
||||
|
||||
def JinjaEnv(template_locations=[]):
|
||||
jenv = jinja2.Environment(trim_blocks=True,
|
||||
lstrip_blocks=True,
|
||||
undefined=jinja2.Undefined,
|
||||
extensions=['jinja2.ext.loopcontrols'])
|
||||
undefined=SilentUndefined,
|
||||
extensions=['jinja2.ext.loopcontrols', 'jinja2.ext.do'])
|
||||
|
||||
jinja_loaders = []
|
||||
for _dir in template_locations:
|
||||
@ -198,9 +235,34 @@ def JinjaEnv(template_locations=[]):
|
||||
|
||||
jenv.filters['regex_replace'] = regex_replace
|
||||
jenv.filters['pyminify'] = pyminify
|
||||
jenv.filters['yaml'] = parse_yaml
|
||||
|
||||
jenv.tests['match'] = match
|
||||
jenv.tests['regex'] = regex
|
||||
jenv.tests['search'] = search
|
||||
|
||||
return jenv
|
||||
|
||||
|
||||
def read_config_file(path, jinja_args=None):
|
||||
""" reads yaml config file, passes it through jinja and returns data structre """
|
||||
|
||||
if os.path.exists(path):
|
||||
logger.debug("Reading config file: {}".format(path))
|
||||
try:
|
||||
jenv = jinja2.Environment(
|
||||
loader=jinja2.FileSystemLoader(os.path.dirname(path)),
|
||||
undefined=jinja2.StrictUndefined,
|
||||
extensions=['jinja2.ext.loopcontrols'])
|
||||
template = jenv.get_template(os.path.basename(path))
|
||||
rendered_template = template.render(
|
||||
env=os.environ
|
||||
)
|
||||
data = yaml.safe_load(rendered_template)
|
||||
if data:
|
||||
return data
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Error reading config file: {} ({})".format(path, e))
|
||||
|
||||
return {}
|
||||
|
@ -1,6 +1,5 @@
|
||||
import os
|
||||
import re
|
||||
import semver
|
||||
import hashlib
|
||||
import oyaml as yaml
|
||||
import json
|
||||
@ -10,12 +9,11 @@ import subprocess
|
||||
from datetime import datetime, timedelta
|
||||
from dateutil.tz import tzutc
|
||||
|
||||
import botocore
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from .utils import read_yaml_file, dict_merge
|
||||
from .utils import dict_merge
|
||||
from .connection import BotoConnection
|
||||
from .jinja import JinjaEnv
|
||||
from .jinja import JinjaEnv, read_config_file
|
||||
from . import __version__
|
||||
|
||||
import cfnlint.core
|
||||
@ -61,7 +59,7 @@ class Stack(object):
|
||||
logger.debug("<Stack {}: {}>".format(self.id, vars(self)))
|
||||
|
||||
def read_config(self):
|
||||
_config = read_yaml_file(self.path)
|
||||
_config = read_config_file(self.path)
|
||||
for p in ["region", "stackname", "template", "default_lock", "multi_delete", "provides"]:
|
||||
if p in _config:
|
||||
setattr(self, p, _config[p])
|
||||
@ -82,27 +80,6 @@ class Stack(object):
|
||||
|
||||
logger.debug("Stack {} added.".format(self.id))
|
||||
|
||||
def check_fortytwo(self, template):
|
||||
# Fail early if 42 is enabled but not available
|
||||
if self.cfn['Mode'] == "FortyTwo" and self.template != 'FortyTwo':
|
||||
try:
|
||||
response = self.connection_manager.call(
|
||||
'lambda', 'get_function', {'FunctionName': 'FortyTwo'},
|
||||
profile=self.profile, region=self.region)
|
||||
|
||||
# Also verify version in case specified in the template's metadata
|
||||
try:
|
||||
req_ver = template['Metadata']['FortyTwo']['RequiredVersion']
|
||||
if 'Release' not in response['Tags']:
|
||||
raise("Lambda FortyTwo has no Release Tag! Required: {}".format(req_ver))
|
||||
elif semver.compare(req_ver, re.sub("-.*$", '', response['Tags']['Release'])) > 0:
|
||||
raise("Lambda FortyTwo version is not recent enough! Required: {} vs. Found: {}".format(req_ver, response['Tags']['Release']))
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
except botocore.exceptions.ClientError:
|
||||
raise("No Lambda FortyTwo found in your account")
|
||||
|
||||
def render(self):
|
||||
"""Renders the cfn jinja template for this stack"""
|
||||
|
||||
@ -266,7 +243,7 @@ class Stack(object):
|
||||
try:
|
||||
value = str(self.parameters[p])
|
||||
self.cfn_parameters.append({'ParameterKey': p, 'ParameterValue': value})
|
||||
logger.info('Got {} = {}'.format(p, value))
|
||||
logger.info('{} {} Parameter {}={}'.format(self.region, self.stackname, p, value))
|
||||
except KeyError:
|
||||
# If we have a Default defined in the CFN skip, as AWS will use it
|
||||
if 'Default' in self.data['Parameters'][p]:
|
||||
@ -311,7 +288,7 @@ class Stack(object):
|
||||
'TemplateBody': self.cfn_template,
|
||||
'Parameters': self.cfn_parameters,
|
||||
'Tags': [{"Key": str(k), "Value": str(v)} for k, v in self.tags.items()],
|
||||
'Capabilities': ['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM']},
|
||||
'Capabilities': ['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM', 'CAPABILITY_AUTO_EXPAND']},
|
||||
profile=self.profile, region=self.region)
|
||||
|
||||
return self._wait_for_completion()
|
||||
@ -332,7 +309,7 @@ class Stack(object):
|
||||
'TemplateBody': self.cfn_template,
|
||||
'Parameters': self.cfn_parameters,
|
||||
'Tags': [{"Key": str(k), "Value": str(v)} for k, v in self.tags.items()],
|
||||
'Capabilities': ['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM']},
|
||||
'Capabilities': ['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM', 'CAPABILITY_AUTO_EXPAND']},
|
||||
profile=self.profile, region=self.region)
|
||||
|
||||
except ClientError as e:
|
||||
|
@ -2,7 +2,8 @@ import os
|
||||
import glob
|
||||
import logging
|
||||
|
||||
from .utils import read_yaml_file, dict_merge
|
||||
from .utils import dict_merge
|
||||
from .jinja import read_config_file
|
||||
from .stack import Stack
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -36,7 +37,7 @@ class StackGroup(object):
|
||||
return None
|
||||
|
||||
# First read config.yaml if present
|
||||
_config = read_yaml_file(os.path.join(self.path, 'config.yaml'))
|
||||
_config = read_config_file(os.path.join(self.path, 'config.yaml'))
|
||||
|
||||
# Stack Group name if not explicit via config is derived from subfolder, or in case of root object the parent folder
|
||||
if "stackgroupname" in _config:
|
||||
@ -119,40 +120,3 @@ class StackGroup(object):
|
||||
return s
|
||||
|
||||
return None
|
||||
|
||||
# TODO: Integrate properly into stackgroup class, broken for now
|
||||
# stackoutput inspection
|
||||
def BROKEN_inspect_stacks(self, conglomerate):
|
||||
# Get all stacks of the conglomertate
|
||||
response = self.connection_manager.call('cloudformation', 'decribe_stacks')
|
||||
|
||||
stacks = []
|
||||
for stack in response['Stacks']:
|
||||
for tag in stack['Tags']:
|
||||
if tag['Key'] == 'Conglomerate' and tag['Value'] == conglomerate:
|
||||
stacks.append(stack)
|
||||
break
|
||||
|
||||
# Gather stack outputs, use Tag['Artifact'] as name space: Artifact.OutputName, same as FortyTwo
|
||||
stack_outputs = {}
|
||||
for stack in stacks:
|
||||
# If stack has an Artifact Tag put resources into the namespace Artifact.Resource
|
||||
artifact = None
|
||||
for tag in stack['Tags']:
|
||||
if tag['Key'] == 'Artifact':
|
||||
artifact = tag['Value']
|
||||
|
||||
if artifact:
|
||||
key_prefix = "{}.".format(artifact)
|
||||
else:
|
||||
key_prefix = ""
|
||||
|
||||
try:
|
||||
for output in stack['Outputs']:
|
||||
# Gather all outputs of the stack into one dimensional key=value structure
|
||||
stack_outputs[key_prefix + output['OutputKey']] = output['OutputValue']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# Add outputs from stacks into the data for jinja under StackOutput
|
||||
return stack_outputs
|
||||
|
@ -1,5 +1,4 @@
|
||||
import os
|
||||
import yaml
|
||||
import copy
|
||||
import logging
|
||||
import boto3
|
||||
@ -7,21 +6,6 @@ import boto3
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def read_yaml_file(path):
|
||||
data = {}
|
||||
if os.path.exists(path):
|
||||
with open(path, 'r') as config_file_contents:
|
||||
logger.debug("Reading config file: {}".format(path))
|
||||
try:
|
||||
_data = yaml.load(config_file_contents.read())
|
||||
if _data:
|
||||
data.update(_data)
|
||||
except Exception as e:
|
||||
logger.warning("Error reading config file: {} ({})".format(path, e))
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def dict_merge(a, b):
|
||||
""" Deep merge to allow proper inheritance for config files"""
|
||||
if not a:
|
||||
|
Loading…
Reference in New Issue
Block a user