Release 0.5.0
This commit is contained in:
parent
55b67909e4
commit
badb7b02c8
@ -1,5 +1,13 @@
|
||||
# Changelog
|
||||
|
||||
## 0.5.0
|
||||
- new custom Jinja function `sub`, works the same as re.sub
|
||||
- added possibility to use custom Jinja function `inline_yaml` to set data as yaml
|
||||
- disabled SilentUndefined
|
||||
- added Jinja2 extension `do` and `loopcontrols`
|
||||
- new custom Jinja function `option` to access options at render time incl. default support for nested objects
|
||||
- removed custom Jinja functions around old remote Ref handling
|
||||
|
||||
## 0.4.2
|
||||
- silence warnings by latest PyYaml 5.1
|
||||
|
||||
|
2
Makefile
2
Makefile
@ -4,7 +4,7 @@ test:
|
||||
tox
|
||||
|
||||
clean:
|
||||
rm -rf .tox .cache dist
|
||||
rm -rf .tox .cache .coverage .eggs cloudbender.egg-info .pytest_cache dist
|
||||
|
||||
dist:
|
||||
python setup.py bdist_wheel --universal
|
||||
|
@ -2,7 +2,7 @@ import logging
|
||||
|
||||
__author__ = 'Stefan Reimer'
|
||||
__email__ = 'stefan@zero-downtimet.net'
|
||||
__version__ = '0.4.2'
|
||||
__version__ = '0.5.0'
|
||||
|
||||
|
||||
# Set up logging to ``/dev/null`` like a library is supposed to.
|
||||
|
@ -20,9 +20,6 @@ class CloudBender(object):
|
||||
"parameter_path": os.path.join(self.root, "parameters"),
|
||||
"artifact_paths": [os.path.join(self.root, "artifacts")]
|
||||
}
|
||||
self.default_settings = {
|
||||
'vars': {'Mode': 'CloudBender'}
|
||||
}
|
||||
|
||||
if not os.path.isdir(self.root):
|
||||
raise "Check '{0}' exists and is a valid project folder.".format(root_path)
|
||||
@ -55,7 +52,7 @@ class CloudBender(object):
|
||||
ensure_dir(self.ctx[k])
|
||||
|
||||
self.sg = StackGroup(self.ctx['config_path'], self.ctx)
|
||||
self.sg.read_config(self.default_settings)
|
||||
self.sg.read_config()
|
||||
|
||||
self.all_stacks = self.sg.get_stacks()
|
||||
|
||||
|
@ -8,6 +8,8 @@ import yaml
|
||||
import jinja2
|
||||
from jinja2.utils import missing, object_type_repr
|
||||
from jinja2._compat import string_types
|
||||
from jinja2.filters import make_attrgetter
|
||||
from jinja2.runtime import Undefined
|
||||
|
||||
import pyminifier.token_utils
|
||||
import pyminifier.minification
|
||||
@ -15,76 +17,31 @@ import pyminifier.compression
|
||||
import pyminifier.obfuscate
|
||||
import types
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@jinja2.contextfunction
|
||||
def cloudbender_ctx(context, cb_ctx={}, reset=False, command=None, args={}):
|
||||
def option(context, attribute, default_value=u'', source='options'):
|
||||
""" Get attribute from options data structure, default_value otherwise """
|
||||
environment = context.environment
|
||||
options = environment.globals['_config'][source]
|
||||
|
||||
# Reset state
|
||||
if reset:
|
||||
cb_ctx.clear()
|
||||
return
|
||||
if not attribute:
|
||||
return default_value
|
||||
|
||||
if 'dependencies' not in cb_ctx:
|
||||
cb_ctx['dependencies'] = set()
|
||||
try:
|
||||
getter = make_attrgetter(environment, attribute)
|
||||
value = getter(options)
|
||||
|
||||
if 'mandatory_parameters' not in cb_ctx:
|
||||
cb_ctx['mandatory_parameters'] = set()
|
||||
if isinstance(value, Undefined):
|
||||
return default_value
|
||||
|
||||
if command == 'get_dependencies':
|
||||
_deps = sorted(list(cb_ctx['dependencies']))
|
||||
if _deps:
|
||||
logger.debug("Stack depencies: {}".format(','.join(_deps)))
|
||||
return _deps
|
||||
return value
|
||||
|
||||
elif command == 'add_dependency':
|
||||
try:
|
||||
cb_ctx['dependencies'].add(args['dep'])
|
||||
logger.debug("Adding stack depency to {}".format(args['dep']))
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
else:
|
||||
raise("Unknown command")
|
||||
|
||||
|
||||
@jinja2.contextfunction
|
||||
def get_custom_att(context, att=None, ResourceName="FortyTwo", attributes={}, reset=False, dump=False):
|
||||
""" Returns the rendered required fragement and also collects all foreign
|
||||
attributes for the specified CustomResource to include them later in
|
||||
the actual CustomResource include property """
|
||||
|
||||
# Reset state
|
||||
if reset:
|
||||
attributes.clear()
|
||||
return
|
||||
|
||||
# return all registered attributes
|
||||
if dump:
|
||||
return attributes
|
||||
|
||||
# If called with an attribute, return fragement and register dependency
|
||||
if att:
|
||||
config = context.get_all()['_config']
|
||||
|
||||
if ResourceName not in attributes:
|
||||
attributes[ResourceName] = set()
|
||||
|
||||
attributes[ResourceName].add(att)
|
||||
if ResourceName == 'FortyTwo':
|
||||
cloudbender_ctx(context, command='add_dependency', args={'dep': att.split('.')[0]})
|
||||
|
||||
if config['cfn']['Mode'] == "FortyTwo":
|
||||
return('{{ "Fn::GetAtt": ["{0}", "{1}"] }}'.format(ResourceName, att))
|
||||
elif config['cfn']['Mode'] == "AWSImport" and ResourceName == "FortyTwo":
|
||||
# AWS only allows - and :, so replace '.' with ":"
|
||||
return('{{ "Fn::ImportValue": {{ "Fn::Sub": "${{Conglomerate}}:{0}" }} }}'.format(att.replace('.', ':')))
|
||||
else:
|
||||
# We need to replace . with some PureAlphaNumeric thx AWS ...
|
||||
return('{{ Ref: {0} }}'.format(att.replace('.', 'DoT')))
|
||||
except (jinja2.exceptions.UndefinedError):
|
||||
return default_value
|
||||
|
||||
|
||||
@jinja2.contextfunction
|
||||
@ -107,21 +64,6 @@ def include_raw_gz(context, files=None, gz=True):
|
||||
return base64.b64encode(buf.getvalue()).decode('utf-8')
|
||||
|
||||
|
||||
@jinja2.contextfunction
|
||||
def render_once(context, name=None, resources=set(), reset=False):
|
||||
""" Utility function returning True only once per name """
|
||||
|
||||
if reset:
|
||||
resources.clear()
|
||||
return
|
||||
|
||||
if name and name not in resources:
|
||||
resources.add(name)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@jinja2.contextfunction
|
||||
def raise_helper(context, msg):
|
||||
raise Exception(msg)
|
||||
@ -154,7 +96,7 @@ def search(value, pattern='', ignorecase=False):
|
||||
|
||||
|
||||
# Custom filters
|
||||
def regex_replace(value='', pattern='', replace='', ignorecase=False):
|
||||
def sub(value='', pattern='', replace='', ignorecase=False):
|
||||
if ignorecase:
|
||||
flags = re.I
|
||||
else:
|
||||
@ -183,11 +125,11 @@ def pyminify(source, obfuscate=False, minify=True):
|
||||
source = pyminifier.token_utils.untokenize(tokens)
|
||||
# logger.info(source)
|
||||
minified_source = pyminifier.compression.gz_pack(source)
|
||||
logger.info("Compressed python code to {}".format(len(minified_source)))
|
||||
logger.info("Compressed python code from {} to {}".format(len(source), len(minified_source)))
|
||||
return minified_source
|
||||
|
||||
|
||||
def parse_yaml(block):
|
||||
def inline_yaml(block):
|
||||
return yaml.safe_load(block)
|
||||
|
||||
|
||||
@ -219,8 +161,8 @@ class SilentUndefined(jinja2.Undefined):
|
||||
def JinjaEnv(template_locations=[]):
|
||||
jenv = jinja2.Environment(trim_blocks=True,
|
||||
lstrip_blocks=True,
|
||||
undefined=SilentUndefined,
|
||||
extensions=['jinja2.ext.loopcontrols', 'jinja2.ext.do'])
|
||||
# undefined=SilentUndefined,
|
||||
|
||||
jinja_loaders = []
|
||||
for _dir in template_locations:
|
||||
@ -228,14 +170,12 @@ def JinjaEnv(template_locations=[]):
|
||||
jenv.loader = jinja2.ChoiceLoader(jinja_loaders)
|
||||
|
||||
jenv.globals['include_raw'] = include_raw_gz
|
||||
jenv.globals['get_custom_att'] = get_custom_att
|
||||
jenv.globals['cloudbender_ctx'] = cloudbender_ctx
|
||||
jenv.globals['render_once'] = render_once
|
||||
jenv.globals['raise'] = raise_helper
|
||||
jenv.globals['option'] = option
|
||||
|
||||
jenv.filters['regex_replace'] = regex_replace
|
||||
jenv.filters['sub'] = sub
|
||||
jenv.filters['pyminify'] = pyminify
|
||||
jenv.filters['yaml'] = parse_yaml
|
||||
jenv.filters['inline_yaml'] = inline_yaml
|
||||
|
||||
jenv.tests['match'] = match
|
||||
jenv.tests['regex'] = regex
|
||||
|
@ -4,7 +4,6 @@ import hashlib
|
||||
import oyaml as yaml
|
||||
import json
|
||||
import time
|
||||
import subprocess
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from dateutil.tz import tzutc
|
||||
@ -33,17 +32,19 @@ class StackStatus(object):
|
||||
|
||||
|
||||
class Stack(object):
|
||||
def __init__(self, name, path, rel_path, tags=None, parameters=None, template_vars=None, region='global', profile=None, template=None, ctx={}):
|
||||
def __init__(self, name, path, rel_path, tags=None, parameters=None, options=None, region='global', profile=None, template=None, ctx={}):
|
||||
self.id = (profile, region, name)
|
||||
self.stackname = name
|
||||
self.path = path
|
||||
self.rel_path = rel_path
|
||||
self.tags = tags
|
||||
self.parameters = parameters
|
||||
self.template_vars = template_vars
|
||||
self.options = options
|
||||
self.region = region
|
||||
self.profile = profile
|
||||
self.template = template
|
||||
self.md5 = None
|
||||
self.mode = 'CloudBender'
|
||||
self.provides = template
|
||||
self.cfn_template = None
|
||||
self.cfn_parameters = []
|
||||
@ -68,11 +69,20 @@ class Stack(object):
|
||||
if p in _config:
|
||||
setattr(self, p, dict_merge(getattr(self, p), _config[p]))
|
||||
|
||||
# Inject Artifact for now hard coded
|
||||
self.tags['Artifact'] = self.provides
|
||||
# Inject Artifact if not explicitly set
|
||||
if 'Artifact' not in self.tags:
|
||||
self.tags['Artifact'] = self.provides
|
||||
|
||||
# backwards comp
|
||||
if 'vars' in _config:
|
||||
self.template_vars = dict_merge(self.template_vars, _config['vars'])
|
||||
self.options = dict_merge(self.options, _config['vars'])
|
||||
if 'Mode' in self.options:
|
||||
self.mode = self.options['Mode']
|
||||
|
||||
if 'options' in _config:
|
||||
self.options = dict_merge(self.options, _config['options'])
|
||||
if 'Mode' in self.options:
|
||||
self.mode = self.options['Mode']
|
||||
|
||||
if 'dependencies' in _config:
|
||||
for dep in _config['dependencies']:
|
||||
@ -83,62 +93,73 @@ class Stack(object):
|
||||
def render(self):
|
||||
"""Renders the cfn jinja template for this stack"""
|
||||
|
||||
jenv = JinjaEnv(self.ctx['artifact_paths'])
|
||||
|
||||
template = jenv.get_template('{0}{1}'.format(self.template, '.yaml.jinja'))
|
||||
|
||||
template_metadata = {
|
||||
'Template.Name': self.template,
|
||||
'Template.Hash': 'tbd',
|
||||
'Template.Hash': "__HASH__",
|
||||
'CloudBender.Version': __version__
|
||||
}
|
||||
|
||||
cb = False
|
||||
if self.template_vars['Mode'] == "CloudBender":
|
||||
cb = True
|
||||
|
||||
_config = {'cb': cb, 'cfn': self.template_vars, 'Metadata': template_metadata}
|
||||
# cfn is provided for old configs
|
||||
_config = {'mode': self.mode, 'options': self.options, 'metadata': template_metadata, 'cfn': self.options}
|
||||
|
||||
jenv = JinjaEnv(self.ctx['artifact_paths'])
|
||||
jenv.globals['_config'] = _config
|
||||
|
||||
# First render pass to calculate a md5 checksum
|
||||
template_metadata['Template.Hash'] = hashlib.md5(template.render(_config).encode('utf-8')).hexdigest()
|
||||
|
||||
# Reset and set Metadata for final render pass
|
||||
jenv.globals['get_custom_att'](context={'_config': self.template_vars}, reset=True)
|
||||
jenv.globals['render_once'](context={'_config': self.template_vars}, reset=True)
|
||||
jenv.globals['cloudbender_ctx'](context={'_config': self.template_vars}, reset=True)
|
||||
|
||||
# Try to add latest tag/commit for the template source, skip if not in git tree
|
||||
try:
|
||||
_comment = subprocess.check_output('git log -1 --pretty=%B {}'.format(template.filename).split(' ')).decode('utf-8').strip().replace('"', '').replace('#', '').replace('\n', '').replace(':', ' ')
|
||||
if _comment:
|
||||
template_metadata['Template.LastGitComment'] = _comment
|
||||
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
template = jenv.get_template('{0}{1}'.format(self.template, '.yaml.jinja'))
|
||||
|
||||
logger.info('Rendering %s', template.filename)
|
||||
rendered = template.render(_config)
|
||||
|
||||
try:
|
||||
self.data = yaml.safe_load(rendered)
|
||||
self.cfn_template = template.render(_config)
|
||||
self.cfn_data = yaml.safe_load(self.cfn_template)
|
||||
except Exception as e:
|
||||
# In case we rendered invalid yaml this helps to debug
|
||||
logger.error(rendered)
|
||||
if self.cfn_template:
|
||||
logger.error(self.cfn_template)
|
||||
raise e
|
||||
|
||||
# Some sanity checks and final cosmetics
|
||||
# Check for empty top level Parameters, Outputs and Conditions and remove
|
||||
for key in ['Parameters', 'Outputs', 'Conditions']:
|
||||
if key in self.data and self.data[key] is None:
|
||||
if key in self.cfn_data and self.cfn_data[key] is None:
|
||||
# Delete from data structure which also takes care of json
|
||||
del self.data[key]
|
||||
# but also remove from rendered for the yaml file
|
||||
rendered = rendered.replace('\n' + key + ":", '')
|
||||
del self.cfn_data[key]
|
||||
|
||||
# Condense multiple empty lines to one
|
||||
self.cfn_template = re.sub(r'\n\s*\n', '\n\n', rendered)
|
||||
# but also remove from rendered for the yaml file
|
||||
self.cfn_template = self.cfn_template.replace('\n' + key + ":", '')
|
||||
|
||||
if not re.search('CloudBender::', self.cfn_template):
|
||||
logger.info("CloudBender not required -> removing Transform")
|
||||
del self.cfn_data['Transform']
|
||||
self.cfn_template = self.cfn_template.replace('Transform: [CloudBender]', '')
|
||||
|
||||
# Remove and condense multiple empty lines
|
||||
self.cfn_template = re.sub(r'\n\s*\n', '\n\n', self.cfn_template)
|
||||
self.cfn_template = re.sub(r'^\s*', '', self.cfn_template)
|
||||
self.cfn_template = re.sub(r'\s*$', '', self.cfn_template)
|
||||
|
||||
# set md5 last
|
||||
self.md5 = hashlib.md5(self.cfn_template.encode('utf-8')).hexdigest()
|
||||
self.cfn_data['Metadata']['Hash'] = self.md5
|
||||
|
||||
# Add Legacy FortyTwo if needed to prevent AWS from replacing existing resources for NO reason ;-(
|
||||
include = []
|
||||
search_attributes(self.cfn_data, include)
|
||||
if len(include):
|
||||
_res = """
|
||||
FortyTwo:
|
||||
Type: Custom::FortyTwo
|
||||
Properties:
|
||||
ServiceToken:
|
||||
Fn::Sub: "arn:aws:lambda:${{AWS::Region}}:${{AWS::AccountId}}:function:FortyTwo"
|
||||
UpdateToken: {}
|
||||
Include: {}""".format(self.md5, sorted(set(include)))
|
||||
self.cfn_data['Resources'].update(yaml.load(_res))
|
||||
|
||||
self.cfn_template = re.sub(r'Resources:', r'Resources:' + _res + '\n', self.cfn_template)
|
||||
logger.info("Legacy Mode -> added Custom::FortyTwo")
|
||||
|
||||
self.cfn_template = self.cfn_template.replace('__HASH__', self.md5)
|
||||
|
||||
# Update internal data structures
|
||||
self._parse_metadata()
|
||||
@ -146,7 +167,7 @@ class Stack(object):
|
||||
def _parse_metadata(self):
|
||||
# Extract dependencies if present
|
||||
try:
|
||||
for dep in self.data['Metadata']['CloudBender']['Dependencies']:
|
||||
for dep in self.cfn_data['Metadata']['CloudBender']['Dependencies']:
|
||||
self.dependencies.add(dep)
|
||||
except KeyError:
|
||||
pass
|
||||
@ -178,7 +199,7 @@ class Stack(object):
|
||||
self.cfn_template = yaml_contents.read()
|
||||
logger.debug('Read cfn template %s.', yaml_file)
|
||||
|
||||
self.data = yaml.safe_load(self.cfn_template)
|
||||
self.cfn_data = yaml.safe_load(self.cfn_template)
|
||||
self._parse_metadata()
|
||||
|
||||
else:
|
||||
@ -189,14 +210,18 @@ class Stack(object):
|
||||
self.read_template_file()
|
||||
|
||||
try:
|
||||
ignore_checks = self.data['Metadata']['cfnlint_ignore']
|
||||
ignore_checks = self.cfn_data['Metadata']['cfnlint_ignore']
|
||||
except KeyError:
|
||||
ignore_checks = []
|
||||
|
||||
# Ignore some more checks around injected parameters as we generate these
|
||||
if self.template_vars['Mode'] == "Piped":
|
||||
if self.mode == "Piped":
|
||||
ignore_checks = ignore_checks + ['W2505', 'W2509', 'W2507']
|
||||
|
||||
# Ignore checks regarding overloaded properties
|
||||
if self.mode == "CloudBender":
|
||||
ignore_checks = ignore_checks + ['E3035', 'E3002', 'E3012', 'W2001', 'E3001']
|
||||
|
||||
filename = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname + ".yaml")
|
||||
logger.info('Validating {0}'.format(filename))
|
||||
|
||||
@ -223,18 +248,18 @@ class Stack(object):
|
||||
|
||||
# Inspect all outputs of the running Conglomerate members
|
||||
# if we run in Piped Mode
|
||||
# if self.template_vars['Mode'] == "Piped":
|
||||
# if self.mode == "Piped":
|
||||
# try:
|
||||
# stack_outputs = inspect_stacks(config['tags']['Conglomerate'])
|
||||
# logger.info(pprint.pformat(stack_outputs))
|
||||
# except KeyError:
|
||||
# pass
|
||||
|
||||
if 'Parameters' in self.data:
|
||||
if 'Parameters' in self.cfn_data:
|
||||
self.cfn_parameters = []
|
||||
for p in self.data['Parameters']:
|
||||
for p in self.cfn_data['Parameters']:
|
||||
# In Piped mode we try to resolve all Paramters first via stack_outputs
|
||||
# if config['cfn']['Mode'] == "Piped":
|
||||
# if self.mode == "Piped":
|
||||
# try:
|
||||
# # first reverse the rename due to AWS alphanumeric restriction for parameter names
|
||||
# _p = p.replace('DoT','.')
|
||||
@ -252,7 +277,7 @@ class Stack(object):
|
||||
logger.info('{} {} Parameter {}={}'.format(self.region, self.stackname, p, value))
|
||||
except KeyError:
|
||||
# If we have a Default defined in the CFN skip, as AWS will use it
|
||||
if 'Default' in self.data['Parameters'][p]:
|
||||
if 'Default' in self.cfn_data['Parameters'][p]:
|
||||
continue
|
||||
else:
|
||||
logger.error('Cannot find value for parameter {0}'.format(p))
|
||||
@ -470,3 +495,23 @@ class Stack(object):
|
||||
# Ensure output dirs exist
|
||||
if not os.path.exists(os.path.join(self.ctx[path], self.rel_path)):
|
||||
os.makedirs(os.path.join(self.ctx[path], self.rel_path))
|
||||
|
||||
|
||||
def search_attributes(template, attributes):
|
||||
""" Traverses a template and searches for all Fn::GetAtt calls to FortyTwo
|
||||
adding them to the passed in attributes set
|
||||
"""
|
||||
if isinstance(template, dict):
|
||||
for k, v in template.items():
|
||||
# Look for Fn::GetAtt
|
||||
if k == "Fn::GetAtt" and isinstance(v, list):
|
||||
if v[0] == "FortyTwo":
|
||||
attributes.append(v[1])
|
||||
|
||||
if isinstance(v, dict) or isinstance(v, list):
|
||||
search_attributes(v, attributes)
|
||||
|
||||
elif isinstance(template, list):
|
||||
for k in template:
|
||||
if isinstance(k, dict) or isinstance(k, list):
|
||||
search_attributes(k, attributes)
|
||||
|
@ -50,7 +50,7 @@ class StackGroup(object):
|
||||
|
||||
tags = _config.get('tags', {})
|
||||
parameters = _config.get('parameters', {})
|
||||
template_vars = _config.get('vars', {})
|
||||
options = _config.get('options', {})
|
||||
region = _config.get('region', 'global')
|
||||
profile = _config.get('profile', '')
|
||||
stackname_prefix = _config.get('stacknameprefix', '')
|
||||
@ -67,7 +67,7 @@ class StackGroup(object):
|
||||
|
||||
new_stack = Stack(
|
||||
name=stackname, template=template, path=stack_path, rel_path=str(self.rel_path),
|
||||
tags=dict(tags), parameters=dict(parameters), template_vars=dict(template_vars),
|
||||
tags=dict(tags), parameters=dict(parameters), options=dict(options),
|
||||
region=str(region), profile=str(profile), ctx=self.ctx)
|
||||
new_stack.read_config()
|
||||
self.stacks.append(new_stack)
|
||||
|
Loading…
Reference in New Issue
Block a user