Code clean up PEP8, some fixes along the way

This commit is contained in:
Stefan Reimer 2019-02-07 15:36:16 +00:00
parent 3587451170
commit 47b492a65e
7 changed files with 119 additions and 172 deletions

View File

@ -11,13 +11,14 @@ from .utils import setup_logging
import logging import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@click.group() @click.group()
@click.version_option(version=__version__, prog_name="CloudBender") @click.version_option(version=__version__, prog_name="CloudBender")
@click.option("--debug", is_flag=True, help="Turn on debug logging.") @click.option("--debug", is_flag=True, help="Turn on debug logging.")
@click.option("--dir", "directory", help="Specify cloudbender project directory.") @click.option("--dir", "directory", help="Specify cloudbender project directory.")
@click.pass_context @click.pass_context
def cli(ctx, debug, directory): def cli(ctx, debug, directory):
logger = setup_logging(debug) setup_logging(debug)
# Read global config # Read global config
cb = CloudBender(directory if directory else os.getcwd()) cb = CloudBender(directory if directory else os.getcwd())
@ -86,7 +87,7 @@ def provision(ctx, stack_names, multi):
futures.append(group.submit(stack.update)) futures.append(group.submit(stack.update))
for future in as_completed(futures): for future in as_completed(futures):
result = future.result() future.result()
@click.command() @click.command()
@ -109,7 +110,7 @@ def delete(ctx, stack_names, multi):
futures.append(group.submit(stack.delete)) futures.append(group.submit(stack.delete))
for future in as_completed(futures): for future in as_completed(futures):
result = future.result() future.result()
@click.command() @click.command()
@ -140,13 +141,14 @@ def sort_stacks(ctx, stacks):
data[s.id] = set(deps) data[s.id] = set(deps)
logger.debug("Stack {} depends on {}".format(s.id, deps)) logger.debug("Stack {} depends on {}".format(s.id, deps))
# Ignore self dependencies
for k, v in data.items(): for k, v in data.items():
v.discard(k) # Ignore self dependencies v.discard(k)
extra_items_in_deps = functools.reduce(set.union, data.values()) - set(data.keys()) extra_items_in_deps = functools.reduce(set.union, data.values()) - set(data.keys())
data.update({item:set() for item in extra_items_in_deps}) data.update({item: set() for item in extra_items_in_deps})
while True: while True:
ordered = set(item for item,dep in data.items() if not dep) ordered = set(item for item, dep in data.items() if not dep)
if not ordered: if not ordered:
break break
@ -154,10 +156,11 @@ def sort_stacks(ctx, stacks):
result = [] result = []
for o in ordered: for o in ordered:
for s in stacks: for s in stacks:
if s.id == o: result.append(s) if s.id == o:
result.append(s)
yield result yield result
data = {item: (dep - ordered) for item,dep in data.items() data = {item: (dep - ordered) for item, dep in data.items()
if item not in ordered} if item not in ordered}
assert not data, "A cyclic dependency exists amongst %r" % data assert not data, "A cyclic dependency exists amongst %r" % data
@ -167,7 +170,7 @@ def _find_stacks(ctx, stack_names, multi=False):
stacks = [] stacks = []
for s in stack_names: for s in stack_names:
stacks = stacks+cb.resolve_stacks(s) stacks = stacks + cb.resolve_stacks(s)
if not multi and len(stacks) > 1: if not multi and len(stacks) > 1:
logger.error('Found more than one stack matching name ({}). Please set --multi if that is what you want.'.format(', '.join(stack_names))) logger.error('Found more than one stack matching name ({}). Please set --multi if that is what you want.'.format(', '.join(stack_names)))

View File

@ -9,48 +9,46 @@ import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class BotoConnection(): class BotoConnection():
_sessions= {} _sessions = {}
_clients = {} _clients = {}
def __init__(self, profile=None, region=None): def __init__(self, profile=None, region=None):
self.region = region self.region = region
self.profile = profile self.profile = profile
def _get_session(self, profile=None, region=None): def _get_session(self, profile=None, region=None):
if self._sessions.get((profile,region)): if self._sessions.get((profile, region)):
return self._sessions[(profile,region)] return self._sessions[(profile, region)]
# Construct botocore session with cache # Construct botocore session with cache
# Setup boto to cache STS tokens for MFA # Setup boto to cache STS tokens for MFA
# Change the cache path from the default of ~/.aws/boto/cache to the one used by awscli # Change the cache path from the default of ~/.aws/boto/cache to the one used by awscli
session_vars = {} session_vars = {}
if profile: if profile:
session_vars['profile'] = (None,None,profile,None) session_vars['profile'] = (None, None, profile, None)
if region and region != 'global': if region and region != 'global':
session_vars['region'] = (None,None,region,None) session_vars['region'] = (None, None, region, None)
session = botocore.session.Session(session_vars=session_vars) session = botocore.session.Session(session_vars=session_vars)
cli_cache = os.path.join(os.path.expanduser('~'),'.aws/cli/cache') cli_cache = os.path.join(os.path.expanduser('~'), '.aws/cli/cache')
session.get_component('credential_provider').get_provider('assume-role').cache = credentials.JSONFileCache(cli_cache) session.get_component('credential_provider').get_provider('assume-role').cache = credentials.JSONFileCache(cli_cache)
self._sessions[(profile,region)] = session self._sessions[(profile, region)] = session
return session return session
def _get_client(self, service, profile=None, region=None): def _get_client(self, service, profile=None, region=None):
if self._clients.get((profile,region,service)): if self._clients.get((profile, region, service)):
return self._clients[(profile,region,service)] return self._clients[(profile, region, service)]
session = self._get_session(profile,region) session = self._get_session(profile, region)
client = boto3.Session(botocore_session=session).client(service) client = boto3.Session(botocore_session=session).client(service)
self._clients[(profile,region,service)] = client self._clients[(profile, region, service)] = client
return client return client
def call(self, service, command, kwargs={}, profile=None, region=None): def call(self, service, command, kwargs={}, profile=None, region=None):
while True: while True:
try: try:
@ -64,4 +62,3 @@ class BotoConnection():
pass pass
else: else:
raise e raise e

View File

@ -1,14 +1,12 @@
import os import os
import glob
import logging import logging
from .utils import read_yaml_file, ensure_dir from .utils import read_yaml_file, ensure_dir
from .stack import Stack
from .stackgroup import StackGroup from .stackgroup import StackGroup
from .connection import BotoConnection
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class CloudBender(object): class CloudBender(object):
""" Config Class to handle recursive conf/* config tree """ """ Config Class to handle recursive conf/* config tree """
def __init__(self, root_path): def __init__(self, root_path):
@ -22,13 +20,12 @@ class CloudBender(object):
"artifact_paths": [os.path.join(self.root, "artifacts")] "artifact_paths": [os.path.join(self.root, "artifacts")]
} }
self.default_settings = { self.default_settings = {
'vars': { 'Mode': 'FortyTwo' } 'vars': {'Mode': 'FortyTwo'}
} }
if not os.path.isdir(self.root): if not os.path.isdir(self.root):
raise "Check '{0}' exists and is a valid project folder.".format(root_path) raise "Check '{0}' exists and is a valid project folder.".format(root_path)
def read_config(self): def read_config(self):
"""Load the <path>/config.yaml, <path>/*.yaml as stacks, sub-folders are child groups """ """Load the <path>/config.yaml, <path>/*.yaml as stacks, sub-folders are child groups """
@ -39,7 +36,7 @@ class CloudBender(object):
# Make sure all paths are abs # Make sure all paths are abs
for k, v in self.ctx.items(): for k, v in self.ctx.items():
if k in ['config_path','template_path','parameter_path','artifact_paths']: if k in ['config_path', 'template_path', 'parameter_path', 'artifact_paths']:
if isinstance(v, list): if isinstance(v, list):
new_list = [] new_list = []
for path in v: for path in v:
@ -51,9 +48,9 @@ class CloudBender(object):
elif isinstance(v, str): elif isinstance(v, str):
if not os.path.isabs(v): if not os.path.isabs(v):
self.ctx[k]=os.path.normpath(os.path.join(self.root, v)) self.ctx[k] = os.path.normpath(os.path.join(self.root, v))
if k in ['template_path','parameter_path']: if k in ['template_path', 'parameter_path']:
ensure_dir(self.ctx[k]) ensure_dir(self.ctx[k])
self.sg = StackGroup(self.ctx['config_path'], self.ctx) self.sg = StackGroup(self.ctx['config_path'], self.ctx)
@ -66,18 +63,15 @@ class CloudBender(object):
# _config = { "vars": { 'Azs': {'TestAZ': 'Next'}, 'Segments': {'Testnet': 'internet'}, "Mode": "Piped" } } # _config = { "vars": { 'Azs': {'TestAZ': 'Next'}, 'Segments': {'Testnet': 'internet'}, "Mode": "Piped" } }
# self.vars.update(_config.get('vars')) # self.vars.update(_config.get('vars'))
def dump_config(self): def dump_config(self):
logger.debug("<CloudBender: {}>".format(vars(self))) logger.debug("<CloudBender: {}>".format(vars(self)))
self.sg.dump_config() self.sg.dump_config()
def clean(self): def clean(self):
for s in self.all_stacks: for s in self.all_stacks:
s.delete_template_file() s.delete_template_file()
s.delete_parameter_file() s.delete_parameter_file()
def resolve_stacks(self, token): def resolve_stacks(self, token):
stacks = [] stacks = []
@ -100,7 +94,6 @@ class CloudBender(object):
return stacks return stacks
def filter_stacks(self, filter_by, stacks=None): def filter_stacks(self, filter_by, stacks=None):
# filter_by is a dict { property, value } # filter_by is a dict { property, value }
@ -112,7 +105,7 @@ class CloudBender(object):
for s in stacks: for s in stacks:
match = True match = True
for p,v in filter_by.items(): for p, v in filter_by.items():
if not (hasattr(s, p) and getattr(s, p) == v): if not (hasattr(s, p) and getattr(s, p) == v):
match = False match = False
break break

View File

@ -1,8 +1,6 @@
import os
import io import io
import gzip import gzip
import jinja2 import jinja2
import oyaml as yaml
import re import re
import base64 import base64
@ -75,10 +73,10 @@ def get_custom_att(context, att=None, ResourceName="FortyTwo", attributes={}, re
return('{{ "Fn::GetAtt": ["{0}", "{1}"] }}'.format(ResourceName, att)) return('{{ "Fn::GetAtt": ["{0}", "{1}"] }}'.format(ResourceName, att))
elif config['cfn']['Mode'] == "AWSImport" and ResourceName == "FortyTwo": elif config['cfn']['Mode'] == "AWSImport" and ResourceName == "FortyTwo":
# AWS only allows - and :, so replace '.' with ":" # AWS only allows - and :, so replace '.' with ":"
return('{{ "Fn::ImportValue": {{ "Fn::Sub": "${{Conglomerate}}:{0}" }} }}'.format(att.replace('.',':'))) return('{{ "Fn::ImportValue": {{ "Fn::Sub": "${{Conglomerate}}:{0}" }} }}'.format(att.replace('.', ':')))
else: else:
# We need to replace . with some PureAlphaNumeric thx AWS ... # We need to replace . with some PureAlphaNumeric thx AWS ...
return('{{ Ref: {0} }}'.format(att.replace('.','DoT'))) return('{{ Ref: {0} }}'.format(att.replace('.', 'DoT')))
@jinja2.contextfunction @jinja2.contextfunction
@ -153,13 +151,15 @@ def regex_replace(value='', pattern='', replace='', ignorecase=False):
flags = re.I flags = re.I
else: else:
flags = 0 flags = 0
return re.sub(pattern,replace,value,flags=flags) return re.sub(pattern, replace, value, flags=flags)
def pyminify(source, obfuscate=False, minify=True): def pyminify(source, obfuscate=False, minify=True):
# pyminifier options # pyminifier options
options = types.SimpleNamespace(tabs=False,replacement_length=1,use_nonlatin=0, options = types.SimpleNamespace(
obfuscate=0,obf_variables=1,obf_classes=0,obf_functions=0,obf_import_methods=0,obf_builtins=0) tabs=False, replacement_length=1, use_nonlatin=0,
obfuscate=0, obf_variables=1, obf_classes=0, obf_functions=0,
obf_import_methods=0, obf_builtins=0)
tokens = pyminifier.token_utils.listified_tokenizer(source) tokens = pyminifier.token_utils.listified_tokenizer(source)
@ -170,10 +170,10 @@ def pyminify(source, obfuscate=False, minify=True):
if obfuscate: if obfuscate:
name_generator = pyminifier.obfuscate.obfuscation_machine(use_unicode=False) name_generator = pyminifier.obfuscate.obfuscation_machine(use_unicode=False)
pyminifier.obfuscate.obfuscate("__main__", tokens, options, name_generator=name_generator) pyminifier.obfuscate.obfuscate("__main__", tokens, options, name_generator=name_generator)
#source = pyminifier.obfuscate.apply_obfuscation(source) # source = pyminifier.obfuscate.apply_obfuscation(source)
source = pyminifier.token_utils.untokenize(tokens) source = pyminifier.token_utils.untokenize(tokens)
#logger.info(source) # logger.info(source)
minified_source = pyminifier.compression.gz_pack(source) minified_source = pyminifier.compression.gz_pack(source)
logger.info("Compressed python code to {}".format(len(minified_source))) logger.info("Compressed python code to {}".format(len(minified_source)))
return minified_source return minified_source

View File

@ -5,6 +5,7 @@ import hashlib
import oyaml as yaml import oyaml as yaml
import json import json
import time import time
import subprocess
from datetime import datetime, timedelta from datetime import datetime, timedelta
from dateutil.tz import tzutc from dateutil.tz import tzutc
@ -56,11 +57,9 @@ class Stack(object):
self.default_lock = None self.default_lock = None
self.multi_delete = True self.multi_delete = True
def dump_config(self): def dump_config(self):
logger.debug("<Stack {}: {}>".format(self.id, vars(self))) logger.debug("<Stack {}: {}>".format(self.id, vars(self)))
def read_config(self): def read_config(self):
_config = read_yaml_file(self.path) _config = read_yaml_file(self.path)
for p in ["region", "stackname", "template", "default_lock", "multi_delete", "provides"]: for p in ["region", "stackname", "template", "default_lock", "multi_delete", "provides"]:
@ -83,27 +82,26 @@ class Stack(object):
logger.debug("Stack {} added.".format(self.id)) logger.debug("Stack {} added.".format(self.id))
def check_fortytwo(self, template): def check_fortytwo(self, template):
# Fail early if 42 is enabled but not available # Fail early if 42 is enabled but not available
if self.cfn['Mode'] == "FortyTwo" and self.template != 'FortyTwo': if self.cfn['Mode'] == "FortyTwo" and self.template != 'FortyTwo':
try: try:
response = self.connection_manager.call('lambda', 'get_function', {'FunctionName': 'FortyTwo'}, response = self.connection_manager.call(
'lambda', 'get_function', {'FunctionName': 'FortyTwo'},
profile=self.profile, region=self.region) profile=self.profile, region=self.region)
# Also verify version in case specified in the template's metadata # Also verify version in case specified in the template's metadata
try: try:
req_ver = template['Metadata']['FortyTwo']['RequiredVersion'] req_ver = template['Metadata']['FortyTwo']['RequiredVersion']
if 'Release' not in response['Tags']: if 'Release' not in response['Tags']:
abort("Lambda FortyTwo has no Release Tag! Required: {}".format(req_ver)) raise("Lambda FortyTwo has no Release Tag! Required: {}".format(req_ver))
elif semver.compare(req_ver, re.sub("-.*$",'', response['Tags']['Release'])) > 0: elif semver.compare(req_ver, re.sub("-.*$", '', response['Tags']['Release'])) > 0:
abort("Lambda FortyTwo version is not recent enough! Required: {} vs. Found: {}".format(req_ver, response['Tags']['Release'])) raise("Lambda FortyTwo version is not recent enough! Required: {} vs. Found: {}".format(req_ver, response['Tags']['Release']))
except KeyError: except KeyError:
pass pass
except botocore.exceptions.ClientError: except botocore.exceptions.ClientError:
abort("No Lambda FortyTwo found in your account") raise("No Lambda FortyTwo found in your account")
def render(self): def render(self):
"""Renders the cfn jinja template for this stack""" """Renders the cfn jinja template for this stack"""
@ -114,56 +112,38 @@ class Stack(object):
template_metadata = { template_metadata = {
'Template.Name': self.template, 'Template.Name': self.template,
'Template.Hash': 'unknown', 'Template.Hash': 'tbd',
'Template.GitComment': 'unknown',
'CloudBender.Version': __version__ 'CloudBender.Version': __version__
} }
jenv.globals['_config'] = { 'cfn': self.template_vars, 'Metadata': template_metadata } jenv.globals['_config'] = {'cfn': self.template_vars, 'Metadata': template_metadata}
# First render pass to calculate a md5 checksum # First render pass to calculate a md5 checksum
template_metadata['Template.Hash'] = hashlib.md5(template.render({ 'cfn': self.template_vars, 'Metadata': template_metadata }).encode('utf-8')).hexdigest() template_metadata['Template.Hash'] = hashlib.md5(template.render({'cfn': self.template_vars, 'Metadata': template_metadata}).encode('utf-8')).hexdigest()
# Reset and set Metadata for final render pass # Reset and set Metadata for final render pass
jenv.globals['get_custom_att'](context={'_config': self.template_vars}, reset=True) jenv.globals['get_custom_att'](context={'_config': self.template_vars}, reset=True)
jenv.globals['render_once'](context={'_config': self.template_vars}, reset=True) jenv.globals['render_once'](context={'_config': self.template_vars}, reset=True)
jenv.globals['cloudbender_ctx'](context={'_config': self.template_vars}, reset=True) jenv.globals['cloudbender_ctx'](context={'_config': self.template_vars}, reset=True)
# try to get local git info # Try to add latest tag/commit for the template source, skip if not in git tree
try: try:
self.template_vars['Metadata']['{}.Version'.format(PROJECT_NAME)] = subprocess.check_output('git describe --tags'.split(' '), universal_newlines=True)[:-1] _comment = subprocess.check_output('git log -1 --pretty=%B {}'.format(template.filename).split(' ')).decode('utf-8').strip().replace('"', '').replace('#', '').replace('\n', '').replace(':', ' ')
except:
pass
# Add latest tag/commit
try:
os.chdir(ROOT_DIR)
_version = subprocess.check_output('git describe --tags'.split(' '), universal_newlines=True)[:-1]
if _version:
self.template_vars['Metadata']['CloudBender.Version'] = _version
os.chdir(os.path.dirname(template.filename))
_comment = subprocess.check_output('git log -1 --pretty=%B {0}{1}'
.format(input_file, TEMPLATE_EXT).split(' ')).decode('utf-8').strip() \
.replace('"', '').replace('#', '').replace('\n', '').replace(':', ' ')
if _comment: if _comment:
self.template_vars['Metadata']['Template.GitComment'] = _comment template_metadata['Template.LastGitComment'] = _comment
os.chdir(PROJECT_DIR) except subprocess.CalledProcessError:
except:
pass pass
logger.info('Rendering %s', template.filename) logger.info('Rendering %s', template.filename)
rendered = template.render({ 'cfn': self.template_vars, 'Metadata': template_metadata }) rendered = template.render({'cfn': self.template_vars, 'Metadata': template_metadata})
try: try:
self.data = yaml.load(rendered) self.data = yaml.load(rendered)
except: except Exception as e:
# In case we rendered invalid yaml this helps to debug # In case we rendered invalid yaml this helps to debug
logger.error(rendered) logger.error(rendered)
raise raise e
# Some sanity checks and final cosmetics # Some sanity checks and final cosmetics
# Check for empty top level Parameters, Outputs and Conditions and remove # Check for empty top level Parameters, Outputs and Conditions and remove
@ -172,7 +152,7 @@ class Stack(object):
# Delete from data structure which also takes care of json # Delete from data structure which also takes care of json
del self.data[key] del self.data[key]
# but also remove from rendered for the yaml file # but also remove from rendered for the yaml file
rendered = rendered.replace('\n'+key+":",'') rendered = rendered.replace('\n' + key + ":", '')
# Condense multiple empty lines to one # Condense multiple empty lines to one
self.cfn_template = re.sub(r'\n\s*\n', '\n\n', rendered) self.cfn_template = re.sub(r'\n\s*\n', '\n\n', rendered)
@ -180,7 +160,6 @@ class Stack(object):
# Update internal data structures # Update internal data structures
self._parse_metadata() self._parse_metadata()
def _parse_metadata(self): def _parse_metadata(self):
# Extract dependencies if present # Extract dependencies if present
try: try:
@ -189,10 +168,9 @@ class Stack(object):
except KeyError: except KeyError:
pass pass
def write_template_file(self): def write_template_file(self):
if self.cfn_template: if self.cfn_template:
yaml_file = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname+".yaml") yaml_file = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname + ".yaml")
self._ensure_dirs('template_path') self._ensure_dirs('template_path')
with open(yaml_file, 'w') as yaml_contents: with open(yaml_file, 'w') as yaml_contents:
yaml_contents.write(self.cfn_template) yaml_contents.write(self.cfn_template)
@ -201,20 +179,18 @@ class Stack(object):
else: else:
logger.error('No cfn template rendered yet for stack {}.'.format(self.stackname)) logger.error('No cfn template rendered yet for stack {}.'.format(self.stackname))
def delete_template_file(self): def delete_template_file(self):
yaml_file = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname+".yaml") yaml_file = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname + ".yaml")
try: try:
os.remove(yaml_file) os.remove(yaml_file)
logger.debug('Deleted cfn template %s.', yaml_file) logger.debug('Deleted cfn template %s.', yaml_file)
except OSError: except OSError:
pass pass
def read_template_file(self): def read_template_file(self):
""" Reads rendered yaml template from disk and extracts metadata """ """ Reads rendered yaml template from disk and extracts metadata """
if not self.cfn_template: if not self.cfn_template:
yaml_file = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname+".yaml") yaml_file = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname + ".yaml")
with open(yaml_file, 'r') as yaml_contents: with open(yaml_file, 'r') as yaml_contents:
self.cfn_template = yaml_contents.read() self.cfn_template = yaml_contents.read()
logger.debug('Read cfn template %s.', yaml_file) logger.debug('Read cfn template %s.', yaml_file)
@ -225,7 +201,6 @@ class Stack(object):
else: else:
logger.debug('Using cached cfn template %s.', self.stackname) logger.debug('Using cached cfn template %s.', self.stackname)
def validate(self): def validate(self):
"""Validates the rendered template via cfn-lint""" """Validates the rendered template via cfn-lint"""
self.read_template_file() self.read_template_file()
@ -237,15 +212,15 @@ class Stack(object):
# Ignore some more checks around injected parameters as we generate these # Ignore some more checks around injected parameters as we generate these
if self.template_vars['Mode'] == "Piped": if self.template_vars['Mode'] == "Piped":
ignore_checks = ignore_checks+['W2505','W2509','W2507'] ignore_checks = ignore_checks + ['W2505', 'W2509', 'W2507']
filename = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname+".yaml") filename = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname + ".yaml")
logger.info('Validating {0}'.format(filename)) logger.info('Validating {0}'.format(filename))
lint_args = ['--template', filename] lint_args = ['--template', filename]
if ignore_checks: if ignore_checks:
lint_args.append('--ignore-checks') lint_args.append('--ignore-checks')
lint_args = lint_args+ignore_checks lint_args = lint_args + ignore_checks
logger.info('Ignoring checks: {}'.format(','.join(ignore_checks))) logger.info('Ignoring checks: {}'.format(','.join(ignore_checks)))
(args, filenames, formatter) = cfnlint.core.get_args_filenames(lint_args) (args, filenames, formatter) = cfnlint.core.get_args_filenames(lint_args)
@ -258,7 +233,6 @@ class Stack(object):
else: else:
logger.info("Passed.") logger.info("Passed.")
def resolve_parameters(self): def resolve_parameters(self):
""" Renders parameters for the stack based on the source template and the environment configuration """ """ Renders parameters for the stack based on the source template and the environment configuration """
@ -277,7 +251,7 @@ class Stack(object):
self.cfn_parameters = [] self.cfn_parameters = []
for p in self.data['Parameters']: for p in self.data['Parameters']:
# In Piped mode we try to resolve all Paramters first via stack_outputs # In Piped mode we try to resolve all Paramters first via stack_outputs
#if config['cfn']['Mode'] == "Piped": # if config['cfn']['Mode'] == "Piped":
# try: # try:
# # first reverse the rename due to AWS alphanumeric restriction for parameter names # # first reverse the rename due to AWS alphanumeric restriction for parameter names
# _p = p.replace('DoT','.') # _p = p.replace('DoT','.')
@ -291,18 +265,17 @@ class Stack(object):
# Key name in config tree is: stacks.<self.stackname>.parameters.<parameter> # Key name in config tree is: stacks.<self.stackname>.parameters.<parameter>
try: try:
value = str(self.parameters[p]) value = str(self.parameters[p])
self.cfn_parameters.append({'ParameterKey': p, 'ParameterValue': value }) self.cfn_parameters.append({'ParameterKey': p, 'ParameterValue': value})
logger.info('Got {} = {}'.format(p,value)) logger.info('Got {} = {}'.format(p, value))
except KeyError as e: except KeyError:
# If we have a Default defined in the CFN skip, as AWS will use it # If we have a Default defined in the CFN skip, as AWS will use it
if 'Default' in self.data['Parameters'][p]: if 'Default' in self.data['Parameters'][p]:
continue continue
else: else:
logger.error('Cannot find value for parameter {0}'.format(p)) logger.error('Cannot find value for parameter {0}'.format(p))
def write_parameter_file(self): def write_parameter_file(self):
parameter_file = os.path.join(self.ctx['parameter_path'], self.rel_path, self.stackname+".yaml") parameter_file = os.path.join(self.ctx['parameter_path'], self.rel_path, self.stackname + ".yaml")
# Render parameters as json for AWS CFN # Render parameters as json for AWS CFN
self._ensure_dirs('parameter_path') self._ensure_dirs('parameter_path')
@ -315,16 +288,14 @@ class Stack(object):
if os.path.isfile(parameter_file): if os.path.isfile(parameter_file):
os.remove(parameter_file) os.remove(parameter_file)
def delete_parameter_file(self): def delete_parameter_file(self):
parameter_file = os.path.join(self.ctx['parameter_path'], self.rel_path, self.stackname+".yaml") parameter_file = os.path.join(self.ctx['parameter_path'], self.rel_path, self.stackname + ".yaml")
try: try:
os.remove(parameter_file) os.remove(parameter_file)
logger.debug('Deleted parameter %s.', parameter_file) logger.debug('Deleted parameter %s.', parameter_file)
except OSError: except OSError:
pass pass
def create(self): def create(self):
"""Creates a stack """ """Creates a stack """
@ -334,17 +305,17 @@ class Stack(object):
self.read_template_file() self.read_template_file()
logger.info('Creating {0} {1}'.format(self.region, self.stackname)) logger.info('Creating {0} {1}'.format(self.region, self.stackname))
response = self.connection_manager.call('cloudformation', 'create_stack', self.connection_manager.call(
{'StackName':self.stackname, 'cloudformation', 'create_stack',
'TemplateBody':self.cfn_template, {'StackName': self.stackname,
'Parameters':self.cfn_parameters, 'TemplateBody': self.cfn_template,
'Tags':[ {"Key": str(k), "Value": str(v)} for k, v in self.tags.items() ], 'Parameters': self.cfn_parameters,
'Capabilities':['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM']}, 'Tags': [{"Key": str(k), "Value": str(v)} for k, v in self.tags.items()],
'Capabilities': ['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM']},
profile=self.profile, region=self.region) profile=self.profile, region=self.region)
return self._wait_for_completion() return self._wait_for_completion()
def update(self): def update(self):
"""Updates an existing stack """ """Updates an existing stack """
@ -355,12 +326,13 @@ class Stack(object):
logger.info('Updating {0} {1}'.format(self.region, self.stackname)) logger.info('Updating {0} {1}'.format(self.region, self.stackname))
try: try:
response = self.connection_manager.call('cloudformation', 'update_stack', self.connection_manager.call(
{'StackName':self.stackname, 'cloudformation', 'update_stack',
'TemplateBody':self.cfn_template, {'StackName': self.stackname,
'Parameters':self.cfn_parameters, 'TemplateBody': self.cfn_template,
'Tags':[ {"Key": str(k), "Value": str(v)} for k, v in self.tags.items() ], 'Parameters': self.cfn_parameters,
'Capabilities':['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM']}, 'Tags': [{"Key": str(k), "Value": str(v)} for k, v in self.tags.items()],
'Capabilities': ['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM']},
profile=self.profile, region=self.region) profile=self.profile, region=self.region)
except ClientError as e: except ClientError as e:
@ -372,17 +344,16 @@ class Stack(object):
return self._wait_for_completion() return self._wait_for_completion()
def delete(self): def delete(self):
"""Deletes a stack """ """Deletes a stack """
logger.info('Deleting {0} {1}'.format(self.region, self.stackname)) logger.info('Deleting {0} {1}'.format(self.region, self.stackname))
response = self.connection_manager.call('cloudformation', 'delete_stack', self.connection_manager.call(
{'StackName':self.stackname}, profile=self.profile, region=self.region) 'cloudformation', 'delete_stack', {'StackName': self.stackname},
profile=self.profile, region=self.region)
return self._wait_for_completion() return self._wait_for_completion()
def create_change_set(self, change_set_name): def create_change_set(self, change_set_name):
""" Creates a Change Set with the name ``change_set_name``. """ """ Creates a Change Set with the name ``change_set_name``. """
@ -392,17 +363,17 @@ class Stack(object):
self.read_template_file() self.read_template_file()
logger.info('Creating change set {0} for stack {1}'.format(change_set_name, self.stackname)) logger.info('Creating change set {0} for stack {1}'.format(change_set_name, self.stackname))
response = self.connection_manager.call('cloudformation', 'create_change_set', self.connection_manager.call(
{'StackName':self.stackname, 'cloudformation', 'create_change_set',
{'StackName': self.stackname,
'ChangeSetName': change_set_name, 'ChangeSetName': change_set_name,
'TemplateBody':self.cfn_template, 'TemplateBody': self.cfn_template,
'Parameters':self.cfn_parameters, 'Parameters': self.cfn_parameters,
'Tags':[ {"Key": str(k), "Value": str(v)} for k, v in self.tags.items() ], 'Tags': [{"Key": str(k), "Value": str(v)} for k, v in self.tags.items()],
'Capabilities':['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM']}, 'Capabilities': ['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM']},
profile=self.profile, region=self.region) profile=self.profile, region=self.region)
return self._wait_for_completion() return self._wait_for_completion()
def describe(self): def describe(self):
""" """
Returns the a description of the stack. Returns the a description of the stack.
@ -414,7 +385,6 @@ class Stack(object):
{"StackName": self.stackname}, {"StackName": self.stackname},
profile=self.profile, region=self.region) profile=self.profile, region=self.region)
def get_status(self): def get_status(self):
""" """
Returns the stack's status. Returns the stack's status.
@ -429,7 +399,6 @@ class Stack(object):
raise e raise e
return status return status
def describe_events(self): def describe_events(self):
""" """
Returns a dictionary contianing the stack events. Returns a dictionary contianing the stack events.
@ -449,7 +418,6 @@ class Stack(object):
return status return status
def _wait_for_completion(self, timeout=0): def _wait_for_completion(self, timeout=0):
""" """
Waits for a stack operation to finish. Prints CloudFormation events while it waits. Waits for a stack operation to finish. Prints CloudFormation events while it waits.
@ -477,7 +445,6 @@ class Stack(object):
return status return status
@staticmethod @staticmethod
def _get_simplified_status(status): def _get_simplified_status(status):
""" Returns the simplified Stack Status. """ """ Returns the simplified Stack Status. """
@ -493,7 +460,6 @@ class Stack(object):
else: else:
return 'Unknown' return 'Unknown'
def _log_new_events(self): def _log_new_events(self):
""" """
Log the latest stack events while the stack is being built. Log the latest stack events while the stack is being built.
@ -517,7 +483,6 @@ class Stack(object):
])) ]))
self.most_recent_event_datetime = event["Timestamp"] self.most_recent_event_datetime = event["Timestamp"]
def _ensure_dirs(self, path): def _ensure_dirs(self, path):
# Ensure output dirs exist # Ensure output dirs exist
if not os.path.exists(os.path.join(self.ctx[path], self.rel_path)): if not os.path.exists(os.path.join(self.ctx[path], self.rel_path)):

View File

@ -13,7 +13,7 @@ class StackGroup(object):
self.name = None self.name = None
self.ctx = ctx self.ctx = ctx
self.path = path self.path = path
self.rel_path = os.path.relpath(path ,ctx['config_path']) self.rel_path = os.path.relpath(path, ctx['config_path'])
self.config = {} self.config = {}
self.sgs = [] self.sgs = []
self.stacks = [] self.stacks = []
@ -21,7 +21,6 @@ class StackGroup(object):
if self.rel_path == '.': if self.rel_path == '.':
self.rel_path = '' self.rel_path = ''
def dump_config(self): def dump_config(self):
for sg in self.sgs: for sg in self.sgs:
sg.dump_config() sg.dump_config()
@ -31,7 +30,6 @@ class StackGroup(object):
for s in self.stacks: for s in self.stacks:
s.dump_config() s.dump_config()
def read_config(self, parent_config={}): def read_config(self, parent_config={}):
if not os.path.isdir(self.path): if not os.path.isdir(self.path):
@ -66,18 +64,15 @@ class StackGroup(object):
if stackname_prefix: if stackname_prefix:
stackname = stackname_prefix + stackname stackname = stackname_prefix + stackname
new_stack = Stack(name=stackname, template=template, new_stack = Stack(
path=stack_path, rel_path=str(self.rel_path), name=stackname, template=template, path=stack_path, rel_path=str(self.rel_path),
tags=dict(tags), parameters=dict(parameters), tags=dict(tags), parameters=dict(parameters), template_vars=dict(template_vars),
template_vars=dict(template_vars), region=str(region), profile=str(profile), ctx=self.ctx)
region=str(region), profile=str(profile),
ctx=self.ctx
)
new_stack.read_config() new_stack.read_config()
self.stacks.append(new_stack) self.stacks.append(new_stack)
# Create StackGroups recursively # Create StackGroups recursively
for sub_group in [f.path for f in os.scandir(self.path) if f.is_dir() ]: for sub_group in [f.path for f in os.scandir(self.path) if f.is_dir()]:
sg = StackGroup(sub_group, self.ctx) sg = StackGroup(sub_group, self.ctx)
sg.read_config(_config) sg.read_config(_config)
@ -86,7 +81,6 @@ class StackGroup(object):
# Return raw, merged config to parent # Return raw, merged config to parent
return _config return _config
def get_stacks(self, name=None, recursive=True, match_by='name'): def get_stacks(self, name=None, recursive=True, match_by='name'):
""" Returns [stack] matching stack_name or [all] """ """ Returns [stack] matching stack_name or [all] """
stacks = [] stacks = []
@ -105,11 +99,10 @@ class StackGroup(object):
for sg in self.sgs: for sg in self.sgs:
s = sg.get_stacks(name, recursive, match_by) s = sg.get_stacks(name, recursive, match_by)
if s: if s:
stacks = stacks+s stacks = stacks + s
return stacks return stacks
def get_stackgroup(self, name=None, recursive=True, match_by='name'): def get_stackgroup(self, name=None, recursive=True, match_by='name'):
""" Returns stack group matching stackgroup_name or all if None """ """ Returns stack group matching stackgroup_name or all if None """
if not name or (self.name == name and match_by == 'name') or (self.path.endswith(name) and match_by == 'path'): if not name or (self.name == name and match_by == 'name') or (self.path.endswith(name) and match_by == 'path'):
@ -127,22 +120,19 @@ class StackGroup(object):
return None return None
# TODO: Integrate properly into stackgroup class, broken for now
# TODO: Integrate properly into stackgroup class, borken for now
# stackoutput inspection # stackoutput inspection
def BROKEN_inspect_stacks(conglomerate): def BROKEN_inspect_stacks(self, conglomerate):
# Get all stacks of the conglomertate # Get all stacks of the conglomertate
client = Connection.get_connection('cloudformation') response = self.connection_manager.call('cloudformation', 'decribe_stacks')
running_stacks=client.describe_stacks()
stacks = [] stacks = []
for stack in running_stacks['Stacks']: for stack in response['Stacks']:
for tag in stack['Tags']: for tag in stack['Tags']:
if tag['Key'] == 'Conglomerate' and tag['Value'] == conglomerate: if tag['Key'] == 'Conglomerate' and tag['Value'] == conglomerate:
stacks.append(stack) stacks.append(stack)
break break
# Gather stack outputs, use Tag['Artifact'] as name space: Artifact.OutputName, same as FortyTwo # Gather stack outputs, use Tag['Artifact'] as name space: Artifact.OutputName, same as FortyTwo
stack_outputs = {} stack_outputs = {}
for stack in stacks: for stack in stacks:
@ -160,10 +150,9 @@ class StackGroup(object):
try: try:
for output in stack['Outputs']: for output in stack['Outputs']:
# Gather all outputs of the stack into one dimensional key=value structure # Gather all outputs of the stack into one dimensional key=value structure
stack_outputs[key_prefix+output['OutputKey']]=output['OutputValue'] stack_outputs[key_prefix + output['OutputKey']] = output['OutputValue']
except KeyError: except KeyError:
pass pass
# Add outputs from stacks into the data for jinja under StackOutput # Add outputs from stacks into the data for jinja under StackOutput
return stack_outputs return stack_outputs

View File

@ -6,6 +6,7 @@ import boto3
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def read_yaml_file(path): def read_yaml_file(path):
data = {} data = {}
if os.path.exists(path): if os.path.exists(path):
@ -16,7 +17,7 @@ def read_yaml_file(path):
if _data: if _data:
data.update(_data) data.update(_data)
except Exception as e: except Exception as e:
logger.warning("Error reading config file: {} ({})".format(path,e)) logger.warning("Error reading config file: {} ({})".format(path, e))
return data return data
@ -67,7 +68,6 @@ def setup_logging(debug):
datefmt="%Y-%m-%d %H:%M:%S" datefmt="%Y-%m-%d %H:%M:%S"
) )
log_handler = logging.StreamHandler() log_handler = logging.StreamHandler()
log_handler.setFormatter(formatter) log_handler.setFormatter(formatter)
logger = logging.getLogger("cloudbender") logger = logging.getLogger("cloudbender")