CloudBender/cloudbender/stack.py

531 lines
19 KiB
Python
Raw Normal View History

2018-11-22 18:31:59 +00:00
import os
import re
import hashlib
import oyaml as yaml
import json
import time
from datetime import datetime, timedelta
from dateutil.tz import tzutc
from botocore.exceptions import ClientError
from .utils import dict_merge
2018-11-22 18:31:59 +00:00
from .connection import BotoConnection
from .jinja import JinjaEnv, read_config_file
2018-11-22 18:31:59 +00:00
from . import __version__
import cfnlint.core
import logging
logger = logging.getLogger(__name__)
class StackStatus(object):
"""
StackStatus stores simplified stack statuses.
"""
COMPLETE = "complete"
FAILED = "failed"
IN_PROGRESS = "in progress"
PENDING = "pending"
class Stack(object):
2019-04-18 16:30:50 +00:00
def __init__(self, name, path, rel_path, tags=None, parameters=None, options=None, region='global', profile=None, template=None, ctx={}):
2018-11-22 18:31:59 +00:00
self.id = (profile, region, name)
self.stackname = name
self.path = path
self.rel_path = rel_path
self.tags = tags
self.parameters = parameters
2019-04-18 16:30:50 +00:00
self.options = options
2018-11-22 18:31:59 +00:00
self.region = region
self.profile = profile
self.template = template
2019-04-18 16:30:50 +00:00
self.md5 = None
self.mode = 'CloudBender'
2018-11-22 18:31:59 +00:00
self.provides = template
self.cfn_template = None
self.cfn_parameters = []
self.cfn_data = None
2018-11-22 18:31:59 +00:00
self.connection_manager = BotoConnection(self.profile, self.region)
self.ctx = ctx
self.status = None
self.dependencies = set()
self.default_lock = None
self.multi_delete = True
def dump_config(self):
logger.debug("<Stack {}: {}>".format(self.id, vars(self)))
def read_config(self):
_config = read_config_file(self.path)
for p in ["region", "stackname", "template", "default_lock", "multi_delete", "provides"]:
2018-11-22 18:31:59 +00:00
if p in _config:
setattr(self, p, _config[p])
for p in ["parameters", "tags"]:
if p in _config:
setattr(self, p, dict_merge(getattr(self, p), _config[p]))
2019-04-18 16:30:50 +00:00
# Inject Artifact if not explicitly set
if 'Artifact' not in self.tags:
self.tags['Artifact'] = self.provides
2018-11-22 18:31:59 +00:00
2019-04-18 16:30:50 +00:00
# backwards comp
2018-11-22 18:31:59 +00:00
if 'vars' in _config:
2019-04-18 16:30:50 +00:00
self.options = dict_merge(self.options, _config['vars'])
if 'options' in _config:
self.options = dict_merge(self.options, _config['options'])
if 'Mode' in self.options:
self.mode = self.options['Mode']
2018-11-22 18:31:59 +00:00
if 'dependencies' in _config:
for dep in _config['dependencies']:
self.dependencies.add(dep)
2019-01-21 15:24:18 +00:00
logger.debug("Stack {} added.".format(self.id))
2018-11-22 18:31:59 +00:00
def render(self):
"""Renders the cfn jinja template for this stack"""
template_metadata = {
'Template.Name': self.template,
2019-04-18 16:30:50 +00:00
'Template.Hash': "__HASH__",
2018-11-22 18:31:59 +00:00
'CloudBender.Version': __version__
}
_config = {'mode': self.mode, 'options': self.options, 'metadata': template_metadata}
2019-04-18 16:30:50 +00:00
jenv = JinjaEnv(self.ctx['artifact_paths'])
jenv.globals['_config'] = _config
2018-11-22 18:31:59 +00:00
2019-04-18 16:30:50 +00:00
template = jenv.get_template('{0}{1}'.format(self.template, '.yaml.jinja'))
2018-11-22 18:31:59 +00:00
logger.info('Rendering %s', template.filename)
try:
2019-04-18 16:30:50 +00:00
self.cfn_template = template.render(_config)
self.cfn_data = yaml.safe_load(self.cfn_template)
except Exception as e:
2018-11-22 18:31:59 +00:00
# In case we rendered invalid yaml this helps to debug
2019-04-18 16:30:50 +00:00
if self.cfn_template:
logger.error(self.cfn_template)
raise e
2018-11-22 18:31:59 +00:00
2019-04-18 16:30:50 +00:00
if not re.search('CloudBender::', self.cfn_template):
logger.info("CloudBender not required -> removing Transform and Conglomerate parameter")
2019-04-18 16:30:50 +00:00
self.cfn_template = self.cfn_template.replace('Transform: [CloudBender]', '')
_res = """
Conglomerate:
Type: String
Description: Project / Namespace this stack is part of
"""
self.cfn_template = re.sub(_res, '', self.cfn_template)
2019-04-18 16:30:50 +00:00
# Add Legacy FortyTwo resource to prevent AWS from replacing existing resources for NO reason ;-(
2019-04-18 16:30:50 +00:00
include = []
search_refs(self.cfn_data, include)
if len(include) and 'Legacy' in self.options:
2019-04-18 16:30:50 +00:00
_res = """
FortyTwo:
Type: Custom::FortyTwo
Properties:
ServiceToken:
Fn::Sub: "arn:aws:lambda:${{AWS::Region}}:${{AWS::AccountId}}:function:FortyTwo"
UpdateToken: __HASH__
Include: {}""".format(sorted(set(include)))
2019-04-18 16:30:50 +00:00
self.cfn_template = re.sub(r'Resources:', r'Resources:' + _res + '\n', self.cfn_template)
logger.info("Legacy Mode -> added Custom::FortyTwo")
# Re-read updated template
self.cfn_data = yaml.safe_load(self.cfn_template)
# Check for empty top level Parameters, Outputs and Conditions and remove
for key in ['Parameters', 'Outputs', 'Conditions']:
if key in self.cfn_data and not self.cfn_data[key]:
del self.cfn_data[key]
self.cfn_template = self.cfn_template.replace('\n' + key + ":", '')
# Remove and condense multiple empty lines
self.cfn_template = re.sub(r'\n\s*\n', '\n\n', self.cfn_template)
self.cfn_template = re.sub(r'^\s*', '', self.cfn_template)
self.cfn_template = re.sub(r'\s*$', '', self.cfn_template)
# set md5 last
self.md5 = hashlib.md5(self.cfn_template.encode('utf-8')).hexdigest()
2019-04-18 16:30:50 +00:00
self.cfn_template = self.cfn_template.replace('__HASH__', self.md5)
2018-11-22 18:31:59 +00:00
# Update internal data structures
self._parse_metadata()
print(self.dependencies)
def _parse_metadata(self):
# Extract dependencies
try:
2019-04-18 16:30:50 +00:00
for dep in self.cfn_data['Metadata']['CloudBender']['Dependencies']:
self.dependencies.add(dep)
except KeyError:
pass
# Add CloudBender or FortyTwo dependencies
include = []
search_refs(self.cfn_data, include)
for ref in include:
self.dependencies.add(ref.split('.')[0])
2018-11-22 18:31:59 +00:00
def write_template_file(self):
if self.cfn_template:
yaml_file = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname + ".yaml")
2018-11-22 18:31:59 +00:00
self._ensure_dirs('template_path')
with open(yaml_file, 'w') as yaml_contents:
yaml_contents.write(self.cfn_template)
logger.info('Wrote %s to %s', self.template, yaml_file)
else:
logger.error('No cfn template rendered yet for stack {}.'.format(self.stackname))
def delete_template_file(self):
yaml_file = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname + ".yaml")
2018-11-22 18:31:59 +00:00
try:
os.remove(yaml_file)
logger.debug('Deleted cfn template %s.', yaml_file)
except OSError:
pass
def read_template_file(self):
""" Reads rendered yaml template from disk and extracts metadata """
if not self.cfn_template:
yaml_file = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname + ".yaml")
with open(yaml_file, 'r') as yaml_contents:
self.cfn_template = yaml_contents.read()
logger.debug('Read cfn template %s.', yaml_file)
2019-04-18 16:30:50 +00:00
self.cfn_data = yaml.safe_load(self.cfn_template)
self._parse_metadata()
else:
2019-02-04 15:59:28 +00:00
logger.debug('Using cached cfn template %s.', self.stackname)
2018-11-22 18:31:59 +00:00
def validate(self):
"""Validates the rendered template via cfn-lint"""
self.read_template_file()
2018-11-22 18:31:59 +00:00
try:
2019-04-18 16:30:50 +00:00
ignore_checks = self.cfn_data['Metadata']['cfnlint_ignore']
2018-11-22 18:31:59 +00:00
except KeyError:
ignore_checks = []
# Ignore some more checks around injected parameters as we generate these
2019-04-18 16:30:50 +00:00
if self.mode == "Piped":
ignore_checks = ignore_checks + ['W2505', 'W2509', 'W2507']
2018-11-22 18:31:59 +00:00
2019-04-18 16:30:50 +00:00
# Ignore checks regarding overloaded properties
if self.mode == "CloudBender":
ignore_checks = ignore_checks + ['E3035', 'E3002', 'E3012', 'W2001', 'E3001']
filename = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname + ".yaml")
2018-11-22 18:31:59 +00:00
logger.info('Validating {0}'.format(filename))
lint_args = ['--template', filename]
if ignore_checks:
lint_args.append('--ignore-checks')
lint_args = lint_args + ignore_checks
2018-11-22 18:31:59 +00:00
logger.info('Ignoring checks: {}'.format(','.join(ignore_checks)))
(args, filenames, formatter) = cfnlint.core.get_args_filenames(lint_args)
(template, rules, matches) = cfnlint.core.get_template_rules(filename, args)
if not matches:
matches.extend(cfnlint.core.run_cli(filename, template, rules, ['us-east-1'], None))
if len(matches):
for match in matches:
logger.error(formatter._format(match))
else:
logger.info("Passed.")
def resolve_parameters(self):
""" Renders parameters for the stack based on the source template and the environment configuration """
self.read_template_file()
2018-11-22 18:31:59 +00:00
# Inspect all outputs of the running Conglomerate members
# if we run in Piped Mode
2019-04-18 16:30:50 +00:00
# if self.mode == "Piped":
2018-11-22 18:31:59 +00:00
# try:
# stack_outputs = inspect_stacks(config['tags']['Conglomerate'])
# logger.info(pprint.pformat(stack_outputs))
# except KeyError:
# pass
2018-11-22 18:31:59 +00:00
2019-04-18 16:30:50 +00:00
if 'Parameters' in self.cfn_data:
2018-11-22 18:31:59 +00:00
self.cfn_parameters = []
2019-04-18 16:30:50 +00:00
for p in self.cfn_data['Parameters']:
2018-11-22 18:31:59 +00:00
# In Piped mode we try to resolve all Paramters first via stack_outputs
2019-04-18 16:30:50 +00:00
# if self.mode == "Piped":
2018-11-22 18:31:59 +00:00
# try:
# # first reverse the rename due to AWS alphanumeric restriction for parameter names
# _p = p.replace('DoT','.')
# value = str(stack_outputs[_p])
# parameters.append({'ParameterKey': p, 'ParameterValue': value })
# logger.info('Got {} = {} from running stack'.format(p,value))
# continue
# except KeyError:
# pass
# Key name in config tree is: stacks.<self.stackname>.parameters.<parameter>
try:
value = str(self.parameters[p])
self.cfn_parameters.append({'ParameterKey': p, 'ParameterValue': value})
logger.info('{} {} Parameter {}={}'.format(self.region, self.stackname, p, value))
except KeyError:
2018-11-22 18:31:59 +00:00
# If we have a Default defined in the CFN skip, as AWS will use it
2019-04-18 16:30:50 +00:00
if 'Default' in self.cfn_data['Parameters'][p]:
2018-11-22 18:31:59 +00:00
continue
else:
logger.error('Cannot find value for parameter {0}'.format(p))
def write_parameter_file(self):
parameter_file = os.path.join(self.ctx['parameter_path'], self.rel_path, self.stackname + ".yaml")
2018-11-22 18:31:59 +00:00
# Render parameters as json for AWS CFN
self._ensure_dirs('parameter_path')
with open(parameter_file, 'w') as parameter_contents:
parameter_contents.write(json.dumps(self.cfn_parameters, indent=2, separators=(',', ': '), sort_keys=True))
logger.info('Wrote json parameters for %s to %s', self.stackname, parameter_file)
if not self.cfn_parameters:
# Make sure there are no parameters from previous runs
if os.path.isfile(parameter_file):
os.remove(parameter_file)
def delete_parameter_file(self):
parameter_file = os.path.join(self.ctx['parameter_path'], self.rel_path, self.stackname + ".yaml")
2018-11-22 18:31:59 +00:00
try:
os.remove(parameter_file)
logger.debug('Deleted parameter %s.', parameter_file)
except OSError:
pass
def create(self):
"""Creates a stack """
# Prepare parameters
self.resolve_parameters()
self.write_parameter_file()
self.read_template_file()
2018-11-22 18:31:59 +00:00
logger.info('Creating {0} {1}'.format(self.region, self.stackname))
self.connection_manager.call(
'cloudformation', 'create_stack',
{'StackName': self.stackname,
'TemplateBody': self.cfn_template,
'Parameters': self.cfn_parameters,
'Tags': [{"Key": str(k), "Value": str(v)} for k, v in self.tags.items()],
'Capabilities': ['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM', 'CAPABILITY_AUTO_EXPAND']},
profile=self.profile, region=self.region)
2018-11-22 18:31:59 +00:00
return self._wait_for_completion()
def update(self):
"""Updates an existing stack """
# Prepare parameters
self.resolve_parameters()
self.write_parameter_file()
self.read_template_file()
2018-11-22 18:31:59 +00:00
logger.info('Updating {0} {1}'.format(self.region, self.stackname))
2018-11-22 18:31:59 +00:00
try:
self.connection_manager.call(
'cloudformation', 'update_stack',
{'StackName': self.stackname,
'TemplateBody': self.cfn_template,
'Parameters': self.cfn_parameters,
'Tags': [{"Key": str(k), "Value": str(v)} for k, v in self.tags.items()],
'Capabilities': ['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM', 'CAPABILITY_AUTO_EXPAND']},
profile=self.profile, region=self.region)
2018-11-22 18:31:59 +00:00
except ClientError as e:
if 'No updates are to be performed' in e.response['Error']['Message']:
logger.info('No updates for {0}'.format(self.stackname))
return StackStatus.COMPLETE
else:
raise e
return self._wait_for_completion()
def delete(self):
"""Deletes a stack """
logger.info('Deleting {0} {1}'.format(self.region, self.stackname))
self.connection_manager.call(
'cloudformation', 'delete_stack', {'StackName': self.stackname},
profile=self.profile, region=self.region)
2018-11-22 18:31:59 +00:00
return self._wait_for_completion()
2019-01-30 13:00:06 +00:00
def create_change_set(self, change_set_name):
""" Creates a Change Set with the name ``change_set_name``. """
# Prepare parameters
self.resolve_parameters()
self.write_parameter_file()
self.read_template_file()
2019-01-30 13:00:06 +00:00
logger.info('Creating change set {0} for stack {1}'.format(change_set_name, self.stackname))
self.connection_manager.call(
'cloudformation', 'create_change_set',
{'StackName': self.stackname,
'ChangeSetName': change_set_name,
'TemplateBody': self.cfn_template,
'Parameters': self.cfn_parameters,
'Tags': [{"Key": str(k), "Value": str(v)} for k, v in self.tags.items()],
'Capabilities': ['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM']},
profile=self.profile, region=self.region)
2019-01-30 13:00:06 +00:00
return self._wait_for_completion()
2018-11-22 18:31:59 +00:00
def describe(self):
"""
Returns the a description of the stack.
:returns: A stack description.
"""
return self.connection_manager.call(
"cloudformation",
"describe_stacks",
{"StackName": self.stackname},
profile=self.profile, region=self.region)
def get_status(self):
"""
Returns the stack's status.
:returns: The stack's status.
"""
try:
status = self.describe()["Stacks"][0]["StackStatus"]
except ClientError as e:
if e.response["Error"]["Message"].endswith("does not exist"):
return None
else:
raise e
return status
def describe_events(self):
"""
Returns a dictionary contianing the stack events.
:returns: The CloudFormation events for a stack.
"""
try:
status = self.connection_manager.call(
"cloudformation",
"describe_stack_events",
{"StackName": self.stackname},
profile=self.profile, region=self.region)
except ClientError as e:
if e.response["Error"]["Message"].endswith("does not exist"):
return None
else:
raise e
return status
def _wait_for_completion(self, timeout=0):
"""
Waits for a stack operation to finish. Prints CloudFormation events while it waits.
:param timeout: Timeout before returning
:returns: The final stack status.
"""
def timed_out(elapsed):
return elapsed >= timeout if timeout else False
status = StackStatus.IN_PROGRESS
self.most_recent_event_datetime = (
datetime.now(tzutc()) - timedelta(seconds=3)
)
elapsed = 0
while status == StackStatus.IN_PROGRESS and not timed_out(elapsed):
status = self._get_simplified_status(self.get_status())
if not status:
return None
self._log_new_events()
time.sleep(4)
elapsed += 4
return status
@staticmethod
def _get_simplified_status(status):
""" Returns the simplified Stack Status. """
if status:
if status.endswith("ROLLBACK_COMPLETE"):
return StackStatus.FAILED
elif status.endswith("_COMPLETE"):
return StackStatus.COMPLETE
elif status.endswith("_IN_PROGRESS"):
return StackStatus.IN_PROGRESS
elif status.endswith("_FAILED"):
return StackStatus.FAILED
else:
return 'Unknown'
def _log_new_events(self):
"""
Log the latest stack events while the stack is being built.
"""
events = self.describe_events()
if events:
events = events["StackEvents"]
events.reverse()
new_events = [
event for event in events
if event["Timestamp"] > self.most_recent_event_datetime
]
for event in new_events:
logger.info(" ".join([
self.region,
2018-11-22 18:31:59 +00:00
self.stackname,
event["LogicalResourceId"],
event["ResourceType"],
event["ResourceStatus"],
event.get("ResourceStatusReason", "")
]))
self.most_recent_event_datetime = event["Timestamp"]
def _ensure_dirs(self, path):
# Ensure output dirs exist
if not os.path.exists(os.path.join(self.ctx[path], self.rel_path)):
os.makedirs(os.path.join(self.ctx[path], self.rel_path))
2019-04-18 16:30:50 +00:00
def search_refs(template, attributes):
2019-04-18 16:30:50 +00:00
""" Traverses a template and searches for all Fn::GetAtt calls to FortyTwo
adding them to the passed in attributes set
"""
if isinstance(template, dict):
for k, v in template.items():
# FortyTwo Fn::GetAtt
2019-04-18 16:30:50 +00:00
if k == "Fn::GetAtt" and isinstance(v, list):
if v[0] == "FortyTwo":
attributes.append(v[1])
# CloudBender::StackRef
if k == "CloudBender::StackRef":
try:
attributes.append(v['StackTags']['Artifact'])
except KeyError:
pass
2019-04-18 16:30:50 +00:00
if isinstance(v, dict) or isinstance(v, list):
search_refs(v, attributes)
2019-04-18 16:30:50 +00:00
elif isinstance(template, list):
for k in template:
if isinstance(k, dict) or isinstance(k, list):
search_refs(k, attributes)