Allow dependencies to be resolved automatically and merged with stack configs, resolve global deps, code cleanup
This commit is contained in:
parent
5e76eabbc5
commit
54df655648
@ -126,14 +126,19 @@ def sort_stacks(ctx, stacks):
|
|||||||
|
|
||||||
data = {}
|
data = {}
|
||||||
for s in stacks:
|
for s in stacks:
|
||||||
# Resolve dependencies
|
# To resolve dependencies we have to read each template
|
||||||
|
s.read_template_file()
|
||||||
deps = []
|
deps = []
|
||||||
for d in s.dependencies:
|
for d in s.dependencies:
|
||||||
# For now we assume deps are artifacts so we prepend them with our local profile and region to match stack.id
|
# For now we assume deps are artifacts so we prepend them with our local profile and region to match stack.id
|
||||||
for dep_stack in cb.filter_stacks({'region': s.region, 'profile': s.profile, 'provides': d}):
|
for dep_stack in cb.filter_stacks({'region': s.region, 'profile': s.profile, 'provides': d}):
|
||||||
deps.append(dep_stack.id)
|
deps.append(dep_stack.id)
|
||||||
|
# also look for global services
|
||||||
|
for dep_stack in cb.filter_stacks({'region': 'global', 'profile': s.profile, 'provides': d}):
|
||||||
|
deps.append(dep_stack.id)
|
||||||
|
|
||||||
data[s.id] = set(deps)
|
data[s.id] = set(deps)
|
||||||
|
logger.debug("Stack {} depends on {}".format(s.id, deps))
|
||||||
|
|
||||||
for k, v in data.items():
|
for k, v in data.items():
|
||||||
v.discard(k) # Ignore self dependencies
|
v.discard(k) # Ignore self dependencies
|
||||||
|
@ -22,6 +22,9 @@ def get_custom_att(context, att=None, ResourceName="FortyTwo", attributes={}, fl
|
|||||||
attributes for the specified CustomResource to include them later in
|
attributes for the specified CustomResource to include them later in
|
||||||
the actual CustomResource include property """
|
the actual CustomResource include property """
|
||||||
|
|
||||||
|
if ResourceName not in attributes:
|
||||||
|
attributes[ResourceName] = set()
|
||||||
|
|
||||||
# If flush is set all we do is empty our state dict
|
# If flush is set all we do is empty our state dict
|
||||||
if flush:
|
if flush:
|
||||||
attributes.clear()
|
attributes.clear()
|
||||||
@ -32,21 +35,24 @@ def get_custom_att(context, att=None, ResourceName="FortyTwo", attributes={}, fl
|
|||||||
return attributes
|
return attributes
|
||||||
|
|
||||||
# If dependencies, return all Artifacts this stack depends on, which are the attr of FortyTwo
|
# If dependencies, return all Artifacts this stack depends on, which are the attr of FortyTwo
|
||||||
|
config = context.get_all()['_config']
|
||||||
if dependencies:
|
if dependencies:
|
||||||
deps = set()
|
deps = set()
|
||||||
if ResourceName in attributes:
|
try:
|
||||||
for att in attributes[ResourceName]:
|
for att in attributes['FortyTwo']:
|
||||||
deps.add(att.split('.')[0])
|
deps.add(att.split('.')[0])
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Incl. FortyTwo itself if any FortyTwo function is used
|
||||||
|
if config['cfn']['Mode'] == "FortyTwo" and attributes:
|
||||||
|
deps.add('FortyTwo')
|
||||||
|
|
||||||
return list(deps)
|
return list(deps)
|
||||||
|
|
||||||
# If call with an attribute, return fragement and register
|
# If call with an attribute, return fragement and register
|
||||||
if att:
|
if att:
|
||||||
if ResourceName not in attributes:
|
|
||||||
attributes[ResourceName] = set()
|
|
||||||
|
|
||||||
attributes[ResourceName].add(att)
|
attributes[ResourceName].add(att)
|
||||||
config = context.get_all()['_config']
|
|
||||||
if config['cfn']['Mode'] == "FortyTwo":
|
if config['cfn']['Mode'] == "FortyTwo":
|
||||||
return('{{ "Fn::GetAtt": ["{0}", "{1}"] }}'.format(ResourceName, att))
|
return('{{ "Fn::GetAtt": ["{0}", "{1}"] }}'.format(ResourceName, att))
|
||||||
elif config['cfn']['Mode'] == "AWSImport" and ResourceName == "FortyTwo":
|
elif config['cfn']['Mode'] == "AWSImport" and ResourceName == "FortyTwo":
|
||||||
|
@ -48,6 +48,7 @@ class Stack(object):
|
|||||||
self.provides = template
|
self.provides = template
|
||||||
self.cfn_template = None
|
self.cfn_template = None
|
||||||
self.cfn_parameters = []
|
self.cfn_parameters = []
|
||||||
|
self.cfn_data = None
|
||||||
self.connection_manager = BotoConnection(self.profile, self.region)
|
self.connection_manager = BotoConnection(self.profile, self.region)
|
||||||
self.ctx = ctx
|
self.ctx = ctx
|
||||||
self.status = None
|
self.status = None
|
||||||
@ -62,7 +63,7 @@ class Stack(object):
|
|||||||
|
|
||||||
def read_config(self):
|
def read_config(self):
|
||||||
_config = read_yaml_file(self.path)
|
_config = read_yaml_file(self.path)
|
||||||
for p in ["region", "stackname", "template", "dependencies", "default_lock", "multi_delete", "provides"]:
|
for p in ["region", "stackname", "template", "default_lock", "multi_delete", "provides"]:
|
||||||
if p in _config:
|
if p in _config:
|
||||||
setattr(self, p, _config[p])
|
setattr(self, p, _config[p])
|
||||||
|
|
||||||
@ -76,6 +77,10 @@ class Stack(object):
|
|||||||
if 'vars' in _config:
|
if 'vars' in _config:
|
||||||
self.template_vars = dict_merge(self.template_vars, _config['vars'])
|
self.template_vars = dict_merge(self.template_vars, _config['vars'])
|
||||||
|
|
||||||
|
if 'dependencies' in _config:
|
||||||
|
for dep in _config['dependencies']:
|
||||||
|
self.dependencies.add(dep)
|
||||||
|
|
||||||
logger.debug("Stack {} added.".format(self.id))
|
logger.debug("Stack {} added.".format(self.id))
|
||||||
|
|
||||||
|
|
||||||
@ -153,7 +158,7 @@ class Stack(object):
|
|||||||
rendered = template.render({ 'cfn': self.template_vars, 'Metadata': template_metadata })
|
rendered = template.render({ 'cfn': self.template_vars, 'Metadata': template_metadata })
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = yaml.load(rendered)
|
self.data = yaml.load(rendered)
|
||||||
except:
|
except:
|
||||||
# In case we rendered invalid yaml this helps to debug
|
# In case we rendered invalid yaml this helps to debug
|
||||||
logger.error(rendered)
|
logger.error(rendered)
|
||||||
@ -162,15 +167,27 @@ class Stack(object):
|
|||||||
# Some sanity checks and final cosmetics
|
# Some sanity checks and final cosmetics
|
||||||
# Check for empty top level Parameters, Outputs and Conditions and remove
|
# Check for empty top level Parameters, Outputs and Conditions and remove
|
||||||
for key in ['Parameters', 'Outputs', 'Conditions']:
|
for key in ['Parameters', 'Outputs', 'Conditions']:
|
||||||
if key in data and data[key] is None:
|
if key in self.data and self.data[key] is None:
|
||||||
# Delete from data structure which also takes care of json
|
# Delete from data structure which also takes care of json
|
||||||
del data[key]
|
del self.data[key]
|
||||||
# but also remove from rendered for the yaml file
|
# but also remove from rendered for the yaml file
|
||||||
rendered = rendered.replace('\n'+key+":",'')
|
rendered = rendered.replace('\n'+key+":",'')
|
||||||
|
|
||||||
# Condense multiple empty lines to one
|
# Condense multiple empty lines to one
|
||||||
self.cfn_template = re.sub(r'\n\s*\n', '\n\n', rendered)
|
self.cfn_template = re.sub(r'\n\s*\n', '\n\n', rendered)
|
||||||
|
|
||||||
|
# Update internal data structures
|
||||||
|
self._parse_metadata()
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_metadata(self):
|
||||||
|
# Extract dependencies if present
|
||||||
|
try:
|
||||||
|
for dep in self.data['Metadata']['CloudBender']['Dependencies']:
|
||||||
|
self.dependencies.add(dep)
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def write_template_file(self):
|
def write_template_file(self):
|
||||||
if self.cfn_template:
|
if self.cfn_template:
|
||||||
@ -194,20 +211,26 @@ class Stack(object):
|
|||||||
|
|
||||||
|
|
||||||
def read_template_file(self):
|
def read_template_file(self):
|
||||||
yaml_file = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname+".yaml")
|
""" Reads rendered yaml template from disk and extracts metadata """
|
||||||
with open(yaml_file, 'r') as yaml_contents:
|
if not self.cfn_template:
|
||||||
self.cfn_template = yaml_contents.read()
|
yaml_file = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname+".yaml")
|
||||||
logger.debug('Read cfn template %s.', yaml_file)
|
with open(yaml_file, 'r') as yaml_contents:
|
||||||
|
self.cfn_template = yaml_contents.read()
|
||||||
|
logger.debug('Read cfn template %s.', yaml_file)
|
||||||
|
|
||||||
|
self.data = yaml.load(self.cfn_template)
|
||||||
|
self._parse_metadata()
|
||||||
|
|
||||||
|
else:
|
||||||
|
logger.debug('Using cached cfn template %s.', yaml_file)
|
||||||
|
|
||||||
|
|
||||||
def validate(self):
|
def validate(self):
|
||||||
"""Validates the rendered template via cfn-lint"""
|
"""Validates the rendered template via cfn-lint"""
|
||||||
if not self.cfn_template:
|
self.read_template_file()
|
||||||
self.read_template_file()
|
|
||||||
|
|
||||||
data = yaml.load(self.cfn_template)
|
|
||||||
try:
|
try:
|
||||||
ignore_checks = data['Metadata']['cfnlint_ignore']
|
ignore_checks = self.data['Metadata']['cfnlint_ignore']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
ignore_checks = []
|
ignore_checks = []
|
||||||
|
|
||||||
@ -238,10 +261,7 @@ class Stack(object):
|
|||||||
def resolve_parameters(self):
|
def resolve_parameters(self):
|
||||||
""" Renders parameters for the stack based on the source template and the environment configuration """
|
""" Renders parameters for the stack based on the source template and the environment configuration """
|
||||||
|
|
||||||
if not self.cfn_template:
|
self.read_template_file()
|
||||||
self.read_template_file()
|
|
||||||
|
|
||||||
data = yaml.load(self.cfn_template)
|
|
||||||
|
|
||||||
# Inspect all outputs of the running Conglomerate members
|
# Inspect all outputs of the running Conglomerate members
|
||||||
# if we run in Piped Mode
|
# if we run in Piped Mode
|
||||||
@ -252,9 +272,9 @@ class Stack(object):
|
|||||||
# except KeyError:
|
# except KeyError:
|
||||||
# pass
|
# pass
|
||||||
|
|
||||||
if 'Parameters' in data:
|
if 'Parameters' in self.data:
|
||||||
self.cfn_parameters = []
|
self.cfn_parameters = []
|
||||||
for p in data['Parameters']:
|
for p in self.data['Parameters']:
|
||||||
# In Piped mode we try to resolve all Paramters first via stack_outputs
|
# In Piped mode we try to resolve all Paramters first via stack_outputs
|
||||||
#if config['cfn']['Mode'] == "Piped":
|
#if config['cfn']['Mode'] == "Piped":
|
||||||
# try:
|
# try:
|
||||||
@ -274,7 +294,7 @@ class Stack(object):
|
|||||||
logger.info('Got {} = {}'.format(p,value))
|
logger.info('Got {} = {}'.format(p,value))
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
# If we have a Default defined in the CFN skip, as AWS will use it
|
# If we have a Default defined in the CFN skip, as AWS will use it
|
||||||
if 'Default' in data['Parameters'][p]:
|
if 'Default' in self.data['Parameters'][p]:
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
logger.error('Cannot find value for parameter {0}'.format(p))
|
logger.error('Cannot find value for parameter {0}'.format(p))
|
||||||
@ -310,9 +330,7 @@ class Stack(object):
|
|||||||
# Prepare parameters
|
# Prepare parameters
|
||||||
self.resolve_parameters()
|
self.resolve_parameters()
|
||||||
self.write_parameter_file()
|
self.write_parameter_file()
|
||||||
|
self.read_template_file()
|
||||||
if not self.cfn_template:
|
|
||||||
self.read_template_file()
|
|
||||||
|
|
||||||
logger.info('Creating {0} {1}'.format(self.region, self.stackname))
|
logger.info('Creating {0} {1}'.format(self.region, self.stackname))
|
||||||
response = self.connection_manager.call('cloudformation', 'create_stack',
|
response = self.connection_manager.call('cloudformation', 'create_stack',
|
||||||
@ -332,9 +350,7 @@ class Stack(object):
|
|||||||
# Prepare parameters
|
# Prepare parameters
|
||||||
self.resolve_parameters()
|
self.resolve_parameters()
|
||||||
self.write_parameter_file()
|
self.write_parameter_file()
|
||||||
|
self.read_template_file()
|
||||||
if not self.cfn_template:
|
|
||||||
self.read_template_file()
|
|
||||||
|
|
||||||
logger.info('Updating {0} {1}'.format(self.region, self.stackname))
|
logger.info('Updating {0} {1}'.format(self.region, self.stackname))
|
||||||
try:
|
try:
|
||||||
@ -372,9 +388,7 @@ class Stack(object):
|
|||||||
# Prepare parameters
|
# Prepare parameters
|
||||||
self.resolve_parameters()
|
self.resolve_parameters()
|
||||||
self.write_parameter_file()
|
self.write_parameter_file()
|
||||||
|
self.read_template_file()
|
||||||
if not self.cfn_template:
|
|
||||||
self.read_template_file()
|
|
||||||
|
|
||||||
logger.info('Creating change set {0} for stack {1}'.format(change_set_name, self.stackname))
|
logger.info('Creating change set {0} for stack {1}'.format(change_set_name, self.stackname))
|
||||||
response = self.connection_manager.call('cloudformation', 'create_change_set',
|
response = self.connection_manager.call('cloudformation', 'create_change_set',
|
||||||
|
Loading…
Reference in New Issue
Block a user