2018-11-22 18:31:59 +00:00
import os
import re
import hashlib
2021-01-11 14:34:50 +00:00
import yaml
2018-11-22 18:31:59 +00:00
import time
2019-12-09 13:32:39 +00:00
import pathlib
2019-07-28 13:02:18 +00:00
import pprint
2021-09-20 14:19:14 +00:00
import pulumi
2018-11-22 18:31:59 +00:00
from datetime import datetime , timedelta
from dateutil . tz import tzutc
from botocore . exceptions import ClientError
2020-08-12 15:07:56 +00:00
from . utils import dict_merge , search_refs , ensure_dir , get_s3_url
2018-11-22 18:31:59 +00:00
from . connection import BotoConnection
2019-03-06 19:57:31 +00:00
from . jinja import JinjaEnv , read_config_file
2018-11-22 18:31:59 +00:00
from . import __version__
2021-03-11 18:25:02 +00:00
from . exceptions import ParameterNotFound , ParameterIllegalValue , ChecksumError
2021-09-20 14:19:14 +00:00
from . hooks import exec_hooks , pulumi_ws
from . pulumi import pulumi_init
2018-11-22 18:31:59 +00:00
import cfnlint . core
2020-07-31 21:35:14 +00:00
import cfnlint . template
import cfnlint . graph
2018-11-22 18:31:59 +00:00
2021-09-20 14:19:14 +00:00
import importlib . resources as pkg_resources
2020-06-05 10:08:19 +00:00
from . import templates
2018-11-22 18:31:59 +00:00
import logging
logger = logging . getLogger ( __name__ )
2021-01-11 14:34:50 +00:00
# Ignore any !<Constructors> during re-loading of CFN templates
class SafeLoaderIgnoreUnknown ( yaml . SafeLoader ) :
def ignore_unknown ( self , node ) :
return node . tag
SafeLoaderIgnoreUnknown . add_constructor ( None , SafeLoaderIgnoreUnknown . ignore_unknown )
2018-11-22 18:31:59 +00:00
class Stack ( object ) :
2019-09-02 11:13:40 +00:00
def __init__ ( self , name , template , path , rel_path , ctx ) :
2018-11-22 18:31:59 +00:00
self . stackname = name
2019-07-27 22:30:03 +00:00
self . template = template
2019-12-09 13:32:39 +00:00
self . path = pathlib . Path ( path )
2018-11-22 18:31:59 +00:00
self . rel_path = rel_path
2019-07-27 22:30:03 +00:00
self . ctx = ctx
2019-07-28 13:02:18 +00:00
self . tags = { }
self . parameters = { }
2020-06-22 11:30:54 +00:00
self . outputs = { }
2021-02-12 11:06:43 +00:00
self . options = { }
2019-07-28 13:02:18 +00:00
self . region = ' global '
2021-09-20 14:19:14 +00:00
self . profile = ' default '
2019-07-28 13:02:18 +00:00
self . onfailure = ' DELETE '
self . notfication_sns = [ ]
2019-07-27 22:30:03 +00:00
self . id = ( self . profile , self . region , self . stackname )
2020-06-22 11:30:54 +00:00
self . aws_stackid = None
2019-07-27 22:30:03 +00:00
2019-04-18 16:30:50 +00:00
self . md5 = None
self . mode = ' CloudBender '
2018-11-22 18:31:59 +00:00
self . provides = template
self . cfn_template = None
self . cfn_parameters = [ ]
2019-02-04 15:43:34 +00:00
self . cfn_data = None
2018-11-22 18:31:59 +00:00
self . connection_manager = BotoConnection ( self . profile , self . region )
self . status = None
2020-06-22 11:30:54 +00:00
self . store_outputs = False
2018-11-22 18:31:59 +00:00
self . dependencies = set ( )
2020-06-22 11:30:54 +00:00
self . hooks = { ' post_create ' : [ ] , ' post_update ' : [ ] , ' pre_create ' : [ ] , ' pre_update ' : [ ] }
2018-11-22 18:31:59 +00:00
self . default_lock = None
self . multi_delete = True
2020-08-12 15:07:56 +00:00
self . template_bucket_url = None
2021-09-20 14:19:14 +00:00
self . work_dir = None
self . pulumi = { }
2018-11-22 18:31:59 +00:00
def dump_config ( self ) :
2019-07-28 13:02:18 +00:00
logger . debug ( " <Stack {} : {} > " . format ( self . id , pprint . pformat ( vars ( self ) ) ) )
2018-11-22 18:31:59 +00:00
2019-09-02 11:13:40 +00:00
def read_config ( self , sg_config = { } ) :
""" reads stack config """
# First set various attributes based on parent stackgroup config
self . tags . update ( sg_config . get ( ' tags ' , { } ) )
self . parameters . update ( sg_config . get ( ' parameters ' , { } ) )
self . options . update ( sg_config . get ( ' options ' , { } ) )
2021-09-20 14:19:14 +00:00
self . pulumi . update ( sg_config . get ( ' pulumi ' , { } ) )
2019-09-02 11:13:40 +00:00
2020-08-12 15:07:56 +00:00
# by default inherit parent group settings
for p in [ ' region ' , ' profile ' , ' notfication_sns ' , ' template_bucket_url ' ] :
if p in sg_config :
setattr ( self , p , sg_config [ p ] )
2019-09-02 11:13:40 +00:00
2020-08-12 15:07:56 +00:00
# now override stack specific settings
2019-09-02 11:13:40 +00:00
_config = read_config_file ( self . path , sg_config . get ( ' variables ' , { } ) )
2020-08-12 15:07:56 +00:00
for p in [ " region " , " stackname " , " template " , " default_lock " , " multi_delete " , " provides " , " onfailure " , " notification_sns " , " template_bucket_url " ] :
2018-11-22 18:31:59 +00:00
if p in _config :
setattr ( self , p , _config [ p ] )
2021-09-20 14:19:14 +00:00
for p in [ " parameters " , " tags " , " pulumi " ] :
2018-11-22 18:31:59 +00:00
if p in _config :
setattr ( self , p , dict_merge ( getattr ( self , p ) , _config [ p ] ) )
2019-04-18 16:30:50 +00:00
# Inject Artifact if not explicitly set
if ' Artifact ' not in self . tags :
self . tags [ ' Artifact ' ] = self . provides
2018-11-22 18:31:59 +00:00
2019-04-18 16:30:50 +00:00
if ' options ' in _config :
self . options = dict_merge ( self . options , _config [ ' options ' ] )
2019-06-15 00:05:15 +00:00
2021-09-20 14:19:14 +00:00
if ' Mode ' in self . options :
2019-06-15 00:05:15 +00:00
self . mode = self . options [ ' Mode ' ]
2018-11-22 18:31:59 +00:00
2020-08-12 15:07:56 +00:00
if ' StoreOutputs ' in self . options and self . options [ ' StoreOutputs ' ] :
2020-06-22 11:30:54 +00:00
self . store_outputs = True
2019-02-04 15:43:34 +00:00
if ' dependencies ' in _config :
for dep in _config [ ' dependencies ' ] :
self . dependencies . add ( dep )
2019-07-03 13:15:18 +00:00
# Some sanity checks
if self . onfailure not in [ " DO_NOTHING " , " ROLLBACK " , " DELETE " ] :
raise ParameterIllegalValue ( " onfailure must be one of DO_NOTHING | ROLLBACK | DELETE " )
2019-01-21 15:24:18 +00:00
logger . debug ( " Stack {} added. " . format ( self . id ) )
2018-11-22 18:31:59 +00:00
def render ( self ) :
""" Renders the cfn jinja template for this stack """
template_metadata = {
' Template.Name ' : self . template ,
2019-04-18 16:30:50 +00:00
' Template.Hash ' : " __HASH__ " ,
2018-11-22 18:31:59 +00:00
' CloudBender.Version ' : __version__
}
2019-06-15 00:05:15 +00:00
_config = { ' mode ' : self . mode , ' options ' : self . options , ' metadata ' : template_metadata }
2019-03-20 12:51:17 +00:00
2019-04-18 16:30:50 +00:00
jenv = JinjaEnv ( self . ctx [ ' artifact_paths ' ] )
2019-03-20 12:51:17 +00:00
jenv . globals [ ' _config ' ] = _config
2018-11-22 18:31:59 +00:00
2019-04-18 16:30:50 +00:00
template = jenv . get_template ( ' {0} {1} ' . format ( self . template , ' .yaml.jinja ' ) )
2018-11-22 18:31:59 +00:00
logger . info ( ' Rendering %s ' , template . filename )
try :
2019-04-18 16:30:50 +00:00
self . cfn_template = template . render ( _config )
2021-01-11 14:34:50 +00:00
self . cfn_data = yaml . load ( self . cfn_template , Loader = SafeLoaderIgnoreUnknown )
2019-02-07 15:36:16 +00:00
except Exception as e :
2018-11-22 18:31:59 +00:00
# In case we rendered invalid yaml this helps to debug
2019-04-18 16:30:50 +00:00
if self . cfn_template :
2019-08-03 21:31:17 +00:00
_output = " "
for i , line in enumerate ( self . cfn_template . splitlines ( ) , start = 1 ) :
_output = _output + ' {} : {} \n ' . format ( i , line )
logger . error ( _output )
2019-02-07 15:36:16 +00:00
raise e
2018-11-22 18:31:59 +00:00
2019-09-02 15:54:19 +00:00
if not re . search ( ' CloudBender:: ' , self . cfn_template ) and not re . search ( ' Iterate: ' , self . cfn_template ) :
2019-06-15 00:05:15 +00:00
logger . info ( " CloudBender not required -> removing Transform and Conglomerate parameter " )
2019-04-18 16:30:50 +00:00
self . cfn_template = self . cfn_template . replace ( ' Transform: [CloudBender] ' , ' ' )
2019-06-15 00:05:15 +00:00
_res = """
Conglomerate :
Type : String
Description : Project / Namespace this stack is part of
"""
self . cfn_template = re . sub ( _res , ' ' , self . cfn_template )
2019-04-18 16:30:50 +00:00
include = [ ]
2019-06-27 12:10:42 +00:00
search_refs ( self . cfn_data , include , self . mode )
2021-02-12 11:06:43 +00:00
if self . mode == " Piped " and len ( include ) :
2019-06-27 12:10:42 +00:00
_res = " "
for attr in include :
_res = _res + """
{ 0 } :
Type : String
Description : Parameter to provide remote stack attribute { 0 } """ .format(attr)
self . cfn_template = re . sub ( r ' Parameters: ' , r ' Parameters: ' + _res + ' \n ' , self . cfn_template )
logger . info ( " Piped mode: Added parameters for remote stack references " )
2019-06-15 00:05:15 +00:00
# Re-read updated template
2021-01-11 14:34:50 +00:00
self . cfn_data = yaml . load ( self . cfn_template , Loader = SafeLoaderIgnoreUnknown )
2019-06-15 00:05:15 +00:00
# Check for empty top level Parameters, Outputs and Conditions and remove
for key in [ ' Parameters ' , ' Outputs ' , ' Conditions ' ] :
if key in self . cfn_data and not self . cfn_data [ key ] :
del self . cfn_data [ key ]
self . cfn_template = self . cfn_template . replace ( ' \n ' + key + " : " , ' ' )
# Remove and condense multiple empty lines
self . cfn_template = re . sub ( r ' \ n \ s* \ n ' , ' \n \n ' , self . cfn_template )
self . cfn_template = re . sub ( r ' ^ \ s* ' , ' ' , self . cfn_template )
self . cfn_template = re . sub ( r ' \ s*$ ' , ' ' , self . cfn_template )
# set md5 last
self . md5 = hashlib . md5 ( self . cfn_template . encode ( ' utf-8 ' ) ) . hexdigest ( )
2019-04-18 16:30:50 +00:00
self . cfn_template = self . cfn_template . replace ( ' __HASH__ ' , self . md5 )
2018-11-22 18:31:59 +00:00
2019-02-04 15:43:34 +00:00
# Update internal data structures
self . _parse_metadata ( )
def _parse_metadata ( self ) :
2019-06-15 00:05:15 +00:00
# Extract dependencies
2019-02-04 15:43:34 +00:00
try :
2019-04-18 16:30:50 +00:00
for dep in self . cfn_data [ ' Metadata ' ] [ ' CloudBender ' ] [ ' Dependencies ' ] :
2019-02-04 15:43:34 +00:00
self . dependencies . add ( dep )
except KeyError :
pass
2021-03-11 18:25:02 +00:00
# Get checksum
if not self . md5 :
try :
self . md5 = self . cfn_data [ ' Metadata ' ] [ ' Template ' ] [ ' Hash ' ]
# Verify embedded md5 hash
source_cfn = re . sub ( ' Hash: [0-9a-f] {32} ' , ' Hash: __HASH__ ' , self . cfn_template )
our_md5 = hashlib . md5 ( source_cfn . encode ( ' utf-8 ' ) ) . hexdigest ( )
if ( our_md5 != self . md5 ) :
raise ChecksumError ( " Template hash checksum mismatch! Expected: {} Got: {} " . format ( self . md5 , our_md5 ) ) from None
except KeyError :
raise ChecksumError ( " Template missing Hash checksum! " ) from None
2021-02-12 11:06:43 +00:00
# Add CloudBender dependencies
2019-06-15 00:05:15 +00:00
include = [ ]
2019-06-27 12:10:42 +00:00
search_refs ( self . cfn_data , include , self . mode )
2019-06-15 00:05:15 +00:00
for ref in include :
2019-06-27 12:10:42 +00:00
if self . mode != " Piped " :
self . dependencies . add ( ref . split ( ' . ' ) [ 0 ] )
else :
self . dependencies . add ( ref . split ( ' DoT ' ) [ 0 ] )
2019-06-15 00:05:15 +00:00
2020-06-22 11:30:54 +00:00
# Extract hooks
try :
for hook , func in self . cfn_data [ ' Metadata ' ] [ ' Hooks ' ] . items ( ) :
if hook in [ ' post_update ' , ' post_create ' , ' pre_create ' , ' pre_update ' ] :
if isinstance ( func , list ) :
self . hooks [ hook ] . extend ( func )
else :
self . hooks [ hook ] . append ( func )
except KeyError :
pass
2018-11-22 18:31:59 +00:00
def write_template_file ( self ) :
if self . cfn_template :
2019-02-07 15:36:16 +00:00
yaml_file = os . path . join ( self . ctx [ ' template_path ' ] , self . rel_path , self . stackname + " .yaml " )
2020-06-19 16:54:48 +00:00
ensure_dir ( os . path . join ( self . ctx [ ' template_path ' ] , self . rel_path ) )
2018-11-22 18:31:59 +00:00
with open ( yaml_file , ' w ' ) as yaml_contents :
yaml_contents . write ( self . cfn_template )
logger . info ( ' Wrote %s to %s ' , self . template , yaml_file )
2020-08-12 15:07:56 +00:00
# upload template to s3 if set
if self . template_bucket_url :
try :
( bucket , path ) = get_s3_url ( self . template_bucket_url , self . rel_path , self . stackname + " .yaml " )
self . connection_manager . call (
' s3 ' , ' put_object ' ,
{ ' Bucket ' : bucket ,
' Key ' : path ,
' Body ' : self . cfn_template ,
' ServerSideEncryption ' : ' AES256 ' } ,
profile = self . profile , region = self . region )
logger . info ( " Uploaded template to s3:// {} / {} " . format ( bucket , path ) )
except ClientError as e :
logger . error ( " Error trying to upload template so S3: {} , {} " . format ( self . template_bucket_url , e ) )
else :
if len ( self . cfn_template ) > 51200 :
logger . warning ( " template_bucket_url not set and rendered template exceeds maximum allowed size of 51200, actual size: {} ! " . format ( len ( self . cfn_template ) ) )
2018-11-22 18:31:59 +00:00
else :
logger . error ( ' No cfn template rendered yet for stack {} . ' . format ( self . stackname ) )
def delete_template_file ( self ) :
2019-02-07 15:36:16 +00:00
yaml_file = os . path . join ( self . ctx [ ' template_path ' ] , self . rel_path , self . stackname + " .yaml " )
2018-11-22 18:31:59 +00:00
try :
os . remove ( yaml_file )
logger . debug ( ' Deleted cfn template %s . ' , yaml_file )
except OSError :
pass
2020-08-12 15:07:56 +00:00
if self . template_bucket_url :
try :
( bucket , path ) = get_s3_url ( self . template_bucket_url , self . rel_path , self . stackname + " .yaml " )
self . connection_manager . call (
' s3 ' , ' delete_object ' ,
{ ' Bucket ' : bucket ,
' Key ' : path } ,
profile = self . profile , region = self . region )
logger . info ( " Deleted template from s3:// {} / {} " . format ( bucket , path ) )
except ClientError as e :
logger . error ( " Error trying to delete template from S3: {} , {} " . format ( self . template_bucket_url , e ) )
2018-11-22 18:31:59 +00:00
def read_template_file ( self ) :
2020-08-12 15:07:56 +00:00
""" Reads rendered yaml template from disk or s3 and extracts metadata """
2019-02-04 15:43:34 +00:00
if not self . cfn_template :
2020-08-12 15:07:56 +00:00
if self . template_bucket_url :
try :
( bucket , path ) = get_s3_url ( self . template_bucket_url , self . rel_path , self . stackname + " .yaml " )
template = self . connection_manager . call (
' s3 ' , ' get_object ' ,
{ ' Bucket ' : bucket ,
' Key ' : path } ,
profile = self . profile , region = self . region )
logger . debug ( " Got template from s3:// {} / {} " . format ( bucket , path ) )
2020-12-28 23:12:55 +00:00
self . cfn_template = template [ ' Body ' ] . read ( ) . decode ( ' utf-8 ' )
# Overwrite local copy
yaml_file = os . path . join ( self . ctx [ ' template_path ' ] , self . rel_path , self . stackname + " .yaml " )
ensure_dir ( os . path . join ( self . ctx [ ' template_path ' ] , self . rel_path ) )
with open ( yaml_file , ' w ' ) as yaml_contents :
yaml_contents . write ( self . cfn_template )
2020-08-12 15:07:56 +00:00
except ClientError as e :
logger . error ( " Could not find template file on S3: {} / {} , {} " . format ( bucket , path , e ) )
else :
yaml_file = os . path . join ( self . ctx [ ' template_path ' ] , self . rel_path , self . stackname + " .yaml " )
2019-02-04 15:43:34 +00:00
2020-08-12 15:07:56 +00:00
try :
with open ( yaml_file , ' r ' ) as yaml_contents :
self . cfn_template = yaml_contents . read ( )
logger . debug ( ' Read cfn template %s . ' , yaml_file )
except FileNotFoundError as e :
logger . warn ( " Could not find template file: {} " . format ( yaml_file ) )
raise e
2021-01-11 14:34:50 +00:00
self . cfn_data = yaml . load ( self . cfn_template , Loader = SafeLoaderIgnoreUnknown )
2020-08-12 15:07:56 +00:00
self . _parse_metadata ( )
2021-03-11 18:25:02 +00:00
2019-02-04 15:43:34 +00:00
else :
2019-02-04 15:59:28 +00:00
logger . debug ( ' Using cached cfn template %s . ' , self . stackname )
2018-11-22 18:31:59 +00:00
def validate ( self ) :
""" Validates the rendered template via cfn-lint """
2019-02-04 15:43:34 +00:00
self . read_template_file ( )
2018-11-22 18:31:59 +00:00
try :
2019-04-18 16:30:50 +00:00
ignore_checks = self . cfn_data [ ' Metadata ' ] [ ' cfnlint_ignore ' ]
2018-11-22 18:31:59 +00:00
except KeyError :
ignore_checks = [ ]
# Ignore some more checks around injected parameters as we generate these
2019-04-18 16:30:50 +00:00
if self . mode == " Piped " :
2019-02-07 15:36:16 +00:00
ignore_checks = ignore_checks + [ ' W2505 ' , ' W2509 ' , ' W2507 ' ]
2018-11-22 18:31:59 +00:00
2019-04-18 16:30:50 +00:00
# Ignore checks regarding overloaded properties
if self . mode == " CloudBender " :
2019-12-06 14:46:41 +00:00
ignore_checks = ignore_checks + [ ' E3035 ' , ' E3002 ' , ' E3012 ' , ' W2001 ' , ' E3001 ' , ' E0002 ' , ' E1012 ' ]
2019-04-18 16:30:50 +00:00
2019-02-07 15:36:16 +00:00
filename = os . path . join ( self . ctx [ ' template_path ' ] , self . rel_path , self . stackname + " .yaml " )
2018-11-22 18:31:59 +00:00
logger . info ( ' Validating {0} ' . format ( filename ) )
lint_args = [ ' --template ' , filename ]
if ignore_checks :
lint_args . append ( ' --ignore-checks ' )
2019-02-07 15:36:16 +00:00
lint_args = lint_args + ignore_checks
2018-11-22 18:31:59 +00:00
logger . info ( ' Ignoring checks: {} ' . format ( ' , ' . join ( ignore_checks ) ) )
( args , filenames , formatter ) = cfnlint . core . get_args_filenames ( lint_args )
( template , rules , matches ) = cfnlint . core . get_template_rules ( filename , args )
2021-03-11 15:38:55 +00:00
region = self . region
if region == ' global ' :
region = ' us-east-1 '
2018-11-22 18:31:59 +00:00
if not matches :
2021-03-11 15:38:55 +00:00
matches . extend ( cfnlint . core . run_checks ( filename , template , rules , [ region ] ) )
2018-11-22 18:31:59 +00:00
if len ( matches ) :
for match in matches :
logger . error ( formatter . _format ( match ) )
2021-03-11 18:25:02 +00:00
return 1
2018-11-22 18:31:59 +00:00
else :
logger . info ( " Passed. " )
2021-03-11 18:25:02 +00:00
return 0
2018-11-22 18:31:59 +00:00
2020-02-25 20:40:12 +00:00
def get_outputs ( self , include = ' .* ' , values = False ) :
2020-06-22 11:30:54 +00:00
""" gets outputs of the stack """
2020-02-25 20:40:12 +00:00
2021-09-20 14:19:14 +00:00
if self . mode == ' pulumi ' :
stack = pulumi_init ( self )
self . outputs = stack . outputs ( )
2020-02-25 20:40:12 +00:00
2021-09-20 14:19:14 +00:00
else :
self . read_template_file ( )
2020-02-25 20:40:12 +00:00
try :
2021-09-20 14:19:14 +00:00
stacks = self . connection_manager . call (
" cloudformation " ,
" describe_stacks " ,
{ ' StackName ' : self . stackname } ,
profile = self . profile , region = self . region ) [ ' Stacks ' ]
2020-02-25 20:40:12 +00:00
2021-09-20 14:19:14 +00:00
try :
for output in stacks [ 0 ] [ ' Outputs ' ] :
self . outputs [ output [ ' OutputKey ' ] ] = output [ ' OutputValue ' ]
logger . debug ( " Stack outputs for {} in {} : {} " . format ( self . stackname , self . region , self . outputs ) )
except KeyError :
pass
except ClientError :
logger . warn ( " Could not get outputs of {} " . format ( self . stackname ) )
pass
2020-02-25 20:40:12 +00:00
2020-06-22 13:14:11 +00:00
if self . outputs :
logger . info ( ' {} {} Outputs: \n {} ' . format ( self . region , self . stackname , pprint . pformat ( self . outputs , indent = 2 ) ) )
if self . store_outputs :
2021-02-22 18:38:44 +00:00
try :
filename = self . cfn_data [ ' Metadata ' ] [ ' CustomOutputs ' ] [ ' Name ' ]
my_template = self . cfn_data [ ' Metadata ' ] [ ' CustomOutputs ' ] [ ' Template ' ]
except ( TypeError , KeyError ) :
filename = self . stackname + " .yaml "
my_template = pkg_resources . read_text ( templates , ' outputs.yaml ' )
output_file = os . path . join ( self . ctx [ ' outputs_path ' ] , self . rel_path , filename )
ensure_dir ( os . path . join ( self . ctx [ ' outputs_path ' ] , self . rel_path ) )
jenv = JinjaEnv ( )
template = jenv . from_string ( my_template )
data = { ' stackname ' : " / " . join ( [ self . rel_path , self . stackname ] ) , ' timestamp ' : datetime . strftime ( datetime . now ( tzutc ( ) ) , " %d / % m/ % y % H: % M " ) , ' outputs ' : self . outputs , ' parameters ' : self . parameters }
with open ( output_file , ' w ' ) as output_contents :
output_contents . write ( template . render ( * * data ) )
logger . info ( ' Wrote outputs for %s to %s ' , self . stackname , output_file )
2020-06-22 11:30:54 +00:00
2020-07-31 21:35:14 +00:00
def create_docs ( self , template = False , graph = False ) :
2020-06-19 16:40:51 +00:00
""" Read rendered template, parse documentation fragments, eg. parameter description
2020-06-04 15:32:17 +00:00
and create a mardown doc file for the stack
same idea as eg . helm - docs for values . yaml
"""
2020-06-22 13:14:11 +00:00
try :
self . read_template_file ( )
except FileNotFoundError :
return
2020-06-04 15:32:17 +00:00
2020-06-05 10:08:19 +00:00
if not template :
doc_template = pkg_resources . read_text ( templates , ' stack-doc.md ' )
jenv = JinjaEnv ( )
template = jenv . from_string ( doc_template )
data = { }
else :
doc_template = template
2020-06-04 15:32:17 +00:00
data [ ' name ' ] = self . stackname
data [ ' description ' ] = self . cfn_data [ ' Description ' ]
data [ ' dependencies ' ] = self . dependencies
if ' Parameters ' in self . cfn_data :
data [ ' parameters ' ] = self . cfn_data [ ' Parameters ' ]
2020-06-22 15:16:38 +00:00
set_parameters = self . resolve_parameters ( )
for p in set_parameters :
data [ ' parameters ' ] [ p ] [ ' value ' ] = set_parameters [ p ]
2020-06-04 15:32:17 +00:00
2020-06-19 16:40:51 +00:00
if ' Outputs ' in self . cfn_data :
data [ ' outputs ' ] = self . cfn_data [ ' Outputs ' ]
2020-06-22 13:14:11 +00:00
# Check for existing outputs yaml, if found add current value column and set header to timestamp from outputs file
output_file = os . path . join ( self . ctx [ ' outputs_path ' ] , self . rel_path , self . stackname + " .yaml " )
try :
with open ( output_file , ' r ' ) as yaml_contents :
outputs = yaml . safe_load ( yaml_contents . read ( ) )
for p in outputs [ ' Outputs ' ] :
data [ ' outputs ' ] [ p ] [ ' last_value ' ] = outputs [ ' Outputs ' ] [ p ]
data [ ' timestamp ' ] = outputs [ ' TimeStamp ' ]
2020-07-31 21:57:25 +00:00
except ( FileNotFoundError , KeyError , TypeError ) :
2020-06-22 13:14:11 +00:00
pass
doc_file = os . path . join ( self . ctx [ ' docs_path ' ] , self . rel_path , self . stackname + " .md " )
2020-06-19 16:54:48 +00:00
ensure_dir ( os . path . join ( self . ctx [ ' docs_path ' ] , self . rel_path ) )
2020-06-04 15:32:17 +00:00
with open ( doc_file , ' w ' ) as doc_contents :
doc_contents . write ( template . render ( * * data ) )
logger . info ( ' Wrote documentation for %s to %s ' , self . stackname , doc_file )
2020-07-31 21:35:14 +00:00
# Write Graph in Dot format
if graph :
filename = os . path . join ( self . ctx [ ' template_path ' ] , self . rel_path , self . stackname + " .yaml " )
lint_args = [ ' --template ' , filename ]
( args , filenames , formatter ) = cfnlint . core . get_args_filenames ( lint_args )
( template , rules , matches ) = cfnlint . core . get_template_rules ( filename , args )
template_obj = cfnlint . template . Template ( filename , template , [ self . region ] )
path = os . path . join ( self . ctx [ ' docs_path ' ] , self . rel_path , self . stackname + " .dot " )
2020-07-31 21:57:25 +00:00
g = cfnlint . graph . Graph ( template_obj )
2020-07-31 21:35:14 +00:00
try :
g . to_dot ( path )
logger . info ( ' DOT representation of the graph written to %s ' , path )
except ImportError :
logger . error (
' Could not write the graph in DOT format. Please install either `pygraphviz` or `pydot` modules. ' )
2018-11-22 18:31:59 +00:00
def resolve_parameters ( self ) :
""" Renders parameters for the stack based on the source template and the environment configuration """
2019-02-04 15:43:34 +00:00
self . read_template_file ( )
2018-11-22 18:31:59 +00:00
2019-06-27 13:31:51 +00:00
# if we run in Piped Mode, inspect all outputs of the running Conglomerate members
if self . mode == " Piped " :
stack_outputs = { }
try :
stack_outputs = self . _inspect_stacks ( self . tags [ ' Conglomerate ' ] )
except KeyError :
pass
2018-11-22 18:31:59 +00:00
2020-06-25 13:09:27 +00:00
_found = { }
2019-04-18 16:30:50 +00:00
if ' Parameters ' in self . cfn_data :
2019-06-27 13:31:51 +00:00
_errors = [ ]
2018-11-22 18:31:59 +00:00
self . cfn_parameters = [ ]
2019-04-18 16:30:50 +00:00
for p in self . cfn_data [ ' Parameters ' ] :
2018-11-22 18:31:59 +00:00
# In Piped mode we try to resolve all Paramters first via stack_outputs
2019-06-27 13:31:51 +00:00
if self . mode == " Piped " :
try :
# first reverse the rename due to AWS alphanumeric restriction for parameter names
_p = p . replace ( ' DoT ' , ' . ' )
value = str ( stack_outputs [ _p ] )
self . cfn_parameters . append ( { ' ParameterKey ' : p , ' ParameterValue ' : value } )
logger . info ( ' Got {} = {} from running stack ' . format ( p , value ) )
continue
except KeyError :
pass
2018-11-22 18:31:59 +00:00
# Key name in config tree is: stacks.<self.stackname>.parameters.<parameter>
2019-06-27 13:31:51 +00:00
if p in self . parameters :
2018-11-22 18:31:59 +00:00
value = str ( self . parameters [ p ] )
2019-02-07 15:36:16 +00:00
self . cfn_parameters . append ( { ' ParameterKey ' : p , ' ParameterValue ' : value } )
2020-04-08 15:30:58 +00:00
# Hide NoEcho parameters in shell output
if ' NoEcho ' in self . cfn_data [ ' Parameters ' ] [ p ] and self . cfn_data [ ' Parameters ' ] [ p ] [ ' NoEcho ' ] :
value = ' **** '
2020-06-22 11:30:54 +00:00
_found [ p ] = value
2019-06-27 13:31:51 +00:00
else :
2018-11-22 18:31:59 +00:00
# If we have a Default defined in the CFN skip, as AWS will use it
2019-06-27 13:34:41 +00:00
if ' Default ' not in self . cfn_data [ ' Parameters ' ] [ p ] :
2019-06-27 13:31:51 +00:00
_errors . append ( p )
if _errors :
raise ParameterNotFound ( ' Cannot find value for parameters: {0} ' . format ( _errors ) )
2018-11-22 18:31:59 +00:00
2020-07-16 22:56:09 +00:00
# Warning of excessive parameters, might be useful to spot typos early
_warnings = [ ]
for p in self . parameters . keys ( ) :
if p not in self . cfn_data [ ' Parameters ' ] :
_warnings . append ( p )
2020-06-22 15:16:38 +00:00
logger . info ( ' {} {} set parameters: \n {} ' . format ( self . region , self . stackname , pprint . pformat ( _found , indent = 2 ) ) )
2020-07-16 22:56:09 +00:00
if _warnings :
logger . warning ( ' Ignored additional parameters: {} . ' . format ( _warnings ) )
2020-06-22 15:16:38 +00:00
# Return dict of explicitly set parameters
return _found
2020-06-22 11:30:54 +00:00
2021-09-20 14:19:14 +00:00
@pulumi_ws
2020-06-22 11:30:54 +00:00
@exec_hooks
2018-11-22 18:31:59 +00:00
def create ( self ) :
""" Creates a stack """
2021-09-20 14:19:14 +00:00
if self . mode == ' pulumi ' :
stack = pulumi_init ( self )
stack . up ( on_output = self . _log_pulumi )
2018-11-22 18:31:59 +00:00
2021-09-20 14:19:14 +00:00
else :
# Prepare parameters
self . resolve_parameters ( )
2020-08-12 16:20:37 +00:00
2021-09-20 14:19:14 +00:00
logger . info ( ' Creating {0} {1} ' . format ( self . region , self . stackname ) )
kwargs = { ' StackName ' : self . stackname ,
' Parameters ' : self . cfn_parameters ,
' OnFailure ' : self . onfailure ,
' NotificationARNs ' : self . notfication_sns ,
' Tags ' : [ { " Key " : str ( k ) , " Value " : str ( v ) } for k , v in self . tags . items ( ) ] ,
' Capabilities ' : [ ' CAPABILITY_IAM ' , ' CAPABILITY_NAMED_IAM ' , ' CAPABILITY_AUTO_EXPAND ' ] }
kwargs = self . _add_template_arg ( kwargs )
2018-11-22 18:31:59 +00:00
2021-09-20 14:19:14 +00:00
self . aws_stackid = self . connection_manager . call (
' cloudformation ' , ' create_stack ' , kwargs , profile = self . profile , region = self . region )
2020-06-22 11:30:54 +00:00
2021-09-20 14:19:14 +00:00
status = self . _wait_for_completion ( )
self . get_outputs ( )
return status
2018-11-22 18:31:59 +00:00
2021-09-20 14:19:14 +00:00
@pulumi_ws
2020-06-22 11:30:54 +00:00
@exec_hooks
2018-11-22 18:31:59 +00:00
def update ( self ) :
""" Updates an existing stack """
2021-09-20 14:19:14 +00:00
# We cannot migrate directly so bail out if CFN stack still exists
if self . mode == ' pulumi ' :
logger . error ( " Cloudformation stack {} still exists, cannot use Pulumi! " . format ( self . stackname ) )
return
2018-11-22 18:31:59 +00:00
# Prepare parameters
self . resolve_parameters ( )
2019-01-31 10:23:03 +00:00
logger . info ( ' Updating {0} {1} ' . format ( self . region , self . stackname ) )
2018-11-22 18:31:59 +00:00
try :
2020-08-12 16:20:37 +00:00
kwargs = { ' StackName ' : self . stackname ,
' Parameters ' : self . cfn_parameters ,
' NotificationARNs ' : self . notfication_sns ,
' Tags ' : [ { " Key " : str ( k ) , " Value " : str ( v ) } for k , v in self . tags . items ( ) ] ,
' Capabilities ' : [ ' CAPABILITY_IAM ' , ' CAPABILITY_NAMED_IAM ' , ' CAPABILITY_AUTO_EXPAND ' ] }
kwargs = self . _add_template_arg ( kwargs )
2020-06-22 11:30:54 +00:00
self . aws_stackid = self . connection_manager . call (
2020-08-12 16:20:37 +00:00
' cloudformation ' , ' update_stack ' , kwargs , profile = self . profile , region = self . region )
2018-11-22 18:31:59 +00:00
except ClientError as e :
if ' No updates are to be performed ' in e . response [ ' Error ' ] [ ' Message ' ] :
logger . info ( ' No updates for {0} ' . format ( self . stackname ) )
2020-06-26 14:54:37 +00:00
return " COMPLETE "
2018-11-22 18:31:59 +00:00
else :
raise e
2020-06-22 11:30:54 +00:00
status = self . _wait_for_completion ( )
self . get_outputs ( )
2018-11-22 18:31:59 +00:00
2020-06-22 11:30:54 +00:00
return status
2021-09-20 14:19:14 +00:00
@pulumi_ws
2020-06-22 11:30:54 +00:00
@exec_hooks
2018-11-22 18:31:59 +00:00
def delete ( self ) :
2021-09-20 14:19:14 +00:00
""" Deletes a stack """
2018-11-22 18:31:59 +00:00
2019-01-31 10:23:03 +00:00
logger . info ( ' Deleting {0} {1} ' . format ( self . region , self . stackname ) )
2021-09-20 14:19:14 +00:00
if self . mode == ' pulumi ' :
stack = pulumi_init ( self )
stack . destroy ( on_output = self . _log_pulumi )
return
2020-06-22 11:30:54 +00:00
self . aws_stackid = self . connection_manager . call (
2019-02-07 15:36:16 +00:00
' cloudformation ' , ' delete_stack ' , { ' StackName ' : self . stackname } ,
profile = self . profile , region = self . region )
2018-11-22 18:31:59 +00:00
2020-06-22 11:30:54 +00:00
status = self . _wait_for_completion ( )
return status
2018-11-22 18:31:59 +00:00
2021-09-20 14:19:14 +00:00
@pulumi_ws
def refresh ( self ) :
""" Refreshes a Pulumi stack """
stack = pulumi_init ( self )
stack . refresh ( on_output = self . _log_pulumi )
return
@pulumi_ws
def preview ( self ) :
""" Preview a Pulumi stack up operation """
stack = pulumi_init ( self )
stack . preview ( on_output = self . _log_pulumi )
return
@pulumi_ws
def set_config ( self , key , value , secret ) :
""" Set a config or secret """
stack = pulumi_init ( self )
stack . set_config ( key , pulumi . automation . ConfigValue ( value , secret ) )
# Store salt or key and encrypted value in CloudBender stack config
settings = None
pulumi_settings = stack . workspace . stack_settings ( stack . name ) . _serialize ( )
with open ( self . path , " r " ) as file :
settings = yaml . safe_load ( file )
2021-10-04 15:51:16 +00:00
if ' pulumi ' not in settings :
settings [ ' pulumi ' ] = { }
if ' encryptionsalt ' in pulumi_settings :
2021-09-20 14:19:14 +00:00
settings [ ' pulumi ' ] [ ' encryptionsalt ' ] = pulumi_settings [ ' encryptionsalt ' ]
2021-10-04 15:51:16 +00:00
if ' encryptedkey ' in pulumi_settings :
2021-09-20 14:19:14 +00:00
settings [ ' pulumi ' ] [ ' encryptedkey ' ] = pulumi_settings [ ' encryptedkey ' ]
if ' parameters ' not in settings :
settings [ ' parameters ' ] = { }
settings [ ' parameters ' ] [ key ] = pulumi_settings [ ' config ' ] [ ' {} : {} ' . format ( self . parameters [ ' Conglomerate ' ] , key ) ]
with open ( self . path , " w " ) as file :
yaml . dump ( settings , stream = file )
return
@pulumi_ws
def get_config ( self , key ) :
""" Get a config or secret """
stack = pulumi_init ( self )
print ( stack . get_config ( key ) . value )
2019-01-30 13:00:06 +00:00
def create_change_set ( self , change_set_name ) :
""" Creates a Change Set with the name ``change_set_name``. """
# Prepare parameters
self . resolve_parameters ( )
2019-02-04 15:43:34 +00:00
self . read_template_file ( )
2019-01-30 13:00:06 +00:00
logger . info ( ' Creating change set {0} for stack {1} ' . format ( change_set_name , self . stackname ) )
2020-08-12 16:20:37 +00:00
kwargs = { ' StackName ' : self . stackname ,
' ChangeSetName ' : change_set_name ,
' Parameters ' : self . cfn_parameters ,
' Tags ' : [ { " Key " : str ( k ) , " Value " : str ( v ) } for k , v in self . tags . items ( ) ] ,
' Capabilities ' : [ ' CAPABILITY_IAM ' , ' CAPABILITY_NAMED_IAM ' ] }
kwargs = self . _add_template_arg ( kwargs )
2019-02-07 15:36:16 +00:00
self . connection_manager . call (
2020-08-12 16:20:37 +00:00
' cloudformation ' , ' create_change_set ' , kwargs , profile = self . profile , region = self . region )
2019-01-30 13:00:06 +00:00
return self . _wait_for_completion ( )
2018-11-22 18:31:59 +00:00
def get_status ( self ) :
"""
Returns the stack ' s status.
: returns : The stack ' s status.
"""
try :
2020-06-26 14:54:37 +00:00
status = self . connection_manager . call (
" cloudformation " ,
" describe_stacks " ,
{ " StackName " : self . stackname } ,
profile = self . profile , region = self . region ) [ " Stacks " ] [ 0 ] [ " StackStatus " ]
2018-11-22 18:31:59 +00:00
except ClientError as e :
if e . response [ " Error " ] [ " Message " ] . endswith ( " does not exist " ) :
return None
else :
raise e
return status
def describe_events ( self ) :
"""
Returns a dictionary contianing the stack events .
: returns : The CloudFormation events for a stack .
"""
try :
status = self . connection_manager . call (
" cloudformation " ,
" describe_stack_events " ,
{ " StackName " : self . stackname } ,
profile = self . profile , region = self . region )
except ClientError as e :
if e . response [ " Error " ] [ " Message " ] . endswith ( " does not exist " ) :
return None
else :
raise e
return status
def _wait_for_completion ( self , timeout = 0 ) :
"""
Waits for a stack operation to finish . Prints CloudFormation events while it waits .
: param timeout : Timeout before returning
: returns : The final stack status .
"""
def timed_out ( elapsed ) :
return elapsed > = timeout if timeout else False
2020-06-26 14:54:37 +00:00
status = " IN_PROGRESS "
2018-11-22 18:31:59 +00:00
self . most_recent_event_datetime = (
datetime . now ( tzutc ( ) ) - timedelta ( seconds = 3 )
)
elapsed = 0
2020-06-26 14:54:37 +00:00
while status == " IN_PROGRESS " and not timed_out ( elapsed ) :
2018-11-22 18:31:59 +00:00
status = self . _get_simplified_status ( self . get_status ( ) )
if not status :
return None
self . _log_new_events ( )
time . sleep ( 4 )
elapsed + = 4
return status
@staticmethod
def _get_simplified_status ( status ) :
""" Returns the simplified Stack Status. """
if status :
if status . endswith ( " ROLLBACK_COMPLETE " ) :
2020-06-26 14:54:37 +00:00
return " FAILED "
2018-11-22 18:31:59 +00:00
elif status . endswith ( " _COMPLETE " ) :
2020-06-26 14:54:37 +00:00
return " COMPLETE "
2018-11-22 18:31:59 +00:00
elif status . endswith ( " _IN_PROGRESS " ) :
2020-06-26 14:54:37 +00:00
return " IN_PROGRESS "
2018-11-22 18:31:59 +00:00
elif status . endswith ( " _FAILED " ) :
2020-06-26 14:54:37 +00:00
return " FAILED "
2018-11-22 18:31:59 +00:00
else :
return ' Unknown '
def _log_new_events ( self ) :
"""
Log the latest stack events while the stack is being built .
"""
events = self . describe_events ( )
if events :
events = events [ " StackEvents " ]
events . reverse ( )
new_events = [
event for event in events
if event [ " Timestamp " ] > self . most_recent_event_datetime
]
for event in new_events :
logger . info ( " " . join ( [
2019-01-31 10:23:03 +00:00
self . region ,
2018-11-22 18:31:59 +00:00
self . stackname ,
event [ " LogicalResourceId " ] ,
event [ " ResourceType " ] ,
event [ " ResourceStatus " ] ,
event . get ( " ResourceStatusReason " , " " )
] ) )
self . most_recent_event_datetime = event [ " Timestamp " ]
2019-06-27 13:31:51 +00:00
# stackoutput inspection
def _inspect_stacks ( self , conglomerate ) :
# Get all stacks of the conglomertate
running_stacks = self . connection_manager . call (
" cloudformation " ,
" describe_stacks " ,
profile = self . profile , region = self . region )
2019-04-18 16:30:50 +00:00
2019-06-27 13:31:51 +00:00
stacks = [ ]
for stack in running_stacks [ ' Stacks ' ] :
for tag in stack [ ' Tags ' ] :
if tag [ ' Key ' ] == ' Conglomerate ' and tag [ ' Value ' ] == conglomerate :
stacks . append ( stack )
break
2021-02-12 11:06:43 +00:00
# Gather stack outputs, use Tag['Artifact'] as name space: Artifact.OutputName
2019-06-27 13:31:51 +00:00
stack_outputs = { }
for stack in stacks :
# If stack has an Artifact Tag put resources into the namespace Artifact.Resource
artifact = None
for tag in stack [ ' Tags ' ] :
if tag [ ' Key ' ] == ' Artifact ' :
artifact = tag [ ' Value ' ]
if artifact :
key_prefix = " {} . " . format ( artifact )
else :
key_prefix = " "
try :
for output in stack [ ' Outputs ' ] :
# Gather all outputs of the stack into one dimensional key=value structure
stack_outputs [ key_prefix + output [ ' OutputKey ' ] ] = output [ ' OutputValue ' ]
except KeyError :
pass
# Add outputs from stacks into the data for jinja under StackOutput
return stack_outputs
2020-08-12 16:20:37 +00:00
def _add_template_arg ( self , kwargs ) :
if self . template_bucket_url :
# https://bucket-name.s3.Region.amazonaws.com/key name
# so we need the region, AWS as usual
( bucket , path ) = get_s3_url ( self . template_bucket_url , self . rel_path , self . stackname + " .yaml " )
bucket_region = self . connection_manager . call ( ' s3 ' , ' get_bucket_location ' , { ' Bucket ' : bucket } , profile = self . profile , region = self . region ) [ ' LocationConstraint ' ]
2021-03-11 18:25:02 +00:00
# If bucket is in us-east-1 AWS returns 'none' cause reasons grrr
if not bucket_region :
bucket_region = ' us-east-1 '
2020-08-12 16:20:37 +00:00
kwargs [ ' TemplateURL ' ] = ' https:// {} .s3. {} .amazonaws.com/ {} ' . format ( bucket , bucket_region , path )
else :
kwargs [ ' TemplateBody ' ] = self . cfn_template
return kwargs
2021-09-20 14:19:14 +00:00
def _log_pulumi ( self , text ) :
2021-10-05 10:47:29 +00:00
text = re . sub ( r ' pulumi:pulumi:Stack \ s* {} - {} \ s* ' . format ( self . parameters [ ' Conglomerate ' ] , self . stackname ) , ' ' , text )
if text and not text . isspace ( ) :
2021-10-04 15:51:16 +00:00
logger . info ( " " . join ( [ self . region , self . stackname , text ] ) )