Add S3 support

This commit is contained in:
Stefan Reimer 2020-08-12 16:07:56 +01:00
parent 9d042d9a24
commit 16f5c2be90
4 changed files with 97 additions and 30 deletions

View File

@ -1,5 +1,12 @@
# Changelog # Changelog
## 0.9.5
### New Features
Support for uploading and retrieving rendered templates from S3!
Enabled by setting `template_bucket_url` to a valid S3 location: ```s3://<bucket_name>[/<prefix>]```
Templates will still be stored and updated in the local file system to allow tracking via git.
## 0.9.4 ## 0.9.4
- new option to generate Dot Graph files via `--graph` option for the create-docs command - new option to generate Dot Graph files via `--graph` option for the create-docs command
- fix validate command using latest cfn-lint library - fix validate command using latest cfn-lint library

View File

@ -63,7 +63,6 @@ class CloudBender(object):
def clean(self): def clean(self):
for s in self.all_stacks: for s in self.all_stacks:
s.delete_template_file() s.delete_template_file()
s.delete_parameter_file()
def resolve_stacks(self, token): def resolve_stacks(self, token):
stacks = [] stacks = []

View File

@ -11,7 +11,7 @@ from dateutil.tz import tzutc
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
from .utils import dict_merge, search_refs, ensure_dir from .utils import dict_merge, search_refs, ensure_dir, get_s3_url
from .connection import BotoConnection from .connection import BotoConnection
from .jinja import JinjaEnv, read_config_file from .jinja import JinjaEnv, read_config_file
from . import __version__ from . import __version__
@ -65,6 +65,7 @@ class Stack(object):
self.hooks = {'post_create': [], 'post_update': [], 'pre_create': [], 'pre_update': []} self.hooks = {'post_create': [], 'post_update': [], 'pre_create': [], 'pre_update': []}
self.default_lock = None self.default_lock = None
self.multi_delete = True self.multi_delete = True
self.template_bucket_url = None
def dump_config(self): def dump_config(self):
logger.debug("<Stack {}: {}>".format(self.id, pprint.pformat(vars(self)))) logger.debug("<Stack {}: {}>".format(self.id, pprint.pformat(vars(self))))
@ -77,15 +78,14 @@ class Stack(object):
self.parameters.update(sg_config.get('parameters', {})) self.parameters.update(sg_config.get('parameters', {}))
self.options.update(sg_config.get('options', {})) self.options.update(sg_config.get('options', {}))
if 'region' in sg_config: # by default inherit parent group settings
self.region = sg_config['region'] for p in ['region', 'profile', 'notfication_sns', 'template_bucket_url']:
if 'profile' in sg_config: if p in sg_config:
self.profile = sg_config['profile'] setattr(self, p, sg_config[p])
if 'notfication_sns' in sg_config:
self.notfication_sns = sg_config['notfication_sns']
# now override stack specific settings
_config = read_config_file(self.path, sg_config.get('variables', {})) _config = read_config_file(self.path, sg_config.get('variables', {}))
for p in ["region", "stackname", "template", "default_lock", "multi_delete", "provides", "onfailure", "notification_sns"]: for p in ["region", "stackname", "template", "default_lock", "multi_delete", "provides", "onfailure", "notification_sns", "template_bucket_url"]:
if p in _config: if p in _config:
setattr(self, p, _config[p]) setattr(self, p, _config[p])
@ -97,18 +97,13 @@ class Stack(object):
if 'Artifact' not in self.tags: if 'Artifact' not in self.tags:
self.tags['Artifact'] = self.provides self.tags['Artifact'] = self.provides
# backwards comp
if 'vars' in _config:
logger.warn("vars: in config is deprecated, please use options: instead")
self.options = dict_merge(self.options, _config['vars'])
if 'options' in _config: if 'options' in _config:
self.options = dict_merge(self.options, _config['options']) self.options = dict_merge(self.options, _config['options'])
if 'Mode' in self.options: if 'Mode' in self.options:
self.mode = self.options['Mode'] self.mode = self.options['Mode']
if 'StoreOutputs' in self.options: if 'StoreOutputs' in self.options and self.options['StoreOutputs']:
self.store_outputs = True self.store_outputs = True
if 'dependencies' in _config: if 'dependencies' in _config:
@ -244,9 +239,26 @@ class Stack(object):
with open(yaml_file, 'w') as yaml_contents: with open(yaml_file, 'w') as yaml_contents:
yaml_contents.write(self.cfn_template) yaml_contents.write(self.cfn_template)
logger.info('Wrote %s to %s', self.template, yaml_file) logger.info('Wrote %s to %s', self.template, yaml_file)
if len(self.cfn_template) > 51200:
logger.warning("Rendered template exceeds maximum allowed size of 51200, actual size: {} !".format(len(self.cfn_template)))
# upload template to s3 if set
if self.template_bucket_url:
try:
(bucket, path) = get_s3_url(self.template_bucket_url, self.rel_path, self.stackname + ".yaml")
self.connection_manager.call(
's3', 'put_object',
{'Bucket': bucket,
'Key': path,
'Body': self.cfn_template,
'ServerSideEncryption': 'AES256'},
profile=self.profile, region=self.region)
logger.info("Uploaded template to s3://{}/{}".format(bucket, path))
except ClientError as e:
logger.error("Error trying to upload template so S3: {}, {}".format(self.template_bucket_url, e))
else:
if len(self.cfn_template) > 51200:
logger.warning("template_bucket_url not set and rendered template exceeds maximum allowed size of 51200, actual size: {} !".format(len(self.cfn_template)))
else: else:
logger.error('No cfn template rendered yet for stack {}.'.format(self.stackname)) logger.error('No cfn template rendered yet for stack {}.'.format(self.stackname))
@ -258,21 +270,55 @@ class Stack(object):
except OSError: except OSError:
pass pass
if self.template_bucket_url:
try:
(bucket, path) = get_s3_url(self.template_bucket_url, self.rel_path, self.stackname + ".yaml")
self.connection_manager.call(
's3', 'delete_object',
{'Bucket': bucket,
'Key': path},
profile=self.profile, region=self.region)
logger.info("Deleted template from s3://{}/{}".format(bucket, path))
except ClientError as e:
logger.error("Error trying to delete template from S3: {}, {}".format(self.template_bucket_url, e))
def read_template_file(self): def read_template_file(self):
""" Reads rendered yaml template from disk and extracts metadata """ """ Reads rendered yaml template from disk or s3 and extracts metadata """
if not self.cfn_template: if not self.cfn_template:
if self.template_bucket_url:
try:
(bucket, path) = get_s3_url(self.template_bucket_url, self.rel_path, self.stackname + ".yaml")
template = self.connection_manager.call(
's3', 'get_object',
{'Bucket': bucket,
'Key': path},
profile=self.profile, region=self.region)
logger.debug("Got template from s3://{}/{}".format(bucket, path))
except ClientError as e:
logger.error("Could not find template file on S3: {}/{}, {}".format(bucket, path, e))
self.cfn_template = template['Body'].read().decode('utf-8')
# Overwrite local copy
yaml_file = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname + ".yaml")
ensure_dir(os.path.join(self.ctx['template_path'], self.rel_path))
with open(yaml_file, 'w') as yaml_contents:
yaml_contents.write(self.cfn_template)
else:
yaml_file = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname + ".yaml") yaml_file = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname + ".yaml")
try: try:
with open(yaml_file, 'r') as yaml_contents: with open(yaml_file, 'r') as yaml_contents:
self.cfn_template = yaml_contents.read() self.cfn_template = yaml_contents.read()
logger.debug('Read cfn template %s.', yaml_file) logger.debug('Read cfn template %s.', yaml_file)
self.cfn_data = yaml.safe_load(self.cfn_template)
self._parse_metadata()
except FileNotFoundError as e: except FileNotFoundError as e:
logger.warn("Could not find template file: {}".format(yaml_file)) logger.warn("Could not find template file: {}".format(yaml_file))
raise e raise e
self.cfn_data = yaml.safe_load(self.cfn_template)
self._parse_metadata()
else: else:
logger.debug('Using cached cfn template %s.', self.stackname) logger.debug('Using cached cfn template %s.', self.stackname)

View File

@ -1,6 +1,7 @@
import os import os
import copy import copy
import logging import logging
import re
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -87,3 +88,17 @@ def search_refs(template, attributes, mode):
for k in template: for k in template:
if isinstance(k, dict) or isinstance(k, list): if isinstance(k, dict) or isinstance(k, list):
search_refs(k, attributes, mode) search_refs(k, attributes, mode)
def get_s3_url(url, *args):
bucket = None
path = None
m = re.match('^(s3://)?([^/]*)(/.*)?', url)
bucket = m[2]
if m[3]:
path = m[3].lstrip('/')
path = os.path.join(path, *args)
return(bucket, path)