2018-11-22 18:31:59 +00:00
|
|
|
import os
|
|
|
|
import copy
|
|
|
|
import logging
|
2020-08-12 15:07:56 +00:00
|
|
|
import re
|
2018-11-22 18:31:59 +00:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2019-02-07 15:36:16 +00:00
|
|
|
|
2018-11-22 18:31:59 +00:00
|
|
|
def dict_merge(a, b):
|
|
|
|
""" Deep merge to allow proper inheritance for config files"""
|
|
|
|
if not a:
|
|
|
|
return b
|
|
|
|
|
|
|
|
if not b:
|
|
|
|
return a
|
|
|
|
|
2019-02-07 15:36:16 +00:00
|
|
|
if not isinstance(a, dict) or not isinstance(b, dict):
|
2018-11-22 18:31:59 +00:00
|
|
|
raise TypeError
|
|
|
|
|
|
|
|
result = copy.deepcopy(a)
|
|
|
|
for k, v in b.items():
|
|
|
|
if k in result and isinstance(result[k], dict):
|
2019-02-07 15:36:16 +00:00
|
|
|
result[k] = dict_merge(result[k], v)
|
2018-11-22 18:31:59 +00:00
|
|
|
else:
|
|
|
|
result[k] = copy.deepcopy(v)
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def ensure_dir(path):
|
|
|
|
"""Creates dir if it does not already exist."""
|
|
|
|
if not os.path.exists(path):
|
|
|
|
os.makedirs(path)
|
2020-06-19 16:54:48 +00:00
|
|
|
logger.debug('Created directory: %s', path)
|
2018-11-22 18:31:59 +00:00
|
|
|
|
|
|
|
|
|
|
|
def setup_logging(debug):
|
|
|
|
if debug:
|
|
|
|
our_level = logging.DEBUG
|
2019-02-07 22:05:33 +00:00
|
|
|
logging.getLogger("botocore").setLevel(logging.INFO)
|
2019-01-21 15:24:18 +00:00
|
|
|
|
|
|
|
formatter = logging.Formatter(
|
|
|
|
fmt="[%(asctime)s] %(name)s %(message)s",
|
|
|
|
datefmt="%Y-%m-%d %H:%M:%S"
|
|
|
|
)
|
2018-11-22 18:31:59 +00:00
|
|
|
else:
|
|
|
|
our_level = logging.INFO
|
|
|
|
logging.getLogger("botocore").setLevel(logging.CRITICAL)
|
|
|
|
|
2019-01-21 15:24:18 +00:00
|
|
|
formatter = logging.Formatter(
|
|
|
|
fmt="[%(asctime)s] %(message)s",
|
|
|
|
datefmt="%Y-%m-%d %H:%M:%S"
|
|
|
|
)
|
|
|
|
|
2018-11-22 18:31:59 +00:00
|
|
|
log_handler = logging.StreamHandler()
|
|
|
|
log_handler.setFormatter(formatter)
|
|
|
|
logger = logging.getLogger("cloudbender")
|
|
|
|
logger.addHandler(log_handler)
|
|
|
|
logger.setLevel(our_level)
|
|
|
|
return logger
|
2019-06-27 13:31:51 +00:00
|
|
|
|
|
|
|
|
|
|
|
def search_refs(template, attributes, mode):
|
2020-06-19 16:40:51 +00:00
|
|
|
""" Traverses a template and searches for any remote references and
|
|
|
|
adds them to the attributes set
|
2019-06-27 13:31:51 +00:00
|
|
|
"""
|
|
|
|
if isinstance(template, dict):
|
|
|
|
for k, v in template.items():
|
|
|
|
# FortyTwo Fn::GetAtt
|
|
|
|
if k == "Fn::GetAtt" and isinstance(v, list):
|
|
|
|
if v[0] == "FortyTwo":
|
|
|
|
attributes.append(v[1])
|
|
|
|
|
|
|
|
# CloudBender::StackRef
|
|
|
|
if k == "CloudBender::StackRef":
|
|
|
|
try:
|
|
|
|
attributes.append(v['StackTags']['Artifact'])
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# PipedMode Refs
|
|
|
|
if mode == "Piped" and k == "Ref" and "DoT" in v:
|
|
|
|
attributes.append(v)
|
|
|
|
|
|
|
|
if isinstance(v, dict) or isinstance(v, list):
|
|
|
|
search_refs(v, attributes, mode)
|
|
|
|
|
|
|
|
elif isinstance(template, list):
|
|
|
|
for k in template:
|
|
|
|
if isinstance(k, dict) or isinstance(k, list):
|
|
|
|
search_refs(k, attributes, mode)
|
2020-08-12 15:07:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_s3_url(url, *args):
|
|
|
|
bucket = None
|
|
|
|
path = None
|
|
|
|
|
|
|
|
m = re.match('^(s3://)?([^/]*)(/.*)?', url)
|
|
|
|
bucket = m[2]
|
|
|
|
if m[3]:
|
|
|
|
path = m[3].lstrip('/')
|
|
|
|
|
|
|
|
path = os.path.join(path, *args)
|
|
|
|
|
|
|
|
return(bucket, path)
|