2018-11-22 18:31:59 +00:00
|
|
|
import os
|
|
|
|
import logging
|
|
|
|
|
2019-03-06 19:57:31 +00:00
|
|
|
from .utils import ensure_dir
|
2018-11-22 18:31:59 +00:00
|
|
|
from .stackgroup import StackGroup
|
2019-03-06 19:57:31 +00:00
|
|
|
from .jinja import read_config_file
|
2018-11-22 18:31:59 +00:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2019-02-07 15:36:16 +00:00
|
|
|
|
2018-11-22 18:31:59 +00:00
|
|
|
class CloudBender(object):
|
|
|
|
""" Config Class to handle recursive conf/* config tree """
|
|
|
|
def __init__(self, root_path):
|
|
|
|
self.root = root_path
|
|
|
|
self.sg = None
|
|
|
|
self.all_stacks = []
|
|
|
|
self.ctx = {
|
|
|
|
"config_path": os.path.join(self.root, "config"),
|
|
|
|
"template_path": os.path.join(self.root, "cloudformation"),
|
|
|
|
"parameter_path": os.path.join(self.root, "parameters"),
|
|
|
|
"artifact_paths": [os.path.join(self.root, "artifacts")]
|
2019-02-07 15:36:16 +00:00
|
|
|
}
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2019-07-05 11:02:10 +00:00
|
|
|
if not os.path.isdir(self.ctx['config_path']):
|
2018-11-22 18:31:59 +00:00
|
|
|
raise "Check '{0}' exists and is a valid project folder.".format(root_path)
|
|
|
|
|
|
|
|
def read_config(self):
|
2019-07-03 13:15:18 +00:00
|
|
|
"""Load the <path>/config.yaml, <path>/*.yaml as stacks, sub-folders are sub-groups """
|
2018-11-22 18:31:59 +00:00
|
|
|
|
|
|
|
# Read top level config.yaml and extract CloudBender CTX
|
2019-03-06 19:57:31 +00:00
|
|
|
_config = read_config_file(os.path.join(self.ctx['config_path'], 'config.yaml'))
|
2019-01-09 10:44:32 +00:00
|
|
|
if _config and _config.get('CloudBender'):
|
2018-11-22 18:31:59 +00:00
|
|
|
self.ctx.update(_config.get('CloudBender'))
|
|
|
|
|
|
|
|
# Make sure all paths are abs
|
|
|
|
for k, v in self.ctx.items():
|
2019-02-07 15:36:16 +00:00
|
|
|
if k in ['config_path', 'template_path', 'parameter_path', 'artifact_paths']:
|
2018-11-22 18:31:59 +00:00
|
|
|
if isinstance(v, list):
|
|
|
|
new_list = []
|
|
|
|
for path in v:
|
|
|
|
if not os.path.isabs(path):
|
|
|
|
new_list.append(os.path.normpath(os.path.join(self.root, path)))
|
|
|
|
else:
|
|
|
|
new_list.append(path)
|
|
|
|
self.ctx[k] = new_list
|
|
|
|
|
|
|
|
elif isinstance(v, str):
|
|
|
|
if not os.path.isabs(v):
|
2019-02-07 15:36:16 +00:00
|
|
|
self.ctx[k] = os.path.normpath(os.path.join(self.root, v))
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2019-02-07 15:36:16 +00:00
|
|
|
if k in ['template_path', 'parameter_path']:
|
2018-11-22 18:31:59 +00:00
|
|
|
ensure_dir(self.ctx[k])
|
|
|
|
|
|
|
|
self.sg = StackGroup(self.ctx['config_path'], self.ctx)
|
2019-04-18 16:30:50 +00:00
|
|
|
self.sg.read_config()
|
2018-11-22 18:31:59 +00:00
|
|
|
|
|
|
|
self.all_stacks = self.sg.get_stacks()
|
|
|
|
|
|
|
|
# If cfn vars config is completely empty set some default for tests to work
|
|
|
|
# if "vars" not in _config:
|
|
|
|
# _config = { "vars": { 'Azs': {'TestAZ': 'Next'}, 'Segments': {'Testnet': 'internet'}, "Mode": "Piped" } }
|
|
|
|
# self.vars.update(_config.get('vars'))
|
|
|
|
|
|
|
|
def dump_config(self):
|
|
|
|
logger.debug("<CloudBender: {}>".format(vars(self)))
|
|
|
|
self.sg.dump_config()
|
|
|
|
|
|
|
|
def clean(self):
|
|
|
|
for s in self.all_stacks:
|
|
|
|
s.delete_template_file()
|
|
|
|
s.delete_parameter_file()
|
|
|
|
|
|
|
|
def resolve_stacks(self, token):
|
2019-02-04 15:59:28 +00:00
|
|
|
stacks = []
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2019-02-07 15:36:16 +00:00
|
|
|
# remove optional leading "config/" to allow bash path expansions
|
2018-11-22 18:31:59 +00:00
|
|
|
if token.startswith("config/"):
|
|
|
|
token = token[7:]
|
|
|
|
|
|
|
|
# If path ends with yaml we look for stacks
|
|
|
|
if token.endswith('.yaml'):
|
|
|
|
stacks = self.sg.get_stacks(token, match_by='path')
|
|
|
|
|
|
|
|
# otherwise assume we look for a group, if we find a group return all stacks below
|
|
|
|
else:
|
|
|
|
# Strip potential trailing slash
|
|
|
|
token = token.rstrip('/')
|
|
|
|
|
|
|
|
sg = self.sg.get_stackgroup(token, match_by='path')
|
|
|
|
if sg:
|
|
|
|
stacks = sg.get_stacks()
|
|
|
|
|
|
|
|
return stacks
|
|
|
|
|
|
|
|
def filter_stacks(self, filter_by, stacks=None):
|
|
|
|
# filter_by is a dict { property, value }
|
|
|
|
|
|
|
|
# if no group of stacks provided, look in all available
|
|
|
|
if not stacks:
|
|
|
|
stacks = self.all_stacks
|
|
|
|
|
|
|
|
matching_stacks = []
|
|
|
|
for s in stacks:
|
|
|
|
match = True
|
|
|
|
|
2019-02-07 15:36:16 +00:00
|
|
|
for p, v in filter_by.items():
|
2018-11-22 18:31:59 +00:00
|
|
|
if not (hasattr(s, p) and getattr(s, p) == v):
|
|
|
|
match = False
|
|
|
|
break
|
|
|
|
|
|
|
|
if match:
|
|
|
|
matching_stacks.append(s)
|
2019-02-07 15:36:16 +00:00
|
|
|
|
2018-11-22 18:31:59 +00:00
|
|
|
return matching_stacks
|