CloudBender/cloudbender/core.py

130 lines
4.1 KiB
Python
Raw Normal View History

import pathlib
2018-11-22 18:31:59 +00:00
import logging
from .stackgroup import StackGroup
from .jinja import read_config_file
from .exceptions import InvalidProjectDir
2018-11-22 18:31:59 +00:00
logger = logging.getLogger(__name__)
2018-11-22 18:31:59 +00:00
class CloudBender(object):
2022-02-22 10:04:29 +00:00
"""Config Class to handle recursive conf/* config tree"""
def __init__(self, root_path, profile):
self.root = pathlib.Path(root_path)
2018-11-22 18:31:59 +00:00
self.sg = None
self.all_stacks = []
self.ctx = {
"config_path": self.root.joinpath("config"),
"template_path": self.root.joinpath("cloudformation"),
"hooks_path": self.root.joinpath("hooks"),
"docs_path": self.root.joinpath("docs"),
"outputs_path": self.root.joinpath("outputs"),
2022-02-22 10:04:29 +00:00
"artifact_paths": [self.root.joinpath("artifacts")],
"profile": profile,
}
2018-11-22 18:31:59 +00:00
if profile:
logger.info("Profile overwrite: using {}".format(self.ctx["profile"]))
2022-02-22 10:04:29 +00:00
if not self.ctx["config_path"].is_dir():
raise InvalidProjectDir(
"Check '{0}' exists and is a valid CloudBender project folder.".format(
self.ctx["config_path"]
)
)
2018-11-22 18:31:59 +00:00
def read_config(self):
2022-02-22 10:04:29 +00:00
"""Load the <path>/config.yaml, <path>/*.yaml as stacks, sub-folders are sub-groups"""
2018-11-22 18:31:59 +00:00
# Read top level config.yaml and extract CloudBender CTX
2022-02-22 10:04:29 +00:00
_config = read_config_file(self.ctx["config_path"].joinpath("config.yaml"))
2021-09-20 14:19:14 +00:00
# Legacy naming
2022-02-22 10:04:29 +00:00
if _config and _config.get("CloudBender"):
self.ctx.update(_config.get("CloudBender"))
2018-11-22 18:31:59 +00:00
2022-02-22 10:04:29 +00:00
if _config and _config.get("cloudbender"):
self.ctx.update(_config.get("cloudbender"))
2021-09-20 14:19:14 +00:00
2018-11-22 18:31:59 +00:00
# Make sure all paths are abs
for k, v in self.ctx.items():
2022-02-22 10:04:29 +00:00
if k in [
"config_path",
"template_path",
"hooks_path",
"docs_path",
"artifact_paths",
"outputs_path",
]:
2018-11-22 18:31:59 +00:00
if isinstance(v, list):
new_list = []
for p in v:
path = pathlib.Path(p)
if not path.is_absolute():
new_list.append(self.root.joinpath(path))
2018-11-22 18:31:59 +00:00
else:
new_list.append(path)
self.ctx[k] = new_list
elif isinstance(v, str):
if not v.is_absolute():
self.ctx[k] = self.root.joinpath(v)
2018-11-22 18:31:59 +00:00
2022-02-22 10:04:29 +00:00
self.sg = StackGroup(self.ctx["config_path"], self.ctx)
2019-04-18 16:30:50 +00:00
self.sg.read_config()
2018-11-22 18:31:59 +00:00
self.all_stacks = self.sg.get_stacks()
def dump_config(self):
logger.debug("<CloudBender: {}>".format(vars(self)))
self.sg.dump_config()
def clean(self):
for s in self.all_stacks:
s.delete_template_file()
def resolve_stacks(self, token):
2019-02-04 15:59:28 +00:00
stacks = []
2018-11-22 18:31:59 +00:00
# remove optional leading "config/" to allow bash path expansions
2018-11-22 18:31:59 +00:00
if token.startswith("config/"):
token = token[7:]
# If path ends with yaml we look for stacks
2022-02-22 10:04:29 +00:00
if token.endswith(".yaml"):
stacks = self.sg.get_stacks(token, match_by="path")
2018-11-22 18:31:59 +00:00
# otherwise assume we look for a group, if we find a group return all stacks below
else:
# Strip potential trailing slash
2022-02-22 10:04:29 +00:00
token = token.rstrip("/")
2018-11-22 18:31:59 +00:00
2022-02-22 10:04:29 +00:00
sg = self.sg.get_stackgroup(token, match_by="path")
2018-11-22 18:31:59 +00:00
if sg:
stacks = sg.get_stacks()
return stacks
def filter_stacks(self, filter_by, stacks=None):
# filter_by is a dict { property, value }
# if no group of stacks provided, look in all available
if not stacks:
stacks = self.all_stacks
matching_stacks = []
for s in stacks:
match = True
for p, v in filter_by.items():
2018-11-22 18:31:59 +00:00
if not (hasattr(s, p) and getattr(s, p) == v):
match = False
break
if match:
matching_stacks.append(s)
2018-11-22 18:31:59 +00:00
return matching_stacks