2018-11-22 18:31:59 +00:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import hashlib
|
2022-02-21 20:04:38 +00:00
|
|
|
import json
|
2021-01-11 14:34:50 +00:00
|
|
|
import yaml
|
2018-11-22 18:31:59 +00:00
|
|
|
import time
|
2019-12-09 13:32:39 +00:00
|
|
|
import pathlib
|
2019-07-28 13:02:18 +00:00
|
|
|
import pprint
|
2021-09-20 14:19:14 +00:00
|
|
|
import pulumi
|
2022-06-28 11:15:45 +00:00
|
|
|
import importlib.resources as pkg_resources
|
2018-11-22 18:31:59 +00:00
|
|
|
|
|
|
|
from datetime import datetime, timedelta
|
|
|
|
from dateutil.tz import tzutc
|
|
|
|
|
|
|
|
from botocore.exceptions import ClientError
|
|
|
|
|
2020-08-12 15:07:56 +00:00
|
|
|
from .utils import dict_merge, search_refs, ensure_dir, get_s3_url
|
2018-11-22 18:31:59 +00:00
|
|
|
from .connection import BotoConnection
|
2019-03-06 19:57:31 +00:00
|
|
|
from .jinja import JinjaEnv, read_config_file
|
2018-11-22 18:31:59 +00:00
|
|
|
from . import __version__
|
2021-03-11 18:25:02 +00:00
|
|
|
from .exceptions import ParameterNotFound, ParameterIllegalValue, ChecksumError
|
2022-06-27 18:51:03 +00:00
|
|
|
from .hooks import exec_hooks
|
|
|
|
from .pulumi import pulumi_ws
|
2018-11-22 18:31:59 +00:00
|
|
|
|
|
|
|
import cfnlint.core
|
2020-07-31 21:35:14 +00:00
|
|
|
import cfnlint.template
|
|
|
|
import cfnlint.graph
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2020-06-05 10:08:19 +00:00
|
|
|
from . import templates
|
|
|
|
|
2018-11-22 18:31:59 +00:00
|
|
|
import logging
|
2022-02-22 10:04:29 +00:00
|
|
|
|
2018-11-22 18:31:59 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2021-01-11 14:34:50 +00:00
|
|
|
# Ignore any !<Constructors> during re-loading of CFN templates
|
|
|
|
class SafeLoaderIgnoreUnknown(yaml.SafeLoader):
|
|
|
|
def ignore_unknown(self, node):
|
|
|
|
return node.tag
|
|
|
|
|
|
|
|
|
|
|
|
SafeLoaderIgnoreUnknown.add_constructor(None, SafeLoaderIgnoreUnknown.ignore_unknown)
|
|
|
|
|
|
|
|
|
2018-11-22 18:31:59 +00:00
|
|
|
class Stack(object):
|
2019-09-02 11:13:40 +00:00
|
|
|
def __init__(self, name, template, path, rel_path, ctx):
|
2018-11-22 18:31:59 +00:00
|
|
|
self.stackname = name
|
2019-07-27 22:30:03 +00:00
|
|
|
self.template = template
|
2019-12-09 13:32:39 +00:00
|
|
|
self.path = pathlib.Path(path)
|
2018-11-22 18:31:59 +00:00
|
|
|
self.rel_path = rel_path
|
2019-07-27 22:30:03 +00:00
|
|
|
self.ctx = ctx
|
|
|
|
|
2019-07-28 13:02:18 +00:00
|
|
|
self.tags = {}
|
|
|
|
self.parameters = {}
|
2020-06-22 11:30:54 +00:00
|
|
|
self.outputs = {}
|
2021-02-12 11:06:43 +00:00
|
|
|
self.options = {}
|
2022-02-22 10:04:29 +00:00
|
|
|
self.region = "global"
|
|
|
|
self.profile = "default"
|
|
|
|
self.onfailure = "DELETE"
|
2019-07-28 13:02:18 +00:00
|
|
|
self.notfication_sns = []
|
|
|
|
|
2019-07-27 22:30:03 +00:00
|
|
|
self.id = (self.profile, self.region, self.stackname)
|
2020-06-22 11:30:54 +00:00
|
|
|
self.aws_stackid = None
|
2019-07-27 22:30:03 +00:00
|
|
|
|
2019-04-18 16:30:50 +00:00
|
|
|
self.md5 = None
|
2022-02-22 10:04:29 +00:00
|
|
|
self.mode = "CloudBender"
|
2018-11-22 18:31:59 +00:00
|
|
|
self.provides = template
|
|
|
|
self.cfn_template = None
|
|
|
|
self.cfn_parameters = []
|
2019-02-04 15:43:34 +00:00
|
|
|
self.cfn_data = None
|
2018-11-22 18:31:59 +00:00
|
|
|
self.connection_manager = BotoConnection(self.profile, self.region)
|
|
|
|
self.status = None
|
2020-06-22 11:30:54 +00:00
|
|
|
self.store_outputs = False
|
2018-11-22 18:31:59 +00:00
|
|
|
self.dependencies = set()
|
2022-02-22 10:04:29 +00:00
|
|
|
self.hooks = {
|
|
|
|
"post_create": [],
|
|
|
|
"post_update": [],
|
|
|
|
"pre_create": [],
|
|
|
|
"pre_update": [],
|
|
|
|
}
|
2018-11-22 18:31:59 +00:00
|
|
|
self.default_lock = None
|
|
|
|
self.multi_delete = True
|
2020-08-12 15:07:56 +00:00
|
|
|
self.template_bucket_url = None
|
2022-06-27 18:51:03 +00:00
|
|
|
|
2021-09-20 14:19:14 +00:00
|
|
|
self.work_dir = None
|
|
|
|
self.pulumi = {}
|
2022-03-16 15:18:23 +00:00
|
|
|
self._pulumi_stack = None
|
2022-06-27 18:51:03 +00:00
|
|
|
self.pulumi_stackname = ""
|
|
|
|
self.pulumi_config = {}
|
|
|
|
self.pulumi_ws_opts = None
|
2018-11-22 18:31:59 +00:00
|
|
|
|
|
|
|
def dump_config(self):
|
2019-07-28 13:02:18 +00:00
|
|
|
logger.debug("<Stack {}: {}>".format(self.id, pprint.pformat(vars(self))))
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2019-09-02 11:13:40 +00:00
|
|
|
def read_config(self, sg_config={}):
|
2022-02-22 10:04:29 +00:00
|
|
|
"""reads stack config"""
|
2019-09-02 11:13:40 +00:00
|
|
|
|
|
|
|
# First set various attributes based on parent stackgroup config
|
2022-02-22 10:04:29 +00:00
|
|
|
self.tags.update(sg_config.get("tags", {}))
|
|
|
|
self.parameters.update(sg_config.get("parameters", {}))
|
|
|
|
self.options.update(sg_config.get("options", {}))
|
|
|
|
self.pulumi.update(sg_config.get("pulumi", {}))
|
2019-09-02 11:13:40 +00:00
|
|
|
|
2020-08-12 15:07:56 +00:00
|
|
|
# by default inherit parent group settings
|
2022-02-22 10:04:29 +00:00
|
|
|
for p in ["region", "profile", "notfication_sns", "template_bucket_url"]:
|
2020-08-12 15:07:56 +00:00
|
|
|
if p in sg_config:
|
|
|
|
setattr(self, p, sg_config[p])
|
2019-09-02 11:13:40 +00:00
|
|
|
|
2020-08-12 15:07:56 +00:00
|
|
|
# now override stack specific settings
|
2022-02-22 10:04:29 +00:00
|
|
|
_config = read_config_file(self.path, sg_config.get("variables", {}))
|
|
|
|
for p in [
|
|
|
|
"region",
|
|
|
|
"stackname",
|
|
|
|
"template",
|
|
|
|
"default_lock",
|
|
|
|
"multi_delete",
|
|
|
|
"provides",
|
|
|
|
"onfailure",
|
|
|
|
"notification_sns",
|
|
|
|
"template_bucket_url",
|
|
|
|
]:
|
2018-11-22 18:31:59 +00:00
|
|
|
if p in _config:
|
|
|
|
setattr(self, p, _config[p])
|
|
|
|
|
2021-09-20 14:19:14 +00:00
|
|
|
for p in ["parameters", "tags", "pulumi"]:
|
2018-11-22 18:31:59 +00:00
|
|
|
if p in _config:
|
|
|
|
setattr(self, p, dict_merge(getattr(self, p), _config[p]))
|
|
|
|
|
2019-04-18 16:30:50 +00:00
|
|
|
# Inject Artifact if not explicitly set
|
2022-02-22 10:04:29 +00:00
|
|
|
if "Artifact" not in self.tags:
|
|
|
|
self.tags["Artifact"] = self.provides
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
if "options" in _config:
|
|
|
|
self.options = dict_merge(self.options, _config["options"])
|
2019-06-15 00:05:15 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
if "Mode" in self.options:
|
|
|
|
self.mode = self.options["Mode"]
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
if "StoreOutputs" in self.options and self.options["StoreOutputs"]:
|
2020-06-22 11:30:54 +00:00
|
|
|
self.store_outputs = True
|
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
if "dependencies" in _config:
|
|
|
|
for dep in _config["dependencies"]:
|
2019-02-04 15:43:34 +00:00
|
|
|
self.dependencies.add(dep)
|
|
|
|
|
2019-07-03 13:15:18 +00:00
|
|
|
# Some sanity checks
|
|
|
|
if self.onfailure not in ["DO_NOTHING", "ROLLBACK", "DELETE"]:
|
2022-02-22 10:04:29 +00:00
|
|
|
raise ParameterIllegalValue(
|
|
|
|
"onfailure must be one of DO_NOTHING | ROLLBACK | DELETE"
|
|
|
|
)
|
2019-07-03 13:15:18 +00:00
|
|
|
|
2019-01-21 15:24:18 +00:00
|
|
|
logger.debug("Stack {} added.".format(self.id))
|
2018-11-22 18:31:59 +00:00
|
|
|
|
|
|
|
def render(self):
|
|
|
|
"""Renders the cfn jinja template for this stack"""
|
|
|
|
|
|
|
|
template_metadata = {
|
2022-02-22 10:04:29 +00:00
|
|
|
"Template.Name": self.template,
|
|
|
|
"Template.Hash": "__HASH__",
|
|
|
|
"CloudBender.Version": __version__,
|
|
|
|
}
|
|
|
|
_config = {
|
|
|
|
"mode": self.mode,
|
|
|
|
"options": self.options,
|
|
|
|
"metadata": template_metadata,
|
2018-11-22 18:31:59 +00:00
|
|
|
}
|
2019-03-20 12:51:17 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
jenv = JinjaEnv(self.ctx["artifact_paths"])
|
|
|
|
jenv.globals["_config"] = _config
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
template = jenv.get_template("{0}{1}".format(self.template, ".yaml.jinja"))
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.info("Rendering %s", template.filename)
|
2018-11-22 18:31:59 +00:00
|
|
|
|
|
|
|
try:
|
2019-04-18 16:30:50 +00:00
|
|
|
self.cfn_template = template.render(_config)
|
2021-01-11 14:34:50 +00:00
|
|
|
self.cfn_data = yaml.load(self.cfn_template, Loader=SafeLoaderIgnoreUnknown)
|
2019-02-07 15:36:16 +00:00
|
|
|
except Exception as e:
|
2018-11-22 18:31:59 +00:00
|
|
|
# In case we rendered invalid yaml this helps to debug
|
2019-04-18 16:30:50 +00:00
|
|
|
if self.cfn_template:
|
2019-08-03 21:31:17 +00:00
|
|
|
_output = ""
|
|
|
|
for i, line in enumerate(self.cfn_template.splitlines(), start=1):
|
2022-02-22 10:04:29 +00:00
|
|
|
_output = _output + "{}: {}\n".format(i, line)
|
2019-08-03 21:31:17 +00:00
|
|
|
logger.error(_output)
|
2019-02-07 15:36:16 +00:00
|
|
|
raise e
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
if not re.search("CloudBender::", self.cfn_template) and not re.search(
|
|
|
|
"Iterate:", self.cfn_template
|
|
|
|
):
|
|
|
|
logger.info(
|
|
|
|
"CloudBender not required -> removing Transform and Conglomerate parameter"
|
|
|
|
)
|
|
|
|
self.cfn_template = self.cfn_template.replace(
|
|
|
|
"Transform: [CloudBender]", ""
|
|
|
|
)
|
2019-04-18 16:30:50 +00:00
|
|
|
|
2019-06-15 00:05:15 +00:00
|
|
|
_res = """
|
|
|
|
Conglomerate:
|
|
|
|
Type: String
|
|
|
|
Description: Project / Namespace this stack is part of
|
|
|
|
"""
|
2022-02-22 10:04:29 +00:00
|
|
|
self.cfn_template = re.sub(_res, "", self.cfn_template)
|
2022-01-24 11:01:50 +00:00
|
|
|
else:
|
|
|
|
self.dependencies.add("CloudBender")
|
2019-04-18 16:30:50 +00:00
|
|
|
|
|
|
|
include = []
|
2019-06-27 12:10:42 +00:00
|
|
|
search_refs(self.cfn_data, include, self.mode)
|
2021-02-12 11:06:43 +00:00
|
|
|
if self.mode == "Piped" and len(include):
|
2019-06-27 12:10:42 +00:00
|
|
|
_res = ""
|
|
|
|
for attr in include:
|
2022-02-22 10:04:29 +00:00
|
|
|
_res = (
|
|
|
|
_res
|
|
|
|
+ """
|
2019-06-27 12:10:42 +00:00
|
|
|
{0}:
|
|
|
|
Type: String
|
2022-02-22 10:04:29 +00:00
|
|
|
Description: Parameter to provide remote stack attribute {0}""".format(
|
|
|
|
attr
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
self.cfn_template = re.sub(
|
|
|
|
r"Parameters:", r"Parameters:" + _res + "\n", self.cfn_template
|
|
|
|
)
|
2019-06-27 12:10:42 +00:00
|
|
|
logger.info("Piped mode: Added parameters for remote stack references")
|
|
|
|
|
2019-06-15 00:05:15 +00:00
|
|
|
# Re-read updated template
|
2021-01-11 14:34:50 +00:00
|
|
|
self.cfn_data = yaml.load(self.cfn_template, Loader=SafeLoaderIgnoreUnknown)
|
2019-06-15 00:05:15 +00:00
|
|
|
|
|
|
|
# Check for empty top level Parameters, Outputs and Conditions and remove
|
2022-02-22 10:04:29 +00:00
|
|
|
for key in ["Parameters", "Outputs", "Conditions"]:
|
2019-06-15 00:05:15 +00:00
|
|
|
if key in self.cfn_data and not self.cfn_data[key]:
|
|
|
|
del self.cfn_data[key]
|
2022-02-22 10:04:29 +00:00
|
|
|
self.cfn_template = self.cfn_template.replace("\n" + key + ":", "")
|
2019-06-15 00:05:15 +00:00
|
|
|
|
|
|
|
# Remove and condense multiple empty lines
|
2022-02-22 10:04:29 +00:00
|
|
|
self.cfn_template = re.sub(r"\n\s*\n", "\n\n", self.cfn_template)
|
|
|
|
self.cfn_template = re.sub(r"^\s*", "", self.cfn_template)
|
|
|
|
self.cfn_template = re.sub(r"\s*$", "", self.cfn_template)
|
2019-06-15 00:05:15 +00:00
|
|
|
|
|
|
|
# set md5 last
|
2022-02-22 10:04:29 +00:00
|
|
|
self.md5 = hashlib.md5(self.cfn_template.encode("utf-8")).hexdigest()
|
|
|
|
self.cfn_template = self.cfn_template.replace("__HASH__", self.md5)
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2019-02-04 15:43:34 +00:00
|
|
|
# Update internal data structures
|
|
|
|
self._parse_metadata()
|
|
|
|
|
|
|
|
def _parse_metadata(self):
|
2019-06-15 00:05:15 +00:00
|
|
|
# Extract dependencies
|
2019-02-04 15:43:34 +00:00
|
|
|
try:
|
2022-02-22 10:04:29 +00:00
|
|
|
for dep in self.cfn_data["Metadata"]["CloudBender"]["Dependencies"]:
|
2019-02-04 15:43:34 +00:00
|
|
|
self.dependencies.add(dep)
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
2021-03-11 18:25:02 +00:00
|
|
|
# Get checksum
|
|
|
|
if not self.md5:
|
|
|
|
try:
|
2022-02-22 10:04:29 +00:00
|
|
|
self.md5 = self.cfn_data["Metadata"]["Template"]["Hash"]
|
2021-03-11 18:25:02 +00:00
|
|
|
|
|
|
|
# Verify embedded md5 hash
|
2022-02-22 10:04:29 +00:00
|
|
|
source_cfn = re.sub(
|
|
|
|
"Hash: [0-9a-f]{32}", "Hash: __HASH__", self.cfn_template
|
|
|
|
)
|
|
|
|
our_md5 = hashlib.md5(source_cfn.encode("utf-8")).hexdigest()
|
|
|
|
if our_md5 != self.md5:
|
|
|
|
raise ChecksumError(
|
|
|
|
"Template hash checksum mismatch! Expected: {} Got: {}".format(
|
|
|
|
self.md5, our_md5
|
|
|
|
)
|
|
|
|
) from None
|
2021-03-11 18:25:02 +00:00
|
|
|
|
|
|
|
except KeyError:
|
|
|
|
raise ChecksumError("Template missing Hash checksum!") from None
|
|
|
|
|
2021-02-12 11:06:43 +00:00
|
|
|
# Add CloudBender dependencies
|
2019-06-15 00:05:15 +00:00
|
|
|
include = []
|
2019-06-27 12:10:42 +00:00
|
|
|
search_refs(self.cfn_data, include, self.mode)
|
2019-06-15 00:05:15 +00:00
|
|
|
for ref in include:
|
2019-06-27 12:10:42 +00:00
|
|
|
if self.mode != "Piped":
|
2022-02-22 10:04:29 +00:00
|
|
|
self.dependencies.add(ref.split(".")[0])
|
2019-06-27 12:10:42 +00:00
|
|
|
else:
|
2022-02-22 10:04:29 +00:00
|
|
|
self.dependencies.add(ref.split("DoT")[0])
|
2019-06-15 00:05:15 +00:00
|
|
|
|
2020-06-22 11:30:54 +00:00
|
|
|
# Extract hooks
|
|
|
|
try:
|
2022-02-22 10:04:29 +00:00
|
|
|
for hook, func in self.cfn_data["Metadata"]["Hooks"].items():
|
|
|
|
if hook in ["post_update", "post_create", "pre_create", "pre_update"]:
|
2020-06-22 11:30:54 +00:00
|
|
|
if isinstance(func, list):
|
|
|
|
self.hooks[hook].extend(func)
|
|
|
|
else:
|
|
|
|
self.hooks[hook].append(func)
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
2018-11-22 18:31:59 +00:00
|
|
|
def write_template_file(self):
|
|
|
|
if self.cfn_template:
|
2022-02-22 10:04:29 +00:00
|
|
|
yaml_file = os.path.join(
|
|
|
|
self.ctx["template_path"], self.rel_path, self.stackname + ".yaml"
|
|
|
|
)
|
|
|
|
ensure_dir(os.path.join(self.ctx["template_path"], self.rel_path))
|
|
|
|
with open(yaml_file, "w") as yaml_contents:
|
2018-11-22 18:31:59 +00:00
|
|
|
yaml_contents.write(self.cfn_template)
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.info("Wrote %s to %s", self.template, yaml_file)
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2020-08-12 15:07:56 +00:00
|
|
|
# upload template to s3 if set
|
|
|
|
if self.template_bucket_url:
|
|
|
|
try:
|
2022-02-22 10:04:29 +00:00
|
|
|
(bucket, path) = get_s3_url(
|
|
|
|
self.template_bucket_url,
|
|
|
|
self.rel_path,
|
|
|
|
self.stackname + ".yaml",
|
|
|
|
)
|
2020-08-12 15:07:56 +00:00
|
|
|
self.connection_manager.call(
|
2022-02-22 10:04:29 +00:00
|
|
|
"s3",
|
|
|
|
"put_object",
|
|
|
|
{
|
|
|
|
"Bucket": bucket,
|
|
|
|
"Key": path,
|
|
|
|
"Body": self.cfn_template,
|
|
|
|
"ServerSideEncryption": "AES256",
|
|
|
|
},
|
|
|
|
profile=self.profile,
|
|
|
|
region=self.region,
|
|
|
|
)
|
2020-08-12 15:07:56 +00:00
|
|
|
|
|
|
|
logger.info("Uploaded template to s3://{}/{}".format(bucket, path))
|
|
|
|
except ClientError as e:
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.error(
|
|
|
|
"Error trying to upload template so S3: {}, {}".format(
|
|
|
|
self.template_bucket_url, e
|
|
|
|
)
|
|
|
|
)
|
2020-08-12 15:07:56 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
if len(self.cfn_template) > 51200:
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.warning(
|
|
|
|
"template_bucket_url not set and rendered template exceeds maximum allowed size of 51200, actual size: {} !".format(
|
|
|
|
len(self.cfn_template)
|
|
|
|
)
|
|
|
|
)
|
2018-11-22 18:31:59 +00:00
|
|
|
else:
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.error(
|
|
|
|
"No cfn template rendered yet for stack {}.".format(self.stackname)
|
|
|
|
)
|
2018-11-22 18:31:59 +00:00
|
|
|
|
|
|
|
def delete_template_file(self):
|
2022-02-22 10:04:29 +00:00
|
|
|
yaml_file = os.path.join(
|
|
|
|
self.ctx["template_path"], self.rel_path, self.stackname + ".yaml"
|
|
|
|
)
|
2018-11-22 18:31:59 +00:00
|
|
|
try:
|
|
|
|
os.remove(yaml_file)
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.debug("Deleted cfn template %s.", yaml_file)
|
2018-11-22 18:31:59 +00:00
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
2020-08-12 15:07:56 +00:00
|
|
|
if self.template_bucket_url:
|
|
|
|
try:
|
2022-02-22 10:04:29 +00:00
|
|
|
(bucket, path) = get_s3_url(
|
|
|
|
self.template_bucket_url, self.rel_path, self.stackname + ".yaml"
|
|
|
|
)
|
2020-08-12 15:07:56 +00:00
|
|
|
self.connection_manager.call(
|
2022-02-22 10:04:29 +00:00
|
|
|
"s3",
|
|
|
|
"delete_object",
|
|
|
|
{"Bucket": bucket, "Key": path},
|
|
|
|
profile=self.profile,
|
|
|
|
region=self.region,
|
|
|
|
)
|
2020-08-12 15:07:56 +00:00
|
|
|
|
|
|
|
logger.info("Deleted template from s3://{}/{}".format(bucket, path))
|
|
|
|
except ClientError as e:
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.error(
|
|
|
|
"Error trying to delete template from S3: {}, {}".format(
|
|
|
|
self.template_bucket_url, e
|
|
|
|
)
|
|
|
|
)
|
2020-08-12 15:07:56 +00:00
|
|
|
|
2018-11-22 18:31:59 +00:00
|
|
|
def read_template_file(self):
|
2022-02-22 10:04:29 +00:00
|
|
|
"""Reads rendered yaml template from disk or s3 and extracts metadata"""
|
2019-02-04 15:43:34 +00:00
|
|
|
if not self.cfn_template:
|
2020-08-12 15:07:56 +00:00
|
|
|
if self.template_bucket_url:
|
|
|
|
try:
|
2022-02-22 10:04:29 +00:00
|
|
|
(bucket, path) = get_s3_url(
|
|
|
|
self.template_bucket_url,
|
|
|
|
self.rel_path,
|
|
|
|
self.stackname + ".yaml",
|
|
|
|
)
|
2020-08-12 15:07:56 +00:00
|
|
|
template = self.connection_manager.call(
|
2022-02-22 10:04:29 +00:00
|
|
|
"s3",
|
|
|
|
"get_object",
|
|
|
|
{"Bucket": bucket, "Key": path},
|
|
|
|
profile=self.profile,
|
|
|
|
region=self.region,
|
|
|
|
)
|
2020-08-12 15:07:56 +00:00
|
|
|
logger.debug("Got template from s3://{}/{}".format(bucket, path))
|
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
self.cfn_template = template["Body"].read().decode("utf-8")
|
2020-12-28 23:12:55 +00:00
|
|
|
|
|
|
|
# Overwrite local copy
|
2022-02-22 10:04:29 +00:00
|
|
|
yaml_file = os.path.join(
|
|
|
|
self.ctx["template_path"],
|
|
|
|
self.rel_path,
|
|
|
|
self.stackname + ".yaml",
|
|
|
|
)
|
|
|
|
ensure_dir(os.path.join(self.ctx["template_path"], self.rel_path))
|
|
|
|
with open(yaml_file, "w") as yaml_contents:
|
2020-12-28 23:12:55 +00:00
|
|
|
yaml_contents.write(self.cfn_template)
|
|
|
|
|
2020-08-12 15:07:56 +00:00
|
|
|
except ClientError as e:
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.error(
|
|
|
|
"Could not find template file on S3: {}/{}, {}".format(
|
|
|
|
bucket, path, e
|
|
|
|
)
|
|
|
|
)
|
2020-08-12 15:07:56 +00:00
|
|
|
|
|
|
|
else:
|
2022-02-22 10:04:29 +00:00
|
|
|
yaml_file = os.path.join(
|
|
|
|
self.ctx["template_path"], self.rel_path, self.stackname + ".yaml"
|
|
|
|
)
|
2019-02-04 15:43:34 +00:00
|
|
|
|
2020-08-12 15:07:56 +00:00
|
|
|
try:
|
2022-02-22 10:04:29 +00:00
|
|
|
with open(yaml_file, "r") as yaml_contents:
|
2020-08-12 15:07:56 +00:00
|
|
|
self.cfn_template = yaml_contents.read()
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.debug("Read cfn template %s.", yaml_file)
|
2020-08-12 15:07:56 +00:00
|
|
|
except FileNotFoundError as e:
|
|
|
|
logger.warn("Could not find template file: {}".format(yaml_file))
|
|
|
|
raise e
|
|
|
|
|
2021-01-11 14:34:50 +00:00
|
|
|
self.cfn_data = yaml.load(self.cfn_template, Loader=SafeLoaderIgnoreUnknown)
|
2020-08-12 15:07:56 +00:00
|
|
|
self._parse_metadata()
|
2021-03-11 18:25:02 +00:00
|
|
|
|
2019-02-04 15:43:34 +00:00
|
|
|
else:
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.debug("Using cached cfn template %s.", self.stackname)
|
2018-11-22 18:31:59 +00:00
|
|
|
|
|
|
|
def validate(self):
|
|
|
|
"""Validates the rendered template via cfn-lint"""
|
2019-02-04 15:43:34 +00:00
|
|
|
self.read_template_file()
|
2018-11-22 18:31:59 +00:00
|
|
|
|
|
|
|
try:
|
2022-02-22 10:04:29 +00:00
|
|
|
ignore_checks = self.cfn_data["Metadata"]["cfnlint_ignore"]
|
2018-11-22 18:31:59 +00:00
|
|
|
except KeyError:
|
|
|
|
ignore_checks = []
|
|
|
|
|
|
|
|
# Ignore some more checks around injected parameters as we generate these
|
2019-04-18 16:30:50 +00:00
|
|
|
if self.mode == "Piped":
|
2022-02-22 10:04:29 +00:00
|
|
|
ignore_checks = ignore_checks + ["W2505", "W2509", "W2507"]
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2019-04-18 16:30:50 +00:00
|
|
|
# Ignore checks regarding overloaded properties
|
|
|
|
if self.mode == "CloudBender":
|
2022-02-22 10:04:29 +00:00
|
|
|
ignore_checks = ignore_checks + [
|
|
|
|
"E3035",
|
|
|
|
"E3002",
|
|
|
|
"E3012",
|
|
|
|
"W2001",
|
|
|
|
"E3001",
|
|
|
|
"E0002",
|
|
|
|
"E1012",
|
|
|
|
]
|
2019-04-18 16:30:50 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
filename = os.path.join(
|
|
|
|
self.ctx["template_path"], self.rel_path, self.stackname + ".yaml"
|
|
|
|
)
|
|
|
|
logger.info("Validating {0}".format(filename))
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
lint_args = ["--template", filename]
|
2018-11-22 18:31:59 +00:00
|
|
|
if ignore_checks:
|
2022-02-22 10:04:29 +00:00
|
|
|
lint_args.append("--ignore-checks")
|
2019-02-07 15:36:16 +00:00
|
|
|
lint_args = lint_args + ignore_checks
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.info("Ignoring checks: {}".format(",".join(ignore_checks)))
|
2018-11-22 18:31:59 +00:00
|
|
|
|
|
|
|
(args, filenames, formatter) = cfnlint.core.get_args_filenames(lint_args)
|
|
|
|
(template, rules, matches) = cfnlint.core.get_template_rules(filename, args)
|
2021-03-11 15:38:55 +00:00
|
|
|
|
|
|
|
region = self.region
|
2022-02-22 10:04:29 +00:00
|
|
|
if region == "global":
|
|
|
|
region = "us-east-1"
|
2021-03-11 15:38:55 +00:00
|
|
|
|
2018-11-22 18:31:59 +00:00
|
|
|
if not matches:
|
2021-03-11 15:38:55 +00:00
|
|
|
matches.extend(cfnlint.core.run_checks(filename, template, rules, [region]))
|
2018-11-22 18:31:59 +00:00
|
|
|
if len(matches):
|
|
|
|
for match in matches:
|
|
|
|
logger.error(formatter._format(match))
|
2021-03-11 18:25:02 +00:00
|
|
|
return 1
|
2018-11-22 18:31:59 +00:00
|
|
|
else:
|
|
|
|
logger.info("Passed.")
|
2021-03-11 18:25:02 +00:00
|
|
|
return 0
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
def get_outputs(self, include=".*", values=False):
|
|
|
|
"""gets outputs of the stack"""
|
2020-02-25 20:40:12 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
if self.mode == "pulumi":
|
2022-06-27 18:51:03 +00:00
|
|
|
self.outputs = self._get_pulumi_stack().outputs()
|
2020-02-25 20:40:12 +00:00
|
|
|
|
2021-09-20 14:19:14 +00:00
|
|
|
else:
|
|
|
|
self.read_template_file()
|
2020-02-25 20:40:12 +00:00
|
|
|
try:
|
2021-09-20 14:19:14 +00:00
|
|
|
stacks = self.connection_manager.call(
|
|
|
|
"cloudformation",
|
|
|
|
"describe_stacks",
|
2022-02-22 10:04:29 +00:00
|
|
|
{"StackName": self.stackname},
|
|
|
|
profile=self.profile,
|
|
|
|
region=self.region,
|
|
|
|
)["Stacks"]
|
2020-02-25 20:40:12 +00:00
|
|
|
|
2021-09-20 14:19:14 +00:00
|
|
|
try:
|
2022-02-22 10:04:29 +00:00
|
|
|
for output in stacks[0]["Outputs"]:
|
|
|
|
self.outputs[output["OutputKey"]] = output["OutputValue"]
|
|
|
|
logger.debug(
|
|
|
|
"Stack outputs for {} in {}: {}".format(
|
|
|
|
self.stackname, self.region, self.outputs
|
|
|
|
)
|
|
|
|
)
|
2021-09-20 14:19:14 +00:00
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
except ClientError:
|
|
|
|
logger.warn("Could not get outputs of {}".format(self.stackname))
|
|
|
|
pass
|
2020-02-25 20:40:12 +00:00
|
|
|
|
2020-06-22 13:14:11 +00:00
|
|
|
if self.outputs:
|
|
|
|
if self.store_outputs:
|
2022-06-28 11:15:45 +00:00
|
|
|
filename = self.stackname + ".yaml"
|
|
|
|
my_template = pkg_resources.read_text(templates, "outputs.yaml")
|
2021-02-22 18:38:44 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
output_file = os.path.join(
|
|
|
|
self.ctx["outputs_path"], self.rel_path, filename
|
|
|
|
)
|
|
|
|
ensure_dir(os.path.join(self.ctx["outputs_path"], self.rel_path))
|
2021-02-22 18:38:44 +00:00
|
|
|
|
2022-04-12 11:16:42 +00:00
|
|
|
# Blacklist at least AWS SecretKeys from leaking into git
|
|
|
|
# Pulumi to the rescue soon
|
|
|
|
blacklist = [".*SecretAccessKey.*"]
|
|
|
|
sanitized_outputs = {}
|
|
|
|
for k in self.outputs.keys():
|
|
|
|
sanitized_outputs[k] = self.outputs[k]
|
|
|
|
for val in blacklist:
|
|
|
|
if re.match(val, k, re.IGNORECASE):
|
|
|
|
sanitized_outputs[k] = "<Redacted>"
|
|
|
|
|
2021-02-22 18:38:44 +00:00
|
|
|
jenv = JinjaEnv()
|
|
|
|
template = jenv.from_string(my_template)
|
2022-02-22 10:04:29 +00:00
|
|
|
data = {
|
|
|
|
"stackname": "/".join([self.rel_path, self.stackname]),
|
|
|
|
"timestamp": datetime.strftime(
|
|
|
|
datetime.now(tzutc()), "%d/%m/%y %H:%M"
|
|
|
|
),
|
2022-04-12 11:16:42 +00:00
|
|
|
"outputs": sanitized_outputs,
|
2022-02-22 10:04:29 +00:00
|
|
|
"parameters": self.parameters,
|
|
|
|
}
|
|
|
|
|
|
|
|
with open(output_file, "w") as output_contents:
|
2021-02-22 18:38:44 +00:00
|
|
|
output_contents.write(template.render(**data))
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.info(
|
|
|
|
"Wrote outputs for %s to %s", self.stackname, output_file
|
|
|
|
)
|
2022-03-15 10:18:38 +00:00
|
|
|
|
|
|
|
# If secrets replace with clear values for now, display ONLY
|
|
|
|
for k in self.outputs.keys():
|
2022-03-16 15:18:23 +00:00
|
|
|
if hasattr(self.outputs[k], "secret") and self.outputs[k].secret:
|
2022-03-15 10:18:38 +00:00
|
|
|
self.outputs[k] = self.outputs[k].value
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
"{} {} Outputs:\n{}".format(
|
|
|
|
self.region, self.stackname, pprint.pformat(self.outputs, indent=2)
|
|
|
|
)
|
|
|
|
)
|
2020-06-22 11:30:54 +00:00
|
|
|
|
2020-07-31 21:35:14 +00:00
|
|
|
def create_docs(self, template=False, graph=False):
|
2022-02-22 10:04:29 +00:00
|
|
|
"""Read rendered template, parse documentation fragments, eg. parameter description
|
|
|
|
and create a mardown doc file for the stack
|
|
|
|
same idea as eg. helm-docs for values.yaml
|
|
|
|
"""
|
2020-06-04 15:32:17 +00:00
|
|
|
|
2020-06-22 13:14:11 +00:00
|
|
|
try:
|
|
|
|
self.read_template_file()
|
|
|
|
except FileNotFoundError:
|
|
|
|
return
|
2020-06-04 15:32:17 +00:00
|
|
|
|
2020-06-05 10:08:19 +00:00
|
|
|
if not template:
|
2022-02-22 10:04:29 +00:00
|
|
|
doc_template = pkg_resources.read_text(templates, "stack-doc.md")
|
2020-06-05 10:08:19 +00:00
|
|
|
jenv = JinjaEnv()
|
|
|
|
template = jenv.from_string(doc_template)
|
|
|
|
data = {}
|
|
|
|
else:
|
|
|
|
doc_template = template
|
2020-06-04 15:32:17 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
data["name"] = self.stackname
|
|
|
|
data["description"] = self.cfn_data["Description"]
|
|
|
|
data["dependencies"] = self.dependencies
|
2020-06-04 15:32:17 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
if "Parameters" in self.cfn_data:
|
|
|
|
data["parameters"] = self.cfn_data["Parameters"]
|
2020-06-22 15:16:38 +00:00
|
|
|
set_parameters = self.resolve_parameters()
|
|
|
|
for p in set_parameters:
|
2022-02-22 10:04:29 +00:00
|
|
|
data["parameters"][p]["value"] = set_parameters[p]
|
2020-06-04 15:32:17 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
if "Outputs" in self.cfn_data:
|
|
|
|
data["outputs"] = self.cfn_data["Outputs"]
|
2020-06-19 16:40:51 +00:00
|
|
|
|
2020-06-22 13:14:11 +00:00
|
|
|
# Check for existing outputs yaml, if found add current value column and set header to timestamp from outputs file
|
2022-02-22 10:04:29 +00:00
|
|
|
output_file = os.path.join(
|
|
|
|
self.ctx["outputs_path"], self.rel_path, self.stackname + ".yaml"
|
|
|
|
)
|
2020-06-22 13:14:11 +00:00
|
|
|
|
|
|
|
try:
|
2022-02-22 10:04:29 +00:00
|
|
|
with open(output_file, "r") as yaml_contents:
|
2020-06-22 13:14:11 +00:00
|
|
|
outputs = yaml.safe_load(yaml_contents.read())
|
2022-02-22 10:04:29 +00:00
|
|
|
for p in outputs["Outputs"]:
|
|
|
|
data["outputs"][p]["last_value"] = outputs["Outputs"][p]
|
|
|
|
data["timestamp"] = outputs["TimeStamp"]
|
2020-07-31 21:57:25 +00:00
|
|
|
except (FileNotFoundError, KeyError, TypeError):
|
2020-06-22 13:14:11 +00:00
|
|
|
pass
|
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
doc_file = os.path.join(
|
|
|
|
self.ctx["docs_path"], self.rel_path, self.stackname + ".md"
|
|
|
|
)
|
|
|
|
ensure_dir(os.path.join(self.ctx["docs_path"], self.rel_path))
|
2020-06-04 15:32:17 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
with open(doc_file, "w") as doc_contents:
|
2020-06-04 15:32:17 +00:00
|
|
|
doc_contents.write(template.render(**data))
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.info("Wrote documentation for %s to %s", self.stackname, doc_file)
|
2020-06-04 15:32:17 +00:00
|
|
|
|
2020-07-31 21:35:14 +00:00
|
|
|
# Write Graph in Dot format
|
|
|
|
if graph:
|
2022-02-22 10:04:29 +00:00
|
|
|
filename = os.path.join(
|
|
|
|
self.ctx["template_path"], self.rel_path, self.stackname + ".yaml"
|
|
|
|
)
|
2020-07-31 21:35:14 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
lint_args = ["--template", filename]
|
2020-07-31 21:35:14 +00:00
|
|
|
(args, filenames, formatter) = cfnlint.core.get_args_filenames(lint_args)
|
|
|
|
(template, rules, matches) = cfnlint.core.get_template_rules(filename, args)
|
|
|
|
template_obj = cfnlint.template.Template(filename, template, [self.region])
|
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
path = os.path.join(
|
|
|
|
self.ctx["docs_path"], self.rel_path, self.stackname + ".dot"
|
|
|
|
)
|
2020-07-31 21:57:25 +00:00
|
|
|
g = cfnlint.graph.Graph(template_obj)
|
2020-07-31 21:35:14 +00:00
|
|
|
try:
|
|
|
|
g.to_dot(path)
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.info("DOT representation of the graph written to %s", path)
|
2020-07-31 21:35:14 +00:00
|
|
|
except ImportError:
|
|
|
|
logger.error(
|
2022-02-22 10:04:29 +00:00
|
|
|
"Could not write the graph in DOT format. Please install either `pygraphviz` or `pydot` modules."
|
|
|
|
)
|
2020-07-31 21:35:14 +00:00
|
|
|
|
2018-11-22 18:31:59 +00:00
|
|
|
def resolve_parameters(self):
|
2022-02-22 10:04:29 +00:00
|
|
|
"""Renders parameters for the stack based on the source template and the environment configuration"""
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2019-02-04 15:43:34 +00:00
|
|
|
self.read_template_file()
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2019-06-27 13:31:51 +00:00
|
|
|
# if we run in Piped Mode, inspect all outputs of the running Conglomerate members
|
|
|
|
if self.mode == "Piped":
|
|
|
|
stack_outputs = {}
|
|
|
|
try:
|
2022-02-22 10:04:29 +00:00
|
|
|
stack_outputs = self._inspect_stacks(self.tags["Conglomerate"])
|
2019-06-27 13:31:51 +00:00
|
|
|
except KeyError:
|
|
|
|
pass
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2020-06-25 13:09:27 +00:00
|
|
|
_found = {}
|
2022-02-22 10:04:29 +00:00
|
|
|
if "Parameters" in self.cfn_data:
|
2019-06-27 13:31:51 +00:00
|
|
|
_errors = []
|
2018-11-22 18:31:59 +00:00
|
|
|
self.cfn_parameters = []
|
2022-02-22 10:04:29 +00:00
|
|
|
for p in self.cfn_data["Parameters"]:
|
2018-11-22 18:31:59 +00:00
|
|
|
# In Piped mode we try to resolve all Paramters first via stack_outputs
|
2019-06-27 13:31:51 +00:00
|
|
|
if self.mode == "Piped":
|
|
|
|
try:
|
|
|
|
# first reverse the rename due to AWS alphanumeric restriction for parameter names
|
2022-02-22 10:04:29 +00:00
|
|
|
_p = p.replace("DoT", ".")
|
2019-06-27 13:31:51 +00:00
|
|
|
value = str(stack_outputs[_p])
|
2022-02-22 10:04:29 +00:00
|
|
|
self.cfn_parameters.append(
|
|
|
|
{"ParameterKey": p, "ParameterValue": value}
|
|
|
|
)
|
|
|
|
logger.info("Got {} = {} from running stack".format(p, value))
|
2019-06-27 13:31:51 +00:00
|
|
|
continue
|
|
|
|
except KeyError:
|
|
|
|
pass
|
2018-11-22 18:31:59 +00:00
|
|
|
|
|
|
|
# Key name in config tree is: stacks.<self.stackname>.parameters.<parameter>
|
2019-06-27 13:31:51 +00:00
|
|
|
if p in self.parameters:
|
2018-11-22 18:31:59 +00:00
|
|
|
value = str(self.parameters[p])
|
2022-02-22 10:04:29 +00:00
|
|
|
self.cfn_parameters.append(
|
|
|
|
{"ParameterKey": p, "ParameterValue": value}
|
|
|
|
)
|
2020-04-08 15:30:58 +00:00
|
|
|
|
|
|
|
# Hide NoEcho parameters in shell output
|
2022-02-22 10:04:29 +00:00
|
|
|
if (
|
|
|
|
"NoEcho" in self.cfn_data["Parameters"][p]
|
|
|
|
and self.cfn_data["Parameters"][p]["NoEcho"]
|
|
|
|
):
|
|
|
|
value = "****"
|
2020-04-08 15:30:58 +00:00
|
|
|
|
2020-06-22 11:30:54 +00:00
|
|
|
_found[p] = value
|
2019-06-27 13:31:51 +00:00
|
|
|
else:
|
2018-11-22 18:31:59 +00:00
|
|
|
# If we have a Default defined in the CFN skip, as AWS will use it
|
2022-02-22 10:04:29 +00:00
|
|
|
if "Default" not in self.cfn_data["Parameters"][p]:
|
2019-06-27 13:31:51 +00:00
|
|
|
_errors.append(p)
|
|
|
|
|
|
|
|
if _errors:
|
2022-02-22 10:04:29 +00:00
|
|
|
raise ParameterNotFound(
|
|
|
|
"Cannot find value for parameters: {0}".format(_errors)
|
|
|
|
)
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2020-07-16 22:56:09 +00:00
|
|
|
# Warning of excessive parameters, might be useful to spot typos early
|
|
|
|
_warnings = []
|
|
|
|
for p in self.parameters.keys():
|
2022-02-22 10:04:29 +00:00
|
|
|
if p not in self.cfn_data["Parameters"]:
|
2020-07-16 22:56:09 +00:00
|
|
|
_warnings.append(p)
|
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.info(
|
|
|
|
"{} {} set parameters:\n{}".format(
|
|
|
|
self.region, self.stackname, pprint.pformat(_found, indent=2)
|
|
|
|
)
|
|
|
|
)
|
2020-06-22 15:16:38 +00:00
|
|
|
|
2020-07-16 22:56:09 +00:00
|
|
|
if _warnings:
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.warning("Ignored additional parameters: {}.".format(_warnings))
|
2020-07-16 22:56:09 +00:00
|
|
|
|
2020-06-22 15:16:38 +00:00
|
|
|
# Return dict of explicitly set parameters
|
|
|
|
return _found
|
2020-06-22 11:30:54 +00:00
|
|
|
|
2021-09-20 14:19:14 +00:00
|
|
|
@pulumi_ws
|
2020-06-22 11:30:54 +00:00
|
|
|
@exec_hooks
|
2018-11-22 18:31:59 +00:00
|
|
|
def create(self):
|
2022-02-22 10:04:29 +00:00
|
|
|
"""Creates a stack"""
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
if self.mode == "pulumi":
|
2022-06-23 13:55:37 +00:00
|
|
|
kwargs = self._set_pulumi_args()
|
2022-06-27 18:51:03 +00:00
|
|
|
self._get_pulumi_stack(create=True).up(**kwargs)
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2021-09-20 14:19:14 +00:00
|
|
|
else:
|
|
|
|
# Prepare parameters
|
|
|
|
self.resolve_parameters()
|
2020-08-12 16:20:37 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.info("Creating {0} {1}".format(self.region, self.stackname))
|
|
|
|
kwargs = {
|
|
|
|
"StackName": self.stackname,
|
|
|
|
"Parameters": self.cfn_parameters,
|
|
|
|
"OnFailure": self.onfailure,
|
|
|
|
"NotificationARNs": self.notfication_sns,
|
|
|
|
"Tags": [
|
|
|
|
{"Key": str(k), "Value": str(v)} for k, v in self.tags.items()
|
|
|
|
],
|
|
|
|
"Capabilities": [
|
|
|
|
"CAPABILITY_IAM",
|
|
|
|
"CAPABILITY_NAMED_IAM",
|
|
|
|
"CAPABILITY_AUTO_EXPAND",
|
|
|
|
],
|
|
|
|
}
|
2021-09-20 14:19:14 +00:00
|
|
|
kwargs = self._add_template_arg(kwargs)
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2021-09-20 14:19:14 +00:00
|
|
|
self.aws_stackid = self.connection_manager.call(
|
2022-02-22 10:04:29 +00:00
|
|
|
"cloudformation",
|
|
|
|
"create_stack",
|
|
|
|
kwargs,
|
|
|
|
profile=self.profile,
|
|
|
|
region=self.region,
|
|
|
|
)
|
2020-06-22 11:30:54 +00:00
|
|
|
|
2021-09-20 14:19:14 +00:00
|
|
|
status = self._wait_for_completion()
|
|
|
|
self.get_outputs()
|
|
|
|
|
|
|
|
return status
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2021-09-20 14:19:14 +00:00
|
|
|
@pulumi_ws
|
2020-06-22 11:30:54 +00:00
|
|
|
@exec_hooks
|
2018-11-22 18:31:59 +00:00
|
|
|
def update(self):
|
2022-02-22 10:04:29 +00:00
|
|
|
"""Updates an existing stack"""
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2021-09-20 14:19:14 +00:00
|
|
|
# We cannot migrate directly so bail out if CFN stack still exists
|
2022-02-22 10:04:29 +00:00
|
|
|
if self.mode == "pulumi":
|
|
|
|
logger.error(
|
|
|
|
"Cloudformation stack {} still exists, cannot use Pulumi!".format(
|
|
|
|
self.stackname
|
|
|
|
)
|
|
|
|
)
|
2021-09-20 14:19:14 +00:00
|
|
|
return
|
|
|
|
|
2018-11-22 18:31:59 +00:00
|
|
|
# Prepare parameters
|
|
|
|
self.resolve_parameters()
|
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.info("Updating {0} {1}".format(self.region, self.stackname))
|
2018-11-22 18:31:59 +00:00
|
|
|
try:
|
2022-02-22 10:04:29 +00:00
|
|
|
kwargs = {
|
|
|
|
"StackName": self.stackname,
|
|
|
|
"Parameters": self.cfn_parameters,
|
|
|
|
"NotificationARNs": self.notfication_sns,
|
|
|
|
"Tags": [
|
|
|
|
{"Key": str(k), "Value": str(v)} for k, v in self.tags.items()
|
|
|
|
],
|
|
|
|
"Capabilities": [
|
|
|
|
"CAPABILITY_IAM",
|
|
|
|
"CAPABILITY_NAMED_IAM",
|
|
|
|
"CAPABILITY_AUTO_EXPAND",
|
|
|
|
],
|
|
|
|
}
|
2020-08-12 16:20:37 +00:00
|
|
|
kwargs = self._add_template_arg(kwargs)
|
|
|
|
|
2020-06-22 11:30:54 +00:00
|
|
|
self.aws_stackid = self.connection_manager.call(
|
2022-02-22 10:04:29 +00:00
|
|
|
"cloudformation",
|
|
|
|
"update_stack",
|
|
|
|
kwargs,
|
|
|
|
profile=self.profile,
|
|
|
|
region=self.region,
|
|
|
|
)
|
2018-11-22 18:31:59 +00:00
|
|
|
|
|
|
|
except ClientError as e:
|
2022-02-22 10:04:29 +00:00
|
|
|
if "No updates are to be performed" in e.response["Error"]["Message"]:
|
|
|
|
logger.info("No updates for {0}".format(self.stackname))
|
2020-06-26 14:54:37 +00:00
|
|
|
return "COMPLETE"
|
2018-11-22 18:31:59 +00:00
|
|
|
else:
|
|
|
|
raise e
|
|
|
|
|
2020-06-22 11:30:54 +00:00
|
|
|
status = self._wait_for_completion()
|
|
|
|
self.get_outputs()
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2020-06-22 11:30:54 +00:00
|
|
|
return status
|
|
|
|
|
2021-09-20 14:19:14 +00:00
|
|
|
@pulumi_ws
|
2020-06-22 11:30:54 +00:00
|
|
|
@exec_hooks
|
2018-11-22 18:31:59 +00:00
|
|
|
def delete(self):
|
2022-02-22 10:04:29 +00:00
|
|
|
"""Deletes a stack"""
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.info("Deleting {0} {1}".format(self.region, self.stackname))
|
2021-09-20 14:19:14 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
if self.mode == "pulumi":
|
2022-06-27 18:51:03 +00:00
|
|
|
pulumi_stack = self._get_pulumi_stack()
|
2022-03-16 15:18:23 +00:00
|
|
|
pulumi_stack.destroy(on_output=self._log_pulumi)
|
|
|
|
pulumi_stack.workspace.remove_stack(pulumi_stack.name)
|
2021-09-20 14:19:14 +00:00
|
|
|
|
|
|
|
return
|
|
|
|
|
2020-06-22 11:30:54 +00:00
|
|
|
self.aws_stackid = self.connection_manager.call(
|
2022-02-22 10:04:29 +00:00
|
|
|
"cloudformation",
|
|
|
|
"delete_stack",
|
|
|
|
{"StackName": self.stackname},
|
|
|
|
profile=self.profile,
|
|
|
|
region=self.region,
|
|
|
|
)
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2020-06-22 11:30:54 +00:00
|
|
|
status = self._wait_for_completion()
|
|
|
|
return status
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2021-09-20 14:19:14 +00:00
|
|
|
@pulumi_ws
|
|
|
|
def refresh(self):
|
2022-02-22 10:04:29 +00:00
|
|
|
"""Refreshes a Pulumi stack"""
|
2021-09-20 14:19:14 +00:00
|
|
|
|
2022-06-27 18:51:03 +00:00
|
|
|
self._get_pulumi_stack().refresh(on_output=self._log_pulumi)
|
2021-09-20 14:19:14 +00:00
|
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
@pulumi_ws
|
|
|
|
def preview(self):
|
2022-02-22 10:04:29 +00:00
|
|
|
"""Preview a Pulumi stack up operation"""
|
2021-09-20 14:19:14 +00:00
|
|
|
|
2022-06-23 13:55:37 +00:00
|
|
|
kwargs = self._set_pulumi_args()
|
2022-06-27 18:51:03 +00:00
|
|
|
self._get_pulumi_stack(create=True).preview(**kwargs)
|
2022-03-16 15:18:23 +00:00
|
|
|
|
|
|
|
return
|
|
|
|
|
2022-06-27 18:51:03 +00:00
|
|
|
@pulumi_ws
|
|
|
|
def execute(self, function, args, listall=False):
|
|
|
|
"""Executes custom Python function within a Pulumi stack"""
|
|
|
|
|
|
|
|
# call all available functions and output built in help
|
|
|
|
if listall:
|
|
|
|
for k in vars(self._pulumi_code).keys():
|
|
|
|
if k.startswith("_execute_"):
|
|
|
|
docstring = vars(self._pulumi_code)[k](docstring=True)
|
|
|
|
print("{}: {}".format(k.lstrip("_execute_"), docstring))
|
|
|
|
return
|
|
|
|
|
|
|
|
else:
|
|
|
|
if not function:
|
|
|
|
logger.error("No function specified !")
|
|
|
|
return
|
|
|
|
|
|
|
|
exec_function = f"_execute_{function}"
|
|
|
|
if exec_function in vars(self._pulumi_code):
|
|
|
|
pulumi_stack = self._get_pulumi_stack()
|
|
|
|
vars(self._pulumi_code)[exec_function](
|
2022-06-28 13:30:13 +00:00
|
|
|
config=pulumi_stack.get_all_config(),
|
|
|
|
outputs=pulumi_stack.outputs(),
|
|
|
|
args=args,
|
2022-06-27 18:51:03 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
else:
|
|
|
|
logger.error(
|
|
|
|
"{} is not defined in {}".format(function, self._pulumi_code)
|
|
|
|
)
|
|
|
|
|
2022-03-16 15:18:23 +00:00
|
|
|
@pulumi_ws
|
|
|
|
def assimilate(self):
|
|
|
|
"""Import resources into Pulumi stack"""
|
|
|
|
|
2022-06-27 18:51:03 +00:00
|
|
|
pulumi_stack = self._get_pulumi_stack(create=True)
|
2022-03-16 15:18:23 +00:00
|
|
|
|
|
|
|
# now lets import each defined resource
|
|
|
|
for r in self._pulumi_code.RESOURCES:
|
2022-04-12 11:16:42 +00:00
|
|
|
r_id = r["id"]
|
|
|
|
if not r_id:
|
2022-06-23 13:55:37 +00:00
|
|
|
r_id = input(
|
|
|
|
"Please enter ID for {} ({}):".format(r["name"], r["type"])
|
|
|
|
)
|
2022-04-12 11:16:42 +00:00
|
|
|
|
|
|
|
logger.info("Importing {} ({}) as {}".format(r_id, r["type"], r["name"]))
|
|
|
|
|
|
|
|
args = ["import", r["type"], r["name"], r_id, "--yes"]
|
2022-03-16 15:18:23 +00:00
|
|
|
pulumi_stack._run_pulumi_cmd_sync(args)
|
2021-09-20 14:19:14 +00:00
|
|
|
|
|
|
|
return
|
|
|
|
|
2022-02-21 20:04:38 +00:00
|
|
|
@pulumi_ws
|
2022-04-12 11:16:42 +00:00
|
|
|
def export(self, remove_pending_operations):
|
2022-02-22 10:04:29 +00:00
|
|
|
"""Exports a Pulumi stack"""
|
2022-02-21 20:04:38 +00:00
|
|
|
|
2022-06-27 18:51:03 +00:00
|
|
|
pulumi_stack = self._get_pulumi_stack()
|
2022-03-16 15:18:23 +00:00
|
|
|
deployment = pulumi_stack.export_stack()
|
2022-02-21 20:04:38 +00:00
|
|
|
|
2022-04-12 11:16:42 +00:00
|
|
|
if remove_pending_operations:
|
2022-02-22 10:04:29 +00:00
|
|
|
deployment.deployment.pop("pending_operations", None)
|
2022-03-16 15:18:23 +00:00
|
|
|
pulumi_stack.import_stack(deployment)
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.info("Removed all pending_operations from %s" % self.stackname)
|
2022-02-21 20:04:38 +00:00
|
|
|
else:
|
|
|
|
print(json.dumps(deployment.deployment))
|
|
|
|
|
|
|
|
return
|
|
|
|
|
2021-09-20 14:19:14 +00:00
|
|
|
@pulumi_ws
|
|
|
|
def set_config(self, key, value, secret):
|
2022-02-22 10:04:29 +00:00
|
|
|
"""Set a config or secret"""
|
2021-09-20 14:19:14 +00:00
|
|
|
|
2022-06-27 18:51:03 +00:00
|
|
|
pulumi_stack = self._get_pulumi_stack(create=True)
|
2022-03-16 15:18:23 +00:00
|
|
|
pulumi_stack.set_config(key, pulumi.automation.ConfigValue(value, secret))
|
2021-09-20 14:19:14 +00:00
|
|
|
|
|
|
|
# Store salt or key and encrypted value in CloudBender stack config
|
|
|
|
settings = None
|
2022-03-16 15:18:23 +00:00
|
|
|
pulumi_settings = pulumi_stack.workspace.stack_settings(
|
|
|
|
pulumi_stack.name
|
|
|
|
)._serialize()
|
2021-09-20 14:19:14 +00:00
|
|
|
|
|
|
|
with open(self.path, "r") as file:
|
|
|
|
settings = yaml.safe_load(file)
|
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
if "pulumi" not in settings:
|
|
|
|
settings["pulumi"] = {}
|
2021-10-04 15:51:16 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
if "encryptionsalt" in pulumi_settings:
|
|
|
|
settings["pulumi"]["encryptionsalt"] = pulumi_settings["encryptionsalt"]
|
|
|
|
if "encryptedkey" in pulumi_settings:
|
|
|
|
settings["pulumi"]["encryptedkey"] = pulumi_settings["encryptedkey"]
|
2021-09-20 14:19:14 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
if "parameters" not in settings:
|
|
|
|
settings["parameters"] = {}
|
|
|
|
settings["parameters"][key] = pulumi_settings["config"][
|
|
|
|
"{}:{}".format(self.parameters["Conglomerate"], key)
|
|
|
|
]
|
2021-09-20 14:19:14 +00:00
|
|
|
|
|
|
|
with open(self.path, "w") as file:
|
|
|
|
yaml.dump(settings, stream=file)
|
|
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
@pulumi_ws
|
|
|
|
def get_config(self, key):
|
2022-02-22 10:04:29 +00:00
|
|
|
"""Get a config or secret"""
|
2021-09-20 14:19:14 +00:00
|
|
|
|
2022-06-27 18:51:03 +00:00
|
|
|
print(self._get_pulumi_stack().get_config(key).value)
|
2021-09-20 14:19:14 +00:00
|
|
|
|
2019-01-30 13:00:06 +00:00
|
|
|
def create_change_set(self, change_set_name):
|
2022-02-22 10:04:29 +00:00
|
|
|
"""Creates a Change Set with the name ``change_set_name``."""
|
2019-01-30 13:00:06 +00:00
|
|
|
|
|
|
|
# Prepare parameters
|
|
|
|
self.resolve_parameters()
|
2019-02-04 15:43:34 +00:00
|
|
|
self.read_template_file()
|
2019-01-30 13:00:06 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.info(
|
|
|
|
"Creating change set {0} for stack {1}".format(
|
|
|
|
change_set_name, self.stackname
|
|
|
|
)
|
|
|
|
)
|
|
|
|
kwargs = {
|
|
|
|
"StackName": self.stackname,
|
|
|
|
"ChangeSetName": change_set_name,
|
|
|
|
"Parameters": self.cfn_parameters,
|
|
|
|
"Tags": [{"Key": str(k), "Value": str(v)} for k, v in self.tags.items()],
|
|
|
|
"Capabilities": ["CAPABILITY_IAM", "CAPABILITY_NAMED_IAM"],
|
|
|
|
}
|
2020-08-12 16:20:37 +00:00
|
|
|
kwargs = self._add_template_arg(kwargs)
|
|
|
|
|
2019-02-07 15:36:16 +00:00
|
|
|
self.connection_manager.call(
|
2022-02-22 10:04:29 +00:00
|
|
|
"cloudformation",
|
|
|
|
"create_change_set",
|
|
|
|
kwargs,
|
|
|
|
profile=self.profile,
|
|
|
|
region=self.region,
|
|
|
|
)
|
2019-01-30 13:00:06 +00:00
|
|
|
return self._wait_for_completion()
|
|
|
|
|
2018-11-22 18:31:59 +00:00
|
|
|
def get_status(self):
|
|
|
|
"""
|
|
|
|
Returns the stack's status.
|
|
|
|
:returns: The stack's status.
|
|
|
|
"""
|
|
|
|
try:
|
2020-06-26 14:54:37 +00:00
|
|
|
status = self.connection_manager.call(
|
|
|
|
"cloudformation",
|
|
|
|
"describe_stacks",
|
|
|
|
{"StackName": self.stackname},
|
2022-02-22 10:04:29 +00:00
|
|
|
profile=self.profile,
|
|
|
|
region=self.region,
|
|
|
|
)["Stacks"][0]["StackStatus"]
|
2018-11-22 18:31:59 +00:00
|
|
|
except ClientError as e:
|
|
|
|
if e.response["Error"]["Message"].endswith("does not exist"):
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
raise e
|
|
|
|
return status
|
|
|
|
|
|
|
|
def describe_events(self):
|
|
|
|
"""
|
|
|
|
Returns a dictionary contianing the stack events.
|
|
|
|
:returns: The CloudFormation events for a stack.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
status = self.connection_manager.call(
|
|
|
|
"cloudformation",
|
|
|
|
"describe_stack_events",
|
|
|
|
{"StackName": self.stackname},
|
2022-02-22 10:04:29 +00:00
|
|
|
profile=self.profile,
|
|
|
|
region=self.region,
|
|
|
|
)
|
2018-11-22 18:31:59 +00:00
|
|
|
except ClientError as e:
|
|
|
|
if e.response["Error"]["Message"].endswith("does not exist"):
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
raise e
|
|
|
|
|
|
|
|
return status
|
|
|
|
|
|
|
|
def _wait_for_completion(self, timeout=0):
|
|
|
|
"""
|
|
|
|
Waits for a stack operation to finish. Prints CloudFormation events while it waits.
|
|
|
|
:param timeout: Timeout before returning
|
|
|
|
:returns: The final stack status.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def timed_out(elapsed):
|
|
|
|
return elapsed >= timeout if timeout else False
|
|
|
|
|
2020-06-26 14:54:37 +00:00
|
|
|
status = "IN_PROGRESS"
|
2018-11-22 18:31:59 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
self.most_recent_event_datetime = datetime.now(tzutc()) - timedelta(seconds=3)
|
2018-11-22 18:31:59 +00:00
|
|
|
elapsed = 0
|
2020-06-26 14:54:37 +00:00
|
|
|
while status == "IN_PROGRESS" and not timed_out(elapsed):
|
2018-11-22 18:31:59 +00:00
|
|
|
status = self._get_simplified_status(self.get_status())
|
|
|
|
if not status:
|
|
|
|
return None
|
|
|
|
|
|
|
|
self._log_new_events()
|
|
|
|
time.sleep(4)
|
|
|
|
elapsed += 4
|
|
|
|
|
|
|
|
return status
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _get_simplified_status(status):
|
2022-02-22 10:04:29 +00:00
|
|
|
"""Returns the simplified Stack Status."""
|
2018-11-22 18:31:59 +00:00
|
|
|
if status:
|
|
|
|
if status.endswith("ROLLBACK_COMPLETE"):
|
2020-06-26 14:54:37 +00:00
|
|
|
return "FAILED"
|
2018-11-22 18:31:59 +00:00
|
|
|
elif status.endswith("_COMPLETE"):
|
2020-06-26 14:54:37 +00:00
|
|
|
return "COMPLETE"
|
2018-11-22 18:31:59 +00:00
|
|
|
elif status.endswith("_IN_PROGRESS"):
|
2020-06-26 14:54:37 +00:00
|
|
|
return "IN_PROGRESS"
|
2018-11-22 18:31:59 +00:00
|
|
|
elif status.endswith("_FAILED"):
|
2020-06-26 14:54:37 +00:00
|
|
|
return "FAILED"
|
2018-11-22 18:31:59 +00:00
|
|
|
else:
|
2022-02-22 10:04:29 +00:00
|
|
|
return "Unknown"
|
2018-11-22 18:31:59 +00:00
|
|
|
|
|
|
|
def _log_new_events(self):
|
|
|
|
"""
|
|
|
|
Log the latest stack events while the stack is being built.
|
|
|
|
"""
|
|
|
|
events = self.describe_events()
|
|
|
|
if events:
|
|
|
|
events = events["StackEvents"]
|
|
|
|
events.reverse()
|
|
|
|
new_events = [
|
2022-02-22 10:04:29 +00:00
|
|
|
event
|
|
|
|
for event in events
|
2018-11-22 18:31:59 +00:00
|
|
|
if event["Timestamp"] > self.most_recent_event_datetime
|
|
|
|
]
|
|
|
|
for event in new_events:
|
2022-02-22 10:04:29 +00:00
|
|
|
logger.info(
|
|
|
|
" ".join(
|
|
|
|
[
|
|
|
|
self.region,
|
|
|
|
self.stackname,
|
|
|
|
event["LogicalResourceId"],
|
|
|
|
event["ResourceType"],
|
|
|
|
event["ResourceStatus"],
|
|
|
|
event.get("ResourceStatusReason", ""),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
)
|
2018-11-22 18:31:59 +00:00
|
|
|
self.most_recent_event_datetime = event["Timestamp"]
|
|
|
|
|
2019-06-27 13:31:51 +00:00
|
|
|
# stackoutput inspection
|
|
|
|
def _inspect_stacks(self, conglomerate):
|
|
|
|
# Get all stacks of the conglomertate
|
|
|
|
running_stacks = self.connection_manager.call(
|
|
|
|
"cloudformation",
|
|
|
|
"describe_stacks",
|
2022-02-22 10:04:29 +00:00
|
|
|
profile=self.profile,
|
|
|
|
region=self.region,
|
|
|
|
)
|
2019-04-18 16:30:50 +00:00
|
|
|
|
2019-06-27 13:31:51 +00:00
|
|
|
stacks = []
|
2022-02-22 10:04:29 +00:00
|
|
|
for stack in running_stacks["Stacks"]:
|
|
|
|
for tag in stack["Tags"]:
|
|
|
|
if tag["Key"] == "Conglomerate" and tag["Value"] == conglomerate:
|
2019-06-27 13:31:51 +00:00
|
|
|
stacks.append(stack)
|
|
|
|
break
|
|
|
|
|
2021-02-12 11:06:43 +00:00
|
|
|
# Gather stack outputs, use Tag['Artifact'] as name space: Artifact.OutputName
|
2019-06-27 13:31:51 +00:00
|
|
|
stack_outputs = {}
|
|
|
|
for stack in stacks:
|
|
|
|
# If stack has an Artifact Tag put resources into the namespace Artifact.Resource
|
|
|
|
artifact = None
|
2022-02-22 10:04:29 +00:00
|
|
|
for tag in stack["Tags"]:
|
|
|
|
if tag["Key"] == "Artifact":
|
|
|
|
artifact = tag["Value"]
|
2019-06-27 13:31:51 +00:00
|
|
|
|
|
|
|
if artifact:
|
|
|
|
key_prefix = "{}.".format(artifact)
|
|
|
|
else:
|
|
|
|
key_prefix = ""
|
|
|
|
|
|
|
|
try:
|
2022-02-22 10:04:29 +00:00
|
|
|
for output in stack["Outputs"]:
|
2019-06-27 13:31:51 +00:00
|
|
|
# Gather all outputs of the stack into one dimensional key=value structure
|
2022-02-22 10:04:29 +00:00
|
|
|
stack_outputs[key_prefix + output["OutputKey"]] = output[
|
|
|
|
"OutputValue"
|
|
|
|
]
|
2019-06-27 13:31:51 +00:00
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# Add outputs from stacks into the data for jinja under StackOutput
|
|
|
|
return stack_outputs
|
2020-08-12 16:20:37 +00:00
|
|
|
|
|
|
|
def _add_template_arg(self, kwargs):
|
|
|
|
if self.template_bucket_url:
|
|
|
|
# https://bucket-name.s3.Region.amazonaws.com/key name
|
|
|
|
# so we need the region, AWS as usual
|
2022-02-22 10:04:29 +00:00
|
|
|
(bucket, path) = get_s3_url(
|
|
|
|
self.template_bucket_url, self.rel_path, self.stackname + ".yaml"
|
|
|
|
)
|
|
|
|
bucket_region = self.connection_manager.call(
|
|
|
|
"s3",
|
|
|
|
"get_bucket_location",
|
|
|
|
{"Bucket": bucket},
|
|
|
|
profile=self.profile,
|
|
|
|
region=self.region,
|
|
|
|
)["LocationConstraint"]
|
2021-03-11 18:25:02 +00:00
|
|
|
# If bucket is in us-east-1 AWS returns 'none' cause reasons grrr
|
|
|
|
if not bucket_region:
|
2022-02-22 10:04:29 +00:00
|
|
|
bucket_region = "us-east-1"
|
2021-03-11 18:25:02 +00:00
|
|
|
|
2022-02-22 10:04:29 +00:00
|
|
|
kwargs["TemplateURL"] = "https://{}.s3.{}.amazonaws.com/{}".format(
|
|
|
|
bucket, bucket_region, path
|
|
|
|
)
|
2020-08-12 16:20:37 +00:00
|
|
|
else:
|
2022-02-22 10:04:29 +00:00
|
|
|
kwargs["TemplateBody"] = self.cfn_template
|
2020-08-12 16:20:37 +00:00
|
|
|
|
|
|
|
return kwargs
|
2021-09-20 14:19:14 +00:00
|
|
|
|
|
|
|
def _log_pulumi(self, text):
|
2022-02-22 10:04:29 +00:00
|
|
|
text = re.sub(
|
|
|
|
r"pulumi:pulumi:Stack\s*{}-{}\s*".format(
|
|
|
|
self.parameters["Conglomerate"], self.stackname
|
|
|
|
),
|
|
|
|
"",
|
|
|
|
text,
|
|
|
|
)
|
2021-10-05 10:47:29 +00:00
|
|
|
if text and not text.isspace():
|
2021-10-04 15:51:16 +00:00
|
|
|
logger.info(" ".join([self.region, self.stackname, text]))
|
2022-06-23 13:55:37 +00:00
|
|
|
|
2022-06-27 18:51:03 +00:00
|
|
|
def _get_pulumi_stack(self, create=False):
|
|
|
|
|
|
|
|
if create:
|
|
|
|
pulumi_stack = pulumi.automation.create_or_select_stack(
|
|
|
|
stack_name=self.pulumi_stackname,
|
|
|
|
project_name=self.parameters["Conglomerate"],
|
|
|
|
program=self._pulumi_code.pulumi_program,
|
|
|
|
opts=self.pulumi_ws_opts,
|
|
|
|
)
|
|
|
|
pulumi_stack.workspace.install_plugin(
|
|
|
|
"aws", pkg_resources.get_distribution("pulumi_aws").version
|
|
|
|
)
|
|
|
|
|
|
|
|
else:
|
|
|
|
pulumi_stack = pulumi.automation.select_stack(
|
|
|
|
stack_name=self.pulumi_stackname,
|
|
|
|
project_name=self.parameters["Conglomerate"],
|
|
|
|
program=self._pulumi_code.pulumi_program,
|
|
|
|
opts=self.pulumi_ws_opts,
|
|
|
|
)
|
|
|
|
|
|
|
|
return pulumi_stack
|
|
|
|
|
2022-06-23 13:55:37 +00:00
|
|
|
def _set_pulumi_args(self, kwargs={}):
|
|
|
|
kwargs["on_output"] = self._log_pulumi
|
|
|
|
kwargs["policy_packs"] = []
|
|
|
|
kwargs["policy_pack_configs"] = []
|
|
|
|
|
|
|
|
# Try to find policies in each artifact location
|
|
|
|
if "policies" in self.pulumi:
|
|
|
|
for policy in self.pulumi["policies"]:
|
|
|
|
found = False
|
|
|
|
for artifacts_path in self.ctx["artifact_paths"]:
|
2022-06-27 18:51:03 +00:00
|
|
|
path = "{}/pulumi/policies/{}".format(
|
|
|
|
artifacts_path.resolve(), policy
|
|
|
|
)
|
2022-06-23 13:55:37 +00:00
|
|
|
if os.path.exists(path):
|
|
|
|
kwargs["policy_packs"].append(path)
|
|
|
|
found = True
|
|
|
|
if not found:
|
|
|
|
logger.error(f"Could not find policy implementation for {policy}!")
|
|
|
|
raise FileNotFoundError
|
|
|
|
|
|
|
|
try:
|
|
|
|
kwargs["policy_pack_configs"] = self.pulumi["policy_configs"]
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return kwargs
|