Compare commits
21 Commits
Author | SHA1 | Date | |
---|---|---|---|
aacd2b1f07 | |||
31a9b6a7d5 | |||
638876381c | |||
266604b964 | |||
24c0346864 | |||
9a25dc34bb | |||
665a2cb09c | |||
6cd64e54a8 | |||
8c958d8fc2 | |||
be403efa43 | |||
77918aa80d | |||
6d4c993fa0 | |||
97a67238e9 | |||
7976d35b76 | |||
b58a495489 | |||
880d1be69b | |||
3834035a29 | |||
72dc20c16c | |||
224bd4bc90 | |||
5ab6069a37 | |||
f8e5583f00 |
@ -14,7 +14,7 @@ include .ci/podman.mk
|
||||
|
||||
Add subtree to your project:
|
||||
```
|
||||
git subtree add --prefix .ci https://git.zero-downtime.net/ZeroDownTime/ci-tools-lib.git master --squash
|
||||
git subtree add --prefix .ci https://git.zero-downtime.net/ZeroDownTime/ci-tools-lib.git main --squash
|
||||
```
|
||||
|
||||
|
||||
|
@ -1,3 +1,13 @@
|
||||
SHELL := bash
|
||||
.SHELLFLAGS := -eu -o pipefail -c
|
||||
.DELETE_ON_ERROR:
|
||||
.SILENT: ; # no need for @
|
||||
.ONESHELL: ; # recipes execute in same shell
|
||||
.NOTPARALLEL: ; # wait for this target to finish
|
||||
.EXPORT_ALL_VARIABLES: ; # send all vars to shell
|
||||
.PHONY: all # All targets are accessible for user
|
||||
.DEFAULT: help # Running Make will run the help target
|
||||
|
||||
# Parse version from latest git semver tag
|
||||
GIT_TAG ?= $(shell git describe --tags --match v*.*.* 2>/dev/null || git rev-parse --short HEAD 2>/dev/null)
|
||||
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD 2>/dev/null)
|
||||
@ -23,13 +33,6 @@ ifneq ($(TRIVY_REMOTE),)
|
||||
TRIVY_OPTS ::= --server $(TRIVY_REMOTE)
|
||||
endif
|
||||
|
||||
.SILENT: ; # no need for @
|
||||
.ONESHELL: ; # recipes execute in same shell
|
||||
.NOTPARALLEL: ; # wait for this target to finish
|
||||
.EXPORT_ALL_VARIABLES: ; # send all vars to shell
|
||||
.PHONY: all # All targets are accessible for user
|
||||
.DEFAULT: help # Running Make will run the help target
|
||||
|
||||
help: ## Show Help
|
||||
grep -E '^[a-zA-Z_-]+:.*?## .*$$' .ci/podman.mk | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
|
||||
|
||||
@ -40,27 +43,28 @@ fmt:: ## auto format source
|
||||
lint:: ## Lint source
|
||||
|
||||
build: ## Build the app
|
||||
buildah build --rm --layers -t $(IMAGE):$(TAG)-$(_ARCH) --build-arg TAG=$(TAG) --build-arg ARCH=$(_ARCH) --platform linux/$(_ARCH) .
|
||||
podman build --rm --layers -t $(IMAGE):$(TAG)-$(_ARCH) --build-arg TAG=$(TAG) --build-arg ARCH=$(_ARCH) --platform linux/$(_ARCH) .
|
||||
|
||||
test:: ## test built artificats
|
||||
|
||||
scan: ## Scan image using trivy
|
||||
echo "Scanning $(IMAGE):$(TAG)-$(_ARCH) using Trivy $(TRIVY_REMOTE)"
|
||||
trivy image $(TRIVY_OPTS) --quiet --no-progress localhost/$(IMAGE):$(TAG)-$(_ARCH)
|
||||
trivy image $(TRIVY_OPTS) --quiet --no-progress --ignorefile ./.trivyignore.yaml localhost/$(IMAGE):$(TAG)-$(_ARCH)
|
||||
|
||||
# first tag and push all actual images
|
||||
# create new manifest for each tag and add all available TAG-ARCH before pushing
|
||||
push: ecr-login ## push images to registry
|
||||
for t in $(TAG) latest $(EXTRA_TAGS); do \
|
||||
for t in $(TAG) latest $(EXTRA_TAGS); do
|
||||
echo "Tagging image with $(REGISTRY)/$(IMAGE):$${t}-$(ARCH)"
|
||||
buildah tag $(IMAGE):$(TAG)-$(_ARCH) $(REGISTRY)/$(IMAGE):$${t}-$(_ARCH); \
|
||||
buildah manifest rm $(IMAGE):$$t || true; \
|
||||
buildah manifest create $(IMAGE):$$t; \
|
||||
for a in $(ALL_ARCHS); do \
|
||||
buildah manifest add $(IMAGE):$$t $(REGISTRY)/$(IMAGE):$(TAG)-$$a; \
|
||||
done; \
|
||||
podman tag $(IMAGE):$(TAG)-$(_ARCH) $(REGISTRY)/$(IMAGE):$${t}-$(_ARCH)
|
||||
podman manifest rm $(IMAGE):$$t || true
|
||||
podman manifest create $(IMAGE):$$t
|
||||
for a in $(ALL_ARCHS); do
|
||||
podman image exists $(REGISTRY)/$(IMAGE):$$t-$$a && \
|
||||
podman manifest add $(IMAGE):$$t containers-storage:$(REGISTRY)/$(IMAGE):$$t-$$a
|
||||
done
|
||||
echo "Pushing manifest $(IMAGE):$$t"
|
||||
buildah manifest push --all $(IMAGE):$$t docker://$(REGISTRY)/$(IMAGE):$$t; \
|
||||
podman manifest push --all $(IMAGE):$$t docker://$(REGISTRY)/$(IMAGE):$$t
|
||||
done
|
||||
|
||||
ecr-login: ## log into AWS ECR public
|
||||
@ -73,12 +77,15 @@ rm-remote-untagged: ## delete all remote untagged and in-dev images, keep 10 tag
|
||||
clean:: ## clean up source folder
|
||||
|
||||
rm-image:
|
||||
test -z "$$(podman image ls -q $(IMAGE):$(TAG)-$(_ARCH))" || podman image rm -f $(IMAGE):$(TAG)-$(_ARCH) > /dev/null
|
||||
test -z "$$(podman image ls -q $(IMAGE):$(TAG)-$(_ARCH))" || echo "Error: Removing image failed"
|
||||
for t in $(TAG) latest $(EXTRA_TAGS); do
|
||||
for a in $(ALL_ARCHS); do
|
||||
podman image exists $(IMAGE):$$t-$$a && podman image rm -f $(IMAGE):$$t-$$a || true
|
||||
done
|
||||
done
|
||||
|
||||
## some useful tasks during development
|
||||
ci-pull-upstream: ## pull latest shared .ci subtree
|
||||
git subtree pull --prefix .ci ssh://git@git.zero-downtime.net/ZeroDownTime/ci-tools-lib.git master --squash -m "Merge latest ci-tools-lib"
|
||||
git subtree pull --prefix .ci ssh://git@git.zero-downtime.net/ZeroDownTime/ci-tools-lib.git main --squash -m "Merge latest ci-tools-lib"
|
||||
|
||||
create-repo: ## create new AWS ECR public repository
|
||||
aws ecr-public create-repository --repository-name $(IMAGE) --region $(REGION)
|
||||
|
8
.trivyignore.yaml
Normal file
8
.trivyignore.yaml
Normal file
@ -0,0 +1,8 @@
|
||||
secrets:
|
||||
- id: private-key
|
||||
paths:
|
||||
- "**/pulumi_aws/glue/connection.py"
|
||||
- id: gcp-service-account
|
||||
paths:
|
||||
- "**/pulumi_aws/glue/connection.py"
|
||||
|
@ -1,8 +1,8 @@
|
||||
ARG RUNTIME_VERSION="3.11"
|
||||
ARG DISTRO_VERSION="3.19"
|
||||
ARG RUNTIME_VERSION="3.12"
|
||||
ARG DISTRO_VERSION="3.20"
|
||||
|
||||
FROM python:${RUNTIME_VERSION}-alpine${DISTRO_VERSION} AS builder
|
||||
ARG RUNTIME_VERSION="3.11"
|
||||
ARG RUNTIME_VERSION="3.12"
|
||||
|
||||
RUN apk add --no-cache \
|
||||
autoconf \
|
||||
|
2
Jenkinsfile
vendored
2
Jenkinsfile
vendored
@ -1,4 +1,4 @@
|
||||
library identifier: 'zdt-lib@master', retriever: modernSCM(
|
||||
library identifier: 'zdt-lib@main', retriever: modernSCM(
|
||||
[$class: 'GitSCMSource',
|
||||
remote: 'https://git.zero-downtime.net/ZeroDownTime/ci-tools-lib.git'])
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
#  CloudBender
|
||||
#  CloudBender
|
||||
|
||||
# About
|
||||
|
||||
|
@ -55,12 +55,13 @@ def cli(ctx, profile, region, debug, directory):
|
||||
sys.exit(1)
|
||||
|
||||
# Only load stackgroups to get profile and region
|
||||
if ctx.invoked_subcommand == "wrap":
|
||||
if ctx.invoked_subcommand in ["wrap", "list_stacks"]:
|
||||
cb.read_config(loadStacks=False)
|
||||
else:
|
||||
cb.read_config()
|
||||
|
||||
cb.dump_config()
|
||||
if debug:
|
||||
cb.dump_config()
|
||||
|
||||
ctx.obj = cb
|
||||
|
||||
@ -212,6 +213,21 @@ def execute(cb, stack_name, function, args):
|
||||
)
|
||||
|
||||
|
||||
@click.command('import')
|
||||
@click.argument("stack_name")
|
||||
@click.argument("pulumi_state_file")
|
||||
@click.pass_obj
|
||||
def _import(cb, stack_name, pulumi_state_file):
|
||||
"""Imports a Pulumi state file as stack"""
|
||||
stacks = _find_stacks(cb, [stack_name])
|
||||
|
||||
for s in stacks:
|
||||
if s.mode == "pulumi":
|
||||
s._import(pulumi_state_file)
|
||||
else:
|
||||
logger.info("Cannot import as {} uses Cloudformation.".format(s.stackname))
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.argument("stack_name")
|
||||
@click.option(
|
||||
@ -334,7 +350,16 @@ def wrap(cb, stack_group, cmd):
|
||||
"""Execute custom external program"""
|
||||
|
||||
sg = cb.sg.get_stackgroup(stack_group)
|
||||
cb.wrap(sg, " ".join(cmd))
|
||||
sg.wrap(" ".join(cmd))
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.argument("stack_group", nargs=1, required=True)
|
||||
@click.pass_obj
|
||||
def list_stacks(cb, stack_group):
|
||||
"""List all Pulumi stacks"""
|
||||
sg = cb.sg.get_stackgroup(stack_group)
|
||||
sg.list_stacks()
|
||||
|
||||
|
||||
@click.command()
|
||||
@ -447,10 +472,10 @@ def _provision(cb, stacks):
|
||||
# Pulumi is still not thread safe
|
||||
if _anyPulumi(step):
|
||||
_threads = 1
|
||||
else
|
||||
else:
|
||||
_threads = len(step)
|
||||
|
||||
with ThreadPoolExecutor(max_workers=_threads)) as group:
|
||||
with ThreadPoolExecutor(max_workers=_threads) as group:
|
||||
futures = []
|
||||
for stack in step:
|
||||
if stack.mode != "pulumi":
|
||||
@ -482,7 +507,9 @@ cli.add_command(refresh)
|
||||
cli.add_command(preview)
|
||||
cli.add_command(set_config)
|
||||
cli.add_command(get_config)
|
||||
cli.add_command(_import)
|
||||
cli.add_command(export)
|
||||
cli.add_command(list_stacks)
|
||||
cli.add_command(assimilate)
|
||||
cli.add_command(execute)
|
||||
cli.add_command(wrap)
|
||||
|
@ -1,9 +1,7 @@
|
||||
import pathlib
|
||||
import logging
|
||||
import pexpect
|
||||
|
||||
from .stackgroup import StackGroup
|
||||
from .connection import BotoConnection
|
||||
from .jinja import read_config_file
|
||||
from .exceptions import InvalidProjectDir
|
||||
|
||||
@ -133,17 +131,3 @@ class CloudBender(object):
|
||||
matching_stacks.append(s)
|
||||
|
||||
return matching_stacks
|
||||
|
||||
def wrap(self, stack_group, cmd):
|
||||
"""
|
||||
Set AWS environment based on profile before executing a custom command, eg. steampipe
|
||||
"""
|
||||
|
||||
profile = stack_group.config.get("profile", "default")
|
||||
region = stack_group.config.get("region", "global")
|
||||
|
||||
connection_manager = BotoConnection(profile, region)
|
||||
connection_manager.exportProfileEnv()
|
||||
|
||||
child = pexpect.spawn(cmd)
|
||||
child.interact()
|
||||
|
@ -189,7 +189,7 @@ def pulumi_ws(func):
|
||||
)
|
||||
|
||||
project_settings = pulumi.automation.ProjectSettings(
|
||||
name=project_name, runtime="python", backend={"url": pulumi_backend}
|
||||
name=project_name, runtime="python", backend=pulumi.automation.ProjectBackend(url=pulumi_backend)
|
||||
)
|
||||
|
||||
self.pulumi_ws_opts = pulumi.automation.LocalWorkspaceOptions(
|
||||
@ -199,6 +199,8 @@ def pulumi_ws(func):
|
||||
secrets_provider=secrets_provider,
|
||||
)
|
||||
|
||||
# self.pulumi_workspace = pulumi.automation.LocalWorkspace(self.pulumi_ws_opts)
|
||||
|
||||
response = func(self, *args, **kwargs)
|
||||
|
||||
# Cleanup temp workspace
|
||||
|
@ -8,7 +8,6 @@ import pathlib
|
||||
import pprint
|
||||
import pulumi
|
||||
import importlib
|
||||
import pkg_resources
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from dateutil.tz import tzutc
|
||||
@ -988,7 +987,7 @@ class Stack(object):
|
||||
def assimilate(self):
|
||||
"""Import resources into Pulumi stack"""
|
||||
|
||||
pulumi_stack = self._get_pulumi_stack(create=True)
|
||||
pulumi_stack = self._get_pulumi_stack()
|
||||
|
||||
# now lets import each defined resource
|
||||
for r in self._pulumi_code.RESOURCES:
|
||||
@ -1025,6 +1024,19 @@ class Stack(object):
|
||||
|
||||
return
|
||||
|
||||
@pulumi_ws
|
||||
def _import(self, pulumi_state_file):
|
||||
"""Imports a Pulumi stack"""
|
||||
|
||||
pulumi_stack = self._get_pulumi_stack()
|
||||
|
||||
with open(pulumi_state_file, "r") as file:
|
||||
state = json.loads(file.read())
|
||||
deployment = pulumi.automation.Deployment(version=3, deployment=state)
|
||||
pulumi_stack.import_stack(deployment)
|
||||
|
||||
return
|
||||
|
||||
@pulumi_ws
|
||||
def set_config(self, key, value, secret):
|
||||
"""Set a config or secret"""
|
||||
@ -1059,11 +1071,18 @@ class Stack(object):
|
||||
|
||||
if "parameters" not in settings:
|
||||
settings["parameters"] = {}
|
||||
|
||||
# hack for bug above, we support one level of nested values for now
|
||||
_val = pulumi_settings["config"]["{}:{}".format(
|
||||
self.parameters["Conglomerate"], key)]
|
||||
if '.' in key:
|
||||
(root, leaf) = key.split('.')
|
||||
try:
|
||||
(root, leaf) = key.split('.')
|
||||
except ValueError:
|
||||
raise ParameterIllegalValue(
|
||||
"Currently only one level hierachies within parameters are supported!"
|
||||
)
|
||||
|
||||
if root not in settings["parameters"]:
|
||||
settings["parameters"][root] = {}
|
||||
|
||||
@ -1314,7 +1333,7 @@ class Stack(object):
|
||||
opts=self.pulumi_ws_opts,
|
||||
)
|
||||
pulumi_stack.workspace.install_plugin(
|
||||
"aws", pkg_resources.get_distribution("pulumi_aws").version
|
||||
"aws", importlib.metadata.distribution("pulumi_aws").version
|
||||
)
|
||||
|
||||
else:
|
||||
|
@ -1,6 +1,13 @@
|
||||
import logging
|
||||
import pprint
|
||||
import pexpect
|
||||
import pulumi
|
||||
import tempfile
|
||||
|
||||
import rich.table
|
||||
import rich.console
|
||||
|
||||
from .connection import BotoConnection
|
||||
from .utils import dict_merge
|
||||
from .jinja import read_config_file
|
||||
from .stack import Stack
|
||||
@ -25,7 +32,7 @@ class StackGroup(object):
|
||||
for sg in self.sgs:
|
||||
sg.dump_config()
|
||||
|
||||
logger.debug(
|
||||
logger.info(
|
||||
"StackGroup {}: {}".format(self.rel_path, pprint.pformat(self.config))
|
||||
)
|
||||
|
||||
@ -135,3 +142,54 @@ class StackGroup(object):
|
||||
return s
|
||||
|
||||
return None
|
||||
|
||||
def wrap(self, cmd):
|
||||
"""
|
||||
Set AWS environment based on profile before executing a custom command, eg. steampipe
|
||||
"""
|
||||
|
||||
profile = self.config.get("profile", "default")
|
||||
region = self.config.get("region", "global")
|
||||
|
||||
connection_manager = BotoConnection(profile, region)
|
||||
connection_manager.exportProfileEnv()
|
||||
|
||||
child = pexpect.spawn(cmd)
|
||||
child.interact()
|
||||
|
||||
def list_stacks(self):
|
||||
project_name = self.config["parameters"]["Conglomerate"]
|
||||
pulumi_backend = "{}/{}/{}".format(self.config["pulumi"]["backend"], project_name, self.config["region"])
|
||||
|
||||
project_settings = pulumi.automation.ProjectSettings(
|
||||
name=project_name, runtime="python", backend=pulumi.automation.ProjectBackend(url=pulumi_backend)
|
||||
)
|
||||
|
||||
work_dir = tempfile.mkdtemp(
|
||||
dir=tempfile.gettempdir(), prefix="cloudbender-"
|
||||
)
|
||||
|
||||
# AWS setup
|
||||
profile = self.config.get("profile", "default")
|
||||
region = self.config.get("region", "global")
|
||||
|
||||
connection_manager = BotoConnection(profile, region)
|
||||
connection_manager.exportProfileEnv()
|
||||
|
||||
pulumi_workspace = pulumi.automation.LocalWorkspace(
|
||||
work_dir=work_dir,
|
||||
project_settings=project_settings
|
||||
)
|
||||
|
||||
stacks = pulumi_workspace.list_stacks()
|
||||
|
||||
table = rich.table.Table(title="Pulumi stacks")
|
||||
table.add_column("Name")
|
||||
table.add_column("Last Update")
|
||||
table.add_column("Resources")
|
||||
|
||||
for s in stacks:
|
||||
table.add_row(s.name, str(s.last_update), str(s.resource_count))
|
||||
|
||||
console = rich.console.Console()
|
||||
console.print(table)
|
||||
|
@ -11,21 +11,23 @@ authors = [
|
||||
description = "Deploy and maintain infrastructure in automated and trackable manner"
|
||||
readme = "README.md"
|
||||
license = "AGPL-3.0-or-later"
|
||||
requires-python = ">=3.9"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
"boto3==1.34.89",
|
||||
"boto3==1.35.70",
|
||||
"mock==5.1.0",
|
||||
"Jinja2==3.1.3",
|
||||
"Jinja2==3.1.4",
|
||||
"click==8.1.7",
|
||||
"pexpect==4.9.0",
|
||||
"python-minifier==2.9.0",
|
||||
"cfn-lint==0.86.4",
|
||||
"python-minifier==2.11.3",
|
||||
"cfn-lint==1.20.1",
|
||||
"ruamel.yaml==0.18.6",
|
||||
"pulumi==3.113.3",
|
||||
"pulumi-aws==6.32.0",
|
||||
"pulumi-aws-native==0.103.0",
|
||||
"pulumi-policy==1.11.0",
|
||||
"pulumi-command==0.10.0",
|
||||
"rich==13.9.4",
|
||||
"pulumi==3.142.0",
|
||||
"pulumi-aws==6.61.0",
|
||||
"pulumi-aws-native==1.11.0",
|
||||
"pulumi-policy==1.13.0",
|
||||
"pulumi-command==1.0.1",
|
||||
"pulumi_random==4.16.7",
|
||||
]
|
||||
|
||||
classifiers = [
|
||||
|
Loading…
x
Reference in New Issue
Block a user