Compare commits

...

21 Commits

Author SHA1 Message Date
aacd2b1f07 ci: fix trivyignore path match
All checks were successful
ZeroDownTime/CloudBender/pipeline/head This commit looks good
2024-12-03 14:40:58 +00:00
31a9b6a7d5 ci: add trivyignore.yaml
Some checks failed
ZeroDownTime/CloudBender/pipeline/head There was a failure building this commit
2024-12-03 13:22:59 +00:00
638876381c feat: add list_stacks command to list existing pulumi stacks
Some checks failed
ZeroDownTime/CloudBender/pipeline/head There was a failure building this commit
ZeroDownTime/CloudBender/pipeline/tag There was a failure building this commit
2024-12-03 12:49:16 +00:00
266604b964 docs: improve error message
Some checks failed
ZeroDownTime/CloudBender/pipeline/head There was a failure building this commit
2024-12-02 12:49:59 +00:00
24c0346864 feat: add import task to pulumi stacks
Some checks failed
ZeroDownTime/CloudBender/pipeline/head There was a failure building this commit
2024-12-02 12:48:30 +00:00
9a25dc34bb fix: adjust to pulumi API changes
Some checks failed
ZeroDownTime/CloudBender/pipeline/head There was a failure building this commit
ZeroDownTime/CloudBender/pipeline/tag There was a failure building this commit
2024-11-27 17:50:09 +00:00
665a2cb09c fix: make Python3.12 work
Some checks failed
ZeroDownTime/CloudBender/pipeline/head There was a failure building this commit
ZeroDownTime/CloudBender/pipeline/tag There was a failure building this commit
2024-11-27 17:32:45 +00:00
6cd64e54a8 Merge latest ci-tools-lib
Some checks failed
ZeroDownTime/CloudBender/pipeline/head There was a failure building this commit
2024-11-27 17:15:00 +00:00
8c958d8fc2 Squashed '.ci/' changes from 2c44e4f..15e4d1f
15e4d1f ci: make work with main branch
3feaf6f chore: migrate to main branch
a392836 feat: migrate all buildah cmds to podman only
d67a80e feat: make push and rm-image more resilient, prevent exit codes
8e202d4 fix: do not add non-existent images
6ef8d28 feat: ensure bash and safe exec
06fcff5 feat: improve image cleanup to incl. all tags and repositories
47b4da4 feat: add suport for trivyignore file

git-subtree-dir: .ci
git-subtree-split: 15e4d1f589c8e055944b2a4b58a9a50728e245b4
2024-11-27 17:15:00 +00:00
be403efa43 fix: latest OS and fixes
Some checks failed
ZeroDownTime/CloudBender/pipeline/head There was a failure building this commit
2024-11-27 17:13:34 +00:00
77918aa80d Merge pull request 'fix(deps): update all non-major dependencies' (#1) from renovate/all-minor-patch into main
Some checks failed
ZeroDownTime/CloudBender/pipeline/head There was a failure building this commit
Reviewed-on: #1
2024-11-27 16:47:12 +00:00
6d4c993fa0 fix(deps): update all non-major dependencies
Some checks are pending
ZeroDownTime/CloudBender/pipeline/pr-main Build started...
2024-11-27 16:46:40 +00:00
97a67238e9 Merge pull request 'fix(deps): update dependency cfn-lint to v1' (#2) from renovate/cfn-lint-1.x into main
Reviewed-on: #2
2024-11-27 16:41:48 +00:00
7976d35b76 Merge pull request 'fix(deps): update dependency pulumi-aws-native to v1' (#3) from renovate/pulumi-aws-native-1.x into main
Some checks are pending
ZeroDownTime/CloudBender/pipeline/head Build queued...
Reviewed-on: #3
2024-11-27 16:41:34 +00:00
b58a495489 Merge pull request 'fix(deps): update dependency pulumi-command to v1' (#4) from renovate/pulumi-command-1.x into main
Some checks are pending
ZeroDownTime/CloudBender/pipeline/head Build started...
Reviewed-on: #4
2024-11-27 16:41:22 +00:00
880d1be69b fix(deps): update dependency pulumi-aws-native to v1
Some checks failed
ZeroDownTime/CloudBender/pipeline/pr-main There was a failure building this commit
2024-11-27 03:02:26 +00:00
3834035a29 fix(deps): update dependency cfn-lint to v1
All checks were successful
ZeroDownTime/CloudBender/pipeline/pr-main This commit looks good
2024-11-27 03:02:15 +00:00
72dc20c16c ci: fix pipeline after branch rename
All checks were successful
ZeroDownTime/CloudBender/pipeline/head This commit looks good
2024-11-18 17:29:50 +00:00
224bd4bc90 fix(deps): update dependency pulumi-command to v1
Some checks failed
ZeroDownTime/CloudBender/pipeline/pr-main There was a failure building this commit
2024-11-18 17:14:57 +00:00
5ab6069a37 feat: better error message, add pulumi_random 2024-11-18 13:34:01 +00:00
f8e5583f00 chore: fix link 2024-11-18 13:27:33 +00:00
12 changed files with 177 additions and 70 deletions

View File

@ -14,7 +14,7 @@ include .ci/podman.mk
Add subtree to your project:
```
git subtree add --prefix .ci https://git.zero-downtime.net/ZeroDownTime/ci-tools-lib.git master --squash
git subtree add --prefix .ci https://git.zero-downtime.net/ZeroDownTime/ci-tools-lib.git main --squash
```

View File

@ -1,3 +1,13 @@
SHELL := bash
.SHELLFLAGS := -eu -o pipefail -c
.DELETE_ON_ERROR:
.SILENT: ; # no need for @
.ONESHELL: ; # recipes execute in same shell
.NOTPARALLEL: ; # wait for this target to finish
.EXPORT_ALL_VARIABLES: ; # send all vars to shell
.PHONY: all # All targets are accessible for user
.DEFAULT: help # Running Make will run the help target
# Parse version from latest git semver tag
GIT_TAG ?= $(shell git describe --tags --match v*.*.* 2>/dev/null || git rev-parse --short HEAD 2>/dev/null)
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD 2>/dev/null)
@ -23,13 +33,6 @@ ifneq ($(TRIVY_REMOTE),)
TRIVY_OPTS ::= --server $(TRIVY_REMOTE)
endif
.SILENT: ; # no need for @
.ONESHELL: ; # recipes execute in same shell
.NOTPARALLEL: ; # wait for this target to finish
.EXPORT_ALL_VARIABLES: ; # send all vars to shell
.PHONY: all # All targets are accessible for user
.DEFAULT: help # Running Make will run the help target
help: ## Show Help
grep -E '^[a-zA-Z_-]+:.*?## .*$$' .ci/podman.mk | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
@ -40,27 +43,28 @@ fmt:: ## auto format source
lint:: ## Lint source
build: ## Build the app
buildah build --rm --layers -t $(IMAGE):$(TAG)-$(_ARCH) --build-arg TAG=$(TAG) --build-arg ARCH=$(_ARCH) --platform linux/$(_ARCH) .
podman build --rm --layers -t $(IMAGE):$(TAG)-$(_ARCH) --build-arg TAG=$(TAG) --build-arg ARCH=$(_ARCH) --platform linux/$(_ARCH) .
test:: ## test built artificats
scan: ## Scan image using trivy
echo "Scanning $(IMAGE):$(TAG)-$(_ARCH) using Trivy $(TRIVY_REMOTE)"
trivy image $(TRIVY_OPTS) --quiet --no-progress localhost/$(IMAGE):$(TAG)-$(_ARCH)
trivy image $(TRIVY_OPTS) --quiet --no-progress --ignorefile ./.trivyignore.yaml localhost/$(IMAGE):$(TAG)-$(_ARCH)
# first tag and push all actual images
# create new manifest for each tag and add all available TAG-ARCH before pushing
push: ecr-login ## push images to registry
for t in $(TAG) latest $(EXTRA_TAGS); do \
for t in $(TAG) latest $(EXTRA_TAGS); do
echo "Tagging image with $(REGISTRY)/$(IMAGE):$${t}-$(ARCH)"
buildah tag $(IMAGE):$(TAG)-$(_ARCH) $(REGISTRY)/$(IMAGE):$${t}-$(_ARCH); \
buildah manifest rm $(IMAGE):$$t || true; \
buildah manifest create $(IMAGE):$$t; \
for a in $(ALL_ARCHS); do \
buildah manifest add $(IMAGE):$$t $(REGISTRY)/$(IMAGE):$(TAG)-$$a; \
done; \
podman tag $(IMAGE):$(TAG)-$(_ARCH) $(REGISTRY)/$(IMAGE):$${t}-$(_ARCH)
podman manifest rm $(IMAGE):$$t || true
podman manifest create $(IMAGE):$$t
for a in $(ALL_ARCHS); do
podman image exists $(REGISTRY)/$(IMAGE):$$t-$$a && \
podman manifest add $(IMAGE):$$t containers-storage:$(REGISTRY)/$(IMAGE):$$t-$$a
done
echo "Pushing manifest $(IMAGE):$$t"
buildah manifest push --all $(IMAGE):$$t docker://$(REGISTRY)/$(IMAGE):$$t; \
podman manifest push --all $(IMAGE):$$t docker://$(REGISTRY)/$(IMAGE):$$t
done
ecr-login: ## log into AWS ECR public
@ -73,12 +77,15 @@ rm-remote-untagged: ## delete all remote untagged and in-dev images, keep 10 tag
clean:: ## clean up source folder
rm-image:
test -z "$$(podman image ls -q $(IMAGE):$(TAG)-$(_ARCH))" || podman image rm -f $(IMAGE):$(TAG)-$(_ARCH) > /dev/null
test -z "$$(podman image ls -q $(IMAGE):$(TAG)-$(_ARCH))" || echo "Error: Removing image failed"
for t in $(TAG) latest $(EXTRA_TAGS); do
for a in $(ALL_ARCHS); do
podman image exists $(IMAGE):$$t-$$a && podman image rm -f $(IMAGE):$$t-$$a || true
done
done
## some useful tasks during development
ci-pull-upstream: ## pull latest shared .ci subtree
git subtree pull --prefix .ci ssh://git@git.zero-downtime.net/ZeroDownTime/ci-tools-lib.git master --squash -m "Merge latest ci-tools-lib"
git subtree pull --prefix .ci ssh://git@git.zero-downtime.net/ZeroDownTime/ci-tools-lib.git main --squash -m "Merge latest ci-tools-lib"
create-repo: ## create new AWS ECR public repository
aws ecr-public create-repository --repository-name $(IMAGE) --region $(REGION)

8
.trivyignore.yaml Normal file
View File

@ -0,0 +1,8 @@
secrets:
- id: private-key
paths:
- "**/pulumi_aws/glue/connection.py"
- id: gcp-service-account
paths:
- "**/pulumi_aws/glue/connection.py"

View File

@ -1,8 +1,8 @@
ARG RUNTIME_VERSION="3.11"
ARG DISTRO_VERSION="3.19"
ARG RUNTIME_VERSION="3.12"
ARG DISTRO_VERSION="3.20"
FROM python:${RUNTIME_VERSION}-alpine${DISTRO_VERSION} AS builder
ARG RUNTIME_VERSION="3.11"
ARG RUNTIME_VERSION="3.12"
RUN apk add --no-cache \
autoconf \

2
Jenkinsfile vendored
View File

@ -1,4 +1,4 @@
library identifier: 'zdt-lib@master', retriever: modernSCM(
library identifier: 'zdt-lib@main', retriever: modernSCM(
[$class: 'GitSCMSource',
remote: 'https://git.zero-downtime.net/ZeroDownTime/ci-tools-lib.git'])

View File

@ -1,21 +1,21 @@
# ![Logo](https://git.zero-downtime.net/ZeroDownTime/CloudBender/media/branch/master/cloudbender.png) CloudBender
# ![Logo](https://git.zero-downtime.net/ZeroDownTime/CloudBender/media/branch/main/cloudbender.png) CloudBender
# About
Toolset to deploy and maintain infrastructure in automated and trackable manner.
First class support for:
Toolset to deploy and maintain infrastructure in automated and trackable manner.
First class support for:
- [Pulumi](https://www.pulumi.com/docs/)
- [AWS CloudFormation](https://aws.amazon.com/cloudformation)
# Installation
The preferred way of running CloudBender is using the public container. This ensure all tools and dependencies are in sync and underwent some basic testing during the development and build phase.
The preferred way of running CloudBender is using the public container. This ensure all tools and dependencies are in sync and underwent some basic testing during the development and build phase.
As a fall back CloudBender and its dependencies can be installed locally see step *1b* below.
## 1a. Containerized
The command below tests the ability to run containers within containers on your local setup.
The command below tests the ability to run containers within containers on your local setup.
( This most likely only works on a recent Linux box/VM, which is capable of running rootless containers within containers.
Requires kernel >= 5.12, Cgroups V2, podman, ... )
@ -36,7 +36,7 @@ and proceed with step 2)
- either `podman` or `docker` depending on your platform
## 2. Test cli
To verify that all pieces are in place run:
To verify that all pieces are in place run:
```
cloudbender version
```
@ -84,10 +84,10 @@ Commands:
The state for all Pulumi resources are stored on S3 in your account and in the same region as the resources being deployed.
No data is send to nor shared with the official Pulumi provided APIs.
CloudBender configures Pulumi with a local, temporary workspace on the fly. This incl. the injection of various common parameters like the AWS account ID and region etc.
CloudBender configures Pulumi with a local, temporary workspace on the fly. This incl. the injection of various common parameters like the AWS account ID and region etc.
### Cloudformation
All state is handled by AWS Cloudformation.
All state is handled by AWS Cloudformation.
The required account and region are determined by CloudBender automatically from the configuration.

View File

@ -55,12 +55,13 @@ def cli(ctx, profile, region, debug, directory):
sys.exit(1)
# Only load stackgroups to get profile and region
if ctx.invoked_subcommand == "wrap":
if ctx.invoked_subcommand in ["wrap", "list_stacks"]:
cb.read_config(loadStacks=False)
else:
cb.read_config()
cb.dump_config()
if debug:
cb.dump_config()
ctx.obj = cb
@ -212,6 +213,21 @@ def execute(cb, stack_name, function, args):
)
@click.command('import')
@click.argument("stack_name")
@click.argument("pulumi_state_file")
@click.pass_obj
def _import(cb, stack_name, pulumi_state_file):
"""Imports a Pulumi state file as stack"""
stacks = _find_stacks(cb, [stack_name])
for s in stacks:
if s.mode == "pulumi":
s._import(pulumi_state_file)
else:
logger.info("Cannot import as {} uses Cloudformation.".format(s.stackname))
@click.command()
@click.argument("stack_name")
@click.option(
@ -334,7 +350,16 @@ def wrap(cb, stack_group, cmd):
"""Execute custom external program"""
sg = cb.sg.get_stackgroup(stack_group)
cb.wrap(sg, " ".join(cmd))
sg.wrap(" ".join(cmd))
@click.command()
@click.argument("stack_group", nargs=1, required=True)
@click.pass_obj
def list_stacks(cb, stack_group):
"""List all Pulumi stacks"""
sg = cb.sg.get_stackgroup(stack_group)
sg.list_stacks()
@click.command()
@ -447,10 +472,10 @@ def _provision(cb, stacks):
# Pulumi is still not thread safe
if _anyPulumi(step):
_threads = 1
else
else:
_threads = len(step)
with ThreadPoolExecutor(max_workers=_threads)) as group:
with ThreadPoolExecutor(max_workers=_threads) as group:
futures = []
for stack in step:
if stack.mode != "pulumi":
@ -482,7 +507,9 @@ cli.add_command(refresh)
cli.add_command(preview)
cli.add_command(set_config)
cli.add_command(get_config)
cli.add_command(_import)
cli.add_command(export)
cli.add_command(list_stacks)
cli.add_command(assimilate)
cli.add_command(execute)
cli.add_command(wrap)

View File

@ -1,9 +1,7 @@
import pathlib
import logging
import pexpect
from .stackgroup import StackGroup
from .connection import BotoConnection
from .jinja import read_config_file
from .exceptions import InvalidProjectDir
@ -133,17 +131,3 @@ class CloudBender(object):
matching_stacks.append(s)
return matching_stacks
def wrap(self, stack_group, cmd):
"""
Set AWS environment based on profile before executing a custom command, eg. steampipe
"""
profile = stack_group.config.get("profile", "default")
region = stack_group.config.get("region", "global")
connection_manager = BotoConnection(profile, region)
connection_manager.exportProfileEnv()
child = pexpect.spawn(cmd)
child.interact()

View File

@ -189,7 +189,7 @@ def pulumi_ws(func):
)
project_settings = pulumi.automation.ProjectSettings(
name=project_name, runtime="python", backend={"url": pulumi_backend}
name=project_name, runtime="python", backend=pulumi.automation.ProjectBackend(url=pulumi_backend)
)
self.pulumi_ws_opts = pulumi.automation.LocalWorkspaceOptions(
@ -199,6 +199,8 @@ def pulumi_ws(func):
secrets_provider=secrets_provider,
)
# self.pulumi_workspace = pulumi.automation.LocalWorkspace(self.pulumi_ws_opts)
response = func(self, *args, **kwargs)
# Cleanup temp workspace

View File

@ -8,7 +8,6 @@ import pathlib
import pprint
import pulumi
import importlib
import pkg_resources
from datetime import datetime, timedelta
from dateutil.tz import tzutc
@ -988,7 +987,7 @@ class Stack(object):
def assimilate(self):
"""Import resources into Pulumi stack"""
pulumi_stack = self._get_pulumi_stack(create=True)
pulumi_stack = self._get_pulumi_stack()
# now lets import each defined resource
for r in self._pulumi_code.RESOURCES:
@ -1025,6 +1024,19 @@ class Stack(object):
return
@pulumi_ws
def _import(self, pulumi_state_file):
"""Imports a Pulumi stack"""
pulumi_stack = self._get_pulumi_stack()
with open(pulumi_state_file, "r") as file:
state = json.loads(file.read())
deployment = pulumi.automation.Deployment(version=3, deployment=state)
pulumi_stack.import_stack(deployment)
return
@pulumi_ws
def set_config(self, key, value, secret):
"""Set a config or secret"""
@ -1059,11 +1071,18 @@ class Stack(object):
if "parameters" not in settings:
settings["parameters"] = {}
# hack for bug above, we support one level of nested values for now
_val = pulumi_settings["config"]["{}:{}".format(
self.parameters["Conglomerate"], key)]
if '.' in key:
(root, leaf) = key.split('.')
try:
(root, leaf) = key.split('.')
except ValueError:
raise ParameterIllegalValue(
"Currently only one level hierachies within parameters are supported!"
)
if root not in settings["parameters"]:
settings["parameters"][root] = {}
@ -1314,7 +1333,7 @@ class Stack(object):
opts=self.pulumi_ws_opts,
)
pulumi_stack.workspace.install_plugin(
"aws", pkg_resources.get_distribution("pulumi_aws").version
"aws", importlib.metadata.distribution("pulumi_aws").version
)
else:

View File

@ -1,6 +1,13 @@
import logging
import pprint
import pexpect
import pulumi
import tempfile
import rich.table
import rich.console
from .connection import BotoConnection
from .utils import dict_merge
from .jinja import read_config_file
from .stack import Stack
@ -25,7 +32,7 @@ class StackGroup(object):
for sg in self.sgs:
sg.dump_config()
logger.debug(
logger.info(
"StackGroup {}: {}".format(self.rel_path, pprint.pformat(self.config))
)
@ -135,3 +142,54 @@ class StackGroup(object):
return s
return None
def wrap(self, cmd):
"""
Set AWS environment based on profile before executing a custom command, eg. steampipe
"""
profile = self.config.get("profile", "default")
region = self.config.get("region", "global")
connection_manager = BotoConnection(profile, region)
connection_manager.exportProfileEnv()
child = pexpect.spawn(cmd)
child.interact()
def list_stacks(self):
project_name = self.config["parameters"]["Conglomerate"]
pulumi_backend = "{}/{}/{}".format(self.config["pulumi"]["backend"], project_name, self.config["region"])
project_settings = pulumi.automation.ProjectSettings(
name=project_name, runtime="python", backend=pulumi.automation.ProjectBackend(url=pulumi_backend)
)
work_dir = tempfile.mkdtemp(
dir=tempfile.gettempdir(), prefix="cloudbender-"
)
# AWS setup
profile = self.config.get("profile", "default")
region = self.config.get("region", "global")
connection_manager = BotoConnection(profile, region)
connection_manager.exportProfileEnv()
pulumi_workspace = pulumi.automation.LocalWorkspace(
work_dir=work_dir,
project_settings=project_settings
)
stacks = pulumi_workspace.list_stacks()
table = rich.table.Table(title="Pulumi stacks")
table.add_column("Name")
table.add_column("Last Update")
table.add_column("Resources")
for s in stacks:
table.add_row(s.name, str(s.last_update), str(s.resource_count))
console = rich.console.Console()
console.print(table)

View File

@ -11,21 +11,23 @@ authors = [
description = "Deploy and maintain infrastructure in automated and trackable manner"
readme = "README.md"
license = "AGPL-3.0-or-later"
requires-python = ">=3.9"
requires-python = ">=3.12"
dependencies = [
"boto3==1.34.89",
"boto3==1.35.70",
"mock==5.1.0",
"Jinja2==3.1.3",
"Jinja2==3.1.4",
"click==8.1.7",
"pexpect==4.9.0",
"python-minifier==2.9.0",
"cfn-lint==0.86.4",
"python-minifier==2.11.3",
"cfn-lint==1.20.1",
"ruamel.yaml==0.18.6",
"pulumi==3.113.3",
"pulumi-aws==6.32.0",
"pulumi-aws-native==0.103.0",
"pulumi-policy==1.11.0",
"pulumi-command==0.10.0",
"rich==13.9.4",
"pulumi==3.142.0",
"pulumi-aws==6.61.0",
"pulumi-aws-native==1.11.0",
"pulumi-policy==1.13.0",
"pulumi-command==1.0.1",
"pulumi_random==4.16.7",
]
classifiers = [