ci: more CI tuning
This commit is contained in:
parent
a315bddef2
commit
32f3efe7a8
3
.flake8
Normal file
3
.flake8
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
[flake8]
|
||||||
|
ignore = E501
|
||||||
|
exclude = .git,__pycache__,build,dist,report
|
61
.gitignore
vendored
Normal file
61
.gitignore
vendored
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
# Vim
|
||||||
|
*.swp
|
||||||
|
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
env/
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
.pytest*
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*,cover
|
||||||
|
.hypothesis/
|
||||||
|
reports/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# dotenv
|
||||||
|
.env
|
||||||
|
|
||||||
|
# virtualenv
|
||||||
|
venv/
|
||||||
|
ENV/
|
20
Dockerfile
20
Dockerfile
@ -39,28 +39,10 @@ COPY app.py /app
|
|||||||
|
|
||||||
|
|
||||||
# Stage 3 - final runtime image
|
# Stage 3 - final runtime image
|
||||||
FROM python-alpine as release
|
FROM python-alpine
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY --from=build-image /app /app
|
COPY --from=build-image /app /app
|
||||||
|
|
||||||
ENTRYPOINT [ "/usr/local/bin/python", "-m", "awslambdaric" ]
|
ENTRYPOINT [ "/usr/local/bin/python", "-m", "awslambdaric" ]
|
||||||
CMD [ "app.handler" ]
|
CMD [ "app.handler" ]
|
||||||
|
|
||||||
|
|
||||||
# Tests
|
|
||||||
FROM release as test
|
|
||||||
|
|
||||||
# Get aws-lambda run time emulator
|
|
||||||
ADD https://github.com/aws/aws-lambda-runtime-interface-emulator/releases/latest/download/aws-lambda-rie /usr/local/bin/aws-lambda-rie
|
|
||||||
RUN chmod 0755 /usr/local/bin/aws-lambda-rie && \
|
|
||||||
mkdir -p tests
|
|
||||||
|
|
||||||
# Install pytest
|
|
||||||
RUN pip install pytest --target /app
|
|
||||||
|
|
||||||
# Add our tests
|
|
||||||
ADD tests /app/tests
|
|
||||||
|
|
||||||
# Run tests
|
|
||||||
RUN python -m pytest --capture=tee-sys tests
|
|
||||||
|
28
Dockerfile.test
Normal file
28
Dockerfile.test
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
ARG REPOSITORY="sns-alert-hub"
|
||||||
|
ARG TAG="latest"
|
||||||
|
|
||||||
|
FROM ${REPOSITORY}:${TAG}
|
||||||
|
|
||||||
|
# Install additional tools for tests
|
||||||
|
COPY dev-requirements.txt .flake8 .
|
||||||
|
RUN export MAKEFLAGS="-j$(nproc)" && \
|
||||||
|
pip install -r dev-requirements.txt
|
||||||
|
|
||||||
|
# Unit Tests / Static / Style etc.
|
||||||
|
COPY tests/ tests/
|
||||||
|
RUN flake8 app.py tests && \
|
||||||
|
codespell app.py tests
|
||||||
|
|
||||||
|
# Get aws-lambda run time emulator
|
||||||
|
ADD https://github.com/aws/aws-lambda-runtime-interface-emulator/releases/latest/download/aws-lambda-rie /usr/local/bin/aws-lambda-rie
|
||||||
|
RUN chmod 0755 /usr/local/bin/aws-lambda-rie && \
|
||||||
|
mkdir -p tests
|
||||||
|
|
||||||
|
# Install pytest
|
||||||
|
RUN pip install pytest --target /app
|
||||||
|
|
||||||
|
# Add our tests
|
||||||
|
ADD tests /app/tests
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
RUN python -m pytest tests --capture=tee-sys
|
30
Makefile
30
Makefile
@ -14,21 +14,31 @@ endif
|
|||||||
|
|
||||||
.PHONY: build push scan test
|
.PHONY: build push scan test
|
||||||
|
|
||||||
all: build
|
all: test
|
||||||
|
|
||||||
|
# Ensure we run the tests by removing any previous runs
|
||||||
|
.PHONY: rm-test-image
|
||||||
|
rm-test-image:
|
||||||
|
@test -z "$$(docker image ls -q $(REPOSITORY):$(TAG)-test)" || docker image rm $(REPOSITORY):$(TAG)-test > /dev/null
|
||||||
|
@test -z "$$(docker image ls -q $(REPOSITORY):$(TAG)-test)" || echo "Error: Removing test image failed"
|
||||||
|
|
||||||
build:
|
build:
|
||||||
podman build --target release --rm -t $(REPOSITORY):$(TAG) -t $(REPOSITORY):latest .
|
sed -i -e "s/^__version__ =.*/__version__ = \"$(TAG)\"/" app.py
|
||||||
|
docker build --rm -t $(REPOSITORY):$(TAG) .
|
||||||
|
|
||||||
test:
|
test: build rm-test-image
|
||||||
podman build --target test --rm -t $(REPOSITORY):$(TAG) -t $(REPOSITORY):latest .
|
docker build --rm -t $(REPOSITORY):$(TAG)-test \
|
||||||
|
--build-arg REPOSITORY=$(REPOSITORY) \
|
||||||
|
--build-arg TAG=$(TAG) \
|
||||||
|
-f Dockerfile.test .
|
||||||
|
|
||||||
scan:
|
scan: build
|
||||||
trivy $(TRIVY_OPTS) $(REPOSITORY):$(TAG)
|
trivy $(TRIVY_OPTS) $(REPOSITORY):$(TAG)
|
||||||
|
|
||||||
push:
|
push: scan
|
||||||
aws ecr-public get-login-password --region $(REGION) | podman login --username AWS --password-stdin $(REGISTRY)
|
aws ecr-public get-login-password --region $(REGION) | docker login --username AWS --password-stdin $(REGISTRY)
|
||||||
podman tag $(REPOSITORY):latest $(REGISTRY)/$(REPOSITORY):$(TAG) $(REGISTRY)/$(REPOSITORY):latest
|
docker tag $(REPOSITORY):$(TAG) $(REGISTRY)/$(REPOSITORY):$(TAG) $(REGISTRY)/$(REPOSITORY):latest
|
||||||
podman push $(REGISTRY)/$(REPOSITORY):$(TAG)
|
docker push $(REGISTRY)/$(REPOSITORY):$(TAG)
|
||||||
podman push $(REGISTRY)/$(REPOSITORY):latest
|
docker push $(REGISTRY)/$(REPOSITORY):latest
|
||||||
# Delete all untagged images
|
# Delete all untagged images
|
||||||
# aws ecr-public batch-delete-image --repository-name $(REPOSITORY) --region $(REGION) --image-ids $$(for image in $$(aws ecr-public describe-images --repository-name $(REPOSITORY) --region $(REGION) --output json | jq -r '.imageDetails[] | select(.imageTags | not ).imageDigest'); do echo -n "imageDigest=$$image "; done)
|
# aws ecr-public batch-delete-image --repository-name $(REPOSITORY) --region $(REGION) --image-ids $$(for image in $$(aws ecr-public describe-images --repository-name $(REPOSITORY) --region $(REGION) --output json | jq -r '.imageDetails[] | select(.imageTags | not ).imageDigest'); do echo -n "imageDigest=$$image "; done)
|
||||||
|
17
app.py
17
app.py
@ -11,7 +11,7 @@ import apprise
|
|||||||
|
|
||||||
__author__ = "Stefan Reimer"
|
__author__ = "Stefan Reimer"
|
||||||
__author_email__ = "stefan@zero-downtime.net"
|
__author_email__ = "stefan@zero-downtime.net"
|
||||||
__version__ = "0.7.2"
|
__version__ = "head"
|
||||||
|
|
||||||
# Global alias lookup cache
|
# Global alias lookup cache
|
||||||
account_aliases = {}
|
account_aliases = {}
|
||||||
@ -39,11 +39,13 @@ else:
|
|||||||
|
|
||||||
# Ensure slack URLs use ?blocks=yes
|
# Ensure slack URLs use ?blocks=yes
|
||||||
if "slack.com" in WEBHOOK_URL:
|
if "slack.com" in WEBHOOK_URL:
|
||||||
scheme, netloc, path, query_string, fragment = urllib.parse.urlsplit(WEBHOOK_URL)
|
scheme, netloc, path, query_string, fragment = urllib.parse.urlsplit(
|
||||||
|
WEBHOOK_URL)
|
||||||
query_params = urllib.parse.parse_qs(query_string)
|
query_params = urllib.parse.parse_qs(query_string)
|
||||||
query_params["blocks"] = ["yes"]
|
query_params["blocks"] = ["yes"]
|
||||||
new_query_string = urllib.parse.urlencode(query_params, doseq=True)
|
new_query_string = urllib.parse.urlencode(query_params, doseq=True)
|
||||||
WEBHOOK_URL = urllib.parse.urlunsplit((scheme, netloc, path, new_query_string, fragment))
|
WEBHOOK_URL = urllib.parse.urlunsplit(
|
||||||
|
(scheme, netloc, path, new_query_string, fragment))
|
||||||
|
|
||||||
# Setup apprise
|
# Setup apprise
|
||||||
asset = apprise.AppriseAsset()
|
asset = apprise.AppriseAsset()
|
||||||
@ -54,7 +56,8 @@ asset.app_url = "https://zero-downtime.net"
|
|||||||
asset.image_url_mask = (
|
asset.image_url_mask = (
|
||||||
"https://cdn.zero-downtime.net/assets/zdt/apprise/{TYPE}-{XY}{EXTENSION}"
|
"https://cdn.zero-downtime.net/assets/zdt/apprise/{TYPE}-{XY}{EXTENSION}"
|
||||||
)
|
)
|
||||||
asset.app_id = "{} / {} {}".format("cloudbender", __version__, "zero-downtime.net")
|
asset.app_id = "{} / {} {}".format("cloudbender",
|
||||||
|
__version__, "zero-downtime.net")
|
||||||
|
|
||||||
apobj = apprise.Apprise(asset=asset)
|
apobj = apprise.Apprise(asset=asset)
|
||||||
apobj.add(WEBHOOK_URL)
|
apobj.add(WEBHOOK_URL)
|
||||||
@ -197,7 +200,8 @@ def handler(event, context):
|
|||||||
if alert["status"] == "resolved":
|
if alert["status"] == "resolved":
|
||||||
body = body + "\nDuration: {}".format(
|
body = body + "\nDuration: {}".format(
|
||||||
humanize.time.precisedelta(
|
humanize.time.precisedelta(
|
||||||
dateutil.parser.parse(alert["startsAt"]) - dateutil.parser.parse(alert["endsAt"])
|
dateutil.parser.parse(
|
||||||
|
alert["startsAt"]) - dateutil.parser.parse(alert["endsAt"])
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@ -228,4 +232,5 @@ def handler(event, context):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
body = sns["Message"]
|
body = sns["Message"]
|
||||||
apobj.notify(body=body, title="Unknown message type", notify_type=apprise.NotifyType.WARNING)
|
apobj.notify(body=body, title="Unknown message type",
|
||||||
|
notify_type=apprise.NotifyType.WARNING)
|
||||||
|
@ -1,2 +1,3 @@
|
|||||||
|
pytest
|
||||||
flake8
|
flake8
|
||||||
codespell
|
codespell
|
||||||
|
Loading…
Reference in New Issue
Block a user