feat: Initial commit v0.7.2
This commit is contained in:
commit
618cb50feb
55
Dockerfile
Normal file
55
Dockerfile
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
# https://aws.amazon.com/blogs/aws/new-for-aws-lambda-container-image-support/
|
||||||
|
ARG FUNCTION_DIR="/app"
|
||||||
|
ARG RUNTIME_VERSION="3.9"
|
||||||
|
ARG DISTRO_VERSION="3.15"
|
||||||
|
|
||||||
|
# Stage 1 - bundle base image + runtime
|
||||||
|
# Grab a fresh copy of the image and install GCC
|
||||||
|
FROM python:${RUNTIME_VERSION}-alpine${DISTRO_VERSION} AS python-alpine
|
||||||
|
# Install GCC (Alpine uses musl but we compile and link dependencies with GCC)
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
libstdc++
|
||||||
|
|
||||||
|
# Stage 2 - build function and dependencies
|
||||||
|
FROM python-alpine AS build-image
|
||||||
|
# Install aws-lambda-cpp build dependencies
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
build-base \
|
||||||
|
libtool \
|
||||||
|
autoconf \
|
||||||
|
automake \
|
||||||
|
libexecinfo-dev \
|
||||||
|
make \
|
||||||
|
cmake \
|
||||||
|
libcurl \
|
||||||
|
libffi-dev \
|
||||||
|
openssl-dev
|
||||||
|
# cargo
|
||||||
|
|
||||||
|
# Include global args in this stage of the build
|
||||||
|
ARG FUNCTION_DIR
|
||||||
|
ARG RUNTIME_VERSION
|
||||||
|
# Create function directory
|
||||||
|
RUN mkdir -p ${FUNCTION_DIR}
|
||||||
|
# Copy handler function
|
||||||
|
COPY app/* ${FUNCTION_DIR}
|
||||||
|
|
||||||
|
# Install requirements
|
||||||
|
COPY requirements.txt requirements.txt
|
||||||
|
RUN python${RUNTIME_VERSION} -m pip install -r requirements.txt --target ${FUNCTION_DIR}
|
||||||
|
|
||||||
|
# Install Lambda Runtime Interface Client for Python
|
||||||
|
RUN python${RUNTIME_VERSION} -m pip install awslambdaric --target ${FUNCTION_DIR}
|
||||||
|
|
||||||
|
# Stage 3 - final runtime image
|
||||||
|
# Grab a fresh copy of the Python image
|
||||||
|
FROM python-alpine
|
||||||
|
# Include global arg in this stage of the build
|
||||||
|
ARG FUNCTION_DIR
|
||||||
|
# Set working directory to function root directory
|
||||||
|
WORKDIR ${FUNCTION_DIR}
|
||||||
|
# Copy in the built dependencies
|
||||||
|
COPY --from=build-image ${FUNCTION_DIR} ${FUNCTION_DIR}
|
||||||
|
|
||||||
|
ENTRYPOINT [ "/usr/local/bin/python", "-m", "awslambdaric" ]
|
||||||
|
CMD [ "app.handler" ]
|
56
Jenkinsfile
vendored
Normal file
56
Jenkinsfile
vendored
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
pipeline {
|
||||||
|
agent { node { label 'podman-aws-trivy' } }
|
||||||
|
|
||||||
|
stages {
|
||||||
|
stage('Prepare'){
|
||||||
|
// get tags
|
||||||
|
steps {
|
||||||
|
sh 'git fetch -q --tags ${GIT_URL} +refs/heads/${BRANCH_NAME}:refs/remotes/origin/${BRANCH_NAME}'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build using rootless podman
|
||||||
|
stage('Build'){
|
||||||
|
steps {
|
||||||
|
sh 'make build'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('Test'){
|
||||||
|
steps {
|
||||||
|
sh 'make test'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan via trivy
|
||||||
|
stage('Scan'){
|
||||||
|
environment {
|
||||||
|
TRIVY_FORMAT = "template"
|
||||||
|
TRIVY_OUTPUT = "reports/trivy.html"
|
||||||
|
}
|
||||||
|
steps {
|
||||||
|
sh 'mkdir -p reports'
|
||||||
|
sh 'make scan'
|
||||||
|
publishHTML target : [
|
||||||
|
allowMissing: true,
|
||||||
|
alwaysLinkToLastBuild: true,
|
||||||
|
keepAll: true,
|
||||||
|
reportDir: 'reports',
|
||||||
|
reportFiles: 'trivy.html',
|
||||||
|
reportName: 'TrivyScan',
|
||||||
|
reportTitles: 'TrivyScan'
|
||||||
|
]
|
||||||
|
|
||||||
|
// Scan again and fail on CRITICAL vulns
|
||||||
|
sh 'TRIVY_EXIT_CODE=1 TRIVY_SEVERITY=CRITICAL make scan'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Push to ECR
|
||||||
|
stage('Push'){
|
||||||
|
steps {
|
||||||
|
sh 'make push'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
44
Makefile
Normal file
44
Makefile
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
REGISTRY := public.ecr.aws/zero-downtime
|
||||||
|
REPOSITORY := sns-alert-hub
|
||||||
|
REGION := us-east-1
|
||||||
|
|
||||||
|
# Parse version from latest git semver tag
|
||||||
|
GTAG=$(shell git describe --tags --match v*.*.* 2>/dev/null || git rev-parse --short HEAD 2>/dev/null)
|
||||||
|
TAG ?= $(shell echo $(GTAG) | awk -F '-' '{ print $$1 "-" $$2 }' | sed -e 's/-$$//')
|
||||||
|
|
||||||
|
ifeq ($(TRIVY_REMOTE),)
|
||||||
|
TRIVY_OPTS := image
|
||||||
|
else
|
||||||
|
TRIVY_OPTS := client --remote ${TRIVY_REMOTE}
|
||||||
|
endif
|
||||||
|
|
||||||
|
.PHONY: build push scan test
|
||||||
|
|
||||||
|
all: build
|
||||||
|
|
||||||
|
build:
|
||||||
|
podman build --rm --squash-all -t $(REPOSITORY):$(TAG) -t $(REPOSITORY):latest .
|
||||||
|
|
||||||
|
push:
|
||||||
|
aws ecr-public get-login-password --region $(REGION) | podman login --username AWS --password-stdin $(REGISTRY)
|
||||||
|
podman tag $(REPOSITORY):latest $(REGISTRY)/$(REPOSITORY):$(TAG) $(REGISTRY)/$(REPOSITORY):latest
|
||||||
|
podman push $(REGISTRY)/$(REPOSITORY):$(TAG)
|
||||||
|
podman push $(REGISTRY)/$(REPOSITORY):latest
|
||||||
|
# Delete all untagged images
|
||||||
|
# aws ecr-public batch-delete-image --repository-name $(REPOSITORY) --region $(REGION) --image-ids $$(for image in $$(aws ecr-public describe-images --repository-name $(REPOSITORY) --region $(REGION) --output json | jq -r '.imageDetails[] | select(.imageTags | not ).imageDigest'); do echo -n "imageDigest=$$image "; done)
|
||||||
|
|
||||||
|
scan:
|
||||||
|
trivy $(TRIVY_OPTS) $(REPOSITORY):$(TAG)
|
||||||
|
|
||||||
|
test:
|
||||||
|
flake8 --ignore=E501 app/app.py
|
||||||
|
./test.py
|
||||||
|
|
||||||
|
aws_lambda:
|
||||||
|
@[ -z $(AWS_LAMBDA) ] && { echo "Missing aws-lambda-rie in PATH"; exit 1;} || true
|
||||||
|
|
||||||
|
run: aws_lambda
|
||||||
|
podman run --rm -v $(AWS_LAMBDA):/aws-lambda -p 8080:8080 --entrypoint /aws-lambda/aws-lambda-rie $(TAG) /usr/local/bin/python -m awslambdaric app.handler
|
||||||
|
|
||||||
|
dev: aws_lambda
|
||||||
|
cd app && aws-lambda-rie /usr/bin/python -m awslambdaric app.handler
|
139
SNSAlertHub.yaml
Normal file
139
SNSAlertHub.yaml
Normal file
@ -0,0 +1,139 @@
|
|||||||
|
AWSTemplateFormatVersion: "2010-09-09"
|
||||||
|
|
||||||
|
Description: "SNS Topic and tools to fan out alerts to email and or Slack"
|
||||||
|
|
||||||
|
Conditions:
|
||||||
|
|
||||||
|
IsSetEmail:
|
||||||
|
Fn::Not:
|
||||||
|
- Fn::Equals:
|
||||||
|
- Ref: AlertEmail
|
||||||
|
- ""
|
||||||
|
IsSetSlack:
|
||||||
|
Fn::Not:
|
||||||
|
- Fn::Equals:
|
||||||
|
- Ref: AlertSlackWebHook
|
||||||
|
- ""
|
||||||
|
|
||||||
|
Resources:
|
||||||
|
|
||||||
|
AlertHubTopic:
|
||||||
|
Type: AWS::SNS::Topic
|
||||||
|
Properties:
|
||||||
|
TopicName: AlertHub
|
||||||
|
|
||||||
|
# Email
|
||||||
|
EmailAlertsSubscription:
|
||||||
|
Type: AWS::SNS::Subscription
|
||||||
|
Condition: IsSetEmail
|
||||||
|
Properties:
|
||||||
|
Endpoint: { Ref: AlertEmail }
|
||||||
|
Protocol: email
|
||||||
|
TopicArn: { Ref: AlertHubTopic }
|
||||||
|
|
||||||
|
# Slack
|
||||||
|
SlackAlertsSubscription:
|
||||||
|
Type: AWS::SNS::Subscription
|
||||||
|
Condition: IsSetSlack
|
||||||
|
Properties:
|
||||||
|
Endpoint: {"Fn::GetAtt": ["SNSAlertHubFunction", "Arn"] }
|
||||||
|
Protocol: lambda
|
||||||
|
TopicArn: { Ref: AlertHubTopic }
|
||||||
|
|
||||||
|
IamRole:
|
||||||
|
Type: AWS::IAM::Role
|
||||||
|
Condition: IsSetSlack
|
||||||
|
Properties:
|
||||||
|
Policies:
|
||||||
|
- PolicyName: ResolveAccountAlias
|
||||||
|
PolicyDocument:
|
||||||
|
Version: '2012-10-17'
|
||||||
|
Statement:
|
||||||
|
- Effect: Allow
|
||||||
|
Action:
|
||||||
|
- iam:ListAccountAliases
|
||||||
|
Resource:
|
||||||
|
- "*"
|
||||||
|
|
||||||
|
- PolicyName: LogtoCloudwatchGroup
|
||||||
|
PolicyDocument:
|
||||||
|
Version: '2012-10-17'
|
||||||
|
Statement:
|
||||||
|
- Effect: Allow
|
||||||
|
Action:
|
||||||
|
- logs:CreateLogStream
|
||||||
|
- logs:PutLogEvents
|
||||||
|
Resource:
|
||||||
|
- Fn::Sub: "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:log-group:/aws/lambda/SNSAlertHub:log-stream:*"
|
||||||
|
- Effect: Allow
|
||||||
|
Action:
|
||||||
|
- logs:CreateLogGroup
|
||||||
|
Resource:
|
||||||
|
- Fn::Sub: "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:log-group:/aws/lambda/SNSAlertHub:*"
|
||||||
|
|
||||||
|
AssumeRolePolicyDocument:
|
||||||
|
Version: '2012-10-17'
|
||||||
|
Statement:
|
||||||
|
- Action:
|
||||||
|
- sts:AssumeRole
|
||||||
|
Effect: Allow
|
||||||
|
Principal:
|
||||||
|
Service: [ lambda.amazonaws.com ]
|
||||||
|
|
||||||
|
SNSAlertHubAllowed2Lambda:
|
||||||
|
Type: AWS::Lambda::Permission
|
||||||
|
Condition: IsSetSlack
|
||||||
|
Properties:
|
||||||
|
Action: lambda:InvokeFunction
|
||||||
|
Principal: sns.amazonaws.com
|
||||||
|
FunctionName: { Ref: SNSAlertHubFunction }
|
||||||
|
SourceArn: { Ref: AlertHubTopic }
|
||||||
|
|
||||||
|
SNSAlertHubFunction:
|
||||||
|
Type: AWS::Lambda::Function
|
||||||
|
Condition: IsSetSlack
|
||||||
|
Properties:
|
||||||
|
PackageType: Image
|
||||||
|
Code:
|
||||||
|
ImageUri: { "Fn::Sub": "${AWS::AccountId}.dkr.ecr.${AWS::Region}.amazonaws.com/${ImageTag}" }
|
||||||
|
Description: "Lambda function to forward alerts from SNS to Slack"
|
||||||
|
FunctionName: SNSAlertHub
|
||||||
|
MemorySize: 128
|
||||||
|
Role: { "Fn::GetAtt": ["IamRole", "Arn"] }
|
||||||
|
Timeout: 10
|
||||||
|
Environment:
|
||||||
|
Variables:
|
||||||
|
WEBHOOK_URL: { Ref: AlertSlackWebHook }
|
||||||
|
# DEBUG: "1"
|
||||||
|
|
||||||
|
Metadata:
|
||||||
|
Template:
|
||||||
|
Name: sns-alert-hub
|
||||||
|
Hash: 98fcf521f053f7412a90ce360ab62807
|
||||||
|
AwsCfnLib: v0.2.1
|
||||||
|
CloudBender:
|
||||||
|
Version: 0.9.9
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
ImageTag:
|
||||||
|
Type: String
|
||||||
|
Description: "(Optional) Overwrite default ImageTag"
|
||||||
|
Default: "sns-alert-hub:v0.5.8"
|
||||||
|
|
||||||
|
AlertEmail:
|
||||||
|
Type: String
|
||||||
|
Description: "(Optional) Email address to receive alerts via SMTP"
|
||||||
|
Default: ""
|
||||||
|
|
||||||
|
AlertSlackWebHook:
|
||||||
|
Type: String
|
||||||
|
Description: "(Optional) Encrypted (KMS Default key) Slack webhook to post alerts; deploys Slack Lambda function"
|
||||||
|
Default: ""
|
||||||
|
NoEcho: True
|
||||||
|
|
||||||
|
Outputs:
|
||||||
|
|
||||||
|
AlertHubTopic:
|
||||||
|
Value: { Ref: AlertHubTopic }
|
||||||
|
Description: ARN of the SNS AlertHub Topic
|
231
app/app.py
Normal file
231
app/app.py
Normal file
@ -0,0 +1,231 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import boto3
|
||||||
|
import dateutil.parser
|
||||||
|
import humanize
|
||||||
|
import urllib
|
||||||
|
|
||||||
|
import apprise
|
||||||
|
|
||||||
|
__author__ = "Stefan Reimer"
|
||||||
|
__author_email__ = "stefan@zero-downtime.net"
|
||||||
|
__version__ = "0.7.2"
|
||||||
|
|
||||||
|
# Global alias lookup cache
|
||||||
|
account_aliases = {}
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
||||||
|
logging.getLogger("boto3").setLevel(logging.WARNING)
|
||||||
|
logging.getLogger("botocore").setLevel(logging.WARNING)
|
||||||
|
|
||||||
|
|
||||||
|
def boolean(value):
|
||||||
|
if value in ("t", "T", "true", "True", "TRUE", "1", 1, True):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
DEBUG = boolean(os.getenv("DEBUG", default=False))
|
||||||
|
RESOLVE_ACCOUNT = boolean(os.getenv("RESOLVE_ACCOUNT", default=True))
|
||||||
|
WEBHOOK_URL = os.environ.get("WEBHOOK_URL", "dbus://")
|
||||||
|
|
||||||
|
if DEBUG:
|
||||||
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
|
else:
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
|
||||||
|
# Ensure slack URLs use ?blocks=yes
|
||||||
|
if "slack.com" in WEBHOOK_URL:
|
||||||
|
scheme, netloc, path, query_string, fragment = urllib.parse.urlsplit(WEBHOOK_URL)
|
||||||
|
query_params = urllib.parse.parse_qs(query_string)
|
||||||
|
query_params["blocks"] = ["yes"]
|
||||||
|
new_query_string = urllib.parse.urlencode(query_params, doseq=True)
|
||||||
|
WEBHOOK_URL = urllib.parse.urlunsplit((scheme, netloc, path, new_query_string, fragment))
|
||||||
|
|
||||||
|
# Setup apprise
|
||||||
|
asset = apprise.AppriseAsset()
|
||||||
|
|
||||||
|
# Set our app_id which is also used as User-Agent
|
||||||
|
asset.app_desc = "SNSAlertHub part of ZeroDownTime CloudBender"
|
||||||
|
asset.app_url = "https://zero-downtime.net"
|
||||||
|
asset.image_url_mask = (
|
||||||
|
"https://cdn.zero-downtime.net/assets/zdt/apprise/{TYPE}-{XY}{EXTENSION}"
|
||||||
|
)
|
||||||
|
asset.app_id = "{} / {} {}".format("cloudbender", __version__, "zero-downtime.net")
|
||||||
|
|
||||||
|
apobj = apprise.Apprise(asset=asset)
|
||||||
|
apobj.add(WEBHOOK_URL)
|
||||||
|
|
||||||
|
|
||||||
|
def get_alias(account_id):
|
||||||
|
"""resolves aws account_id to account alias and caches for lifetime of lambda function"""
|
||||||
|
if RESOLVE_ACCOUNT:
|
||||||
|
try:
|
||||||
|
if account_id not in account_aliases:
|
||||||
|
iam = boto3.client("iam")
|
||||||
|
account_aliases[account_id] = iam.list_account_aliases()[
|
||||||
|
"AccountAliases"
|
||||||
|
][0]
|
||||||
|
|
||||||
|
return account_aliases[account_id]
|
||||||
|
|
||||||
|
except (KeyError, IndexError):
|
||||||
|
logger.warning("Could not resolve IAM account alias")
|
||||||
|
pass
|
||||||
|
|
||||||
|
return account_id
|
||||||
|
|
||||||
|
|
||||||
|
def handler(event, context):
|
||||||
|
logger.debug(json.dumps({"aws.event": event})) # sort_keys=True, indent=4
|
||||||
|
|
||||||
|
(region, account_id) = context.invoked_function_arn.split(":")[3:5]
|
||||||
|
|
||||||
|
sns = event["Records"][0]["Sns"]
|
||||||
|
|
||||||
|
# Guess what we have, try to parse as json first
|
||||||
|
try:
|
||||||
|
msg = json.loads(sns["Message"])
|
||||||
|
|
||||||
|
except json.decoder.JSONDecodeError:
|
||||||
|
msg = {}
|
||||||
|
pass
|
||||||
|
|
||||||
|
# CloudWatch Alarm ?
|
||||||
|
if "AlarmName" in msg:
|
||||||
|
title = "AWS Cloudwatch Alarm"
|
||||||
|
|
||||||
|
# Discard NewStateValue == OK && OldStateValue == INSUFFICIENT_DATA as these are triggered by installing new Alarms and only cause confusion
|
||||||
|
if msg["NewStateValue"] == "OK" and msg["OldStateValue"] == "INSUFFICIENT_DATA":
|
||||||
|
logger.info(
|
||||||
|
"Discarding Cloudwatch Metrics Alarm as state is OK and previous state was insufficient data, most likely new alarm being installed"
|
||||||
|
)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
body = msg["AlarmDescription"]
|
||||||
|
|
||||||
|
msg_context = "{account} - {region} -> <https://{region}.console.aws.amazon.com/cloudwatch/home?region={region}#alarmsV2:alarm/{alarm}|Alarm Details>".format(
|
||||||
|
region=region,
|
||||||
|
alarm=msg["AlarmName"],
|
||||||
|
account=get_alias(msg["AWSAccountId"]),
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
notify_map = {
|
||||||
|
"ok": apprise.NotifyType.SUCCESS,
|
||||||
|
"alarm": apprise.NotifyType.FAILURE,
|
||||||
|
"insuffcient_data": apprise.NotifyType.INFO,
|
||||||
|
}
|
||||||
|
msg_type = notify_map[msg["NewStateValue"].lower()]
|
||||||
|
except KeyError:
|
||||||
|
msg_type = apprise.NotifyType.WARNING
|
||||||
|
|
||||||
|
body = body + "\n\n_{}_".format(msg_context)
|
||||||
|
apobj.notify(body=body, title=title, notify_type=msg_type)
|
||||||
|
|
||||||
|
elif "Source" in msg and msg["Source"] == "CloudBender":
|
||||||
|
title = "AWS EC2 - CloudBender"
|
||||||
|
|
||||||
|
try:
|
||||||
|
msg_context = "{account} - {region} - {host} ({instance}) <https://{region}.console.aws.amazon.com/ec2/autoscaling/home?region={region}#AutoScalingGroups:id={asg};view=history|{artifact} ASG>".format(
|
||||||
|
account=get_alias(msg["AWSAccountId"]),
|
||||||
|
region=msg["Region"],
|
||||||
|
asg=msg["Asg"],
|
||||||
|
instance=msg["Instance"],
|
||||||
|
host=msg["Hostname"],
|
||||||
|
artifact=msg["Artifact"],
|
||||||
|
)
|
||||||
|
except KeyError:
|
||||||
|
msg_context = "{account} - {region}".format(
|
||||||
|
account=get_alias(msg["AWSAccountId"]), region=msg["Region"]
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
notify_map = {
|
||||||
|
"warning": apprise.NotifyType.WARNING,
|
||||||
|
"error": apprise.NotifyType.FAILURE,
|
||||||
|
"info": apprise.NotifyType.INFO,
|
||||||
|
"success": apprise.NotifyType.SUCCESS,
|
||||||
|
}
|
||||||
|
msg_type = notify_map[msg["Level"].lower()]
|
||||||
|
except KeyError:
|
||||||
|
msg_type = apprise.NotifyType.WARNING
|
||||||
|
|
||||||
|
if "Subject" in msg and msg["Subject"]:
|
||||||
|
title = msg["Subject"]
|
||||||
|
|
||||||
|
body = ""
|
||||||
|
if "Message" in msg and msg["Message"]:
|
||||||
|
body = msg["Message"]
|
||||||
|
|
||||||
|
if "Attachment" in msg and msg["Attachment"]:
|
||||||
|
body = body + "\n```{}```".format(msg["Attachment"])
|
||||||
|
|
||||||
|
body = body + "\n\n_{}_".format(msg_context)
|
||||||
|
apobj.notify(body=body, title=title, notify_type=msg_type)
|
||||||
|
|
||||||
|
elif "receiver" in msg and msg["receiver"] == "alerthub-notifications":
|
||||||
|
|
||||||
|
for alert in msg["alerts"]:
|
||||||
|
|
||||||
|
# First msg_type
|
||||||
|
msg_type = apprise.NotifyType.WARNING
|
||||||
|
try:
|
||||||
|
if alert["status"] == "resolved":
|
||||||
|
msg_type = apprise.NotifyType.SUCCESS
|
||||||
|
else:
|
||||||
|
if alert["labels"]["severity"] == "critical":
|
||||||
|
msg_type = apprise.NotifyType.FAILURE
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# set title to Alertname
|
||||||
|
try:
|
||||||
|
title = alert["labels"]["alertname"]
|
||||||
|
except KeyError:
|
||||||
|
title = "Alertmanager"
|
||||||
|
|
||||||
|
# assemble message body
|
||||||
|
try:
|
||||||
|
body = "{}\n{}".format(
|
||||||
|
alert["annotations"]["summary"], alert["annotations"]["description"]
|
||||||
|
)
|
||||||
|
|
||||||
|
if alert["status"] == "resolved":
|
||||||
|
body = body + "\nDuration: {}".format(
|
||||||
|
humanize.time.precisedelta(
|
||||||
|
dateutil.parser.parse(alert["startsAt"]) - dateutil.parser.parse(alert["endsAt"])
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if "runbook_url" in alert["annotations"]:
|
||||||
|
body = body + " <{}|Runbook>".format(
|
||||||
|
alert["annotations"]["runbook_url"]
|
||||||
|
)
|
||||||
|
if "generatorURL" in alert["annotations"]:
|
||||||
|
body = body + " <{}|Source>".format(
|
||||||
|
alert["annotations"]["generatorURL"]
|
||||||
|
)
|
||||||
|
except KeyError:
|
||||||
|
body = "Unknown Alert:\n{}".format(alert)
|
||||||
|
|
||||||
|
try:
|
||||||
|
msg_context = "{account} - {region} - <{alert_manager_link}/#/alerts?receiver=alerthub-notifications|{cluster}>".format(
|
||||||
|
cluster=alert["labels"]["clusterName"],
|
||||||
|
region=alert["labels"]["awsRegion"],
|
||||||
|
account=get_alias(alert["labels"]["awsAccount"]),
|
||||||
|
alert_manager_link=msg["externalURL"],
|
||||||
|
)
|
||||||
|
body = body + "\n\n_{}_".format(msg_context)
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Finally send each parsed alert
|
||||||
|
apobj.notify(body=body, title=title, notify_type=msg_type)
|
||||||
|
|
||||||
|
else:
|
||||||
|
body = sns["Message"]
|
||||||
|
apobj.notify(body=body, title="Unknow message type", notify_type=apprise.NotifyType.WARNING)
|
5
requirements-dev.txt
Normal file
5
requirements-dev.txt
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
boto3
|
||||||
|
apprise
|
||||||
|
humanize
|
||||||
|
awslambdaric
|
||||||
|
flake8
|
3
requirements.txt
Normal file
3
requirements.txt
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
boto3
|
||||||
|
apprise
|
||||||
|
humanize
|
32
test.py
Executable file
32
test.py
Executable file
@ -0,0 +1,32 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
import subprocess
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
|
def send_event(event):
|
||||||
|
requests.post('http://localhost:8080/2015-03-31/functions/function/invocations', json=event)
|
||||||
|
|
||||||
|
|
||||||
|
p = subprocess.Popen('cd app && aws-lambda-rie /usr/bin/python -m awslambdaric app.handler', shell=True)
|
||||||
|
time.sleep(3)
|
||||||
|
|
||||||
|
# Cloudwatch Alarm
|
||||||
|
event = json.loads(r' { "Records": [ { "EventVersion": "1.0", "EventSubscriptionArn": "arn:aws:sns:EXAMPLE", "EventSource": "aws:sns", "Sns": { "SignatureVersion": "1", "Timestamp": "1970-01-01T00:00:00.000Z", "Signature": "EXAMPLE", "SigningCertUrl": "EXAMPLE", "MessageId": "95df01b4-ee98-5cb9-9903-4c221d41eb5e", "Message": "{\"AlarmName\":\"sns-slack-test-from-cloudwatch-total-cpu\",\"AlarmDescription\":\"Hello from AWS\",\"AWSAccountId\":\"123456789012\",\"NewStateValue\":\"OK\",\"NewStateReason\":\"Threshold Crossed: 1 datapoint (7.9053535353535365) was not greater than or equal to the threshold (8.0).\",\"StateChangeTime\":\"2015-11-09T21:19:43.454+0000\",\"Region\":\"US - N. Virginia\",\"OldStateValue\":\"ALARM\",\"Trigger\":{\"MetricName\":\"CPUUtilization\",\"Namespace\":\"AWS/EC2\",\"Statistic\":\"AVERAGE\",\"Unit\":null,\"Dimensions\":[],\"Period\":300,\"EvaluationPeriods\":1,\"ComparisonOperator\":\"GreaterThanOrEqualToThreshold\",\"Threshold\":8.0}}", "MessageAttributes": { "Test": { "Type": "String", "Value": "TestString" }, "TestBinary": { "Type": "Binary", "Value": "TestBinary" } }, "Type": "Notification", "UnsubscribeUrl": "EXAMPLE", "TopicArn": "arn:aws:sns:us-east-1:123456789012:production-notices", "Subject": "OK: sns-slack-test-from-cloudwatch-total-cpu" } } ] }')
|
||||||
|
send_event(event)
|
||||||
|
|
||||||
|
|
||||||
|
# CloudBender launch event
|
||||||
|
event = json.loads(r' { "Records": [ { "EventSource": "aws:sns", "EventVersion": "1.0", "EventSubscriptionArn": "arn:aws:sns:eu-central-1:123456789012:AlertHub:0c04d2e7-32ec-4933-b913-84c7172e6d90", "Sns": { "Type": "Notification", "MessageId": "25bb2fd0-3221-5c07-aea1-76acf75017c3", "TopicArn": "arn:aws:sns:eu-central-1:123456789012:AlertHub", "Subject": null, "Message": "{\n \"Source\": \"CloudBender\",\n \"AWSAccountId\": \"123456789012\",\n \"Region\": \"eu-central-1\",\n \"Artifact\": \"bastion\",\n \"Hostname\": \"bastion\",\n \"Asg\": \"zdt-bastion-BastionAsg-UABA2PIZV4TI\",\n \"Instance\": \"i-0d9f08855cdfcd740\",\n \"Subject\": \"Test\",\n \"Level\": \"Info\",\n \"Message\": \"Hello\",\n \"Attachment\": \"\",\n \"Emoji\": \"\"\n}\n", "Timestamp": "2021-07-29T15:03:13.318Z", "SignatureVersion": "1", "Signature": "OhLoCz8RWazyZ+ZHK03QLby6M3jmdtZvLWoNFygAHIaljQ0ZHsd6mc4TskDTnqpUpCtd/iIl+TLIPN8hYyflbLk2/cN3LXXWcSQ0GWqQ/bWIEhej54oCmUgZjIzrVfRlgz7mlUkhRnjQoRWYpcXRycQczMWuF2DCeIDP6v3ON53BxR8NdCeQXiquwoFlHaAaZIviRoUMqwp2Cl1T0NaBLeL9zmsdPvJF6EaXRbu3rqC1hdrA6E+nV2lzYNKg09POxA9JVpURmMEq3AC4tXm1Gu73PWQgWgoDSOQx+SOjMrbMeCAqf5R6typBV1BRDsGPkNkt9n4oto0FR9iyDmuWog==", "SigningCertUrl": "https://sns.eu-central-1.amazonaws.com/SimpleNotificationService-010a507c1833636cd94bdb98bd93083a.pem", "UnsubscribeUrl": "https://sns.eu-central-1.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:eu-central-1:123456789012:AlertHub:0c04d2e7-32ec-4933-b913-84c7172e6d90", "MessageAttributes": {} } } ] }')
|
||||||
|
send_event(event)
|
||||||
|
|
||||||
|
|
||||||
|
# Alertmanager
|
||||||
|
event = json.loads(r' { "Records": [ { "EventSource": "aws:sns", "EventVersion": "1.0", "EventSubscriptionArn": "arn:aws:sns:eu-central-1:123456789012:AlertHub:0e7ce1ba-c3e4-4264-bae1-4eb71c91235a", "Sns": { "Type": "Notification", "MessageId": "10ae86eb-9ddc-5c2f-806c-df6ecb6bde42", "TopicArn": "arn:aws:sns:eu-central-1:123456789012:AlertHub", "Subject": null, "Message": "{\"receiver\":\"alerthub-notifications\",\"status\":\"firing\",\"alerts\":[{\"status\":\"firing\",\"labels\":{\"alertname\":\"KubeVersionMismatch\",\"awsRegion\":\"eu-central-1\",\"clusterName\":\"test\",\"prometheus\":\"monitoring/metrics-kube-prometheus-st-prometheus\",\"severity\":\"warning\"},\"annotations\":{\"description\":\"There are 2 different semantic versions of Kubernetes components running.\",\"runbook_url\":\"https://github.com/kubernetes-monitoring/kubernetes-mixin/tree/master/runbook.md#alert-name-kubeversionmismatch\",\"summary\":\"Different semantic versions of Kubernetes components running.\"},\"startsAt\":\"2021-08-04T13:17:40.31Z\",\"endsAt\":\"0001-01-01T00:00:00Z\",\"generatorURL\":\"https://prometheus/graph?g0.expr=count%28count+by%28git_version%29+%28label_replace%28kubernetes_build_info%7Bjob%21~%22kube-dns%7Ccoredns%22%7D%2C+%22git_version%22%2C+%22%241%22%2C+%22git_version%22%2C+%22%28v%5B0-9%5D%2A.%5B0-9%5D%2A%29.%2A%22%29%29%29+%3E+1\\u0026g0.tab=1\",\"fingerprint\":\"5f94d4a22730c666\"}],\"groupLabels\":{},\"commonLabels\":{\"alertname\":\"KubeVersionMismatch\",\"awsRegion\":\"eu-central-1\",\"clusterName\":\"test\",\"prometheus\":\"monitoring/metrics-kube-prometheus-st-prometheus\",\"severity\":\"warning\"},\"commonAnnotations\":{\"description\":\"There are 2 different semantic versions of Kubernetes components running.\",\"runbook_url\":\"https://github.com/kubernetes-monitoring/kubernetes-mixin/tree/master/runbook.md#alert-name-kubeversionmismatch\",\"summary\":\"Different semantic versions of Kubernetes components running.\"},\"externalURL\":\"https://alertmanager\",\"version\":\"4\",\"groupKey\":\"{}:{}\",\"truncatedAlerts\":0}\n", "Timestamp": "2021-08-05T03:01:11.233Z", "SignatureVersion": "1", "Signature": "pSUYO7LDIfzCbBrp/S2HXV3/yzls3vfYy+2di6HsKG8Mf+CV97RLnen15ieAo3eKA8IfviZIzyREasbF0cwfUeruHPbW1B8kO572fDyV206zmUxvR63r6oM6OyLv9XKBmvyYHKawkOgHZHEMP3v1wMIIHK2W5KbJtXoUcks5DVamooVb9iFF58uqTf+Ccy31bOL4tFyMR9nr8NU55vEIlGEVno8A9Q21TujdZTg0V0BmRgPafcic96udWungjmfhZ005378N32u2hlLj6BRneTpHHSXHBw4wKZreKpX+INZwiZ4P8hzVfgRvAIh/4gXN9+0UJSHgnsaqUcLDNoLZTQ==", "SigningCertUrl": "https://sns.eu-central-1.amazonaws.com/SimpleNotificationService-010a507c1833636cd94bdb98bd93083a.pem", "UnsubscribeUrl": "https://sns.eu-central-1.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:eu-central-1:123456789012:AlertHub:0e7ce1ba-c3e4-4264-bae1-4eb71c91235a", "MessageAttributes": {} } } ] }')
|
||||||
|
send_event(event)
|
||||||
|
|
||||||
|
event = json.loads(r' { "Records": [ { "EventSource": "aws:sns", "EventVersion": "1.0", "EventSubscriptionArn": "arn:aws:sns:eu-central-1:123456789012:AlertHub:0e7ce1ba-c3e4-4264-bae1-4eb71c91235a", "Sns": { "Type": "Notification", "MessageId": "10ae86eb-9ddc-5c2f-806c-df6ecb6bde42", "TopicArn": "arn:aws:sns:eu-central-1:123456789012:AlertHub", "Subject": null, "Message": "{\"receiver\":\"alerthub-notifications\",\"status\":\"resolved\",\"alerts\":[{\"status\":\"resolved\",\"labels\":{\"alertname\":\"KubeDeploymentReplicasMismatch\",\"awsAccount\":\"668666974128\",\"awsRegion\":\"us-west-2\",\"clusterName\":\"mayne-aws-dev\",\"container\":\"kube-state-metrics\",\"deployment\":\"extrude-job\",\"endpoint\":\"http\",\"instance\":\"10.244.202.71:8080\",\"job\":\"kube-state-metrics\",\"namespace\":\"default\",\"pod\":\"metrics-kube-state-metrics-56546f44c7-h57jx\",\"prometheus\":\"monitoring/metrics-kube-prometheus-st-prometheus\",\"service\":\"metrics-kube-state-metrics\",\"severity\":\"warning\"},\"annotations\":{\"description\":\"Deployment default/extrude-job has not matched the expected number of replicas for longer than 15 minutes.\",\"runbook_url\":\"https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubedeploymentreplicasmismatch\",\"summary\":\"Deployment has not matched the expected number of replicas.\"},\"startsAt\":\"2021-09-29T12:36:11.394Z\",\"endsAt\":\"2021-09-29T14:51:11.394Z\",\"generatorURL\":\"https://prometheus.dev.mayneinc.com/graph?g0.expr=%28kube_deployment_spec_replicas%7Bjob%3D%22kube-state-metrics%22%7D+%3E+kube_deployment_status_replicas_available%7Bjob%3D%22kube-state-metrics%22%7D%29+and+%28changes%28kube_deployment_status_replicas_updated%7Bjob%3D%22kube-state-metrics%22%7D%5B10m%5D%29+%3D%3D+0%29\\u0026g0.tab=1\",\"fingerprint\":\"59ad2f1a4567b43b\"},{\"status\":\"firing\",\"labels\":{\"alertname\":\"KubeVersionMismatch\",\"awsRegion\":\"eu-central-1\",\"clusterName\":\"test\",\"prometheus\":\"monitoring/metrics-kube-prometheus-st-prometheus\",\"severity\":\"warning\"},\"annotations\":{\"description\":\"There are 2 different semantic versions of Kubernetes components running.\",\"runbook_url\":\"https://github.com/kubernetes-monitoring/kubernetes-mixin/tree/master/runbook.md#alert-name-kubeversionmismatch\",\"summary\":\"Different semantic versions of Kubernetes components running.\"},\"startsAt\":\"2021-08-04T13:17:40.31Z\",\"endsAt\":\"0001-01-01T00:00:00Z\",\"generatorURL\":\"https://prometheus/graph?g0.expr=count%28count+by%28git_version%29+%28label_replace%28kubernetes_build_info%7Bjob%21~%22kube-dns%7Ccoredns%22%7D%2C+%22git_version%22%2C+%22%241%22%2C+%22git_version%22%2C+%22%28v%5B0-9%5D%2A.%5B0-9%5D%2A%29.%2A%22%29%29%29+%3E+1\\u0026g0.tab=1\",\"fingerprint\":\"5f94d4a22730c666\"}],\"groupLabels\":{\"job\":\"kube-state-metrics\"},\"commonLabels\":{\"alertname\":\"KubeDeploymentReplicasMismatch\",\"awsAccount\":\"668666974128\",\"awsRegion\":\"us-west-2\",\"clusterName\":\"mayne-aws-dev\",\"container\":\"kube-state-metrics\",\"deployment\":\"extrude-job\",\"endpoint\":\"http\",\"instance\":\"10.244.202.71:8080\",\"job\":\"kube-state-metrics\",\"namespace\":\"default\",\"pod\":\"metrics-kube-state-metrics-56546f44c7-h57jx\",\"prometheus\":\"monitoring/metrics-kube-prometheus-st-prometheus\",\"service\":\"metrics-kube-state-metrics\",\"severity\":\"warning\"},\"commonAnnotations\":{\"description\":\"Deployment default/extrude-job has not matched the expected number of replicas for longer than 15 minutes.\",\"runbook_url\":\"https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubedeploymentreplicasmismatch\",\"summary\":\"Deployment has not matched the expected number of replicas.\"},\"externalURL\":\"https://alertmanager.dev.mayneinc.com\",\"version\":\"4\",\"groupKey\":\"{}:{job=\\\"kube-state-metrics\\\"}\",\"truncatedAlerts\":0}\n", "Timestamp": "2021-08-05T03:01:11.233Z", "SignatureVersion": "1", "Signature": "pSUYO7LDIfzCbBrp/S2HXV3/yzls3vfYy+2di6HsKG8Mf+CV97RLnen15ieAo3eKA8IfviZIzyREasbF0cwfUeruHPbW1B8kO572fDyV206zmUxvR63r6oM6OyLv9XKBmvyYHKawkOgHZHEMP3v1wMIIHK2W5KbJtXoUcks5DVamooVb9iFF58uqTf+Ccy31bOL4tFyMR9nr8NU55vEIlGEVno8A9Q21TujdZTg0V0BmRgPafcic96udWungjmfhZ005378N32u2hlLj6BRneTpHHSXHBw4wKZreKpX+INZwiZ4P8hzVfgRvAIh/4gXN9+0UJSHgnsaqUcLDNoLZTQ==", "SigningCertUrl": "https://sns.eu-central-1.amazonaws.com/SimpleNotificationService-010a507c1833636cd94bdb98bd93083a.pem", "UnsubscribeUrl": "https://sns.eu-central-1.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:eu-central-1:123456789012:AlertHub:0e7ce1ba-c3e4-4264-bae1-4eb71c91235a", "MessageAttributes": {} } } ] }')
|
||||||
|
send_event(event)
|
||||||
|
|
||||||
|
p.terminate()
|
Loading…
Reference in New Issue
Block a user