Convert python scripts to argparse

This removes the manual command line handling and reformats the scripts
into main methods. This is paving the way for a more unified build tool.
This commit is contained in:
Mike Crute 2020-05-21 18:22:26 -07:00
parent d63409acce
commit a36d0616bf
3 changed files with 317 additions and 256 deletions

View File

@ -1,119 +1,131 @@
@PYTHON@
# vim: ts=4 et:
from datetime import datetime
from distutils.version import StrictVersion
import functools
import os
import re
import sys
import argparse
import textwrap
from datetime import datetime
from collections import defaultdict
from distutils.version import StrictVersion
import yaml
if len(sys.argv) != 2:
sys.exit("Usage: " + os.path.basename(__file__) + "<profile>")
PROFILE = sys.argv[1]
def find_repo_root():
path = os.getcwd()
RELEASE_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'..', 'releases'
)
while ".git" not in set(os.listdir(path)) and path != "/":
path = os.path.dirname(path)
README_MD = os.path.join( RELEASE_DIR, 'README.md')
RELEASE_YAML = os.path.join( RELEASE_DIR, PROFILE + '.yaml')
if path == "/":
raise Exception("No repo found, stopping at /")
# read in releases/<profile>.yaml
with open(RELEASE_YAML, 'r') as data:
RELEASES = yaml.safe_load(data)
sections = {}
for build, releases in RELEASES.items():
for release, amis in releases.items():
for name, info in amis.items():
version = info['version']
arch = info['arch']
built = info['build_time']
if version in sections:
ver = sections[version]
else:
ver = {
'release': '',
'built': {},
'name': {},
'ami': {}
}
if (arch not in ver['built'] or
ver['built'][arch] < built):
ver['release'] = release
ver['name'][arch] = name
ver['built'][arch] = built
for region, ami in info['artifacts'].items():
if region not in ver['ami']:
ver['ami'][region] = {}
ver['ami'][region][arch] = ami
sections[version] = ver
SECTION = """
### Alpine Linux {release} ({date})
<details><summary><i>click to show/hide</i></summary><p>
{rows}
</p></details>
"""
AMI = " [{id}](https://{r}.console.aws.amazon.com/ec2/home#Images:visibility=public-images;imageId={id}) " + \
"([launch](https://{r}.console.aws.amazon.com/ec2/home#launchAmi={id})) |"
ARCHS = ['x86_64', 'aarch64']
return path
# most -> least recent version, edge at end
def ver_cmp(a, b):
try:
if StrictVersion(a) < StrictVersion(b):
return 1
if StrictVersion(a) > StrictVersion(b):
return -1
return 0
except ValueError:
# "edge" doesn't work with StrictVersion
if a == 'edge':
return 1
if b == 'edge':
return -1
return 0
class ReleaseReadmeUpdater:
SECTION_TPL = textwrap.dedent("""
### Alpine Linux {release} ({date})
<details><summary><i>click to show/hide</i></summary><p>
ami_list = "## AMIs\n"
{rows}
for version in sorted(list(sections.keys()), key=functools.cmp_to_key(ver_cmp)):
info = sections[version]
release = info['release']
rows = []
rows.append('| Region |')
rows.append('| ------ |')
for arch in ARCHS:
if arch in info['name']:
rows[0] += ' {n} |'.format(n=info['name'][arch])
rows[1] += ' --- |'
for region, amis in info['ami'].items():
row = '| {r} |'.format(r=region)
for arch in ARCHS:
if arch in amis:
row += AMI.format(r=region, id=amis[arch])
rows.append(row)
ami_list += SECTION.format(
release=release.capitalize(),
date=datetime.utcfromtimestamp(max(info['built'].values())).date(),
rows="\n".join(rows)
</p></details>
""")
AMI_TPL = (
" [{id}](https://{r}.console.aws.amazon.com/ec2/home"
"#Images:visibility=public-images;imageId={id}) "
"([launch](https://{r}.console.aws.amazon.com/ec2/home"
"#launchAmi={id})) |"
)
with open(README_MD, 'r') as file:
readme = file.read()
def __init__(self, profile, archs=None):
self.profile = profile
self.archs = archs or ["x86_64", "aarch64"]
readme_re = re.compile('## AMIs.*\Z', re.S)
def get_sorted_releases(self, release_data):
sections = defaultdict(lambda: {
"release": "",
"built": {},
"name": {},
"ami": defaultdict(dict)
})
with open(README_MD, 'w') as file:
file.write(readme_re.sub(ami_list, readme))
for build, releases in release_data.items():
for release, amis in releases.items():
for name, info in amis.items():
arch = info["arch"]
built = info["build_time"]
ver = sections[info["version"]]
if arch not in ver["built"] or ver["built"][arch] < built:
ver["release"] = release
ver["name"][arch] = name
ver["built"][arch] = built
for region, ami in info["artifacts"].items():
ver["ami"][region][arch] = ami
extract_ver = lambda x: StrictVersion(
"0.0" if x["release"] == "edge" else x["release"])
return sorted(sections.values(), key=extract_ver, reverse=True)
def make_ami_list(self, sorted_releases):
ami_list = "## AMIs\n"
for info in sorted_releases:
rows = ["| Region |", "| ------ |"]
for arch in self.archs:
if arch in info["name"]:
rows[0] += f" {info['name'][arch]} |"
rows[1] += " --- |"
for region, amis in info["ami"].items():
row = f"| {region} |"
for arch in self.archs:
if arch in amis:
row += self.AMI_TPL.format(r=region, id=amis[arch])
rows.append(row)
ami_list += self.SECTION_TPL.format(
release=info["release"].capitalize(),
date=datetime.utcfromtimestamp(
max(info["built"].values())).date(),
rows="\n".join(rows))
return ami_list
def update_markdown(self):
release_dir = os.path.join(find_repo_root(), "releases")
profile_file = os.path.join(release_dir, f"{self.profile}.yaml")
with open(profile_file, "r") as data:
sorted_releases = self.get_sorted_releases(yaml.safe_load(data))
readme_md = os.path.join(release_dir, "README.md")
with open(readme_md, "r") as file:
readme = file.read()
with open(readme_md, "w") as file:
file.write(
re.sub("## AMIs.*\Z", self.make_ami_list(sorted_releases),
readme, flags=re.S))
def main():
parser = argparse.ArgumentParser(description="Update release README")
parser.add_argument("profile", help="name of profile to update")
args = parser.parse_args()
ReleaseReadmeUpdater(args.profile).update_markdown()
if __name__ == "__main__":
main()

View File

@ -1,137 +1,168 @@
@PYTHON@
#@PYTHON@
# vim: ts=4 et:
from datetime import datetime
import os
import sys
import argparse
from datetime import datetime
import yaml
import boto3
from botocore.exceptions import ClientError
import yaml
LEVELS = ['revision', 'release', 'version']
LEVEL_HELP = """\
revision - keep only the latest revision per release
release - keep only the latest release per version
version - keep only the versions that aren't end-of-life
"""
if 3 < len(sys.argv) > 4 or sys.argv[1] not in LEVELS:
sys.exit("Usage: " + os.path.basename(__file__) + """ <level> <profile> [<build>]
<level> :-
revision - keep only the latest revision per release
release - keep only the latest release per version
version - keep only the versions that aren't end-of-life""")
NOW = datetime.utcnow()
LEVEL = sys.argv[1]
PROFILE = sys.argv[2]
BUILD = None if len(sys.argv) == 3 else sys.argv[3]
def find_repo_root():
path = os.getcwd()
RELEASE_YAML = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'..', 'releases', PROFILE + '.yaml'
)
while ".git" not in set(os.listdir(path)) and path != "/":
path = os.path.dirname(path)
with open(RELEASE_YAML, 'r') as data:
BEFORE = yaml.safe_load(data)
if path == "/":
raise Exception("No repo found, stopping at /")
known = {}
prune = {}
after = {}
return path
# for all builds in the profile...
for build_name, releases in BEFORE.items():
# this is not the build that was specified
if BUILD is not None and BUILD != build_name:
print('< skipping {0}/{1}'.format(PROFILE, build_name))
# ensure its release data remains intact
after[build_name] = BEFORE[build_name]
continue
else:
print('> PRUNING {0}/{1} for {2}'.format(PROFILE, build_name, LEVEL))
def main(args):
parser = argparse.ArgumentParser(
description="Prune AMIs from AWS",
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument(
"level", choices=["revision", "release", "version"], help=LEVEL_HELP)
parser.add_argument("profile", help="profile to prune")
parser.add_argument(
"build", nargs="?", help="build within profile to prune")
args = parser.parse_args()
criteria = {}
now = datetime.utcnow()
# scan releases for pruning criteria
for release, amis in releases.items():
for ami_name, info in amis.items():
version = info['version']
if info['end_of_life']:
eol = datetime.fromisoformat(info['end_of_life'])
else:
eol = None
built = info['build_time']
for region, ami_id in info['artifacts'].items():
if region not in known:
known[region] = []
known[region].append(ami_id)
release_yaml = os.path.join(
find_repo_root() "releases", f"{args.profile}.yaml")
if LEVEL == 'revision':
# find build timestamp of most recent revision, per release
if release not in criteria or built > criteria[release]:
criteria[release] = built
elif LEVEL == 'release':
# find build timestamp of most recent revision, per version
if version not in criteria or built > criteria[version]:
criteria[version] = built
elif LEVEL == 'version':
# find latest EOL date, per version
if (version not in criteria or not criteria[version]) or (
eol and eol > criteria[version]):
criteria[version] = eol
with open(release_yaml, "r") as data:
before = yaml.safe_load(data)
# rescan again to determine what doesn't make the cut
for release, amis in releases.items():
for ami_name, info in amis.items():
version = info['version']
if info['end_of_life']:
eol = datetime.fromisoformat(info['end_of_life'])
else:
eol = None
built = info['build_time']
if ((LEVEL == 'revision' and built < criteria[release]) or
(LEVEL == 'release' and built < criteria[version]) or
(LEVEL == 'version' and criteria[version] and (
(version != 'edge' and criteria[version] < NOW) or
(version == 'edge' and ((not eol) or (eol < NOW)))
))):
for region, ami_id in info['artifacts'].items():
if region not in prune:
prune[region] = []
prune[region].append(ami_id)
else:
if build_name not in after:
after[build_name] = {}
if release not in after[build_name]:
after[build_name][release] = {}
after[build_name][release][ami_name] = info
known = {}
prune = {}
after = {}
# scan all regions for AMIs
AWS = boto3.session.Session()
for region in AWS.get_available_regions('ec2'):
print("* scanning: " + region + '...')
EC2 = AWS.client('ec2', region_name=region)
# for all builds in the profile...
for build_name, releases in before.items():
try:
for image in EC2.describe_images(Owners=['self'])['Images']:
# this is not the build that was specified
if args.build is not None and args.build != build_name:
print(f"< skipping {args.profile}/{build_name}")
# ensure its release data remains intact
after[build_name] = before[build_name]
continue
else:
print(f"> PRUNING {args.profile}/{build_name} for {args.level}")
action = '? UNKNOWN'
if region in prune and image['ImageId'] in prune[region]:
action = '- REMOVING'
elif region in known and image['ImageId'] in known[region]:
action = '+ KEEPING'
criteria = {}
print(' ' + action + ': ' + image['Name'] +
"\n = " + image['ImageId'], end='', flush=True)
if action[0] == '-':
EC2.deregister_image(ImageId=image['ImageId'])
for blockdev in image['BlockDeviceMappings']:
if 'Ebs' in blockdev:
print(', ' + blockdev['Ebs']['SnapshotId'],
end='', flush=True)
if action[0] == '-':
EC2.delete_snapshot(
SnapshotId=blockdev['Ebs']['SnapshotId'])
print()
except ClientError as e:
print(e)
# scan releases for pruning criteria
for release, amis in releases.items():
for ami_name, info in amis.items():
version = info["version"]
built = info["build_time"]
# update releases/<profile>.yaml
with open(RELEASE_YAML, 'w') as data:
yaml.dump(after, data, sort_keys=False)
if info["end_of_life"]:
eol = datetime.fromisoformat(info["end_of_life"])
else:
eol = None
for region, ami_id in info["artifacts"].items():
if region not in known:
known[region] = []
known[region].append(ami_id)
if args.level == "revision":
# find build timestamp of most recent revision, per release
if release not in criteria or built > criteria[release]:
criteria[release] = built
elif args.level == "release":
# find build timestamp of most recent revision, per version
if version not in criteria or built > criteria[version]:
criteria[version] = built
elif args.level == "version":
# find latest EOL date, per version
if (version not in criteria or not criteria[version]) or (
eol and eol > criteria[version]):
criteria[version] = eol
# rescan again to determine what doesn't make the cut
for release, amis in releases.items():
for ami_name, info in amis.items():
version = info["version"]
built = info["build_time"]
if info["end_of_life"]:
eol = datetime.fromisoformat(info["end_of_life"])
else:
eol = None
if ((args.level == "revision" and built < criteria[release]) or
(args.level == "release" and built < criteria[version]) or
(args.level == "version" and criteria[version] and (
(version != "edge" and criteria[version] < now) or
(version == "edge" and ((not eol) or (eol < now)))
))):
for region, ami_id in info["artifacts"].items():
if region not in prune:
prune[region] = []
prune[region].append(ami_id)
else:
if build_name not in after:
after[build_name] = {}
if release not in after[build_name]:
after[build_name][release] = {}
after[build_name][release][ami_name] = info
# scan all regions for AMIs
AWS = boto3.session.Session()
for region in AWS.get_available_regions("ec2"):
print(f"* scanning: {region} ...")
EC2 = AWS.client("ec2", region_name=region)
try:
for image in EC2.describe_images(Owners=["self"])["Images"]:
action = "? UNKNOWN"
if region in prune and image["ImageId"] in prune[region]:
action = "- REMOVING"
elif region in known and image["ImageId"] in known[region]:
action = "+ KEEPING"
print(f" {action}: {image['Name']}\n = {image['ImageId']}",
end="", flush=True)
if action[0] == "-":
EC2.deregister_image(ImageId=image["ImageId"])
for blockdev in image["BlockDeviceMappings"]:
if "Ebs" in blockdev:
print(", {blockdev['Ebs']['SnapshotId']}",
end="", flush=True)
if action[0] == "-":
EC2.delete_snapshot(
SnapshotId=blockdev["Ebs"]["SnapshotId"])
print()
except ClientError as e:
print(e)
# update releases/<profile>.yaml
with open(release_yaml, "w") as data:
yaml.dump(after, data, sort_keys=False)
if __name__ == "__main__":
main(sys.argv)

View File

@ -1,62 +1,80 @@
@PYTHON@
# vim: set ts=4 et:
import json
import os
import re
import sys
import json
import argparse
import yaml
if len(sys.argv) != 3:
sys.exit("Usage: " + os.path.basename(__file__) + " <profile> <build>")
PROFILE = sys.argv[1]
BUILD = sys.argv[2]
def find_repo_root():
path = os.getcwd()
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
MANIFEST_JSON = os.path.join(
SCRIPT_DIR, 'profile', PROFILE, BUILD, 'manifest.json'
)
while ".git" not in set(os.listdir(path)) and path != "/":
path = os.path.dirname(path)
RELEASE_DIR = os.path.join(SCRIPT_DIR, '..', 'releases')
RELEASE_YAML = os.path.join(RELEASE_DIR, PROFILE + '.yaml')
if path == "/":
raise Exception("No repo found, stopping at /")
if not os.path.exists(RELEASE_DIR):
os.makedirs(RELEASE_DIR)
return path
releases = {}
if os.path.exists(RELEASE_YAML):
with open(RELEASE_YAML, 'r') as data:
releases = yaml.safe_load(data)
with open(MANIFEST_JSON, 'r') as data:
MANIFEST = json.load(data)
def parse_artifact_ids(ids):
parsed = re.split(":|,", ids)
return dict(zip(parsed[0::2], parsed[1::2]))
A = re.split(':|,', MANIFEST['builds'][0]['artifact_id'])
ARTIFACTS = dict(zip(A[0::2], A[1::2]))
BUILD_TIME = MANIFEST['builds'][0]['build_time']
DATA = MANIFEST['builds'][0]['custom_data']
RELEASE = DATA['release']
if BUILD not in releases:
releases[BUILD] = {}
if RELEASE not in releases[BUILD]:
releases[BUILD][RELEASE] = {}
def main(args):
parser = argparse.ArgumentParser(description="Update release YAML")
parser.add_argument("profile", help="name of profile to update")
parser.add_argument("build", help="name of build to update")
args = parser.parse_args()
REVISION = {
'description': DATA['ami_desc'],
'profile': PROFILE,
'profile_build': BUILD,
'version': DATA['version'],
'release': RELEASE,
'arch': DATA['arch'],
'revision': DATA['revision'],
'end_of_life': DATA['end_of_life'],
'build_time': BUILD_TIME,
'artifacts': ARTIFACTS
}
root = find_repo_root()
releases[BUILD][RELEASE][DATA['ami_name']] = REVISION
release_dir = os.path.join(root, "releases")
if not os.path.exists(release_dir):
os.makedirs(release_dir)
with open(RELEASE_YAML, 'w') as data:
yaml.dump(releases, data, sort_keys=False)
release_yaml = os.path.join(release_dir, f"{args.profile}.yaml")
releases = {}
if os.path.exists(release_yaml):
with open(release_yaml, "r") as data:
releases = yaml.safe_load(data)
manifest_json = os.path.join(
root, "build", "profile", args.profile, args.build, "manifest.json")
with open(manifest_json, "r") as data:
manifest = json.load(data)
data = manifest["builds"][0]["custom_data"]
release = data["release"]
if args.build not in releases:
releases[args.build] = {}
if release not in releases[args.build]:
releases[args.build][release] = {}
releases[args.build][release][data["ami_name"]] = {
"description": data["ami_desc"],
"profile": args.profile,
"profile_build": args.build,
"version": data["version"],
"release": release,
"arch": data["arch"],
"revision": data["revision"],
"end_of_life": data["end_of_life"],
"build_time": manifest["builds"][0]["build_time"],
"artifacts": parse_artifact_ids(manifest["builds"][0]["artifact_id"]),
}
with open(release_yaml, "w") as data:
yaml.dump(releases, data, sort_keys=False)
if __name__ == "__main__":
main(sys.argv)