Convert python scripts to argparse
This removes the manual command line handling and reformats the scripts into main methods. This is paving the way for a more unified build tool.
This commit is contained in:
parent
d63409acce
commit
a36d0616bf
|
@ -1,119 +1,131 @@
|
||||||
@PYTHON@
|
@PYTHON@
|
||||||
# vim: ts=4 et:
|
# vim: ts=4 et:
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from distutils.version import StrictVersion
|
|
||||||
import functools
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import argparse
|
||||||
|
import textwrap
|
||||||
|
from datetime import datetime
|
||||||
|
from collections import defaultdict
|
||||||
|
from distutils.version import StrictVersion
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
if len(sys.argv) != 2:
|
|
||||||
sys.exit("Usage: " + os.path.basename(__file__) + "<profile>")
|
|
||||||
|
|
||||||
PROFILE = sys.argv[1]
|
def find_repo_root():
|
||||||
|
path = os.getcwd()
|
||||||
|
|
||||||
RELEASE_DIR = os.path.join(
|
while ".git" not in set(os.listdir(path)) and path != "/":
|
||||||
os.path.dirname(os.path.realpath(__file__)),
|
path = os.path.dirname(path)
|
||||||
'..', 'releases'
|
|
||||||
)
|
|
||||||
|
|
||||||
README_MD = os.path.join( RELEASE_DIR, 'README.md')
|
if path == "/":
|
||||||
RELEASE_YAML = os.path.join( RELEASE_DIR, PROFILE + '.yaml')
|
raise Exception("No repo found, stopping at /")
|
||||||
|
|
||||||
# read in releases/<profile>.yaml
|
return path
|
||||||
with open(RELEASE_YAML, 'r') as data:
|
|
||||||
RELEASES = yaml.safe_load(data)
|
|
||||||
|
|
||||||
sections = {}
|
|
||||||
|
|
||||||
for build, releases in RELEASES.items():
|
|
||||||
for release, amis in releases.items():
|
|
||||||
for name, info in amis.items():
|
|
||||||
version = info['version']
|
|
||||||
arch = info['arch']
|
|
||||||
built = info['build_time']
|
|
||||||
if version in sections:
|
|
||||||
ver = sections[version]
|
|
||||||
else:
|
|
||||||
ver = {
|
|
||||||
'release': '',
|
|
||||||
'built': {},
|
|
||||||
'name': {},
|
|
||||||
'ami': {}
|
|
||||||
}
|
|
||||||
if (arch not in ver['built'] or
|
|
||||||
ver['built'][arch] < built):
|
|
||||||
ver['release'] = release
|
|
||||||
ver['name'][arch] = name
|
|
||||||
ver['built'][arch] = built
|
|
||||||
for region, ami in info['artifacts'].items():
|
|
||||||
if region not in ver['ami']:
|
|
||||||
ver['ami'][region] = {}
|
|
||||||
ver['ami'][region][arch] = ami
|
|
||||||
sections[version] = ver
|
|
||||||
|
|
||||||
SECTION = """
|
|
||||||
### Alpine Linux {release} ({date})
|
|
||||||
<details><summary><i>click to show/hide</i></summary><p>
|
|
||||||
|
|
||||||
{rows}
|
|
||||||
|
|
||||||
</p></details>
|
|
||||||
"""
|
|
||||||
|
|
||||||
AMI = " [{id}](https://{r}.console.aws.amazon.com/ec2/home#Images:visibility=public-images;imageId={id}) " + \
|
|
||||||
"([launch](https://{r}.console.aws.amazon.com/ec2/home#launchAmi={id})) |"
|
|
||||||
|
|
||||||
ARCHS = ['x86_64', 'aarch64']
|
|
||||||
|
|
||||||
|
|
||||||
# most -> least recent version, edge at end
|
class ReleaseReadmeUpdater:
|
||||||
def ver_cmp(a, b):
|
|
||||||
try:
|
|
||||||
if StrictVersion(a) < StrictVersion(b):
|
|
||||||
return 1
|
|
||||||
if StrictVersion(a) > StrictVersion(b):
|
|
||||||
return -1
|
|
||||||
return 0
|
|
||||||
except ValueError:
|
|
||||||
# "edge" doesn't work with StrictVersion
|
|
||||||
if a == 'edge':
|
|
||||||
return 1
|
|
||||||
if b == 'edge':
|
|
||||||
return -1
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
SECTION_TPL = textwrap.dedent("""
|
||||||
|
### Alpine Linux {release} ({date})
|
||||||
|
<details><summary><i>click to show/hide</i></summary><p>
|
||||||
|
|
||||||
ami_list = "## AMIs\n"
|
{rows}
|
||||||
|
|
||||||
for version in sorted(list(sections.keys()), key=functools.cmp_to_key(ver_cmp)):
|
</p></details>
|
||||||
info = sections[version]
|
""")
|
||||||
release = info['release']
|
|
||||||
rows = []
|
AMI_TPL = (
|
||||||
rows.append('| Region |')
|
" [{id}](https://{r}.console.aws.amazon.com/ec2/home"
|
||||||
rows.append('| ------ |')
|
"#Images:visibility=public-images;imageId={id}) "
|
||||||
for arch in ARCHS:
|
"([launch](https://{r}.console.aws.amazon.com/ec2/home"
|
||||||
if arch in info['name']:
|
"#launchAmi={id})) |"
|
||||||
rows[0] += ' {n} |'.format(n=info['name'][arch])
|
|
||||||
rows[1] += ' --- |'
|
|
||||||
for region, amis in info['ami'].items():
|
|
||||||
row = '| {r} |'.format(r=region)
|
|
||||||
for arch in ARCHS:
|
|
||||||
if arch in amis:
|
|
||||||
row += AMI.format(r=region, id=amis[arch])
|
|
||||||
rows.append(row)
|
|
||||||
ami_list += SECTION.format(
|
|
||||||
release=release.capitalize(),
|
|
||||||
date=datetime.utcfromtimestamp(max(info['built'].values())).date(),
|
|
||||||
rows="\n".join(rows)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(README_MD, 'r') as file:
|
def __init__(self, profile, archs=None):
|
||||||
readme = file.read()
|
self.profile = profile
|
||||||
|
self.archs = archs or ["x86_64", "aarch64"]
|
||||||
|
|
||||||
readme_re = re.compile('## AMIs.*\Z', re.S)
|
def get_sorted_releases(self, release_data):
|
||||||
|
sections = defaultdict(lambda: {
|
||||||
|
"release": "",
|
||||||
|
"built": {},
|
||||||
|
"name": {},
|
||||||
|
"ami": defaultdict(dict)
|
||||||
|
})
|
||||||
|
|
||||||
with open(README_MD, 'w') as file:
|
for build, releases in release_data.items():
|
||||||
file.write(readme_re.sub(ami_list, readme))
|
for release, amis in releases.items():
|
||||||
|
for name, info in amis.items():
|
||||||
|
arch = info["arch"]
|
||||||
|
built = info["build_time"]
|
||||||
|
ver = sections[info["version"]]
|
||||||
|
|
||||||
|
if arch not in ver["built"] or ver["built"][arch] < built:
|
||||||
|
ver["release"] = release
|
||||||
|
ver["name"][arch] = name
|
||||||
|
ver["built"][arch] = built
|
||||||
|
|
||||||
|
for region, ami in info["artifacts"].items():
|
||||||
|
ver["ami"][region][arch] = ami
|
||||||
|
|
||||||
|
extract_ver = lambda x: StrictVersion(
|
||||||
|
"0.0" if x["release"] == "edge" else x["release"])
|
||||||
|
|
||||||
|
return sorted(sections.values(), key=extract_ver, reverse=True)
|
||||||
|
|
||||||
|
def make_ami_list(self, sorted_releases):
|
||||||
|
ami_list = "## AMIs\n"
|
||||||
|
|
||||||
|
for info in sorted_releases:
|
||||||
|
rows = ["| Region |", "| ------ |"]
|
||||||
|
|
||||||
|
for arch in self.archs:
|
||||||
|
if arch in info["name"]:
|
||||||
|
rows[0] += f" {info['name'][arch]} |"
|
||||||
|
rows[1] += " --- |"
|
||||||
|
|
||||||
|
for region, amis in info["ami"].items():
|
||||||
|
row = f"| {region} |"
|
||||||
|
for arch in self.archs:
|
||||||
|
if arch in amis:
|
||||||
|
row += self.AMI_TPL.format(r=region, id=amis[arch])
|
||||||
|
rows.append(row)
|
||||||
|
|
||||||
|
ami_list += self.SECTION_TPL.format(
|
||||||
|
release=info["release"].capitalize(),
|
||||||
|
date=datetime.utcfromtimestamp(
|
||||||
|
max(info["built"].values())).date(),
|
||||||
|
rows="\n".join(rows))
|
||||||
|
|
||||||
|
return ami_list
|
||||||
|
|
||||||
|
def update_markdown(self):
|
||||||
|
release_dir = os.path.join(find_repo_root(), "releases")
|
||||||
|
profile_file = os.path.join(release_dir, f"{self.profile}.yaml")
|
||||||
|
|
||||||
|
with open(profile_file, "r") as data:
|
||||||
|
sorted_releases = self.get_sorted_releases(yaml.safe_load(data))
|
||||||
|
|
||||||
|
readme_md = os.path.join(release_dir, "README.md")
|
||||||
|
|
||||||
|
with open(readme_md, "r") as file:
|
||||||
|
readme = file.read()
|
||||||
|
|
||||||
|
with open(readme_md, "w") as file:
|
||||||
|
file.write(
|
||||||
|
re.sub("## AMIs.*\Z", self.make_ami_list(sorted_releases),
|
||||||
|
readme, flags=re.S))
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(description="Update release README")
|
||||||
|
parser.add_argument("profile", help="name of profile to update")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
ReleaseReadmeUpdater(args.profile).update_markdown()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
|
@ -1,137 +1,168 @@
|
||||||
@PYTHON@
|
#@PYTHON@
|
||||||
# vim: ts=4 et:
|
# vim: ts=4 et:
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import argparse
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import yaml
|
||||||
import boto3
|
import boto3
|
||||||
from botocore.exceptions import ClientError
|
from botocore.exceptions import ClientError
|
||||||
import yaml
|
|
||||||
|
|
||||||
LEVELS = ['revision', 'release', 'version']
|
LEVEL_HELP = """\
|
||||||
|
revision - keep only the latest revision per release
|
||||||
|
release - keep only the latest release per version
|
||||||
|
version - keep only the versions that aren't end-of-life
|
||||||
|
"""
|
||||||
|
|
||||||
if 3 < len(sys.argv) > 4 or sys.argv[1] not in LEVELS:
|
|
||||||
sys.exit("Usage: " + os.path.basename(__file__) + """ <level> <profile> [<build>]
|
|
||||||
<level> :-
|
|
||||||
revision - keep only the latest revision per release
|
|
||||||
release - keep only the latest release per version
|
|
||||||
version - keep only the versions that aren't end-of-life""")
|
|
||||||
|
|
||||||
NOW = datetime.utcnow()
|
def find_repo_root():
|
||||||
LEVEL = sys.argv[1]
|
path = os.getcwd()
|
||||||
PROFILE = sys.argv[2]
|
|
||||||
BUILD = None if len(sys.argv) == 3 else sys.argv[3]
|
|
||||||
|
|
||||||
RELEASE_YAML = os.path.join(
|
while ".git" not in set(os.listdir(path)) and path != "/":
|
||||||
os.path.dirname(os.path.realpath(__file__)),
|
path = os.path.dirname(path)
|
||||||
'..', 'releases', PROFILE + '.yaml'
|
|
||||||
)
|
|
||||||
|
|
||||||
with open(RELEASE_YAML, 'r') as data:
|
if path == "/":
|
||||||
BEFORE = yaml.safe_load(data)
|
raise Exception("No repo found, stopping at /")
|
||||||
|
|
||||||
known = {}
|
return path
|
||||||
prune = {}
|
|
||||||
after = {}
|
|
||||||
|
|
||||||
# for all builds in the profile...
|
|
||||||
for build_name, releases in BEFORE.items():
|
|
||||||
|
|
||||||
# this is not the build that was specified
|
def main(args):
|
||||||
if BUILD is not None and BUILD != build_name:
|
parser = argparse.ArgumentParser(
|
||||||
print('< skipping {0}/{1}'.format(PROFILE, build_name))
|
description="Prune AMIs from AWS",
|
||||||
# ensure its release data remains intact
|
formatter_class=argparse.RawTextHelpFormatter)
|
||||||
after[build_name] = BEFORE[build_name]
|
parser.add_argument(
|
||||||
continue
|
"level", choices=["revision", "release", "version"], help=LEVEL_HELP)
|
||||||
else:
|
parser.add_argument("profile", help="profile to prune")
|
||||||
print('> PRUNING {0}/{1} for {2}'.format(PROFILE, build_name, LEVEL))
|
parser.add_argument(
|
||||||
|
"build", nargs="?", help="build within profile to prune")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
criteria = {}
|
now = datetime.utcnow()
|
||||||
|
|
||||||
# scan releases for pruning criteria
|
release_yaml = os.path.join(
|
||||||
for release, amis in releases.items():
|
find_repo_root() "releases", f"{args.profile}.yaml")
|
||||||
for ami_name, info in amis.items():
|
|
||||||
version = info['version']
|
|
||||||
if info['end_of_life']:
|
|
||||||
eol = datetime.fromisoformat(info['end_of_life'])
|
|
||||||
else:
|
|
||||||
eol = None
|
|
||||||
built = info['build_time']
|
|
||||||
for region, ami_id in info['artifacts'].items():
|
|
||||||
if region not in known:
|
|
||||||
known[region] = []
|
|
||||||
known[region].append(ami_id)
|
|
||||||
|
|
||||||
if LEVEL == 'revision':
|
with open(release_yaml, "r") as data:
|
||||||
# find build timestamp of most recent revision, per release
|
before = yaml.safe_load(data)
|
||||||
if release not in criteria or built > criteria[release]:
|
|
||||||
criteria[release] = built
|
|
||||||
elif LEVEL == 'release':
|
|
||||||
# find build timestamp of most recent revision, per version
|
|
||||||
if version not in criteria or built > criteria[version]:
|
|
||||||
criteria[version] = built
|
|
||||||
elif LEVEL == 'version':
|
|
||||||
# find latest EOL date, per version
|
|
||||||
if (version not in criteria or not criteria[version]) or (
|
|
||||||
eol and eol > criteria[version]):
|
|
||||||
criteria[version] = eol
|
|
||||||
|
|
||||||
# rescan again to determine what doesn't make the cut
|
known = {}
|
||||||
for release, amis in releases.items():
|
prune = {}
|
||||||
for ami_name, info in amis.items():
|
after = {}
|
||||||
version = info['version']
|
|
||||||
if info['end_of_life']:
|
|
||||||
eol = datetime.fromisoformat(info['end_of_life'])
|
|
||||||
else:
|
|
||||||
eol = None
|
|
||||||
built = info['build_time']
|
|
||||||
if ((LEVEL == 'revision' and built < criteria[release]) or
|
|
||||||
(LEVEL == 'release' and built < criteria[version]) or
|
|
||||||
(LEVEL == 'version' and criteria[version] and (
|
|
||||||
(version != 'edge' and criteria[version] < NOW) or
|
|
||||||
(version == 'edge' and ((not eol) or (eol < NOW)))
|
|
||||||
))):
|
|
||||||
for region, ami_id in info['artifacts'].items():
|
|
||||||
if region not in prune:
|
|
||||||
prune[region] = []
|
|
||||||
prune[region].append(ami_id)
|
|
||||||
else:
|
|
||||||
if build_name not in after:
|
|
||||||
after[build_name] = {}
|
|
||||||
if release not in after[build_name]:
|
|
||||||
after[build_name][release] = {}
|
|
||||||
after[build_name][release][ami_name] = info
|
|
||||||
|
|
||||||
# scan all regions for AMIs
|
# for all builds in the profile...
|
||||||
AWS = boto3.session.Session()
|
for build_name, releases in before.items():
|
||||||
for region in AWS.get_available_regions('ec2'):
|
|
||||||
print("* scanning: " + region + '...')
|
|
||||||
EC2 = AWS.client('ec2', region_name=region)
|
|
||||||
|
|
||||||
try:
|
# this is not the build that was specified
|
||||||
for image in EC2.describe_images(Owners=['self'])['Images']:
|
if args.build is not None and args.build != build_name:
|
||||||
|
print(f"< skipping {args.profile}/{build_name}")
|
||||||
|
# ensure its release data remains intact
|
||||||
|
after[build_name] = before[build_name]
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
print(f"> PRUNING {args.profile}/{build_name} for {args.level}")
|
||||||
|
|
||||||
action = '? UNKNOWN'
|
criteria = {}
|
||||||
if region in prune and image['ImageId'] in prune[region]:
|
|
||||||
action = '- REMOVING'
|
|
||||||
elif region in known and image['ImageId'] in known[region]:
|
|
||||||
action = '+ KEEPING'
|
|
||||||
|
|
||||||
print(' ' + action + ': ' + image['Name'] +
|
# scan releases for pruning criteria
|
||||||
"\n = " + image['ImageId'], end='', flush=True)
|
for release, amis in releases.items():
|
||||||
if action[0] == '-':
|
for ami_name, info in amis.items():
|
||||||
EC2.deregister_image(ImageId=image['ImageId'])
|
version = info["version"]
|
||||||
for blockdev in image['BlockDeviceMappings']:
|
built = info["build_time"]
|
||||||
if 'Ebs' in blockdev:
|
|
||||||
print(', ' + blockdev['Ebs']['SnapshotId'],
|
|
||||||
end='', flush=True)
|
|
||||||
if action[0] == '-':
|
|
||||||
EC2.delete_snapshot(
|
|
||||||
SnapshotId=blockdev['Ebs']['SnapshotId'])
|
|
||||||
print()
|
|
||||||
except ClientError as e:
|
|
||||||
print(e)
|
|
||||||
|
|
||||||
# update releases/<profile>.yaml
|
if info["end_of_life"]:
|
||||||
with open(RELEASE_YAML, 'w') as data:
|
eol = datetime.fromisoformat(info["end_of_life"])
|
||||||
yaml.dump(after, data, sort_keys=False)
|
else:
|
||||||
|
eol = None
|
||||||
|
|
||||||
|
for region, ami_id in info["artifacts"].items():
|
||||||
|
if region not in known:
|
||||||
|
known[region] = []
|
||||||
|
known[region].append(ami_id)
|
||||||
|
|
||||||
|
if args.level == "revision":
|
||||||
|
# find build timestamp of most recent revision, per release
|
||||||
|
if release not in criteria or built > criteria[release]:
|
||||||
|
criteria[release] = built
|
||||||
|
elif args.level == "release":
|
||||||
|
# find build timestamp of most recent revision, per version
|
||||||
|
if version not in criteria or built > criteria[version]:
|
||||||
|
criteria[version] = built
|
||||||
|
elif args.level == "version":
|
||||||
|
# find latest EOL date, per version
|
||||||
|
if (version not in criteria or not criteria[version]) or (
|
||||||
|
eol and eol > criteria[version]):
|
||||||
|
criteria[version] = eol
|
||||||
|
|
||||||
|
# rescan again to determine what doesn't make the cut
|
||||||
|
for release, amis in releases.items():
|
||||||
|
for ami_name, info in amis.items():
|
||||||
|
version = info["version"]
|
||||||
|
built = info["build_time"]
|
||||||
|
|
||||||
|
if info["end_of_life"]:
|
||||||
|
eol = datetime.fromisoformat(info["end_of_life"])
|
||||||
|
else:
|
||||||
|
eol = None
|
||||||
|
|
||||||
|
if ((args.level == "revision" and built < criteria[release]) or
|
||||||
|
(args.level == "release" and built < criteria[version]) or
|
||||||
|
(args.level == "version" and criteria[version] and (
|
||||||
|
(version != "edge" and criteria[version] < now) or
|
||||||
|
(version == "edge" and ((not eol) or (eol < now)))
|
||||||
|
))):
|
||||||
|
for region, ami_id in info["artifacts"].items():
|
||||||
|
if region not in prune:
|
||||||
|
prune[region] = []
|
||||||
|
|
||||||
|
prune[region].append(ami_id)
|
||||||
|
else:
|
||||||
|
if build_name not in after:
|
||||||
|
after[build_name] = {}
|
||||||
|
|
||||||
|
if release not in after[build_name]:
|
||||||
|
after[build_name][release] = {}
|
||||||
|
|
||||||
|
after[build_name][release][ami_name] = info
|
||||||
|
|
||||||
|
# scan all regions for AMIs
|
||||||
|
AWS = boto3.session.Session()
|
||||||
|
for region in AWS.get_available_regions("ec2"):
|
||||||
|
print(f"* scanning: {region} ...")
|
||||||
|
EC2 = AWS.client("ec2", region_name=region)
|
||||||
|
|
||||||
|
try:
|
||||||
|
for image in EC2.describe_images(Owners=["self"])["Images"]:
|
||||||
|
|
||||||
|
action = "? UNKNOWN"
|
||||||
|
if region in prune and image["ImageId"] in prune[region]:
|
||||||
|
action = "- REMOVING"
|
||||||
|
elif region in known and image["ImageId"] in known[region]:
|
||||||
|
action = "+ KEEPING"
|
||||||
|
|
||||||
|
print(f" {action}: {image['Name']}\n = {image['ImageId']}",
|
||||||
|
end="", flush=True)
|
||||||
|
|
||||||
|
if action[0] == "-":
|
||||||
|
EC2.deregister_image(ImageId=image["ImageId"])
|
||||||
|
|
||||||
|
for blockdev in image["BlockDeviceMappings"]:
|
||||||
|
if "Ebs" in blockdev:
|
||||||
|
print(", {blockdev['Ebs']['SnapshotId']}",
|
||||||
|
end="", flush=True)
|
||||||
|
if action[0] == "-":
|
||||||
|
EC2.delete_snapshot(
|
||||||
|
SnapshotId=blockdev["Ebs"]["SnapshotId"])
|
||||||
|
print()
|
||||||
|
except ClientError as e:
|
||||||
|
print(e)
|
||||||
|
|
||||||
|
# update releases/<profile>.yaml
|
||||||
|
with open(release_yaml, "w") as data:
|
||||||
|
yaml.dump(after, data, sort_keys=False)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main(sys.argv)
|
||||||
|
|
|
@ -1,62 +1,80 @@
|
||||||
@PYTHON@
|
@PYTHON@
|
||||||
# vim: set ts=4 et:
|
# vim: set ts=4 et:
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
import json
|
||||||
|
import argparse
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
if len(sys.argv) != 3:
|
|
||||||
sys.exit("Usage: " + os.path.basename(__file__) + " <profile> <build>")
|
|
||||||
|
|
||||||
PROFILE = sys.argv[1]
|
def find_repo_root():
|
||||||
BUILD = sys.argv[2]
|
path = os.getcwd()
|
||||||
|
|
||||||
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
while ".git" not in set(os.listdir(path)) and path != "/":
|
||||||
MANIFEST_JSON = os.path.join(
|
path = os.path.dirname(path)
|
||||||
SCRIPT_DIR, 'profile', PROFILE, BUILD, 'manifest.json'
|
|
||||||
)
|
|
||||||
|
|
||||||
RELEASE_DIR = os.path.join(SCRIPT_DIR, '..', 'releases')
|
if path == "/":
|
||||||
RELEASE_YAML = os.path.join(RELEASE_DIR, PROFILE + '.yaml')
|
raise Exception("No repo found, stopping at /")
|
||||||
|
|
||||||
if not os.path.exists(RELEASE_DIR):
|
return path
|
||||||
os.makedirs(RELEASE_DIR)
|
|
||||||
|
|
||||||
releases = {}
|
|
||||||
if os.path.exists(RELEASE_YAML):
|
|
||||||
with open(RELEASE_YAML, 'r') as data:
|
|
||||||
releases = yaml.safe_load(data)
|
|
||||||
|
|
||||||
with open(MANIFEST_JSON, 'r') as data:
|
def parse_artifact_ids(ids):
|
||||||
MANIFEST = json.load(data)
|
parsed = re.split(":|,", ids)
|
||||||
|
return dict(zip(parsed[0::2], parsed[1::2]))
|
||||||
|
|
||||||
A = re.split(':|,', MANIFEST['builds'][0]['artifact_id'])
|
|
||||||
ARTIFACTS = dict(zip(A[0::2], A[1::2]))
|
|
||||||
BUILD_TIME = MANIFEST['builds'][0]['build_time']
|
|
||||||
DATA = MANIFEST['builds'][0]['custom_data']
|
|
||||||
RELEASE = DATA['release']
|
|
||||||
|
|
||||||
if BUILD not in releases:
|
def main(args):
|
||||||
releases[BUILD] = {}
|
parser = argparse.ArgumentParser(description="Update release YAML")
|
||||||
if RELEASE not in releases[BUILD]:
|
parser.add_argument("profile", help="name of profile to update")
|
||||||
releases[BUILD][RELEASE] = {}
|
parser.add_argument("build", help="name of build to update")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
REVISION = {
|
root = find_repo_root()
|
||||||
'description': DATA['ami_desc'],
|
|
||||||
'profile': PROFILE,
|
|
||||||
'profile_build': BUILD,
|
|
||||||
'version': DATA['version'],
|
|
||||||
'release': RELEASE,
|
|
||||||
'arch': DATA['arch'],
|
|
||||||
'revision': DATA['revision'],
|
|
||||||
'end_of_life': DATA['end_of_life'],
|
|
||||||
'build_time': BUILD_TIME,
|
|
||||||
'artifacts': ARTIFACTS
|
|
||||||
}
|
|
||||||
|
|
||||||
releases[BUILD][RELEASE][DATA['ami_name']] = REVISION
|
release_dir = os.path.join(root, "releases")
|
||||||
|
if not os.path.exists(release_dir):
|
||||||
|
os.makedirs(release_dir)
|
||||||
|
|
||||||
with open(RELEASE_YAML, 'w') as data:
|
release_yaml = os.path.join(release_dir, f"{args.profile}.yaml")
|
||||||
yaml.dump(releases, data, sort_keys=False)
|
releases = {}
|
||||||
|
if os.path.exists(release_yaml):
|
||||||
|
with open(release_yaml, "r") as data:
|
||||||
|
releases = yaml.safe_load(data)
|
||||||
|
|
||||||
|
manifest_json = os.path.join(
|
||||||
|
root, "build", "profile", args.profile, args.build, "manifest.json")
|
||||||
|
with open(manifest_json, "r") as data:
|
||||||
|
manifest = json.load(data)
|
||||||
|
|
||||||
|
data = manifest["builds"][0]["custom_data"]
|
||||||
|
release = data["release"]
|
||||||
|
|
||||||
|
if args.build not in releases:
|
||||||
|
releases[args.build] = {}
|
||||||
|
|
||||||
|
if release not in releases[args.build]:
|
||||||
|
releases[args.build][release] = {}
|
||||||
|
|
||||||
|
releases[args.build][release][data["ami_name"]] = {
|
||||||
|
"description": data["ami_desc"],
|
||||||
|
"profile": args.profile,
|
||||||
|
"profile_build": args.build,
|
||||||
|
"version": data["version"],
|
||||||
|
"release": release,
|
||||||
|
"arch": data["arch"],
|
||||||
|
"revision": data["revision"],
|
||||||
|
"end_of_life": data["end_of_life"],
|
||||||
|
"build_time": manifest["builds"][0]["build_time"],
|
||||||
|
"artifacts": parse_artifact_ids(manifest["builds"][0]["artifact_id"]),
|
||||||
|
}
|
||||||
|
|
||||||
|
with open(release_yaml, "w") as data:
|
||||||
|
yaml.dump(releases, data, sort_keys=False)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main(sys.argv)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user