Code clean up PEP8, some fixes along the way

This commit is contained in:
Stefan Reimer 2019-02-07 15:36:16 +00:00
parent 3587451170
commit 47b492a65e
7 changed files with 119 additions and 172 deletions

View File

@ -11,13 +11,14 @@ from .utils import setup_logging
import logging
logger = logging.getLogger(__name__)
@click.group()
@click.version_option(version=__version__, prog_name="CloudBender")
@click.option("--debug", is_flag=True, help="Turn on debug logging.")
@click.option("--dir", "directory", help="Specify cloudbender project directory.")
@click.pass_context
def cli(ctx, debug, directory):
logger = setup_logging(debug)
setup_logging(debug)
# Read global config
cb = CloudBender(directory if directory else os.getcwd())
@ -86,7 +87,7 @@ def provision(ctx, stack_names, multi):
futures.append(group.submit(stack.update))
for future in as_completed(futures):
result = future.result()
future.result()
@click.command()
@ -109,7 +110,7 @@ def delete(ctx, stack_names, multi):
futures.append(group.submit(stack.delete))
for future in as_completed(futures):
result = future.result()
future.result()
@click.command()
@ -140,8 +141,9 @@ def sort_stacks(ctx, stacks):
data[s.id] = set(deps)
logger.debug("Stack {} depends on {}".format(s.id, deps))
# Ignore self dependencies
for k, v in data.items():
v.discard(k) # Ignore self dependencies
v.discard(k)
extra_items_in_deps = functools.reduce(set.union, data.values()) - set(data.keys())
data.update({item: set() for item in extra_items_in_deps})
@ -154,7 +156,8 @@ def sort_stacks(ctx, stacks):
result = []
for o in ordered:
for s in stacks:
if s.id == o: result.append(s)
if s.id == o:
result.append(s)
yield result
data = {item: (dep - ordered) for item, dep in data.items()

View File

@ -9,6 +9,7 @@ import logging
logger = logging.getLogger(__name__)
class BotoConnection():
_sessions = {}
_clients = {}
@ -17,7 +18,6 @@ class BotoConnection():
self.region = region
self.profile = profile
def _get_session(self, profile=None, region=None):
if self._sessions.get((profile, region)):
return self._sessions[(profile, region)]
@ -39,7 +39,6 @@ class BotoConnection():
return session
def _get_client(self, service, profile=None, region=None):
if self._clients.get((profile, region, service)):
return self._clients[(profile, region, service)]
@ -50,7 +49,6 @@ class BotoConnection():
self._clients[(profile, region, service)] = client
return client
def call(self, service, command, kwargs={}, profile=None, region=None):
while True:
try:
@ -64,4 +62,3 @@ class BotoConnection():
pass
else:
raise e

View File

@ -1,14 +1,12 @@
import os
import glob
import logging
from .utils import read_yaml_file, ensure_dir
from .stack import Stack
from .stackgroup import StackGroup
from .connection import BotoConnection
logger = logging.getLogger(__name__)
class CloudBender(object):
""" Config Class to handle recursive conf/* config tree """
def __init__(self, root_path):
@ -28,7 +26,6 @@ class CloudBender(object):
if not os.path.isdir(self.root):
raise "Check '{0}' exists and is a valid project folder.".format(root_path)
def read_config(self):
"""Load the <path>/config.yaml, <path>/*.yaml as stacks, sub-folders are child groups """
@ -66,18 +63,15 @@ class CloudBender(object):
# _config = { "vars": { 'Azs': {'TestAZ': 'Next'}, 'Segments': {'Testnet': 'internet'}, "Mode": "Piped" } }
# self.vars.update(_config.get('vars'))
def dump_config(self):
logger.debug("<CloudBender: {}>".format(vars(self)))
self.sg.dump_config()
def clean(self):
for s in self.all_stacks:
s.delete_template_file()
s.delete_parameter_file()
def resolve_stacks(self, token):
stacks = []
@ -100,7 +94,6 @@ class CloudBender(object):
return stacks
def filter_stacks(self, filter_by, stacks=None):
# filter_by is a dict { property, value }

View File

@ -1,8 +1,6 @@
import os
import io
import gzip
import jinja2
import oyaml as yaml
import re
import base64
@ -158,8 +156,10 @@ def regex_replace(value='', pattern='', replace='', ignorecase=False):
def pyminify(source, obfuscate=False, minify=True):
# pyminifier options
options = types.SimpleNamespace(tabs=False,replacement_length=1,use_nonlatin=0,
obfuscate=0,obf_variables=1,obf_classes=0,obf_functions=0,obf_import_methods=0,obf_builtins=0)
options = types.SimpleNamespace(
tabs=False, replacement_length=1, use_nonlatin=0,
obfuscate=0, obf_variables=1, obf_classes=0, obf_functions=0,
obf_import_methods=0, obf_builtins=0)
tokens = pyminifier.token_utils.listified_tokenizer(source)

View File

@ -5,6 +5,7 @@ import hashlib
import oyaml as yaml
import json
import time
import subprocess
from datetime import datetime, timedelta
from dateutil.tz import tzutc
@ -56,11 +57,9 @@ class Stack(object):
self.default_lock = None
self.multi_delete = True
def dump_config(self):
logger.debug("<Stack {}: {}>".format(self.id, vars(self)))
def read_config(self):
_config = read_yaml_file(self.path)
for p in ["region", "stackname", "template", "default_lock", "multi_delete", "provides"]:
@ -83,27 +82,26 @@ class Stack(object):
logger.debug("Stack {} added.".format(self.id))
def check_fortytwo(self, template):
# Fail early if 42 is enabled but not available
if self.cfn['Mode'] == "FortyTwo" and self.template != 'FortyTwo':
try:
response = self.connection_manager.call('lambda', 'get_function', {'FunctionName': 'FortyTwo'},
response = self.connection_manager.call(
'lambda', 'get_function', {'FunctionName': 'FortyTwo'},
profile=self.profile, region=self.region)
# Also verify version in case specified in the template's metadata
try:
req_ver = template['Metadata']['FortyTwo']['RequiredVersion']
if 'Release' not in response['Tags']:
abort("Lambda FortyTwo has no Release Tag! Required: {}".format(req_ver))
raise("Lambda FortyTwo has no Release Tag! Required: {}".format(req_ver))
elif semver.compare(req_ver, re.sub("-.*$", '', response['Tags']['Release'])) > 0:
abort("Lambda FortyTwo version is not recent enough! Required: {} vs. Found: {}".format(req_ver, response['Tags']['Release']))
raise("Lambda FortyTwo version is not recent enough! Required: {} vs. Found: {}".format(req_ver, response['Tags']['Release']))
except KeyError:
pass
except botocore.exceptions.ClientError:
abort("No Lambda FortyTwo found in your account")
raise("No Lambda FortyTwo found in your account")
def render(self):
"""Renders the cfn jinja template for this stack"""
@ -114,8 +112,7 @@ class Stack(object):
template_metadata = {
'Template.Name': self.template,
'Template.Hash': 'unknown',
'Template.GitComment': 'unknown',
'Template.Hash': 'tbd',
'CloudBender.Version': __version__
}
@ -129,30 +126,13 @@ class Stack(object):
jenv.globals['render_once'](context={'_config': self.template_vars}, reset=True)
jenv.globals['cloudbender_ctx'](context={'_config': self.template_vars}, reset=True)
# try to get local git info
# Try to add latest tag/commit for the template source, skip if not in git tree
try:
self.template_vars['Metadata']['{}.Version'.format(PROJECT_NAME)] = subprocess.check_output('git describe --tags'.split(' '), universal_newlines=True)[:-1]
except:
pass
# Add latest tag/commit
try:
os.chdir(ROOT_DIR)
_version = subprocess.check_output('git describe --tags'.split(' '), universal_newlines=True)[:-1]
if _version:
self.template_vars['Metadata']['CloudBender.Version'] = _version
os.chdir(os.path.dirname(template.filename))
_comment = subprocess.check_output('git log -1 --pretty=%B {0}{1}'
.format(input_file, TEMPLATE_EXT).split(' ')).decode('utf-8').strip() \
.replace('"', '').replace('#', '').replace('\n', '').replace(':', ' ')
_comment = subprocess.check_output('git log -1 --pretty=%B {}'.format(template.filename).split(' ')).decode('utf-8').strip().replace('"', '').replace('#', '').replace('\n', '').replace(':', ' ')
if _comment:
self.template_vars['Metadata']['Template.GitComment'] = _comment
template_metadata['Template.LastGitComment'] = _comment
os.chdir(PROJECT_DIR)
except:
except subprocess.CalledProcessError:
pass
logger.info('Rendering %s', template.filename)
@ -160,10 +140,10 @@ class Stack(object):
try:
self.data = yaml.load(rendered)
except:
except Exception as e:
# In case we rendered invalid yaml this helps to debug
logger.error(rendered)
raise
raise e
# Some sanity checks and final cosmetics
# Check for empty top level Parameters, Outputs and Conditions and remove
@ -180,7 +160,6 @@ class Stack(object):
# Update internal data structures
self._parse_metadata()
def _parse_metadata(self):
# Extract dependencies if present
try:
@ -189,7 +168,6 @@ class Stack(object):
except KeyError:
pass
def write_template_file(self):
if self.cfn_template:
yaml_file = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname + ".yaml")
@ -201,7 +179,6 @@ class Stack(object):
else:
logger.error('No cfn template rendered yet for stack {}.'.format(self.stackname))
def delete_template_file(self):
yaml_file = os.path.join(self.ctx['template_path'], self.rel_path, self.stackname + ".yaml")
try:
@ -210,7 +187,6 @@ class Stack(object):
except OSError:
pass
def read_template_file(self):
""" Reads rendered yaml template from disk and extracts metadata """
if not self.cfn_template:
@ -225,7 +201,6 @@ class Stack(object):
else:
logger.debug('Using cached cfn template %s.', self.stackname)
def validate(self):
"""Validates the rendered template via cfn-lint"""
self.read_template_file()
@ -258,7 +233,6 @@ class Stack(object):
else:
logger.info("Passed.")
def resolve_parameters(self):
""" Renders parameters for the stack based on the source template and the environment configuration """
@ -293,14 +267,13 @@ class Stack(object):
value = str(self.parameters[p])
self.cfn_parameters.append({'ParameterKey': p, 'ParameterValue': value})
logger.info('Got {} = {}'.format(p, value))
except KeyError as e:
except KeyError:
# If we have a Default defined in the CFN skip, as AWS will use it
if 'Default' in self.data['Parameters'][p]:
continue
else:
logger.error('Cannot find value for parameter {0}'.format(p))
def write_parameter_file(self):
parameter_file = os.path.join(self.ctx['parameter_path'], self.rel_path, self.stackname + ".yaml")
@ -315,7 +288,6 @@ class Stack(object):
if os.path.isfile(parameter_file):
os.remove(parameter_file)
def delete_parameter_file(self):
parameter_file = os.path.join(self.ctx['parameter_path'], self.rel_path, self.stackname + ".yaml")
try:
@ -324,7 +296,6 @@ class Stack(object):
except OSError:
pass
def create(self):
"""Creates a stack """
@ -334,7 +305,8 @@ class Stack(object):
self.read_template_file()
logger.info('Creating {0} {1}'.format(self.region, self.stackname))
response = self.connection_manager.call('cloudformation', 'create_stack',
self.connection_manager.call(
'cloudformation', 'create_stack',
{'StackName': self.stackname,
'TemplateBody': self.cfn_template,
'Parameters': self.cfn_parameters,
@ -344,7 +316,6 @@ class Stack(object):
return self._wait_for_completion()
def update(self):
"""Updates an existing stack """
@ -355,7 +326,8 @@ class Stack(object):
logger.info('Updating {0} {1}'.format(self.region, self.stackname))
try:
response = self.connection_manager.call('cloudformation', 'update_stack',
self.connection_manager.call(
'cloudformation', 'update_stack',
{'StackName': self.stackname,
'TemplateBody': self.cfn_template,
'Parameters': self.cfn_parameters,
@ -372,17 +344,16 @@ class Stack(object):
return self._wait_for_completion()
def delete(self):
"""Deletes a stack """
logger.info('Deleting {0} {1}'.format(self.region, self.stackname))
response = self.connection_manager.call('cloudformation', 'delete_stack',
{'StackName':self.stackname}, profile=self.profile, region=self.region)
self.connection_manager.call(
'cloudformation', 'delete_stack', {'StackName': self.stackname},
profile=self.profile, region=self.region)
return self._wait_for_completion()
def create_change_set(self, change_set_name):
""" Creates a Change Set with the name ``change_set_name``. """
@ -392,7 +363,8 @@ class Stack(object):
self.read_template_file()
logger.info('Creating change set {0} for stack {1}'.format(change_set_name, self.stackname))
response = self.connection_manager.call('cloudformation', 'create_change_set',
self.connection_manager.call(
'cloudformation', 'create_change_set',
{'StackName': self.stackname,
'ChangeSetName': change_set_name,
'TemplateBody': self.cfn_template,
@ -402,7 +374,6 @@ class Stack(object):
profile=self.profile, region=self.region)
return self._wait_for_completion()
def describe(self):
"""
Returns the a description of the stack.
@ -414,7 +385,6 @@ class Stack(object):
{"StackName": self.stackname},
profile=self.profile, region=self.region)
def get_status(self):
"""
Returns the stack's status.
@ -429,7 +399,6 @@ class Stack(object):
raise e
return status
def describe_events(self):
"""
Returns a dictionary contianing the stack events.
@ -449,7 +418,6 @@ class Stack(object):
return status
def _wait_for_completion(self, timeout=0):
"""
Waits for a stack operation to finish. Prints CloudFormation events while it waits.
@ -477,7 +445,6 @@ class Stack(object):
return status
@staticmethod
def _get_simplified_status(status):
""" Returns the simplified Stack Status. """
@ -493,7 +460,6 @@ class Stack(object):
else:
return 'Unknown'
def _log_new_events(self):
"""
Log the latest stack events while the stack is being built.
@ -517,7 +483,6 @@ class Stack(object):
]))
self.most_recent_event_datetime = event["Timestamp"]
def _ensure_dirs(self, path):
# Ensure output dirs exist
if not os.path.exists(os.path.join(self.ctx[path], self.rel_path)):

View File

@ -21,7 +21,6 @@ class StackGroup(object):
if self.rel_path == '.':
self.rel_path = ''
def dump_config(self):
for sg in self.sgs:
sg.dump_config()
@ -31,7 +30,6 @@ class StackGroup(object):
for s in self.stacks:
s.dump_config()
def read_config(self, parent_config={}):
if not os.path.isdir(self.path):
@ -66,13 +64,10 @@ class StackGroup(object):
if stackname_prefix:
stackname = stackname_prefix + stackname
new_stack = Stack(name=stackname, template=template,
path=stack_path, rel_path=str(self.rel_path),
tags=dict(tags), parameters=dict(parameters),
template_vars=dict(template_vars),
region=str(region), profile=str(profile),
ctx=self.ctx
)
new_stack = Stack(
name=stackname, template=template, path=stack_path, rel_path=str(self.rel_path),
tags=dict(tags), parameters=dict(parameters), template_vars=dict(template_vars),
region=str(region), profile=str(profile), ctx=self.ctx)
new_stack.read_config()
self.stacks.append(new_stack)
@ -86,7 +81,6 @@ class StackGroup(object):
# Return raw, merged config to parent
return _config
def get_stacks(self, name=None, recursive=True, match_by='name'):
""" Returns [stack] matching stack_name or [all] """
stacks = []
@ -109,7 +103,6 @@ class StackGroup(object):
return stacks
def get_stackgroup(self, name=None, recursive=True, match_by='name'):
""" Returns stack group matching stackgroup_name or all if None """
if not name or (self.name == name and match_by == 'name') or (self.path.endswith(name) and match_by == 'path'):
@ -127,22 +120,19 @@ class StackGroup(object):
return None
# TODO: Integrate properly into stackgroup class, borken for now
# TODO: Integrate properly into stackgroup class, broken for now
# stackoutput inspection
def BROKEN_inspect_stacks(conglomerate):
def BROKEN_inspect_stacks(self, conglomerate):
# Get all stacks of the conglomertate
client = Connection.get_connection('cloudformation')
running_stacks=client.describe_stacks()
response = self.connection_manager.call('cloudformation', 'decribe_stacks')
stacks = []
for stack in running_stacks['Stacks']:
for stack in response['Stacks']:
for tag in stack['Tags']:
if tag['Key'] == 'Conglomerate' and tag['Value'] == conglomerate:
stacks.append(stack)
break
# Gather stack outputs, use Tag['Artifact'] as name space: Artifact.OutputName, same as FortyTwo
stack_outputs = {}
for stack in stacks:
@ -166,4 +156,3 @@ class StackGroup(object):
# Add outputs from stacks into the data for jinja under StackOutput
return stack_outputs

View File

@ -6,6 +6,7 @@ import boto3
logger = logging.getLogger(__name__)
def read_yaml_file(path):
data = {}
if os.path.exists(path):
@ -67,7 +68,6 @@ def setup_logging(debug):
datefmt="%Y-%m-%d %H:%M:%S"
)
log_handler = logging.StreamHandler()
log_handler.setFormatter(formatter)
logger = logging.getLogger("cloudbender")