class CloudabilityModule(AtlasBase): def __init__(self, request=None,environment=None): self.module = "cloudability_module" self.ah_obj = AtlasHelper() self.cloud_obj = Cloudability() self.awshelper_obj = AwsHelper() self.request = request self.aws_object = AwsModule(request, environment) self.instance_cost = 0.0 self.storage_cost = 0.0 def get_information(self, environment=None, **kwargs): organization_list = self.awshelper_obj.get_organizations() if environment is None: if 'env_cost_dict' in kwargs: if kwargs['env_cost_dict'] == 'true': for organization in organization_list: env_cost_dict= self.cloud_obj.get_cloudability_costs()['environment_costs'][organization] env_cost_dict['all'] = self.get_information(ec2_cost_dict='true')['region_zone'] return env_cost_dict if 'ec2_cost_dict' in kwargs: if kwargs['ec2_cost_dict'] == 'true': for organization in organization_list: ec2_costs= self.cloud_obj.get_cloudability_costs()['ec2_costs'][organization] return ec2_costs else: if 'env_cost_dict' in kwargs: if kwargs['env_cost_dict'] == 'true': env_costs = 0 for organization in organization_list: env_cost_dict = self.cloud_obj.get_cloudability_costs()['environment_costs'][organization] environment_groups = self.ah_obj.get_atlas_config_data("global_config_data", "environment_groups") if environment_groups and environment in environment_groups[1].keys(): if environment == 'all': env_group_for_environment = self.awshelper_obj.get_environments(organization) else: env_group_for_environment = environment_groups[1][environment] for env in env_group_for_environment: env_costs+=env_cost_dict[env] env_cost_dict[environment] = env_costs return env_cost_dict if 'apps_in_environment' in kwargs: if kwargs['apps_in_environment'] == 'true': return self.aws_object.get_information(environment, apps_in_environment='true') if 'instance_data' in kwargs: if kwargs['instance_data'] == 'true': return self.aws_object.get_information(environment, instance_data='true') if 'instances_cost_dict' in kwargs: if kwargs['instances_cost_dict'] == 'true': for organization in organization_list: return self.cloud_obj.get_cloudability_costs()['instances_costs'][organization] if 'ebs_cost_dict' in kwargs: if kwargs['ebs_cost_dict'] == 'true': for organization in organization_list: return self.cloud_obj.get_cloudability_costs()['ebs_costs'][organization] if 'aws_info_dict' in kwargs: if kwargs['aws_info_dict'] == 'true': return self.aws_object.get_information(environment, 'aws_info_dict') if 'application_subnets' in kwargs: if kwargs['application_subnets'] == 'true': return self.aws_object.get_information(environment, 'application_subnets') def get_configuration_data(self, key): value = self.ah_obj.get_atlas_config_data(self.module, key) if isinstance(value, dict): return value[0] else: return value def get_stack_attributes(self, environment=None): """ Get stack attributes from config file. """ stack_attribute_list = [] stack_attributes_dict = self.ah_obj.get_atlas_config_data('cloudability_module', 'stack_attributes')[1] for attribute, details in stack_attributes_dict.iteritems(): stack_attribute_list.append((attribute, details['editable'])) return(stack_attribute_list, stack_attributes_dict) def get_attribute_values(self, environment=None): return self.__get_detailed_instances_cost_dict(environment, 'stack_costs') def get_status(self,environment=None): status_information = self.get_configuration_data('status') cloud_status_dict = {} organization_list = self.awshelper_obj.get_organizations() environment_list = [] if environment==None: env_cost_dict = self.get_information(env_cost_dict='true') cloud_status_dict = {environment: ["$"+str(env_cost_dict[environment])] for environment in env_cost_dict.keys()} else: env_cost_dict = self.get_information(environment, env_cost_dict='true') region_vpc_selection = self.aws_object.get_information(environment, region_vpc_dict='true') if environment == "uncategorized": region_list = self.awshelper_obj.get_regions() for region in region_vpc_selection: if region =='east': cloud_status_dict[region] = ["$"+str(env_cost_dict[environment])] else: cloud_status_dict[region] = ["$"+ "0.0"] else: for vpc in ['ame1']: #should be changed later to include all vpcs. cloud_status_dict[vpc] = ["$"+str(env_cost_dict[environment])] return (status_information, cloud_status_dict) def get_tabs(self, environment=None): pass def get_instance_actions(self, environment=None): pass def get_environment_actions(self, environment=None): pass def get_instance_group_actions(self, environment=None): pass def get_stack_actions(self, environment=None): pass def get_vpc_actions(self): pass def get_action_status(self, json_data, environment=None): pass def perform_instance_actions(self, environment=None): pass def perform_instancegroup_actions(): pass def perform_stack_actions(): pass def perform_vpc_actions(self, json_data): pass def perform_environment_actions(self, environment=None): pass def get_columns(self, environment=None): column_list = self.ah_obj.get_atlas_config_data(self.module, 'columns') column_dict= self.ah_obj.create_nested_defaultdict() if column_list: column_dict = self.__get_detailed_instances_cost_dict(environment, 'instances_cost') return (column_list, self.ah_obj.defaultdict_to_dict(column_dict)) def get_action_parameters(self, action_type, environment=None): pass def load_session(self, request, environment=None): pass def save_session(self, request, environment=None): pass def get_defaults(): pass def get_aggregates(self, environment=None): aggregates = self.ah_obj.get_atlas_config_data(self.module, 'aggregates') if environment is None: aggregate_list = ["$"+str(self.get_information(ec2_cost_dict='true')['region_zone'])] return (aggregates, aggregate_list) else: aggregate_dict = collections.defaultdict(dict) for agg_key in aggregates: if agg_key == 'cost': aggregate_dict[agg_key] = self.get_information(env_cost_dict='true')[environment] return dict(aggregate_dict) def refresh_information(self, environment=None): self.cloud_obj.cache_cloud_costs() return def __get_detailed_instances_cost_dict(self, environment, cost_type): instances_cost_dict = self.get_information(environment, instances_cost_dict = 'true') ebs_cost_dict = self.get_information(environment, ebs_cost_dict = 'true') aws_tabs_dict = self.aws_object.get_tabs(environment)[1] instance_information = self.aws_object.get_information(environment, instance_data='true') organization_list = self.awshelper_obj.get_organizations() (stack_attr_list, stack_attr_details) = self.get_configuration_data('stack_attributes') apps_in_environment = self.get_information(environment, apps_in_environment='true') application_subnets = self.get_information(environment, application_subnets='true') region, vpc, subnet = "", "", "" instance_cost_column_dict= self.ah_obj.create_nested_defaultdict() ebs_cost_column_dict = self.ah_obj.create_nested_defaultdict() stack_cost_dict= self.ah_obj.create_nested_defaultdict() stack_cost_string_dict = self.ah_obj.create_nested_defaultdict() name_tag_value, fqdn_tag_value = "", "" for instance, aws_tabs_dict in aws_tabs_dict.iteritems(): attribute_cost={} if 'Name' in aws_tabs_dict['aws_tags']: name_tag_value = aws_tabs_dict['aws_tags']["Name"] if 'fqdn' in aws_tabs_dict['aws_tags']: fqdn_tag_value = aws_tabs_dict['aws_tags']['fqdn'] instance_details = self.ah_obj.get_nested_attribute_values(instance_information, instance)[1] region = instance_details['region'] if "region" in instance_details else "none" subnet = instance_details['subnet'] if "subnet" in instance_details else "none" attribute_cost[subnet] = {} vpc = instance_details['vpc'] if "vpc" in instance_details else "none" attribute_cost[vpc] = {} attribute_cost[vpc][subnet] = {} stack = instance_details['application'] if "application" in instance_details else "none" if cost_type == 'instances_cost' or cost_type == 'stack_costs': if fqdn_tag_value in ebs_cost_dict: self.storage_cost = ebs_cost_dict[fqdn_tag_value] if environment == "uncategorized": instance_cost_column_dict[region]['subnets']['none']['instance_attributes'][instance]['storage_cost'] = "$" +str(ebs_cost_dict[fqdn_tag_value])+"/m" else: instance_cost_column_dict[vpc]['subnets'][subnet]['instance_attributes'][instance]['storage_cost'] = "$" +str(ebs_cost_dict[fqdn_tag_value])+"/m" else: self.storage_cost = 0.0 if environment == "uncategorized": instance_cost_column_dict[region]['subnets']['none']['instance_attributes'][instance]['storage_cost'] = "$" + "0.0"+"/m" else: instance_cost_column_dict[vpc]['subnets'][subnet]['instance_attributes'][instance]['storage_cost'] = "$" + "0.0"+"/m" if fqdn_tag_value in instances_cost_dict: if environment == "uncategorized": instance_cost_column_dict[region]['subnets']['none']['instance_attributes'][instance]['instance_cost'] = "$" + str(instances_cost_dict[fqdn_tag_value])+"/m" else: self.instance_cost = instances_cost_dict[fqdn_tag_value] instance_cost_column_dict[vpc]['subnets'][subnet]['instance_attributes'][instance]['instance_cost'] = "$" + str(instances_cost_dict[fqdn_tag_value])+"/m" elif name_tag_value in instances_cost_dict: self.instance_cost = instances_cost_dict[name_tag_value] if environment == "uncategorized": instance_cost_column_dict[region]['subnets']['none']['instance_attributes'][instance]['instance_cost'] = "$" + str(instances_cost_dict[name_tag_value]) +"/m" else: self.instance_cost = instances_cost_dict[fqdn_tag_value] instance_cost_column_dict[vpc]['subnets'][subnet]['instance_attributes'][instance]['instance_cost'] = "$" + str(instances_cost_dict[name_tag_value]) +"/m" else: if environment=="uncategorized": instance_cost_column_dict[region]['subnets']['none']['instance_attributes'][instance]['instance_cost'] = "(empty)" else: self.instance_cost = 0.0 instance_cost_column_dict[vpc]['subnets'][subnet]['instance_attributes'][instance]['instance_cost'] = "(empty)" if cost_type == 'stack_costs': attr_cost = 0.0 for attribute in stack_attr_list: if attribute == 'instance_cost': attr_cost = self.instance_cost elif attribute == 'storage_cost': attr_cost = self.storage_cost elif attribute == 'total_cost': attr_cost = self.instance_cost + self.storage_cost if stack_attr_details[attribute]['stack'] == ['all']: for apps in apps_in_environment: if stack == apps: if not stack_cost_dict[region][vpc][subnet][stack][attribute]: stack_cost_dict[region][vpc][subnet][stack][attribute] = attr_cost else: cost = stack_cost_dict[region][vpc][subnet][stack][attribute] stack_cost_dict[region][vpc][subnet][stack][attribute]+=attr_cost else: for attr_stack in stack_attr_details[attribute]['stack']: if attr_stack == stack: if not stack_cost_dict[region][vpc][subnet][stack][attribute]: stack_cost_dict[region][vpc][subnet][stack][attribute] = attr_cost else: stack_cost_dict[region][vpc][subnet][stack][attribute]+=attr_cost stack_cost_string_dict[region][vpc][subnet][stack][attribute] = \ "$"+str(stack_cost_dict[region][vpc][subnet][stack][attribute])+"/m" if cost_type == 'stack_costs': return self.ah_obj.defaultdict_to_dict(stack_cost_string_dict) if cost_type == 'instances_cost' : return self.ah_obj.defaultdict_to_dict(instance_cost_column_dict)
class Cloudability: #constructor def __init__(self): self.cloudability_dict = {} self.ah_obj = AtlasHelper() self.aws_helper_object = AwsHelper() self.module = "cloudability_module" self.auth_token = os.environ.get('CLOUDABILITY_AUTH_TOKEN') self.cl_base_url = self.ah_obj.get_atlas_config_data( self.module, "cloudability_base_url") self.cl_cost_url = self.ah_obj.get_atlas_config_data( self.module, "cloudability_cost_url") self.report_query = "" self.memcache_var = memcache.Client([ self.ah_obj.get_atlas_config_data("global_config_data", 'memcache_server_location') ], debug=1) self.environment_subnets_details = self.aws_helper_object.get_environment_subnets_details( ) def construct_cost_query(self, query_parameters): try: self.report_query = self.cl_base_url + self.cl_cost_url + query_parameters + self.auth_token return self.report_query except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "construct_cost_query()", exp_object, exc_type, exc_obj, exc_tb) return def generate_report(self, query): try: report_json = {} response = requests.get(query) if response.status_code == 200: report_json = json.loads( response.text) #convert the json into a python dictionary return report_json except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "generate_report()", exp_object, exc_type, exc_obj, exc_tb) return {} def get_previous_period(self, start_date, end_date): try: start = datetime.datetime.strptime(start_date, '%Y-%m-%d') end = datetime.datetime.strptime(end_date, '%Y-%m-%d') period = ((end - start).days) + 1 previous_start_date = ( start - datetime.timedelta(days=period)).strftime('%Y-%m-%d') previous_end_date = ( end - datetime.timedelta(days=period)).strftime('%Y-%m-%d') return (previous_start_date, previous_end_date) except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_previous_period()", exp_object, exc_type, exc_obj, exc_tb) return #get ec2 costs def get_ec2_costs(self, start_date, end_date): ec2_costs = {'region_zone': 0.0} try: query_parameters = "verbose=1&start_date=" + start_date + "&end_date=" + end_date + "&dimensions=linked_account_name&metrics=invoiced_cost&sort_by=invoiced_cost&order=desc&max_results=50&offset=0&auth_token=" ec2_costs_query = self.construct_cost_query(query_parameters) ec2_cost_dict = self.generate_report(ec2_costs_query) if ec2_cost_dict: ec2_costs['region_zone'] = round( float(ec2_cost_dict['meta']['aggregates'][0] ['value'].strip('$').replace(',', '')), 2) if ec2_costs['region_zone']: return ec2_costs else: raise Exception("Could not generate EC2 costs") except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "calculate_ec2_costs()", exp_object, exc_type, exc_obj, exc_tb) return ec2_costs #get aggregate cost spent for ec2 for the specified period def get_current_prev_ec2_costs(self, start_date, end_date): #query parameters should be moved to config file ec2_cost_dict = {'current_period': 0.0, 'previous_period': 0.0} try: previous_period = self.get_previous_period(start_date, end_date) previous_start_date, previous_end_date = previous_period[ 0], previous_period[1] ec2_cost_dict['current_period'] = self.get_ec2_costs( start_date, end_date) ec2_cost_dict['previous_period'] = self.get_ec2_costs( previous_start_date, previous_end_date) return ec2_cost_dict except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_ec2_costs()", exp_object, exc_type, exc_obj, exc_tb) return def create_envcost_dict(self): try: env_subnet_zip = self.environment_subnets_details per_environment_costs = {} for env_subnet_tuple in env_subnet_zip: per_environment_costs[env_subnet_tuple[0]] = 0 return per_environment_costs except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "create_envcost_dict()", exp_object, exc_type, exc_obj, exc_tb) return {} def get_environment_costs(self, start_date, end_date): try: query_parameters = "verbose=1&start_date=" + start_date + "&end_date=" + end_date + "&dimensions=tag2&metrics=invoiced_cost&sort_by=invoiced_cost&order=desc&auth_token=" subnets_cost_query = self.construct_cost_query(query_parameters) subnet_cost_dict = self.generate_report(subnets_cost_query) cost_dict = self.create_envcost_dict() env_subnet_zip = self.environment_subnets_details subnet_details = subnet_cost_dict['results'] for subnet_index in subnet_details: for env_subnet_tuple in env_subnet_zip: if subnet_index['tag2'] in env_subnet_tuple[1]: #strip of the $ symbol and , convert the string to float with 2 precisions subnet_cost = float( (subnet_index['invoiced_cost'].strip('$') ).strip(',').replace(",", "")) env_cost = cost_dict[env_subnet_tuple[0]] if subnet_index['tag2'] in cost_dict.keys(): cost_dict[env_subnet_tuple[0]] += round( (env_cost + subnet_cost), 2) else: cost_dict[env_subnet_tuple[0]] = round( (env_cost + subnet_cost), 2) if cost_dict: return cost_dict else: raise Exception('Could not calculate environment costs') except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_environment_costs()", exp_object, exc_type, exc_obj, exc_tb) return {} def get_current_prev_environment_costs(self, start_date, end_date): #query parameters should be moved to config file try: env_cost_dict = {} previous_period = self.get_previous_period(start_date, end_date) previous_start_date, previous_end_date = previous_period[ 0], previous_period[1] env_cost_dict['current_period'] = self.get_environment_costs( start_date, end_date) env_cost_dict['previous_period'] = self.get_environment_costs( previous_start_date, previous_end_date) return env_cost_dict except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_subnet_costs()", exp_object, exc_type, exc_obj, exc_tb) return #calculate costs subnet wise def get_subnet_costs(self, start_date, end_date): subnet_cost_dict = {} try: query_parameters = "verbose=1&start_date=" + start_date + "&end_date=" + end_date + "&dimensions=tag2&metrics=invoiced_cost&sort_by=invoiced_cost&order=desc&auth_token=" subnets_cost_query = self.construct_cost_query(query_parameters) subnet_cost_json = self.generate_report(subnets_cost_query) env_subnet_zip = self.environment_subnets_details subnet_details = subnet_cost_json['results'] for subnet_index in subnet_details: for env_subnet_tuple in env_subnet_zip: if subnet_index['tag2'] in env_subnet_tuple[1]: #strip of the $ symbol and , convert the string to float with 2 precisions subnet_cost = float( (subnet_index['invoiced_cost'].strip('$') ).strip(',').replace(",", "")) if subnet_index['tag2'] in subnet_cost_dict.keys(): subnet_cost_dict[subnet_index['tag2']] += round( subnet_cost, 2) else: subnet_cost_dict[subnet_index['tag2']] = round( subnet_cost, 2) return subnet_cost_dict except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_subnetwise_costs()", exp_object, exc_type, exc_obj, exc_tb) return {} def get_current_prev_subnet_costs(self, start_date, end_date): #query parameters should be moved to config file subnet_cost_dict = {} try: previous_period = self.get_previous_period(start_date, end_date) previous_start_date, previous_end_date = previous_period[ 0], previous_period[1] subnet_cost_dict['current_period'] = self.get_subnet_costs( start_date, end_date) subnet_cost_dict['previous_period'] = self.get_subnet_costs( previous_start_date, previous_end_date) return subnet_cost_dict except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_subnet_costs()", exp_object, exc_type, exc_obj, exc_tb) return {} def split_string(self, input_string, delimiters): #this function splits a string on multiple delimiters delimiters = tuple(delimiters) string_list = [ input_string, ] for delimiter in delimiters: for index1, input_sub_string in enumerate(string_list): temp_var = input_sub_string.split(delimiter) string_list.pop(index1) for index2, input_sub_string in enumerate(temp_var): string_list.insert(index1 + index2, input_sub_string) return string_list def get_ebs_costs(self, start_date, end_date): try: query_parameters = "&start_date=" + start_date + "&end_date=" + end_date + "&filters=usage_type=@EBS&dimensions=usage_type,tag1,&metrics=invoiced_cost&order=desc&auth_token=" ebs_cost_query = self.construct_cost_query(query_parameters) ebs_cost_json = self.generate_report(ebs_cost_query) ebs_details = ebs_cost_json['results'] ebs_cost_dict = collections.defaultdict(dict) for instance_index in ebs_details: if instance_index['tag1'] in ebs_cost_dict: ebs_cost_dict[instance_index['tag1']] += round( float( instance_index['invoiced_cost'].strip('$').replace( ',', '')), 2) else: ebs_cost_dict[instance_index['tag1']] = round( float( instance_index['invoiced_cost'].strip('$').replace( ',', '')), 2) return dict(ebs_cost_dict) except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_instances_costs()", exp_object, exc_type, exc_obj, exc_tb) return {} def get_instances_costs(self, start_date, end_date): try: query_parameters = "&start_date=" + start_date + "&end_date=" + end_date + "&filters=service_key==AmazonEC2&dimensions=tag1,tag3,&metrics=invoiced_cost&order=desc&auth_token=" instance_cost_query = self.construct_cost_query(query_parameters) instance_cost_json = self.generate_report(instance_cost_query) instance_details = instance_cost_json['results'] instance_cost_dict = collections.defaultdict(dict) for instance_index in instance_details: if 'tag3' in instance_index: if instance_index['tag3'] in instance_cost_dict: instance_cost_dict[instance_index['tag3']] += round( float(instance_index['invoiced_cost'].strip( '$').replace(',', '')), 2) else: instance_cost_dict[instance_index['tag3']] = round( float(instance_index['invoiced_cost'].strip( '$').replace(',', '')), 2) elif 'tag1' in instance_index: if instance_index['tag1'] in instance_cost_dict: instance_cost_dict[instance_index['tag1']] += round( float(instance_index['invoiced_cost'].strip( '$').replace(',', '')), 2) else: instance_cost_dict[instance_index['tag1']] = round( float(instance_index['invoiced_cost'].strip( '$').replace(',', '')), 2) return dict(instance_cost_dict) except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_instances_costs()", exp_object, exc_type, exc_obj, exc_tb) return {} def get_current_prev_instances_costs(self, start_date, end_date): try: instance_cost_dict = {} previous_period = self.get_previous_period(start_date, end_date) previous_start_date, previous_end_date = previous_period[ 0], previous_period[1] instance_cost_dict['current_period'] = self.get_instances_costs( start_date, end_date) instance_cost_dict['previous_period'] = self.get_instances_costs( previous_start_date, previous_end_date) return instance_cost_dict except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_subnet_costs()", exp_object, exc_type, exc_obj, exc_tb) return def get_percentage_change(self, cost_dict): percentage_dict, current_costs_dict, previous_costs_dict = {}, {}, {} try: if cost_dict: if cost_dict.has_key('current_period'): current_costs_dict = cost_dict['current_period'] if cost_dict.has_key('previous_period'): previous_costs_dict = cost_dict['previous_period'] else: raise Exception( "Invalid value: No values for current and previous costs") for key in current_costs_dict: if key in previous_costs_dict.keys(): current_cost = current_costs_dict[key] previous_cost = previous_costs_dict[key] difference = current_cost - previous_cost if difference < 0: tag = 'decrease' elif difference > 0: tag = 'increase' else: tag = 'equal' if previous_cost == 0.0: percentage = round((abs(difference)), 2) else: percentage = round( (abs(difference) * 100 / previous_cost), 2) percentage_dict[key] = (current_cost, tag, percentage) else: percentage_dict[key] = (current_costs_dict[key], '', 0) return percentage_dict except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_subnet_costs()", exp_object, exc_type, exc_obj, exc_tb) return {} def get_cloudability_costs(self): cloud_cost_dict = self.memcache_var.get('cloud_costs') if cloud_cost_dict is None: cloud_cost_dict = self.memcache_var.get( 'global_cloudability_costs') if cloud_cost_dict is not None: self.memcache_var.set("cloud_costs", cloud_cost_dict, 600) with threading.RLock(): thread = threading.Thread(target=self.cache_cloud_costs) thread.start() return cloud_cost_dict def cache_cloud_costs(self): try: cloudability_dict = self.get_cloud_costs() self.memcache_var.set("cloud_costs", cloudability_dict, 2 * 60 * 60) if cloudability_dict is None: raise Exception( "Clodability data is not available. Please ensure data is available and populate the cache." ) if cloudability_dict is not None: self.memcache_var.set("global_cloudability_costs", cloudability_dict, 86400) self.memcache_var.disconnect_all() except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "cache_cloud_costs()", exp_object, exc_type, exc_obj, exc_tb) self.memcache_var.disconnect_all() def get_cloud_costs(self): current_date = date.today().strftime('%Y-%m-%d') month = date.today().month if month in [1, 3, 5, 7, 8, 10, 12]: previous_date = (date.today() - timedelta(days=30)).strftime('%Y-%m-%d') elif month in [2]: previous_date = (date.today() - timedelta(days=28)).strftime('%Y-%m-%d') else: previous_date = (date.today() - timedelta(days=29)).strftime('%Y-%m-%d') organization_list = self.aws_helper_object.get_organizations() region_list = self.aws_helper_object.get_regions() self.cloudability_dict = self.ah_obj.create_nested_defaultdict() for organization in organization_list: for region in region_list: vpc_list = self.aws_helper_object.get_vpc_in_region(region) if vpc_list: for vpc in ["ame1"]: if vpc: ec2_costs = self.get_ec2_costs( previous_date, current_date) self.cloudability_dict = self.ah_obj.create_nested_defaultdict( ) self.cloudability_dict['ec2_costs'][ organization] = ec2_costs ec2_costs = self.get_current_prev_ec2_costs( previous_date, current_date) ec2_percentage_change = self.get_percentage_change( ec2_costs) self.cloudability_dict['ec2_percentage_change'][ organization] = ec2_percentage_change environment_costs = self.get_environment_costs( previous_date, current_date) self.cloudability_dict['environment_costs'][ organization] = environment_costs environment_costs = self.get_current_prev_environment_costs( previous_date, current_date) env_percentage_change = self.get_percentage_change( environment_costs) self.cloudability_dict['env_percentage_change'][ organization] = env_percentage_change subnet_costs = subnet_costs = self.get_subnet_costs( previous_date, current_date) self.cloudability_dict['subnet_costs'][ organization] = subnet_costs subnet_costs = self.get_current_prev_subnet_costs( previous_date, current_date) subnet_percentage_change = self.get_percentage_change( subnet_costs) self.cloudability_dict['subnet_percentage_change'][ organization] = subnet_percentage_change instances_costs = self.get_instances_costs( previous_date, current_date) self.cloudability_dict['instances_costs'][ organization] = instances_costs instances_costs = self.get_current_prev_instances_costs( previous_date, current_date) instances_percentage_change = self.get_percentage_change( instances_costs) self.cloudability_dict[ 'instances_percentage_change'][ organization] = instances_percentage_change ebs_costs = self.get_ebs_costs( previous_date, current_date) self.cloudability_dict['ebs_costs'][ organization] = ebs_costs return self.ah_obj.defaultdict_to_dict(self.cloudability_dict)
class Cloudability: #constructor def __init__(self): self.cloudability_dict = {} self.ah_obj = AtlasHelper() self.aws_helper_object = AwsHelper() self.module = "cloudability_module" self.auth_token = os.environ.get('CLOUDABILITY_AUTH_TOKEN') self.cl_base_url = self.ah_obj.get_atlas_config_data(self.module, "cloudability_base_url") self.cl_cost_url = self.ah_obj.get_atlas_config_data(self.module, "cloudability_cost_url") self.report_query = "" self.memcache_var = memcache.Client([self.ah_obj.get_atlas_config_data("global_config_data", 'memcache_server_location') ], debug=1) self.environment_subnets_details = self.aws_helper_object.get_environment_subnets_details() def construct_cost_query(self, query_parameters): try: self.report_query = self.cl_base_url+self.cl_cost_url+query_parameters+self.auth_token return self.report_query except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "construct_cost_query()", exp_object, exc_type, exc_obj, exc_tb) return def generate_report(self, query): try: report_json = {} response = requests.get(query) if response.status_code == 200: report_json = json.loads(response.text) #convert the json into a python dictionary return report_json except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "generate_report()", exp_object, exc_type, exc_obj, exc_tb) return {} def get_previous_period(self, start_date, end_date): try: start = datetime.datetime.strptime(start_date, '%Y-%m-%d') end = datetime.datetime.strptime(end_date, '%Y-%m-%d') period = ((end - start).days)+1 previous_start_date = (start-datetime.timedelta(days=period)).strftime('%Y-%m-%d') previous_end_date = (end-datetime.timedelta(days=period)).strftime('%Y-%m-%d') return (previous_start_date, previous_end_date) except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_previous_period()", exp_object, exc_type, exc_obj, exc_tb) return #get ec2 costs def get_ec2_costs(self, start_date, end_date): ec2_costs={'region_zone': 0.0} try: query_parameters = "verbose=1&start_date="+start_date+"&end_date="+end_date+"&dimensions=linked_account_name&metrics=invoiced_cost&sort_by=invoiced_cost&order=desc&max_results=50&offset=0&auth_token=" ec2_costs_query = self.construct_cost_query(query_parameters) ec2_cost_dict = self.generate_report(ec2_costs_query) if ec2_cost_dict: ec2_costs['region_zone']= round(float(ec2_cost_dict['meta']['aggregates'][0]['value'].strip('$').replace(',','')),2) if ec2_costs['region_zone']: return ec2_costs else: raise Exception("Could not generate EC2 costs") except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "calculate_ec2_costs()", exp_object, exc_type, exc_obj, exc_tb) return ec2_costs #get aggregate cost spent for ec2 for the specified period def get_current_prev_ec2_costs(self, start_date, end_date): #query parameters should be moved to config file ec2_cost_dict = {'current_period': 0.0, 'previous_period': 0.0} try: previous_period = self.get_previous_period(start_date,end_date) previous_start_date, previous_end_date = previous_period[0], previous_period[1] ec2_cost_dict['current_period'] = self.get_ec2_costs(start_date, end_date) ec2_cost_dict['previous_period'] = self.get_ec2_costs(previous_start_date, previous_end_date) return ec2_cost_dict except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_ec2_costs()", exp_object, exc_type, exc_obj, exc_tb) return def create_envcost_dict(self): try: env_subnet_zip = self.environment_subnets_details per_environment_costs = {} for env_subnet_tuple in env_subnet_zip: per_environment_costs[env_subnet_tuple[0]] = 0 return per_environment_costs except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "create_envcost_dict()", exp_object, exc_type, exc_obj, exc_tb) return {} def get_environment_costs(self, start_date, end_date): try: query_parameters = "verbose=1&start_date="+start_date+"&end_date="+end_date+"&dimensions=tag2&metrics=invoiced_cost&sort_by=invoiced_cost&order=desc&auth_token=" subnets_cost_query = self.construct_cost_query(query_parameters) subnet_cost_dict = self.generate_report(subnets_cost_query) cost_dict = self.create_envcost_dict() env_subnet_zip = self.environment_subnets_details subnet_details = subnet_cost_dict['results'] for subnet_index in subnet_details: for env_subnet_tuple in env_subnet_zip: if subnet_index['tag2'] in env_subnet_tuple[1]: #strip of the $ symbol and , convert the string to float with 2 precisions subnet_cost = float((subnet_index['invoiced_cost'].strip('$')).strip(',').replace(",","")) env_cost = cost_dict[env_subnet_tuple[0]] if subnet_index['tag2'] in cost_dict.keys(): cost_dict[env_subnet_tuple[0]]+= round((env_cost+subnet_cost),2) else: cost_dict[env_subnet_tuple[0]] = round((env_cost+subnet_cost),2) if cost_dict: return cost_dict else: raise Exception('Could not calculate environment costs') except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_environment_costs()", exp_object, exc_type, exc_obj, exc_tb) return {} def get_current_prev_environment_costs(self, start_date, end_date): #query parameters should be moved to config file try: env_cost_dict = {} previous_period = self.get_previous_period(start_date,end_date) previous_start_date, previous_end_date = previous_period[0], previous_period[1] env_cost_dict['current_period'] = self.get_environment_costs(start_date, end_date) env_cost_dict['previous_period'] = self.get_environment_costs(previous_start_date, previous_end_date) return env_cost_dict except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_subnet_costs()", exp_object, exc_type, exc_obj, exc_tb) return #calculate costs subnet wise def get_subnet_costs(self, start_date, end_date): subnet_cost_dict = {} try: query_parameters = "verbose=1&start_date="+start_date+"&end_date="+end_date+"&dimensions=tag2&metrics=invoiced_cost&sort_by=invoiced_cost&order=desc&auth_token=" subnets_cost_query = self.construct_cost_query(query_parameters) subnet_cost_json = self.generate_report(subnets_cost_query) env_subnet_zip = self.environment_subnets_details subnet_details = subnet_cost_json['results'] for subnet_index in subnet_details: for env_subnet_tuple in env_subnet_zip: if subnet_index['tag2'] in env_subnet_tuple[1]: #strip of the $ symbol and , convert the string to float with 2 precisions subnet_cost = float((subnet_index['invoiced_cost'].strip('$')).strip(',').replace(",","")) if subnet_index['tag2'] in subnet_cost_dict.keys(): subnet_cost_dict[subnet_index['tag2']] += round(subnet_cost,2) else: subnet_cost_dict[subnet_index['tag2']] = round(subnet_cost,2) return subnet_cost_dict except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_subnetwise_costs()", exp_object, exc_type, exc_obj, exc_tb) return {} def get_current_prev_subnet_costs(self, start_date, end_date): #query parameters should be moved to config file subnet_cost_dict = {} try: previous_period = self.get_previous_period(start_date,end_date) previous_start_date, previous_end_date = previous_period[0], previous_period[1] subnet_cost_dict['current_period'] = self.get_subnet_costs(start_date, end_date) subnet_cost_dict['previous_period'] = self.get_subnet_costs(previous_start_date, previous_end_date) return subnet_cost_dict except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_subnet_costs()", exp_object, exc_type, exc_obj, exc_tb) return {} def split_string(self, input_string, delimiters): #this function splits a string on multiple delimiters delimiters = tuple(delimiters) string_list = [input_string,] for delimiter in delimiters: for index1, input_sub_string in enumerate(string_list): temp_var = input_sub_string.split(delimiter) string_list.pop(index1) for index2, input_sub_string in enumerate(temp_var): string_list.insert(index1+index2, input_sub_string) return string_list def get_ebs_costs(self, start_date, end_date): try: query_parameters = "&start_date="+start_date+"&end_date="+end_date+"&filters=usage_type=@EBS&dimensions=usage_type,tag1,&metrics=invoiced_cost&order=desc&auth_token=" ebs_cost_query = self.construct_cost_query(query_parameters) ebs_cost_json = self.generate_report(ebs_cost_query) ebs_details = ebs_cost_json['results'] ebs_cost_dict = collections.defaultdict(dict) for instance_index in ebs_details: if instance_index['tag1'] in ebs_cost_dict: ebs_cost_dict[instance_index['tag1']] += round(float(instance_index['invoiced_cost'].strip('$').replace(',','')),2) else: ebs_cost_dict[instance_index['tag1']] = round(float(instance_index['invoiced_cost'].strip('$').replace(',','')),2) return dict(ebs_cost_dict) except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_instances_costs()", exp_object, exc_type, exc_obj, exc_tb) return {} def get_instances_costs(self, start_date, end_date): try: query_parameters = "&start_date="+start_date+"&end_date="+end_date+"&filters=service_key==AmazonEC2&dimensions=tag1,tag3,&metrics=invoiced_cost&order=desc&auth_token=" instance_cost_query = self.construct_cost_query(query_parameters) instance_cost_json = self.generate_report(instance_cost_query) instance_details = instance_cost_json['results'] instance_cost_dict = collections.defaultdict(dict) for instance_index in instance_details: if 'tag3' in instance_index: if instance_index['tag3'] in instance_cost_dict: instance_cost_dict[instance_index['tag3']]+= round(float(instance_index['invoiced_cost'].strip('$').replace(',','')),2) else: instance_cost_dict[instance_index['tag3']] = round(float(instance_index['invoiced_cost'].strip('$').replace(',','')),2) elif 'tag1' in instance_index: if instance_index['tag1'] in instance_cost_dict: instance_cost_dict[instance_index['tag1']] += round(float(instance_index['invoiced_cost'].strip('$').replace(',','')),2) else: instance_cost_dict[instance_index['tag1']] = round(float(instance_index['invoiced_cost'].strip('$').replace(',','')),2) return dict(instance_cost_dict) except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_instances_costs()", exp_object, exc_type, exc_obj, exc_tb) return {} def get_current_prev_instances_costs(self, start_date, end_date): try: instance_cost_dict = {} previous_period = self.get_previous_period(start_date,end_date) previous_start_date, previous_end_date = previous_period[0], previous_period[1] instance_cost_dict['current_period'] = self.get_instances_costs(start_date, end_date) instance_cost_dict['previous_period'] = self.get_instances_costs(previous_start_date, previous_end_date) return instance_cost_dict except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_subnet_costs()", exp_object, exc_type, exc_obj, exc_tb) return def get_percentage_change(self, cost_dict): percentage_dict, current_costs_dict, previous_costs_dict = {},{},{} try: if cost_dict: if cost_dict.has_key('current_period'): current_costs_dict = cost_dict['current_period'] if cost_dict.has_key('previous_period'): previous_costs_dict = cost_dict['previous_period'] else: raise Exception("Invalid value: No values for current and previous costs") for key in current_costs_dict: if key in previous_costs_dict.keys(): current_cost = current_costs_dict[key] previous_cost = previous_costs_dict[key] difference = current_cost - previous_cost if difference < 0: tag = 'decrease' elif difference >0: tag = 'increase' else: tag = 'equal' if previous_cost == 0.0: percentage = round((abs(difference)),2) else: percentage = round((abs(difference)*100/previous_cost),2) percentage_dict[key] = (current_cost, tag, percentage) else: percentage_dict[key] = (current_costs_dict[key], '',0) return percentage_dict except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "get_subnet_costs()", exp_object, exc_type, exc_obj, exc_tb) return {} def get_cloudability_costs(self): cloud_cost_dict = self.memcache_var.get('cloud_costs') if cloud_cost_dict is None: cloud_cost_dict = self.memcache_var.get('global_cloudability_costs') if cloud_cost_dict is not None: self.memcache_var.set("cloud_costs", cloud_cost_dict, 600) with threading.RLock(): thread = threading.Thread(target= self.cache_cloud_costs) thread.start() return cloud_cost_dict def cache_cloud_costs(self): try: cloudability_dict = self.get_cloud_costs() self.memcache_var.set("cloud_costs", cloudability_dict,2*60*60) if cloudability_dict is None: raise Exception("Clodability data is not available. Please ensure data is available and populate the cache.") if cloudability_dict is not None: self.memcache_var.set("global_cloudability_costs", cloudability_dict,86400) self.memcache_var.disconnect_all() except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "cache_cloud_costs()", exp_object, exc_type, exc_obj, exc_tb) self.memcache_var.disconnect_all() def get_cloud_costs(self): current_date= date.today().strftime('%Y-%m-%d') month = date.today().month if month in [1, 3, 5, 7, 8, 10, 12]: previous_date = (date.today()-timedelta(days=30)).strftime('%Y-%m-%d') elif month in [2]: previous_date = (date.today()-timedelta(days=28)).strftime('%Y-%m-%d') else: previous_date = (date.today()-timedelta(days=29)).strftime('%Y-%m-%d') organization_list = self.aws_helper_object.get_organizations() region_list = self.aws_helper_object.get_regions() self.cloudability_dict = self.ah_obj.create_nested_defaultdict() for organization in organization_list: for region in region_list: vpc_list = self.aws_helper_object.get_vpc_in_region(region) if vpc_list: for vpc in ["ame1"]: if vpc: ec2_costs = self.get_ec2_costs(previous_date, current_date) self.cloudability_dict = self.ah_obj.create_nested_defaultdict() self.cloudability_dict['ec2_costs'][organization] = ec2_costs ec2_costs = self.get_current_prev_ec2_costs(previous_date, current_date) ec2_percentage_change = self.get_percentage_change(ec2_costs) self.cloudability_dict['ec2_percentage_change'][organization]= ec2_percentage_change environment_costs = self.get_environment_costs(previous_date, current_date) self.cloudability_dict['environment_costs'][organization] = environment_costs environment_costs = self.get_current_prev_environment_costs(previous_date, current_date) env_percentage_change = self.get_percentage_change(environment_costs) self.cloudability_dict['env_percentage_change'][organization] = env_percentage_change subnet_costs = subnet_costs = self.get_subnet_costs(previous_date, current_date) self.cloudability_dict['subnet_costs'][organization]= subnet_costs subnet_costs = self.get_current_prev_subnet_costs(previous_date, current_date) subnet_percentage_change = self.get_percentage_change(subnet_costs) self.cloudability_dict['subnet_percentage_change'][organization] = subnet_percentage_change instances_costs = self.get_instances_costs(previous_date, current_date) self.cloudability_dict['instances_costs'][organization]= instances_costs instances_costs = self.get_current_prev_instances_costs(previous_date, current_date) instances_percentage_change = self.get_percentage_change(instances_costs) self.cloudability_dict['instances_percentage_change'][organization] = instances_percentage_change ebs_costs = self.get_ebs_costs(previous_date, current_date) self.cloudability_dict['ebs_costs'][organization]= ebs_costs return self.ah_obj.defaultdict_to_dict(self.cloudability_dict)
class JenkinsActions: def __init__(self, request=None, environment=None): self.ah_obj = AtlasHelper() self.module = "jenkins_module" self.python_jenkinsurl = self.ah_obj.get_atlas_config_data( self.module, "python_jenkins_url") self.build_record_count = self.ah_obj.get_atlas_config_data( self.module, "build_record_count") self.jenkins_password = os.environ.get('JENKINS_PASSWORD') self.jenkins_username = os.environ.get('JENKINS_USERNAME') self.jenkinsurl = os.environ.get('JENKINS_URL') self.python_jenkinsurl = self.jenkinsurl + "/job/" self.memcache_var = memcache.Client([ self.ah_obj.get_atlas_config_data("global_config_data", 'memcache_server_location') ], debug=0) if environment: self.aws_obj = AwsModule(request, environment) """ helper methods """ def get_jenkins_job_folder(self, jobname): job_folder_information = self.ah_obj.get_atlas_config_data( self.module, "folders")[1] for folder, job_list in job_folder_information.iteritems(): if jobname in job_list: return folder def cache_jenkins_build_userinfo(self): try: jobname = 'AWS-Build-Dev-Deploy-Dev' build_userinfo_dict = self.jenkins_build_userinfo(jobname) self.memcache_var.set(jobname + '_build_userinfo', build_userinfo_dict, 15 * 60) if build_user_info_dict is None: raise Exception( "Source data from Jenkins server is unavailable. Please ensure data is available and populate the cache." ) if build_userinfo_dict is not None: self.memcache_var.set('global_' + jobname + '_build_userinfo', build_userinfo_dict, 86400) self.memcache_var.disconnect_all() except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "construct_cost_query()", exp_object, exc_type, exc_obj, exc_tb) return def jenkins_build_userinfo(self, jobname): job_info_dict, job_info = {}, {} jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password) jenkins_url = self.python_jenkinsurl + self.get_jenkins_job_folder( jobname) jenkins_obj = jenkins.Jenkins(jenkins_url, username=self.jenkins_username, password=self.jenkins_password) build_user_info_dict = collections.defaultdict(dict) try: if jenkins_obj.job_exists(jobname): job_info = jenkins_obj.get_job_info(jobname) build_information_list = job_info['builds'] latest_build_number = build_information_list[0]['number'] build_info = jenkins_obj.get_build_info( jobname, latest_build_number) for build_number in range( latest_build_number - self.build_record_count, latest_build_number + 1): try: build_info_dict = jenkins_obj.get_build_info( jobname, build_number) build_user_info_dict[build_number] = { 'deployed_by': "", 'branch': "", 'last_deployed': "", 'subnet': "", 'commit_hash': "" } branch = "" if 'actions' in build_info_dict: if 'parameters' in build_info_dict['actions'][0]: for parameter_dict in build_info_dict[ 'actions'][0]['parameters']: if parameter_dict['name'] == 'subnet': build_user_info_dict[build_number][ 'subnet'] = parameter_dict['value'] if parameter_dict['name'] == 'branch': build_user_info_dict[build_number][ 'branch'] = parameter_dict['value'] branch = parameter_dict['value'] if 'causes' in build_info_dict['actions'][1]: actions = build_info_dict['actions'][1] if 'userName' in actions['causes'][0]: build_user_info_dict[build_number][ 'deployed_by'] = build_info_dict[ 'actions'][1]['causes'][0][ 'userName'] if 'buildsByBranchName' in build_info_dict[ 'actions'][2]: commit_hash = build_info_dict['actions'][2][ 'buildsByBranchName']['origin/develop'][ 'revision']['SHA1'][:7] build_user_info_dict[build_number][ 'commit_hash'] = commit_hash if 'timestamp' in build_info_dict: timestamp = str(datetime.datetime.now() - datetime.datetime.fromtimestamp( build_info_dict['timestamp'] / 1000)) deployed_before = "" if isinstance(timestamp, list): hours_minutes = timestamp[1].split(":")[:2] deployed_before = timestamp[ 0] + " " + hours_minutes[ 0] + "hrs " + hours_minutes[1] + "mins" else: hours_minutes = timestamp.split(":")[:2] deployed_before = hours_minutes[ 0] + " hrs " + hours_minutes[1] + " mins" build_user_info_dict[build_number][ 'last_deployed'] = deployed_before except: continue return self.ah_obj.defaultdict_to_dict(build_user_info_dict) except Exception as exp_object: return {} def get_jenkins_build_userinfo(self, jobname): build_userinfo_dict = self.memcache_var.get(jobname + '_build_userinfo') if not build_userinfo_dict: build_userinfo_dict = self.memcache_var.get('global_' + jobname + '_build_userinfo') if build_userinfo_dict is not None: self.memcache_var.set(jobname + '_build_userinfo', build_userinfo_dict, 3 * 60 * 60) with threading.Lock(): thread = threading.Thread( target=self.cache_jenkins_build_userinfo) thread.start() return build_userinfo_dict def get_jenkins_job_info(self, jobname): job_info_dict, job_info = {}, {} jenkins_url = self.python_jenkinsurl + self.get_jenkins_job_folder( jobname) jenkins_obj = jenkins.Jenkins(jenkins_url, username=self.jenkins_username, password=self.jenkins_password) try: if jenkins_obj.job_exists(jobname): job_info = jenkins_obj.get_job_info(jobname) job_info_dict = { 'last_successful_build_number': job_info['lastSuccessfulBuild']['number'], 'last_successful_build_url': job_info['lastSuccessfulBuild']['url'], 'last_unsuccessful_build_number': job_info['lastUnsuccessfulBuild']['number'], 'last_unsuccessful_build_url': job_info['lastUnsuccessfulBuild']['url'], 'last_completed_build_number': job_info['lastCompletedBuild']['number'], 'last_completed_build_url': job_info['lastCompletedBuild']['url'], 'last_unstable_build_number': job_info['lastUnstableBuild'], 'last_unstable_build_url': job_info['lastUnstableBuild'], 'last_stable_build_number': job_info['lastStableBuild']['number'], 'last_stable_build_url': job_info['lastStableBuild']['url'], 'last_build': job_info['lastBuild']['url'], 'last_build-number': job_info['lastBuild']['number'], 'nextBuildNumber': job_info['nextBuildNumber'] } return job_info_dict except Exception as exp_object: return {} def get_console_output(self, build): console_output = build.get_console() if console_output: return console_output def check_build_status(self, job_name): status_dict = {} try: jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password) job = jenkinsapi_obj.get_job(job_name) build = job.get_last_build() other_info = self.get_jenkins_job_info(job_name) if other_info: status_dict['other_info'] = self.get_jenkins_job_info(job_name) status_dict['console_output'] = self.get_console_output(build) if build.is_running(): status_dict['exit_status'] = "Build not complete" status_dict['action_state'] = "action_in_progress" else: if build.is_good(): status_dict['exit_status'] = "Build Successful" status_dict['action_state'] = "action_completed" return status_dict except Exception as exp_object: status_dict['action_state'] = 'action_failed' return status_dict """ action methods """ def server_create_test(self, subnet, profile, node_name): """ Create a server on aws_obj. """ jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password) if profile == "ops-general": jenkinsapi_obj.build_job('server_create_test', { 'subnet': subnet, 'profile': profile, 'name': node_name }) else: jenkinsapi_obj.build_job('server_create_test', { 'subnet': subnet, 'profile': profile }) def echo_contents(self, text1, text2): """ Echo contents sample jenkins job. """ jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password) jenkinsapi_obj.build_job('echo_contents', { 'text1': text1, 'text2': text2 }) def initiate_actions(self, action, parameters): """ Initiate jenkins actions. """ initial_status = {} try: if parameters is None or parameters == '': return other_info = self.get_jenkins_job_info(action) if other_info: initial_status['other_info'] = other_info if action == 'echo_contents': self.echo_contents(parameters['text1'], parameters['text2']) if action == 'server_create_test': self.server_create_test(parameters['subnet'], parameters['profile'], parameters['node_name']) initial_status = self.check_build_status(action) initial_status['action_state'] = 'action_initiated' return initial_status except Exception as exp_object: return initial_status def action_state(self, action): """ Check the status of builds. """ action_state = self.check_build_status(action) return action_state def parameter_values(self, action, parameter, environment=None): """ Return parameter values for each build to be displayed as options to user. """ if action == 'server_create_test': if parameter == 'subnet': return self.aws_obj.get_information(environment, env_subnet_list='true') if parameter == 'profile': return self.aws_obj.get_information(environment, profiles='true') if parameter == 'name': return "" if action == 'echo_contents': if parameter == 'text1': return "" if parameter == 'text2': return "" def action_parameters(self, action_type, environment=None): """ Get parameters for each action. """ action_parameters_dict = {} if (action_type == 'vpc_actions'): action_parameters_dict = self.unpack_action_parameters( self.ah_obj.get_atlas_config_data(self.module, 'vpc_actions')[1], environment) elif action_type == 'instance_actions': pass elif action_type == 'instance_group_actions': pass elif action_type == 'stack_actions': action_parameters_dict = self.unpack_action_parameters( self.ah_obj.get_atlas_config_data(self.module, 'stack_actions')[1], environment) return action_parameters_dict def unpack_action_parameters(self, action_parameters_dict, environment=None): parameter_dict = {} for key, values in action_parameters_dict.iteritems(): parameter_list = values['parameters'] parameter_dict[key] = {} for parameter in parameter_list: temp_list = [] temp_parameter = parameter.split(',') temp_list.append(temp_parameter[1]) temp_list.append( self.parameter_values(key, temp_parameter[0], environment)) parameter_dict[key][temp_parameter[0]] = temp_list return parameter_dict
class ViewsHelper(): def __init__(self): self.awshelper_obj = AwsHelper() self.session_obj = SessionHandler() self.objhelper_obj = ObjectHelper() self.ah_obj = AtlasHelper() self.awshelper_obj = AwsHelper() def context_for_custom_processor(self, request): """ Help context processor. """ if request.user.username: up = UserProfile.objects.get(user__username=request.user.username) region_vpc_dict = {} region_list = self.awshelper_obj.get_regions() vpc_list = [] for region in region_list: vpc_list = self.awshelper_obj.get_vpc_in_region(region) if vpc_list: region_vpc_dict[region] = vpc_list else: region_vpc_dict[region] = [] user_region_vpc_dict = self.session_obj.load_region_session(request) user_region_list = [] user_vpc_list = [] if not user_region_vpc_dict: user_region_vpc_dict = region_vpc_dict for key, values in user_region_vpc_dict.iteritems(): user_region_list.append(key) if values: for index in values: user_vpc_list.append(index) dash_environments = self.awshelper_obj.get_dash_environments() if request.is_secure(): url_scheme = 'https://' else: url_scheme = 'http://' return { 'region_vpc_dict': json.dumps(region_vpc_dict), 'user_region_list': user_region_list, 'user_vpc_list': user_vpc_list, 'default_regions': region_list, 'default_vpc': vpc_list, 'dash_environments': dash_environments, 'user_region_vpc_dict': user_region_vpc_dict, 'default_region_vpc_dict': region_vpc_dict, 'home_url': url_scheme + request.get_host(), } def create_module_list(self): """ Get the list of modules. It is required for the dashboard do not delete it. """ module_list = ['aws_module'] #initialize default module other_modules = self.ah_obj.get_atlas_config_data( "global_config_data", 'modules')[0] other_modules.remove('aws_module') module_list.extend(other_modules) return module_list def refresh_environment_information(self, request): module_list = self.create_module_list() for module in module_list: module_object = self.objhelper_obj.get_module_object( module, request) module_object.refresh_information() def handle_dashboard_post_requests(self, request): """ Handle ajax post requests and send the status. """ if (int(request.POST.get('refresh_atlas_data', 0)) == 1) and (request.POST.get('refresh_flag') == "refresh"): self.refresh_environment_information(request) return HttpResponse(status=202) if int(request.POST.get('session_var_save', 0)) == 1: module_object = self.objhelper_obj.get_module_object( request.POST.get('module'), request) module_object.save_session(request) return HttpResponse(status=202) if request.POST.get('module'): module = request.POST.get('module') module_obj = self.objhelper_obj.get_module_object(module, request) if module_obj: module_obj.save_session(environment=None, request=request) return HttpResponse(status=202) def get_dashboard_status(self, module_object, request): status_list, status_dict = [], {} status_icon_list, icon_position_list = [], [] module_status_list, module_style_dict = [], {} status = module_object.get_status() if isinstance(status, tuple) and status is not None: if status[0] and status[1]: status_dict = status[1] module_status_info = status[0] module_style_dict = module_status_info[1] status_icon_list, icon_position_list = [], [] if module_style_dict: for status_key, style_dict in module_style_dict.iteritems( ): status_list.append(status_key) for key in style_dict.keys(): if key == "icon_file": status_icon_list.append(style_dict[key]) if key == "position": icon_position_list.append(style_dict[key]) return [status_list, status_icon_list, icon_position_list, status_dict] def load_dashboard_session(self, module_object, request): module_session_dict = module_object.load_session(request) if module_session_dict is not None: for sessions, session_dict in module_session_dict.iteritems(): for session_key, session_value in session_dict.iteritems(): request.session[session_key] = session_value def get_dashboard_aggregates(self, module_object, request): """ Get all aggregate vales for dashboard display. """ aggregates = module_object.get_aggregates() if isinstance(aggregates, tuple): return aggregates def dashboard_data_for_module(self, request, module, dash_environments): module_object = self.objhelper_obj.get_module_object(module, request) #load dashboard session variables self.load_dashboard_session(module_object, request) #get dashboard aggregates dashboard_aggregates = self.get_dashboard_aggregates( module_object, request) #get dashboard status mod_status = self.get_dashboard_status(module_object, request) return (dashboard_aggregates, mod_status) def generate_dashboard_data(self, request): """ Generate required data for each module for dashboard display. """ #modules list module_list = self.create_module_list() #environment list dash_environments = self.awshelper_obj.get_dash_environments() #inititalize variables status_info_dict = collections.defaultdict(list) module_status_list = [] aggregate_list, aggregate_values_list = [], [] for module in module_list: with concurrent.futures.ThreadPoolExecutor( max_workers=2) as executor: future = executor.submit(self.dashboard_data_for_module, request, module, dash_environments) (dashboard_aggregates, mod_status) = future.result() module_status_list.extend(mod_status[0]) for env in dash_environments: if env in mod_status[3]: status_info_dict[env].extend( zip(mod_status[0], mod_status[1], mod_status[2], mod_status[3][env])) if dashboard_aggregates is not None: aggregate_list.extend(dashboard_aggregates[0]) aggregate_values_list.extend(dashboard_aggregates[1]) return { 'dash_statuses': module_status_list, 'dash_status_info': self.ah_obj.defaultdict_to_dict(status_info_dict), 'aggregate_values': zip(aggregate_list, aggregate_values_list), 'selected_dash_status': None if not request.session.has_key('config_selected') else request.session['config_selected'], 'selected_dash_env': None if not request.session.has_key('env_selected') else request.session['env_selected'], } def save_environment_session_data(self, request, environment): """ Save session data for user related to environment. """ module_object = self.objhelper_obj.get_module_object( request.POST.get('module'), request, environment) module_object.save_session(request, environment) def perform_environment_actions(self, request, environment): """ Perform instance, instance_group, stack or vpc actions. """ actions_json = ast.literal_eval(request.POST.get('actions_data', "{}")) actions_json['user'] = request.user.username module = actions_json["module"] module_object = self.objhelper_obj.get_module_object( module, request, environment) actions_json['username'] = request.user.username actions_json['environment'] = environment if actions_json["action_type"] == "instance_action": if 'edit_flag' in actions_json and int( actions_json['edit_flag']) == 1: actions_status = module_object.perform_instance_actions( actions_json) return actions_status instance_actions = module_object.get_instance_actions() if instance_actions: if actions_json["action"] in instance_actions: actions_status = module_object.perform_instance_actions( actions_json) return actions_status elif actions_json["action_type"] == "instance_group_action": if 'edit_flag' in actions_json and int( actions_json['edit_flag']) == 1: actions_status = module_object.perform_instance_actions( actions_json) return actions_status group_actions = module_object.get_instance_group_actions() if group_actions: if actions_json["action"] in group_actions: actions_status = module_object.perform_instancegroup_actions( actions_json, environment) return actions_status elif actions_json["action_type"] == "vpc_action": vpc_actions = module_object.get_vpc_actions() if vpc_actions: if actions_json["action"] in vpc_actions: actions_status = module_object.perform_vpc_actions( actions_json, environment) return actions_status elif actions_json["action_type"] == "stack_action": stack_actions = module_object.get_stack_actions() if stack_actions: if actions_json["action"] in stack_actions: actions_status = module_object.perform_stack_actions( actions_json, environment) return actions_status def check_environment_action_status(self, request, environment): """ Check the status of actions performed if they have completed or in progress. """ actions_json = ast.literal_eval(request.POST.get('actions_data', "{}")) action_type = actions_json['action_type'] module = actions_json["module"] module_object = self.objhelper_obj.get_module_object( module, request, environment) if action_type == 'instance_action': if actions_json["action"] in module_object.get_instance_actions(): actions_status = module_object.get_instance_status( actions_json, environment) return actions_status if action_type == 'instance_group_action': if actions_json[ "action"] in module_object.get_instance_group_actions(): actions_status = module_object.get_instancegroup_status( actions_json, environment) return actions_status if action_type == 'vpc_action': if actions_json["action"] in module_object.get_vpc_actions(): actions_status = module_object.get_action_status( actions_json, environment) return actions_status if action_type == 'stack_action': if actions_json["action"] in module_object.get_stack_actions(): actions_status = module_object.get_action_status( actions_json, environment) return actions_status def load_env_session_variables(self, request, environment, module_object): """ Load session variables for each module. """ module_session_dict = module_object.load_session(request, environment) if module_session_dict is not None: for sessions, session_dict in module_session_dict.iteritems(): for session_key, session_value in session_dict.iteritems(): request.session[session_key] = session_value def get_tabsinfo_for_environment(self, request, environment, module_object): """ Get a list of tabs and information for each tab. """ tabs_list, tabs_info_list, instances_list = [], [], [] tab_details = module_object.get_tabs(environment) if tab_details: tabs_list = tab_details[0] if tab_details[1]: tab_details_dict = {} if module_object.__class__ is AwsModule: tabs_info_list = tab_details[1] instances_list = tab_details[1].keys() else: for instances in instances_list: if tab_details[1].has_key(instances): tab_details_dict[key] = tab_details[1][key] if collections.Counter( tab_details_dict.keys()) == collections.Counter( instances_list): tabs_info_list = tab_details[1] return (tabs_list, tabs_info_list) def get_column_data_for_environment(self, request, environment, module_object): """ Get columns and data for columns for each environment. """ return module_object.get_columns(environment) def get_aggregates_value_for_environment(self, request, environment, module_object): """ Get aggregates value for environment. """ aggregates_zip = [] aggregates_dict = module_object.get_aggregates(environment) if aggregates_dict: return (zip(aggregates_dict.keys(), aggregates_dict.values())) def get_statusinfo_for_environment(self, request, environment, module_object): """ Get status value for each environment. """ module_status_list = [] status_details_dict = collections.defaultdict(list) module_status = module_object.get_status(environment) if module_status: if module_status[0] and module_status[1]: module_status_info = module_status[0] module_status_list = module_status_info[0] module_style_dict = module_status_info[1] status_icon_list, icon_position_list = [], [] if module_style_dict: for status_key, style_dict in module_style_dict.iteritems( ): for key in style_dict.keys(): if key == "icon_file": status_icon_list.append(style_dict[key]) if key == "position": icon_position_list.append(style_dict[key]) for key in module_status[1]: #change this status_details_dict[key] = zip(module_status_list, status_icon_list, icon_position_list, module_status[1][key]) return (module_status_list, status_details_dict) def get_stackattributes_for_environment(self, request, environment, module_object): """ Get stack attributes and corresponding values for each attribute. """ stack_attributes_list, stack_attributes_dict, attribute_values_dict =[], {}, {} stack_attributes = module_object.get_stack_attributes(environment) attributes = [] if stack_attributes: stack_attributes_list = stack_attributes[0] stack_attributes_dict = stack_attributes[1] attribute_values = module_object.get_attribute_values(environment) if attribute_values is not None: attribute_values_dict = attribute_values return (stack_attributes_list, stack_attributes_dict, attribute_values_dict) def get_instance_action_data(self, request, environment, module_object): """ Get a list of instance actions for each module. """ return module_object.get_instance_actions() def get_instance_group_action_data(self, request, environment, module_object): """ Get instance group action data. """ return module_object.get_instance_group_actions() def get_stack_action_data(self, request, environment, module_object): """ Get a list of stack action and stack action parameters. """ stack_actions, stack_action_parameters = [], {} if module_object.__class__ != ChefModule: s_actions_list = module_object.get_stack_actions() if s_actions_list is not None: stack_actions.extend(s_actions_list) action_parameters = module_object.get_action_parameters( action_type="stack_actions", environment=environment) if action_parameters: for key, values in action_parameters.iteritems(): stack_action_parameters[key] = values return (stack_actions, stack_action_parameters) def get_vpc_action_data(self, request, environment, module_object): """ Get a list of vpc actions and parameters. """ vpc_actions, vpc_action_parameters = [], {} vpc_actions_list = module_object.get_vpc_actions() if vpc_actions_list: vpc_actions.extend(vpc_actions_list) action_parameters = module_object.get_action_parameters( action_type="vpc_actions", environment=environment) if action_parameters: for key, values in action_parameters.iteritems(): vpc_action_parameters[key] = values return (vpc_actions, vpc_action_parameters) def environment_data_for_module(self, request, environment, module): module_object = self.objhelper_obj.get_module_object( module, request, environment) module_details_dict = {} if module_object: self.load_env_session_variables(request, environment, module_object) module_details_dict[ 'module_aggregates'] = self.get_aggregates_value_for_environment( request, environment, module_object) module_details_dict[ 'module_status'] = self.get_statusinfo_for_environment( request, environment, module_object) module_details_dict[ 'module_tabs'] = self.get_tabsinfo_for_environment( request, environment, module_object) module_details_dict[ 'module_columns'] = self.get_column_data_for_environment( request, environment, module_object) module_details_dict[ 'module_stack_attributes'] = self.get_stackattributes_for_environment( request, environment, module_object) module_details_dict[ 'module_inst_actions'] = self.get_instance_action_data( request, environment, module_object) module_details_dict[ 'module_group_actions'] = self.get_instance_group_action_data( request, environment, module_object) module_details_dict[ 'module_stack_actions'] = self.get_stack_action_data( request, environment, module_object) module_details_dict[ 'module_vpc_actions'] = self.get_vpc_action_data( request, environment, module_object) return module_details_dict def generate_environment_data(self, request, environment): """ Generate data to be displayed for each environment. """ aggregates_zip = [] status_list, status_values_dict = [], collections.defaultdict(list) column_list, column_data_dict = [], {} instance_actions, stack_actions, vpc_actions, group_actions = [], [], [], [] module_actions_dict = {} vpc_action_parameters, stack_action_parameters = {}, {} tabs_list, tabs_info_list = [], [] selected_apps, selected_subnets = [], [] stack_attributes_list, stack_attributes_dict = [], {} attribute_values_dict = {} module_list = self.create_module_list() for module in module_list: with concurrent.futures.ThreadPoolExecutor( max_workers=2) as executor: future = executor.submit(self.environment_data_for_module, request, environment, module) module_details_dict = future.result() module_aggregates = module_details_dict['module_aggregates'] if module_aggregates is not None: aggregates_zip.extend(module_aggregates) (module_status_list, status_details_dict) = module_details_dict['module_status'] status_list.append(module_status_list) for vpc in status_details_dict: if vpc in status_details_dict: status_values_dict[vpc].extend( status_details_dict[vpc]) (module_tabs_list, module_tabs_info_list) = module_details_dict['module_tabs'] tabs_list.append(module_tabs_list) tabs_info_list.append(module_tabs_info_list) column_data = module_details_dict['module_columns'] if column_data is not None and column_data[0] is not None: column_list.extend(column_data[0]) if column_data[1] is not None: generator_object = self.ah_obj.merge_dictionaries( column_data_dict, column_data[1]) column_data_dict = { key: value for key, value in generator_object } (stack_attr_list, stack_attributes, attribute_values ) = module_details_dict['module_stack_attributes'] if stack_attr_list is not None: stack_attributes_list.extend(stack_attr_list) if stack_attributes_dict is None: stack_attributes_dict.update(stack_attributes) else: generator_object = self.ah_obj.merge_dictionaries( stack_attributes_dict, stack_attributes) stack_attributes_dict = { key: value for key, value in generator_object } if attribute_values_dict is None: attribute_values_dict.update(attribute_values) else: generator_object = self.ah_obj.merge_dictionaries( attribute_values_dict, attribute_values) attribute_values_dict = { key: value for key, value in generator_object } module_actions_dict[module] = {} inst_actions_list = module_details_dict['module_inst_actions'] if inst_actions_list is not None: instance_actions.extend(inst_actions_list) module_actions_dict[module][ 'instance_actions'] = inst_actions_list group_actions_list = module_details_dict[ 'module_group_actions'] if group_actions_list is not None: group_actions.extend(group_actions_list) module_actions_dict[module][ 'instance_group_actions'] = group_actions_list (stack_actions_list, stack_action_parameters ) = module_details_dict['module_stack_actions'] stack_actions.extend(stack_actions_list) (vpc_actions_list, vpc_action_parameters ) = module_details_dict['module_vpc_actions'] vpc_actions.extend(vpc_actions_list) module_actions_dict[module][ 'stack_actions'] = stack_actions_list module_actions_dict[module]['vpc_actions'] = vpc_actions_list return { 'environment': environment, 'aggregate_info_zip': aggregates_zip, 'status_values_dict': dict(status_values_dict), 'status_list': status_list, 'table_columns': column_list, 'column_data_dict': column_data_dict, 'tabs_list': tabs_list, 'tabs_list_string': json.dumps(tabs_list), 'tabs_info_string': json.dumps(tabs_info_list), 'instance_actions': json.dumps(instance_actions), 'stack_actions': json.dumps(stack_actions), 'group_actions': json.dumps(group_actions), 'vpc_actions': vpc_actions, 'vpc_action_parameters': json.dumps(vpc_action_parameters), 'stack_action_parameters': json.dumps(stack_action_parameters), 'module_actions_dict': json.dumps(module_actions_dict), 'stack_attributes_list': stack_attributes_list, 'stack_attributes_dict': stack_attributes_dict, 'attribute_values_dict': dict(attribute_values_dict), 'attribute_values_string': json.dumps(attribute_values_dict) }
class CloudabilityModule(AtlasBase): def __init__(self, request=None, environment=None): self.module = "cloudability_module" self.ah_obj = AtlasHelper() self.cloud_obj = Cloudability() self.awshelper_obj = AwsHelper() self.request = request self.aws_object = AwsModule(request, environment) self.instance_cost = 0.0 self.storage_cost = 0.0 def get_information(self, environment=None, **kwargs): organization_list = self.awshelper_obj.get_organizations() if environment is None: if 'env_cost_dict' in kwargs: if kwargs['env_cost_dict'] == 'true': for organization in organization_list: env_cost_dict = self.cloud_obj.get_cloudability_costs( )['environment_costs'][organization] env_cost_dict['all'] = self.get_information( ec2_cost_dict='true')['region_zone'] return env_cost_dict if 'ec2_cost_dict' in kwargs: if kwargs['ec2_cost_dict'] == 'true': for organization in organization_list: ec2_costs = self.cloud_obj.get_cloudability_costs( )['ec2_costs'][organization] return ec2_costs else: if 'env_cost_dict' in kwargs: if kwargs['env_cost_dict'] == 'true': env_costs = 0 for organization in organization_list: env_cost_dict = self.cloud_obj.get_cloudability_costs( )['environment_costs'][organization] environment_groups = self.ah_obj.get_atlas_config_data( "global_config_data", "environment_groups") if environment_groups and environment in environment_groups[ 1].keys(): if environment == 'all': env_group_for_environment = self.awshelper_obj.get_environments( organization) else: env_group_for_environment = environment_groups[ 1][environment] for env in env_group_for_environment: env_costs += env_cost_dict[env] env_cost_dict[environment] = env_costs return env_cost_dict if 'apps_in_environment' in kwargs: if kwargs['apps_in_environment'] == 'true': return self.aws_object.get_information( environment, apps_in_environment='true') if 'instance_data' in kwargs: if kwargs['instance_data'] == 'true': return self.aws_object.get_information( environment, instance_data='true') if 'instances_cost_dict' in kwargs: if kwargs['instances_cost_dict'] == 'true': for organization in organization_list: return self.cloud_obj.get_cloudability_costs( )['instances_costs'][organization] if 'ebs_cost_dict' in kwargs: if kwargs['ebs_cost_dict'] == 'true': for organization in organization_list: return self.cloud_obj.get_cloudability_costs( )['ebs_costs'][organization] if 'aws_info_dict' in kwargs: if kwargs['aws_info_dict'] == 'true': return self.aws_object.get_information( environment, 'aws_info_dict') if 'application_subnets' in kwargs: if kwargs['application_subnets'] == 'true': return self.aws_object.get_information( environment, 'application_subnets') def get_configuration_data(self, key): value = self.ah_obj.get_atlas_config_data(self.module, key) if isinstance(value, dict): return value[0] else: return value def get_stack_attributes(self, environment=None): """ Get stack attributes from config file. """ stack_attribute_list = [] stack_attributes_dict = self.ah_obj.get_atlas_config_data( 'cloudability_module', 'stack_attributes')[1] for attribute, details in stack_attributes_dict.iteritems(): stack_attribute_list.append((attribute, details['editable'])) return (stack_attribute_list, stack_attributes_dict) def get_attribute_values(self, environment=None): return self.__get_detailed_instances_cost_dict(environment, 'stack_costs') def get_status(self, environment=None): status_information = self.get_configuration_data('status') cloud_status_dict = {} organization_list = self.awshelper_obj.get_organizations() environment_list = [] if environment == None: env_cost_dict = self.get_information(env_cost_dict='true') cloud_status_dict = { environment: ["$" + str(env_cost_dict[environment])] for environment in env_cost_dict.keys() } else: env_cost_dict = self.get_information(environment, env_cost_dict='true') region_vpc_selection = self.aws_object.get_information( environment, region_vpc_dict='true') if environment == "uncategorized": region_list = self.awshelper_obj.get_regions() for region in region_vpc_selection: if region == 'east': cloud_status_dict[region] = [ "$" + str(env_cost_dict[environment]) ] else: cloud_status_dict[region] = ["$" + "0.0"] else: for vpc in ['ame1' ]: #should be changed later to include all vpcs. cloud_status_dict[vpc] = [ "$" + str(env_cost_dict[environment]) ] return (status_information, cloud_status_dict) def get_tabs(self, environment=None): pass def get_instance_actions(self, environment=None): pass def get_environment_actions(self, environment=None): pass def get_instance_group_actions(self, environment=None): pass def get_stack_actions(self, environment=None): pass def get_vpc_actions(self): pass def get_action_status(self, json_data, environment=None): pass def perform_instance_actions(self, environment=None): pass def perform_instancegroup_actions(): pass def perform_stack_actions(): pass def perform_vpc_actions(self, json_data): pass def perform_environment_actions(self, environment=None): pass def get_columns(self, environment=None): column_list = self.ah_obj.get_atlas_config_data(self.module, 'columns') column_dict = self.ah_obj.create_nested_defaultdict() if column_list: column_dict = self.__get_detailed_instances_cost_dict( environment, 'instances_cost') return (column_list, self.ah_obj.defaultdict_to_dict(column_dict)) def get_action_parameters(self, action_type, environment=None): pass def load_session(self, request, environment=None): pass def save_session(self, request, environment=None): pass def get_defaults(): pass def get_aggregates(self, environment=None): aggregates = self.ah_obj.get_atlas_config_data(self.module, 'aggregates') if environment is None: aggregate_list = [ "$" + str(self.get_information(ec2_cost_dict='true')['region_zone']) ] return (aggregates, aggregate_list) else: aggregate_dict = collections.defaultdict(dict) for agg_key in aggregates: if agg_key == 'cost': aggregate_dict[agg_key] = self.get_information( env_cost_dict='true')[environment] return dict(aggregate_dict) def refresh_information(self, environment=None): self.cloud_obj.cache_cloud_costs() return def __get_detailed_instances_cost_dict(self, environment, cost_type): instances_cost_dict = self.get_information(environment, instances_cost_dict='true') ebs_cost_dict = self.get_information(environment, ebs_cost_dict='true') aws_tabs_dict = self.aws_object.get_tabs(environment)[1] instance_information = self.aws_object.get_information( environment, instance_data='true') organization_list = self.awshelper_obj.get_organizations() (stack_attr_list, stack_attr_details) = self.get_configuration_data('stack_attributes') apps_in_environment = self.get_information(environment, apps_in_environment='true') application_subnets = self.get_information(environment, application_subnets='true') region, vpc, subnet = "", "", "" instance_cost_column_dict = self.ah_obj.create_nested_defaultdict() ebs_cost_column_dict = self.ah_obj.create_nested_defaultdict() stack_cost_dict = self.ah_obj.create_nested_defaultdict() stack_cost_string_dict = self.ah_obj.create_nested_defaultdict() name_tag_value, fqdn_tag_value = "", "" for instance, aws_tabs_dict in aws_tabs_dict.iteritems(): attribute_cost = {} if 'Name' in aws_tabs_dict['aws_tags']: name_tag_value = aws_tabs_dict['aws_tags']["Name"] if 'fqdn' in aws_tabs_dict['aws_tags']: fqdn_tag_value = aws_tabs_dict['aws_tags']['fqdn'] instance_details = self.ah_obj.get_nested_attribute_values( instance_information, instance)[1] region = instance_details[ 'region'] if "region" in instance_details else "none" subnet = instance_details[ 'subnet'] if "subnet" in instance_details else "none" attribute_cost[subnet] = {} vpc = instance_details[ 'vpc'] if "vpc" in instance_details else "none" attribute_cost[vpc] = {} attribute_cost[vpc][subnet] = {} stack = instance_details[ 'application'] if "application" in instance_details else "none" if cost_type == 'instances_cost' or cost_type == 'stack_costs': if fqdn_tag_value in ebs_cost_dict: self.storage_cost = ebs_cost_dict[fqdn_tag_value] if environment == "uncategorized": instance_cost_column_dict[region]['subnets']['none'][ 'instance_attributes'][instance][ 'storage_cost'] = "$" + str( ebs_cost_dict[fqdn_tag_value]) + "/m" else: instance_cost_column_dict[vpc]['subnets'][subnet][ 'instance_attributes'][instance][ 'storage_cost'] = "$" + str( ebs_cost_dict[fqdn_tag_value]) + "/m" else: self.storage_cost = 0.0 if environment == "uncategorized": instance_cost_column_dict[region]['subnets']['none'][ 'instance_attributes'][instance][ 'storage_cost'] = "$" + "0.0" + "/m" else: instance_cost_column_dict[vpc]['subnets'][subnet][ 'instance_attributes'][instance][ 'storage_cost'] = "$" + "0.0" + "/m" if fqdn_tag_value in instances_cost_dict: if environment == "uncategorized": instance_cost_column_dict[region]['subnets']['none'][ 'instance_attributes'][instance][ 'instance_cost'] = "$" + str( instances_cost_dict[fqdn_tag_value]) + "/m" else: self.instance_cost = instances_cost_dict[ fqdn_tag_value] instance_cost_column_dict[vpc]['subnets'][subnet][ 'instance_attributes'][instance][ 'instance_cost'] = "$" + str( instances_cost_dict[fqdn_tag_value]) + "/m" elif name_tag_value in instances_cost_dict: self.instance_cost = instances_cost_dict[name_tag_value] if environment == "uncategorized": instance_cost_column_dict[region]['subnets']['none'][ 'instance_attributes'][instance][ 'instance_cost'] = "$" + str( instances_cost_dict[name_tag_value]) + "/m" else: self.instance_cost = instances_cost_dict[ fqdn_tag_value] instance_cost_column_dict[vpc]['subnets'][subnet][ 'instance_attributes'][instance][ 'instance_cost'] = "$" + str( instances_cost_dict[name_tag_value]) + "/m" else: if environment == "uncategorized": instance_cost_column_dict[region]['subnets']['none'][ 'instance_attributes'][instance][ 'instance_cost'] = "(empty)" else: self.instance_cost = 0.0 instance_cost_column_dict[vpc]['subnets'][subnet][ 'instance_attributes'][instance][ 'instance_cost'] = "(empty)" if cost_type == 'stack_costs': attr_cost = 0.0 for attribute in stack_attr_list: if attribute == 'instance_cost': attr_cost = self.instance_cost elif attribute == 'storage_cost': attr_cost = self.storage_cost elif attribute == 'total_cost': attr_cost = self.instance_cost + self.storage_cost if stack_attr_details[attribute]['stack'] == ['all']: for apps in apps_in_environment: if stack == apps: if not stack_cost_dict[region][vpc][ subnet][stack][attribute]: stack_cost_dict[region][vpc][subnet][ stack][attribute] = attr_cost else: cost = stack_cost_dict[region][vpc][ subnet][stack][attribute] stack_cost_dict[region][vpc][subnet][ stack][attribute] += attr_cost else: for attr_stack in stack_attr_details[attribute][ 'stack']: if attr_stack == stack: if not stack_cost_dict[region][vpc][ subnet][stack][attribute]: stack_cost_dict[region][vpc][subnet][ stack][attribute] = attr_cost else: stack_cost_dict[region][vpc][subnet][ stack][attribute] += attr_cost stack_cost_string_dict[region][vpc][subnet][stack][attribute] = \ "$"+str(stack_cost_dict[region][vpc][subnet][stack][attribute])+"/m" if cost_type == 'stack_costs': return self.ah_obj.defaultdict_to_dict(stack_cost_string_dict) if cost_type == 'instances_cost': return self.ah_obj.defaultdict_to_dict(instance_cost_column_dict)
class GraphiteHelper(): def __init__(self, request=None, environment=None): self.module = 'graphite_module' self.ah_obj = AtlasHelper() self.aws_helperobj = AwsHelper() self.module_config_data = self.ah_obj.get_atlas_configuration_data( self.module) self.graphite_url = " " self.framework = "" self.parameters_list = [] self.time_interval = 0.0 self.server_monitored = [] self.format = "" self.from_time = "" self.to_time = "" self.memcache_var = memcache.Client([ self.ah_obj.get_atlas_config_data("global_config_data", 'memcache_server_location') ], debug=0) if environment is not None: self.aws_moduleobj = AwsModule(request=request, environment=environment) def get_subnet_list(self, environment): """ Get the subnets for environment which has instances and decide if an attribute should be displayed on a subnet. """ if environment != 'uncategorized': subnets_with_instances = self.aws_moduleobj.get_information( environment, subnets_with_instances='true') subnet_list = [] for subnet, stack_list in subnets_with_instances.iteritems(): for attribute, attr_details in self.module_config_data[ 'stack_attributes'].iteritems(): if attr_details['stack'] == 'all' or set( attr_details['stack']).issubset(set(stack_list)): if subnet not in subnet_list: subnet_list.append(subnet) return subnet_list def get_query_parameters(self): """Get the query parameters from atlas config yaml""" self.graphite_url = self.module_config_data['others'][ 'graphite_url'] + "render/?" self.framework = self.module_config_data['others']['framework'] self.servers_monitored = self.module_config_data['others'][ 'server_name'] self.database = self.module_config_data['others']['database'] self.time_interval = self.module_config_data['others']['time_duration'] if 'from' in self.time_interval: self.from_time = self.time_interval['from'] if 'to' in self.time_interval: self.to_time = self.time_interval['to'] if self.to_time is not None and self.from_time is not None: self.time_string = "&from=" + str(self.from_time) + "&to=" + str( self.to_time) if self.from_time is None: self.time_string = "&to=" + str(self.to_time) if self.to_time is None: self.time_string = "&from=" + str(self.from_time) self.parameters_list = self.module_config_data['others']['parameters'] self.format = self.module_config_data['others']['format'] def queries_for_graphite(self, subnet_list): """Construct queries for grahite""" query_dict = collections.defaultdict(dict) self.get_query_parameters() for subnet in subnet_list: for server in self.servers_monitored: for parameter in self.parameters_list: target = self.framework + "." + subnet + ".ms." + server + "." + self.database + "." + parameter query_dict[subnet][ parameter] = self.graphite_url + "target=" + target + self.time_string + "&format=" + self.format return dict(query_dict) def generate_report(self, query): """Retrieve query results from the graphite server.""" try: report_json = {} response = requests.get(query) if response.status_code == 200: report_json = json.loads( response.text) #convert the json into a python dictionary return report_json except ConnectionError as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("graphite_helper.py", "generate_report()", exp_object, exc_type, exc_obj, exc_tb) except HTTPError as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("graphite_helper.py", "generate_report()", exp_object, exc_type, exc_obj, exc_tb) except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("graphite_helper.py", "generate_report()", exp_object, exc_type, exc_obj, exc_tb) return {} def get_stack_attributes(self, environment): """Get all stack attributes.""" stack_attribute_list, stack_attribute_dict = [], {} for attribute, details in self.module_config_data[ 'stack_attributes'].iteritems(): stack_attribute_list.append( (details['display_name'], details['editable'])) stack_attribute_dict[details['display_name']] = details return (stack_attribute_list, stack_attribute_dict) def get_stack_attribute_values(self, environment): """Get stack attribute values from cache. If it does not exists get it from the the global cache.""" stack_attribute_values = self.memcache_var.get( str(environment + "graphite_stack_attributes")) if not stack_attribute_values: stack_attributes_values = self.memcache_var.get( str(environment + "global_graphite_stack_attributes")) if stack_attribute_values is not None: self.memcache_var.set( str(environment + "graphite_stack_attributes"), stack_attribute_values, 10 * 60) with threading.Lock(): thread = threading.Thread( target=self.cache_stack_attribute_values, args=[environment]) thread.start() return stack_attribute_values def cache_stack_attribute_values(self, environment): """Cache stack attribute values.""" try: stack_attribute_values = self.stack_attribute_values(environment) self.memcache_var.set( str(environment + "graphite_stack_attributes"), stack_attribute_values, 10 * 60) if stack_attribute_values is None: raise Exception( "The graphite attribute values for environment " + environment + " has not been fetched. Please make sure the cache is populated !!!" ) if stack_attribute_values is not None: self.memcache_var.set( str(environment + "global_graphite_stack_attributes"), stack_attribute_values, 15 * 60) self.memcache_var.disconnect_all() except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("graphite_helper.py", "cache_stack_attribute_values()", exp_object, exc_type, exc_obj, exc_tb) return {} def stack_attribute_values(self, environment): """get stack attribute values from graphite server and parse it.""" if environment != 'uncategorized': stack_attribute_dict = self.ah_obj.create_nested_defaultdict() organization_list = self.aws_helperobj.get_organizations() region_list = self.aws_helperobj.get_regions() stack_attributes_from_config = self.module_config_data[ 'stack_attributes'] attributes_list = stack_attributes_from_config.keys() subnet_list = self.get_subnet_list(environment) graphite_query_dict = self.queries_for_graphite(subnet_list) for organization in organization_list: for region in region_list: vpc_list = self.aws_helperobj.get_vpc_in_region(region) if vpc_list: for vpc in vpc_list: for subnet in subnet_list: for attribute in stack_attributes_from_config: stack_list = stack_attributes_from_config[ attribute]['stack'] attribute_value = "" suffix = "" if 'suffix' in stack_attributes_from_config[ attribute]: suffix = stack_attributes_from_config[ attribute]['suffix'] display_name = "" if 'display_name' in stack_attributes_from_config[ attribute]: display_name = stack_attributes_from_config[ attribute]['display_name'] report = self.generate_report( graphite_query_dict[subnet] [attribute]) if report: target = self.ah_obj.split_string( report[0]['target'], ('.')) if subnet in target and attribute in target: for index in range( len(report[0] ['datapoints']) - 1, 0, -1): if report and report[0][ 'datapoints'][index][ 0] is not None: attribute_value = str( int(report[0] ['datapoints'] [index][0]) ) + " " + suffix break else: attribute_value = "null" else: attribute_value = "null" for stack in stack_list: stack_attribute_dict[region][vpc][ subnet][stack][ display_name] = attribute_value return self.ah_obj.defaultdict_to_dict(stack_attribute_dict)
class GraphiteHelper(): def __init__(self, request=None, environment=None): self.module = 'graphite_module' self.ah_obj = AtlasHelper() self.aws_helperobj = AwsHelper() self.module_config_data = self.ah_obj.get_atlas_configuration_data(self.module) self.graphite_url = " " self.framework = "" self.parameters_list = [] self.time_interval = 0.0 self.server_monitored = [] self.format = "" self.from_time = "" self.to_time = "" self.memcache_var = memcache.Client([self.ah_obj.get_atlas_config_data("global_config_data",'memcache_server_location')], debug=0) if environment is not None: self.aws_moduleobj = AwsModule(request=request,environment=environment) def get_subnet_list(self, environment): """ Get the subnets for environment which has instances and decide if an attribute should be displayed on a subnet. """ if environment != 'uncategorized': subnets_with_instances = self.aws_moduleobj.get_information(environment, subnets_with_instances='true') subnet_list = [] for subnet, stack_list in subnets_with_instances.iteritems(): for attribute, attr_details in self.module_config_data['stack_attributes'].iteritems(): if attr_details['stack'] == 'all' or set(attr_details['stack']).issubset(set(stack_list)): if subnet not in subnet_list: subnet_list.append(subnet) return subnet_list def get_query_parameters(self): """Get the query parameters from atlas config yaml""" self.graphite_url = self.module_config_data['others']['graphite_url']+"render/?" self.framework = self.module_config_data['others']['framework'] self.servers_monitored = self.module_config_data['others']['server_name'] self.database = self.module_config_data['others']['database'] self.time_interval = self.module_config_data['others']['time_duration'] if 'from' in self.time_interval: self.from_time = self.time_interval['from'] if 'to' in self.time_interval: self.to_time = self.time_interval['to'] if self.to_time is not None and self.from_time is not None: self.time_string = "&from="+str(self.from_time)+"&to="+str(self.to_time) if self.from_time is None: self.time_string = "&to="+str(self.to_time) if self.to_time is None: self.time_string = "&from="+str(self.from_time) self.parameters_list = self.module_config_data['others']['parameters'] self.format = self.module_config_data['others']['format'] def queries_for_graphite(self, subnet_list): """Construct queries for grahite""" query_dict = collections.defaultdict(dict) self.get_query_parameters() for subnet in subnet_list: for server in self.servers_monitored: for parameter in self.parameters_list: target = self.framework+"."+subnet+".ms."+server+"."+self.database+"."+parameter query_dict[subnet][parameter] = self.graphite_url+"target="+target+self.time_string+"&format="+self.format return dict(query_dict) def generate_report(self, query): """Retrieve query results from the graphite server.""" try: report_json = {} response = requests.get(query) if response.status_code == 200: report_json = json.loads(response.text) #convert the json into a python dictionary return report_json except ConnectionError as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("graphite_helper.py", "generate_report()", exp_object, exc_type, exc_obj, exc_tb) except HTTPError as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("graphite_helper.py", "generate_report()", exp_object, exc_type, exc_obj, exc_tb) except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("graphite_helper.py", "generate_report()", exp_object, exc_type, exc_obj, exc_tb) return {} def get_stack_attributes(self, environment): """Get all stack attributes.""" stack_attribute_list, stack_attribute_dict = [], {} for attribute, details in self.module_config_data['stack_attributes'].iteritems(): stack_attribute_list.append((details['display_name'], details['editable'])) stack_attribute_dict[details['display_name']] = details return(stack_attribute_list, stack_attribute_dict) def get_stack_attribute_values(self, environment): """Get stack attribute values from cache. If it does not exists get it from the the global cache.""" stack_attribute_values = self.memcache_var.get(str(environment+"graphite_stack_attributes")) if not stack_attribute_values: stack_attributes_values = self.memcache_var.get(str(environment+"global_graphite_stack_attributes")) if stack_attribute_values is not None: self.memcache_var.set(str(environment+"graphite_stack_attributes"), stack_attribute_values, 10*60) with threading.Lock(): thread = threading.Thread(target=self.cache_stack_attribute_values, args=[environment]) thread.start() return stack_attribute_values def cache_stack_attribute_values(self, environment): """Cache stack attribute values.""" try: stack_attribute_values = self.stack_attribute_values(environment) self.memcache_var.set(str(environment+"graphite_stack_attributes"), stack_attribute_values, 10*60) if stack_attribute_values is None: raise Exception("The graphite attribute values for environment "+environment+" has not been fetched. Please make sure the cache is populated !!!") if stack_attribute_values is not None: self.memcache_var.set(str(environment+"global_graphite_stack_attributes"),stack_attribute_values, 15*60) self.memcache_var.disconnect_all() except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("graphite_helper.py", "cache_stack_attribute_values()", exp_object, exc_type, exc_obj, exc_tb) return {} def stack_attribute_values(self, environment): """get stack attribute values from graphite server and parse it.""" if environment != 'uncategorized': stack_attribute_dict = self.ah_obj.create_nested_defaultdict() organization_list = self.aws_helperobj.get_organizations() region_list = self.aws_helperobj.get_regions() stack_attributes_from_config = self.module_config_data['stack_attributes'] attributes_list = stack_attributes_from_config.keys() subnet_list = self.get_subnet_list(environment) graphite_query_dict = self.queries_for_graphite(subnet_list) for organization in organization_list: for region in region_list: vpc_list = self.aws_helperobj.get_vpc_in_region(region) if vpc_list: for vpc in vpc_list: for subnet in subnet_list: for attribute in stack_attributes_from_config: stack_list = stack_attributes_from_config[attribute]['stack'] attribute_value="" suffix="" if 'suffix' in stack_attributes_from_config[attribute]: suffix = stack_attributes_from_config[attribute]['suffix'] display_name= "" if 'display_name' in stack_attributes_from_config[attribute]: display_name = stack_attributes_from_config[attribute]['display_name'] report = self.generate_report(graphite_query_dict[subnet][attribute]) if report: target = self.ah_obj.split_string(report[0]['target'], ('.')) if subnet in target and attribute in target: for index in range(len(report[0]['datapoints'])-1, 0, -1): if report and report[0]['datapoints'][index][0] is not None: attribute_value = str(int(report[0]['datapoints'][index][0]))+" "+suffix break else: attribute_value = "null" else:attribute_value = "null" for stack in stack_list: stack_attribute_dict[region][vpc][subnet][stack][display_name] = attribute_value return self.ah_obj.defaultdict_to_dict(stack_attribute_dict)
class JenkinsActions: def __init__(self, request=None, environment=None): self.ah_obj = AtlasHelper() self.module="jenkins_module" self.python_jenkinsurl = self.ah_obj.get_atlas_config_data(self.module, "python_jenkins_url") self.build_record_count = self.ah_obj.get_atlas_config_data(self.module, "build_record_count") self.jenkins_password = os.environ.get('JENKINS_PASSWORD') self.jenkins_username = os.environ.get('JENKINS_USERNAME') self.jenkinsurl = os.environ.get('JENKINS_URL') self.python_jenkinsurl = self.jenkinsurl+"/job/" self.memcache_var = memcache.Client([self.ah_obj.get_atlas_config_data("global_config_data", 'memcache_server_location') ], debug=0) if environment: self.aws_obj = AwsModule(request, environment) """ helper methods """ def get_jenkins_job_folder(self, jobname): job_folder_information = self.ah_obj.get_atlas_config_data(self.module, "folders")[1] for folder, job_list in job_folder_information.iteritems(): if jobname in job_list: return folder def cache_jenkins_build_userinfo(self): try: jobname = 'AWS-Build-Dev-Deploy-Dev' build_userinfo_dict = self.jenkins_build_userinfo(jobname) self.memcache_var.set(jobname+'_build_userinfo', build_userinfo_dict,15*60) if build_user_info_dict is None: raise Exception("Source data from Jenkins server is unavailable. Please ensure data is available and populate the cache.") if build_userinfo_dict is not None: self.memcache_var.set('global_'+jobname+'_build_userinfo', build_userinfo_dict,86400) self.memcache_var.disconnect_all() except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "construct_cost_query()", exp_object, exc_type, exc_obj, exc_tb) return def jenkins_build_userinfo(self, jobname): job_info_dict, job_info = {}, {} jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password) jenkins_url = self.python_jenkinsurl+self.get_jenkins_job_folder(jobname) jenkins_obj = jenkins.Jenkins(jenkins_url, username=self.jenkins_username, password=self.jenkins_password) build_user_info_dict = collections.defaultdict(dict) try: if jenkins_obj.job_exists(jobname): job_info = jenkins_obj.get_job_info(jobname) build_information_list = job_info['builds'] latest_build_number = build_information_list[0]['number'] build_info = jenkins_obj.get_build_info(jobname, latest_build_number) for build_number in range(latest_build_number-self.build_record_count, latest_build_number+1): try: build_info_dict = jenkins_obj.get_build_info(jobname, build_number) build_user_info_dict[build_number] = {'deployed_by':"", 'branch':"", 'last_deployed': "", 'subnet':"", 'commit_hash':""} branch= "" if 'actions' in build_info_dict: if 'parameters' in build_info_dict['actions'][0]: for parameter_dict in build_info_dict['actions'][0]['parameters']: if parameter_dict['name'] == 'subnet': build_user_info_dict[build_number]['subnet'] = parameter_dict['value'] if parameter_dict['name'] == 'branch': build_user_info_dict[build_number]['branch'] = parameter_dict['value'] branch = parameter_dict['value'] if 'causes' in build_info_dict['actions'][1]: actions = build_info_dict['actions'][1] if 'userName' in actions['causes'][0]: build_user_info_dict[build_number]['deployed_by'] = build_info_dict['actions'][1]['causes'][0]['userName'] if 'buildsByBranchName' in build_info_dict['actions'][2]: commit_hash = build_info_dict['actions'][2]['buildsByBranchName']['origin/develop']['revision']['SHA1'][:7] build_user_info_dict[build_number]['commit_hash'] = commit_hash if 'timestamp' in build_info_dict: timestamp = str(datetime.datetime.now() - datetime.datetime.fromtimestamp(build_info_dict['timestamp']/1000)) deployed_before = "" if isinstance(timestamp, list): hours_minutes = timestamp[1].split(":")[:2] deployed_before = timestamp[0] + " "+hours_minutes[0]+"hrs "+hours_minutes[1]+"mins" else: hours_minutes = timestamp.split(":")[:2] deployed_before = hours_minutes[0]+" hrs "+hours_minutes[1]+" mins" build_user_info_dict[build_number]['last_deployed'] = deployed_before except: continue return self.ah_obj.defaultdict_to_dict(build_user_info_dict) except Exception as exp_object: return {} def get_jenkins_build_userinfo(self, jobname): build_userinfo_dict = self.memcache_var.get(jobname+'_build_userinfo') if not build_userinfo_dict: build_userinfo_dict = self.memcache_var.get('global_'+jobname+'_build_userinfo') if build_userinfo_dict is not None: self.memcache_var.set(jobname+'_build_userinfo', build_userinfo_dict, 3*60*60) with threading.Lock(): thread = threading.Thread(target=self.cache_jenkins_build_userinfo) thread.start() return build_userinfo_dict def get_jenkins_job_info(self, jobname): job_info_dict, job_info = {}, {} jenkins_url = self.python_jenkinsurl+self.get_jenkins_job_folder(jobname) jenkins_obj = jenkins.Jenkins(jenkins_url, username=self.jenkins_username, password=self.jenkins_password) try: if jenkins_obj.job_exists(jobname): job_info = jenkins_obj.get_job_info(jobname) job_info_dict= {'last_successful_build_number':job_info['lastSuccessfulBuild']['number'], 'last_successful_build_url': job_info['lastSuccessfulBuild']['url'], 'last_unsuccessful_build_number': job_info['lastUnsuccessfulBuild']['number'], 'last_unsuccessful_build_url': job_info['lastUnsuccessfulBuild']['url'], 'last_completed_build_number':job_info['lastCompletedBuild']['number'], 'last_completed_build_url':job_info['lastCompletedBuild']['url'], 'last_unstable_build_number':job_info['lastUnstableBuild'], 'last_unstable_build_url':job_info['lastUnstableBuild'], 'last_stable_build_number':job_info['lastStableBuild']['number'], 'last_stable_build_url':job_info['lastStableBuild']['url'], 'last_build': job_info['lastBuild']['url'], 'last_build-number': job_info['lastBuild']['number'], 'nextBuildNumber':job_info['nextBuildNumber'] } return job_info_dict except Exception as exp_object: return {} def get_console_output(self,build): console_output = build.get_console() if console_output: return console_output def check_build_status(self, job_name): status_dict = {} try: jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password) job = jenkinsapi_obj.get_job(job_name) build = job.get_last_build() other_info = self.get_jenkins_job_info(job_name) if other_info: status_dict['other_info'] = self.get_jenkins_job_info(job_name) status_dict['console_output'] = self.get_console_output(build) if build.is_running(): status_dict['exit_status'] = "Build not complete" status_dict['action_state'] = "action_in_progress" else: if build.is_good(): status_dict['exit_status'] = "Build Successful" status_dict['action_state'] = "action_completed" return status_dict except Exception as exp_object: status_dict['action_state'] = 'action_failed' return status_dict """ action methods """ def server_create_test(self, subnet, profile, node_name): """ Create a server on aws_obj. """ jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password) if profile == "ops-general": jenkinsapi_obj.build_job('server_create_test', {'subnet': subnet, 'profile': profile, 'name':node_name}) else: jenkinsapi_obj.build_job('server_create_test', {'subnet': subnet, 'profile': profile}) def echo_contents(self, text1, text2): """ Echo contents sample jenkins job. """ jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password) jenkinsapi_obj.build_job('echo_contents', {'text1': text1, 'text2': text2}) def initiate_actions(self, action, parameters): """ Initiate jenkins actions. """ initial_status = {} try: if parameters is None or parameters =='': return other_info = self.get_jenkins_job_info(action) if other_info: initial_status['other_info'] = other_info if action =='echo_contents': self.echo_contents(parameters['text1'], parameters['text2']) if action == 'server_create_test': self.server_create_test(parameters['subnet'], parameters['profile'], parameters['node_name']) initial_status = self.check_build_status(action) initial_status['action_state'] = 'action_initiated' return initial_status except Exception as exp_object: return initial_status def action_state(self, action): """ Check the status of builds. """ action_state = self.check_build_status(action) return action_state def parameter_values(self, action, parameter, environment=None): """ Return parameter values for each build to be displayed as options to user. """ if action == 'server_create_test': if parameter == 'subnet': return self.aws_obj.get_information(environment, env_subnet_list='true') if parameter == 'profile': return self.aws_obj.get_information(environment, profiles='true') if parameter == 'name': return "" if action == 'echo_contents': if parameter == 'text1': return "" if parameter == 'text2': return "" def action_parameters(self, action_type, environment=None): """ Get parameters for each action. """ action_parameters_dict={} if (action_type=='vpc_actions'): action_parameters_dict = self.unpack_action_parameters(self.ah_obj.get_atlas_config_data(self.module, 'vpc_actions')[1], environment) elif action_type == 'instance_actions': pass elif action_type == 'instance_group_actions': pass elif action_type == 'stack_actions': action_parameters_dict = self.unpack_action_parameters(self.ah_obj.get_atlas_config_data(self.module, 'stack_actions')[1], environment) return action_parameters_dict def unpack_action_parameters(self, action_parameters_dict, environment=None): parameter_dict = {} for key, values in action_parameters_dict.iteritems(): parameter_list = values['parameters'] parameter_dict[key] = {} for parameter in parameter_list: temp_list = [] temp_parameter = parameter.split(',') temp_list.append(temp_parameter[1]) temp_list.append(self.parameter_values(key, temp_parameter[0], environment)) parameter_dict[key][temp_parameter[0]] = temp_list return parameter_dict
class JenkinsModule(AtlasBase): def __init__(self, request=None, environment=None): self.ah_obj = AtlasHelper() self.aws_helperobj = AwsHelper() self.module = "jenkins_module" if environment is None: self.awsact_obj = AwsActions() self.jenkinsact_obj = JenkinsActions(request) else: self.jenkinsact_obj = JenkinsActions(request,environment) def get_configuration_data(self, key): value = self.ah_obj.get_atlas_config_data(self.module, key) if isinstance(value, dict): return value[0] else: return value def get_information(self, environment, **kwargs): pass def get_status(self, environment=None): pass def get_tabs(self, environment=None): pass def get_instance_actions(self, environment=None): pass def get_instance_group_actions(self): pass def get_stack_actions(self, environment=None): stack_actions = self.get_configuration_data('stack_actions') if stack_actions: return stack_actions[0] def get_vpc_actions(self): vpc_actions = self.get_configuration_data("vpc_actions") if vpc_actions: return vpc_actions[0] def get_parameter_values(self, environment, parameter): return self.jenkinsact_obj.parameters_values(environment, parameter) def get_action_parameters(self, action_type, environment=None): return self.jenkinsact_obj.action_parameters(action_type, environment) def perform_instance_actions(): pass def perform_instancegroup_actions(): pass def perform_stack_actions(self, json_data, environment=None): action = json_data['action'] parameters_dict = json_data['parameters'] initial_status = self.jenkinsact_obj.initiate_actions(action, parameters_dict) if initial_status is not None: initial_status['action_type'] = json_data['action_type'] initial_status['start_time'] = json_data['start_time'] initial_status['action'] = json_data['action'] return initial_status def perform_vpc_actions(self, json_data, environment=None): action = json_data['action'] action_type = json_data['action_type'] parameters_dict = json_data['parameters'] initial_status = self.jenkinsact_obj.initiate_actions(action, parameters_dict) if initial_status is not None: initial_status['action_type'] = json_data['action_type'] initial_status['start_time'] = json_data['start_time'] initial_status['action'] = json_data['action'] return initial_status def get_action_status(self, json_data, environment=None): action = json_data['action'] action_type = json_data['action_type'] parameter_dict = json_data['parameters'] action_status = self.jenkinsact_obj.action_state(action) if action_status is not None: action_status['action_type'] = json_data['action_type'] action_status['start_time'] = json_data['start_time'] action_status['action'] = json_data['action'] return action_status def get_columns(self, environment=None): pass def load_session(self, request, environment=None): pass def save_session(self, request, environment=None): pass def get_defaults(): pass def get_aggregates(self, environment=None): pass def get_stack_attributes(self, environment=None, stack=None): stack_attribute_list = [] stack_attributes_dict = self.ah_obj.get_atlas_config_data('jenkins_module', 'stack_attributes')[1] for attribute, details in stack_attributes_dict.iteritems(): stack_attribute_list.append((attribute, details['editable'])) return(stack_attribute_list, stack_attributes_dict) def get_attribute_values(self, environment=None): jenkins_build_infodict = self.jenkinsact_obj.get_jenkins_build_userinfo('AWS-Build-Dev-Deploy-Dev') rev_sorted_buildno_list = list(reversed(sorted(jenkins_build_infodict.keys()))) (stack_attr_list, stack_attr_details) = self.get_configuration_data("stack_attributes") stack_attr_values_dict = self.ah_obj.create_nested_defaultdict() organization_list = self.aws_helperobj.get_organizations() temp_subnet_list = [] for organization in organization_list: region_list = self.aws_helperobj.get_regions() for region in region_list: vpc_list = self.aws_helperobj.get_vpc_in_region(region) if vpc_list: for vpc in vpc_list: for attribute, attr_details in stack_attr_details.iteritems(): for build_number in rev_sorted_buildno_list: subnet = jenkins_build_infodict[build_number]['subnet'] if subnet not in temp_subnet_list: temp_subnet_list.append(subnet) for attribute in stack_attr_list: for stack in stack_attr_details[attribute]['stack']: if attribute in jenkins_build_infodict[build_number]: stack_attr_values_dict[region][vpc][subnet][stack][attribute] = \ jenkins_build_infodict[build_number][attribute] return self.ah_obj.defaultdict_to_dict(stack_attr_values_dict) def refresh_information(self, environment=None): self.jenkinsact_obj.cache_jenkins_build_userinfo()