def __init__(self, request=None, environment=None): self.module = "cloudability_module" self.ah_obj = AtlasHelper() self.cloud_obj = Cloudability() self.awshelper_obj = AwsHelper() self.request = request self.aws_object = AwsModule(request, environment) self.instance_cost = 0.0 self.storage_cost = 0.0
def __init__(self, request=None,environment=None): self.module = "cloudability_module" self.ah_obj = AtlasHelper() self.cloud_obj = Cloudability() self.awshelper_obj = AwsHelper() self.request = request self.aws_object = AwsModule(request, environment) self.instance_cost = 0.0 self.storage_cost = 0.0
def __init__(self, request=None, environment=None): self.ah_obj = AtlasHelper() self.module = "jenkins_module" self.python_jenkinsurl = self.ah_obj.get_atlas_config_data( self.module, "python_jenkins_url") self.build_record_count = self.ah_obj.get_atlas_config_data( self.module, "build_record_count") self.jenkins_password = os.environ.get('JENKINS_PASSWORD') self.jenkins_username = os.environ.get('JENKINS_USERNAME') self.jenkinsurl = os.environ.get('JENKINS_URL') self.python_jenkinsurl = self.jenkinsurl + "/job/" self.memcache_var = memcache.Client([ self.ah_obj.get_atlas_config_data("global_config_data", 'memcache_server_location') ], debug=0) if environment: self.aws_obj = AwsModule(request, environment)
def get_module_object(self, module_name, request=None, environment=None): """Create and return objects for each module.""" if module_name == 'aws_module': return AwsModule(request=request, environment=environment) elif module_name == 'chef_module': return ChefModule(request=request, environment=environment) elif module_name == 'cloudability_module': return CloudabilityModule(request=request, environment=environment) elif module_name == 'jenkins_module': return JenkinsModule(request=request, environment=environment) elif module_name == 'graphite_module': return GraphiteModule(request=request, environment=environment)
def __init__(self, request=None, environment=None): self.module = 'graphite_module' self.ah_obj = AtlasHelper() self.aws_helperobj = AwsHelper() self.module_config_data = self.ah_obj.get_atlas_configuration_data( self.module) self.graphite_url = " " self.framework = "" self.parameters_list = [] self.time_interval = 0.0 self.server_monitored = [] self.format = "" self.from_time = "" self.to_time = "" self.memcache_var = memcache.Client([ self.ah_obj.get_atlas_config_data("global_config_data", 'memcache_server_location') ], debug=0) if environment is not None: self.aws_moduleobj = AwsModule(request=request, environment=environment)
def stack_attribute_values(self, request, environment, region_vpc_dict): """ Get attributes and values for each stack. """ stack_attribute_dict = collections.defaultdict(dict) awsmodule_obj = AwsModule(request, environment) (stack_attr_list, stack_attr_details) = stack_attributes = self.ah_obj.get_atlas_config_data(self.module, 'stack_attributes') application_subnets = awsmodule_obj.get_information(environment, application_subnets='true') apps_in_environment = awsmodule_obj.get_information(environment, apps_in_environment='true') if application_subnets is not None and apps_in_environment is not None: for region, vpc_list in region_vpc_dict.iteritems(): stack_attribute_dict[region] = {} if vpc_list is not None: for vpc in vpc_list: stack_attribute_dict[region][vpc] = {} for subnet in application_subnets: stack_attribute_dict[region][vpc][subnet] = {} for stack in apps_in_environment: stack_attribute_dict[region][vpc][subnet][stack] = {} for attribute in stack_attr_list: details = stack_attr_details[attribute] stack_attribute_dict[region][vpc][subnet][stack].update(self.get_values_for_attribute(environment, region, vpc, subnet, stack, attribute, details)) return dict(stack_attribute_dict)
def __init__(self, request=None, environment=None): self.ah_obj = AtlasHelper() self.module="jenkins_module" self.python_jenkinsurl = self.ah_obj.get_atlas_config_data(self.module, "python_jenkins_url") self.build_record_count = self.ah_obj.get_atlas_config_data(self.module, "build_record_count") self.jenkins_password = os.environ.get('JENKINS_PASSWORD') self.jenkins_username = os.environ.get('JENKINS_USERNAME') self.jenkinsurl = os.environ.get('JENKINS_URL') self.python_jenkinsurl = self.jenkinsurl+"/job/" self.memcache_var = memcache.Client([self.ah_obj.get_atlas_config_data("global_config_data", 'memcache_server_location') ], debug=0) if environment: self.aws_obj = AwsModule(request, environment)
def __init__(self, request=None, environment=None): self.module = 'graphite_module' self.ah_obj = AtlasHelper() self.aws_helperobj = AwsHelper() self.module_config_data = self.ah_obj.get_atlas_configuration_data(self.module) self.graphite_url = " " self.framework = "" self.parameters_list = [] self.time_interval = 0.0 self.server_monitored = [] self.format = "" self.from_time = "" self.to_time = "" self.memcache_var = memcache.Client([self.ah_obj.get_atlas_config_data("global_config_data",'memcache_server_location')], debug=0) if environment is not None: self.aws_moduleobj = AwsModule(request=request,environment=environment)
class CloudabilityModule(AtlasBase): def __init__(self, request=None,environment=None): self.module = "cloudability_module" self.ah_obj = AtlasHelper() self.cloud_obj = Cloudability() self.awshelper_obj = AwsHelper() self.request = request self.aws_object = AwsModule(request, environment) self.instance_cost = 0.0 self.storage_cost = 0.0 def get_information(self, environment=None, **kwargs): organization_list = self.awshelper_obj.get_organizations() if environment is None: if 'env_cost_dict' in kwargs: if kwargs['env_cost_dict'] == 'true': for organization in organization_list: env_cost_dict= self.cloud_obj.get_cloudability_costs()['environment_costs'][organization] env_cost_dict['all'] = self.get_information(ec2_cost_dict='true')['region_zone'] return env_cost_dict if 'ec2_cost_dict' in kwargs: if kwargs['ec2_cost_dict'] == 'true': for organization in organization_list: ec2_costs= self.cloud_obj.get_cloudability_costs()['ec2_costs'][organization] return ec2_costs else: if 'env_cost_dict' in kwargs: if kwargs['env_cost_dict'] == 'true': env_costs = 0 for organization in organization_list: env_cost_dict = self.cloud_obj.get_cloudability_costs()['environment_costs'][organization] environment_groups = self.ah_obj.get_atlas_config_data("global_config_data", "environment_groups") if environment_groups and environment in environment_groups[1].keys(): if environment == 'all': env_group_for_environment = self.awshelper_obj.get_environments(organization) else: env_group_for_environment = environment_groups[1][environment] for env in env_group_for_environment: env_costs+=env_cost_dict[env] env_cost_dict[environment] = env_costs return env_cost_dict if 'apps_in_environment' in kwargs: if kwargs['apps_in_environment'] == 'true': return self.aws_object.get_information(environment, apps_in_environment='true') if 'instance_data' in kwargs: if kwargs['instance_data'] == 'true': return self.aws_object.get_information(environment, instance_data='true') if 'instances_cost_dict' in kwargs: if kwargs['instances_cost_dict'] == 'true': for organization in organization_list: return self.cloud_obj.get_cloudability_costs()['instances_costs'][organization] if 'ebs_cost_dict' in kwargs: if kwargs['ebs_cost_dict'] == 'true': for organization in organization_list: return self.cloud_obj.get_cloudability_costs()['ebs_costs'][organization] if 'aws_info_dict' in kwargs: if kwargs['aws_info_dict'] == 'true': return self.aws_object.get_information(environment, 'aws_info_dict') if 'application_subnets' in kwargs: if kwargs['application_subnets'] == 'true': return self.aws_object.get_information(environment, 'application_subnets') def get_configuration_data(self, key): value = self.ah_obj.get_atlas_config_data(self.module, key) if isinstance(value, dict): return value[0] else: return value def get_stack_attributes(self, environment=None): """ Get stack attributes from config file. """ stack_attribute_list = [] stack_attributes_dict = self.ah_obj.get_atlas_config_data('cloudability_module', 'stack_attributes')[1] for attribute, details in stack_attributes_dict.iteritems(): stack_attribute_list.append((attribute, details['editable'])) return(stack_attribute_list, stack_attributes_dict) def get_attribute_values(self, environment=None): return self.__get_detailed_instances_cost_dict(environment, 'stack_costs') def get_status(self,environment=None): status_information = self.get_configuration_data('status') cloud_status_dict = {} organization_list = self.awshelper_obj.get_organizations() environment_list = [] if environment==None: env_cost_dict = self.get_information(env_cost_dict='true') cloud_status_dict = {environment: ["$"+str(env_cost_dict[environment])] for environment in env_cost_dict.keys()} else: env_cost_dict = self.get_information(environment, env_cost_dict='true') region_vpc_selection = self.aws_object.get_information(environment, region_vpc_dict='true') if environment == "uncategorized": region_list = self.awshelper_obj.get_regions() for region in region_vpc_selection: if region =='east': cloud_status_dict[region] = ["$"+str(env_cost_dict[environment])] else: cloud_status_dict[region] = ["$"+ "0.0"] else: for vpc in ['ame1']: #should be changed later to include all vpcs. cloud_status_dict[vpc] = ["$"+str(env_cost_dict[environment])] return (status_information, cloud_status_dict) def get_tabs(self, environment=None): pass def get_instance_actions(self, environment=None): pass def get_environment_actions(self, environment=None): pass def get_instance_group_actions(self, environment=None): pass def get_stack_actions(self, environment=None): pass def get_vpc_actions(self): pass def get_action_status(self, json_data, environment=None): pass def perform_instance_actions(self, environment=None): pass def perform_instancegroup_actions(): pass def perform_stack_actions(): pass def perform_vpc_actions(self, json_data): pass def perform_environment_actions(self, environment=None): pass def get_columns(self, environment=None): column_list = self.ah_obj.get_atlas_config_data(self.module, 'columns') column_dict= self.ah_obj.create_nested_defaultdict() if column_list: column_dict = self.__get_detailed_instances_cost_dict(environment, 'instances_cost') return (column_list, self.ah_obj.defaultdict_to_dict(column_dict)) def get_action_parameters(self, action_type, environment=None): pass def load_session(self, request, environment=None): pass def save_session(self, request, environment=None): pass def get_defaults(): pass def get_aggregates(self, environment=None): aggregates = self.ah_obj.get_atlas_config_data(self.module, 'aggregates') if environment is None: aggregate_list = ["$"+str(self.get_information(ec2_cost_dict='true')['region_zone'])] return (aggregates, aggregate_list) else: aggregate_dict = collections.defaultdict(dict) for agg_key in aggregates: if agg_key == 'cost': aggregate_dict[agg_key] = self.get_information(env_cost_dict='true')[environment] return dict(aggregate_dict) def refresh_information(self, environment=None): self.cloud_obj.cache_cloud_costs() return def __get_detailed_instances_cost_dict(self, environment, cost_type): instances_cost_dict = self.get_information(environment, instances_cost_dict = 'true') ebs_cost_dict = self.get_information(environment, ebs_cost_dict = 'true') aws_tabs_dict = self.aws_object.get_tabs(environment)[1] instance_information = self.aws_object.get_information(environment, instance_data='true') organization_list = self.awshelper_obj.get_organizations() (stack_attr_list, stack_attr_details) = self.get_configuration_data('stack_attributes') apps_in_environment = self.get_information(environment, apps_in_environment='true') application_subnets = self.get_information(environment, application_subnets='true') region, vpc, subnet = "", "", "" instance_cost_column_dict= self.ah_obj.create_nested_defaultdict() ebs_cost_column_dict = self.ah_obj.create_nested_defaultdict() stack_cost_dict= self.ah_obj.create_nested_defaultdict() stack_cost_string_dict = self.ah_obj.create_nested_defaultdict() name_tag_value, fqdn_tag_value = "", "" for instance, aws_tabs_dict in aws_tabs_dict.iteritems(): attribute_cost={} if 'Name' in aws_tabs_dict['aws_tags']: name_tag_value = aws_tabs_dict['aws_tags']["Name"] if 'fqdn' in aws_tabs_dict['aws_tags']: fqdn_tag_value = aws_tabs_dict['aws_tags']['fqdn'] instance_details = self.ah_obj.get_nested_attribute_values(instance_information, instance)[1] region = instance_details['region'] if "region" in instance_details else "none" subnet = instance_details['subnet'] if "subnet" in instance_details else "none" attribute_cost[subnet] = {} vpc = instance_details['vpc'] if "vpc" in instance_details else "none" attribute_cost[vpc] = {} attribute_cost[vpc][subnet] = {} stack = instance_details['application'] if "application" in instance_details else "none" if cost_type == 'instances_cost' or cost_type == 'stack_costs': if fqdn_tag_value in ebs_cost_dict: self.storage_cost = ebs_cost_dict[fqdn_tag_value] if environment == "uncategorized": instance_cost_column_dict[region]['subnets']['none']['instance_attributes'][instance]['storage_cost'] = "$" +str(ebs_cost_dict[fqdn_tag_value])+"/m" else: instance_cost_column_dict[vpc]['subnets'][subnet]['instance_attributes'][instance]['storage_cost'] = "$" +str(ebs_cost_dict[fqdn_tag_value])+"/m" else: self.storage_cost = 0.0 if environment == "uncategorized": instance_cost_column_dict[region]['subnets']['none']['instance_attributes'][instance]['storage_cost'] = "$" + "0.0"+"/m" else: instance_cost_column_dict[vpc]['subnets'][subnet]['instance_attributes'][instance]['storage_cost'] = "$" + "0.0"+"/m" if fqdn_tag_value in instances_cost_dict: if environment == "uncategorized": instance_cost_column_dict[region]['subnets']['none']['instance_attributes'][instance]['instance_cost'] = "$" + str(instances_cost_dict[fqdn_tag_value])+"/m" else: self.instance_cost = instances_cost_dict[fqdn_tag_value] instance_cost_column_dict[vpc]['subnets'][subnet]['instance_attributes'][instance]['instance_cost'] = "$" + str(instances_cost_dict[fqdn_tag_value])+"/m" elif name_tag_value in instances_cost_dict: self.instance_cost = instances_cost_dict[name_tag_value] if environment == "uncategorized": instance_cost_column_dict[region]['subnets']['none']['instance_attributes'][instance]['instance_cost'] = "$" + str(instances_cost_dict[name_tag_value]) +"/m" else: self.instance_cost = instances_cost_dict[fqdn_tag_value] instance_cost_column_dict[vpc]['subnets'][subnet]['instance_attributes'][instance]['instance_cost'] = "$" + str(instances_cost_dict[name_tag_value]) +"/m" else: if environment=="uncategorized": instance_cost_column_dict[region]['subnets']['none']['instance_attributes'][instance]['instance_cost'] = "(empty)" else: self.instance_cost = 0.0 instance_cost_column_dict[vpc]['subnets'][subnet]['instance_attributes'][instance]['instance_cost'] = "(empty)" if cost_type == 'stack_costs': attr_cost = 0.0 for attribute in stack_attr_list: if attribute == 'instance_cost': attr_cost = self.instance_cost elif attribute == 'storage_cost': attr_cost = self.storage_cost elif attribute == 'total_cost': attr_cost = self.instance_cost + self.storage_cost if stack_attr_details[attribute]['stack'] == ['all']: for apps in apps_in_environment: if stack == apps: if not stack_cost_dict[region][vpc][subnet][stack][attribute]: stack_cost_dict[region][vpc][subnet][stack][attribute] = attr_cost else: cost = stack_cost_dict[region][vpc][subnet][stack][attribute] stack_cost_dict[region][vpc][subnet][stack][attribute]+=attr_cost else: for attr_stack in stack_attr_details[attribute]['stack']: if attr_stack == stack: if not stack_cost_dict[region][vpc][subnet][stack][attribute]: stack_cost_dict[region][vpc][subnet][stack][attribute] = attr_cost else: stack_cost_dict[region][vpc][subnet][stack][attribute]+=attr_cost stack_cost_string_dict[region][vpc][subnet][stack][attribute] = \ "$"+str(stack_cost_dict[region][vpc][subnet][stack][attribute])+"/m" if cost_type == 'stack_costs': return self.ah_obj.defaultdict_to_dict(stack_cost_string_dict) if cost_type == 'instances_cost' : return self.ah_obj.defaultdict_to_dict(instance_cost_column_dict)
class JenkinsActions: def __init__(self, request=None, environment=None): self.ah_obj = AtlasHelper() self.module = "jenkins_module" self.python_jenkinsurl = self.ah_obj.get_atlas_config_data( self.module, "python_jenkins_url") self.build_record_count = self.ah_obj.get_atlas_config_data( self.module, "build_record_count") self.jenkins_password = os.environ.get('JENKINS_PASSWORD') self.jenkins_username = os.environ.get('JENKINS_USERNAME') self.jenkinsurl = os.environ.get('JENKINS_URL') self.python_jenkinsurl = self.jenkinsurl + "/job/" self.memcache_var = memcache.Client([ self.ah_obj.get_atlas_config_data("global_config_data", 'memcache_server_location') ], debug=0) if environment: self.aws_obj = AwsModule(request, environment) """ helper methods """ def get_jenkins_job_folder(self, jobname): job_folder_information = self.ah_obj.get_atlas_config_data( self.module, "folders")[1] for folder, job_list in job_folder_information.iteritems(): if jobname in job_list: return folder def cache_jenkins_build_userinfo(self): try: jobname = 'AWS-Build-Dev-Deploy-Dev' build_userinfo_dict = self.jenkins_build_userinfo(jobname) self.memcache_var.set(jobname + '_build_userinfo', build_userinfo_dict, 15 * 60) if build_user_info_dict is None: raise Exception( "Source data from Jenkins server is unavailable. Please ensure data is available and populate the cache." ) if build_userinfo_dict is not None: self.memcache_var.set('global_' + jobname + '_build_userinfo', build_userinfo_dict, 86400) self.memcache_var.disconnect_all() except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "construct_cost_query()", exp_object, exc_type, exc_obj, exc_tb) return def jenkins_build_userinfo(self, jobname): job_info_dict, job_info = {}, {} jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password) jenkins_url = self.python_jenkinsurl + self.get_jenkins_job_folder( jobname) jenkins_obj = jenkins.Jenkins(jenkins_url, username=self.jenkins_username, password=self.jenkins_password) build_user_info_dict = collections.defaultdict(dict) try: if jenkins_obj.job_exists(jobname): job_info = jenkins_obj.get_job_info(jobname) build_information_list = job_info['builds'] latest_build_number = build_information_list[0]['number'] build_info = jenkins_obj.get_build_info( jobname, latest_build_number) for build_number in range( latest_build_number - self.build_record_count, latest_build_number + 1): try: build_info_dict = jenkins_obj.get_build_info( jobname, build_number) build_user_info_dict[build_number] = { 'deployed_by': "", 'branch': "", 'last_deployed': "", 'subnet': "", 'commit_hash': "" } branch = "" if 'actions' in build_info_dict: if 'parameters' in build_info_dict['actions'][0]: for parameter_dict in build_info_dict[ 'actions'][0]['parameters']: if parameter_dict['name'] == 'subnet': build_user_info_dict[build_number][ 'subnet'] = parameter_dict['value'] if parameter_dict['name'] == 'branch': build_user_info_dict[build_number][ 'branch'] = parameter_dict['value'] branch = parameter_dict['value'] if 'causes' in build_info_dict['actions'][1]: actions = build_info_dict['actions'][1] if 'userName' in actions['causes'][0]: build_user_info_dict[build_number][ 'deployed_by'] = build_info_dict[ 'actions'][1]['causes'][0][ 'userName'] if 'buildsByBranchName' in build_info_dict[ 'actions'][2]: commit_hash = build_info_dict['actions'][2][ 'buildsByBranchName']['origin/develop'][ 'revision']['SHA1'][:7] build_user_info_dict[build_number][ 'commit_hash'] = commit_hash if 'timestamp' in build_info_dict: timestamp = str(datetime.datetime.now() - datetime.datetime.fromtimestamp( build_info_dict['timestamp'] / 1000)) deployed_before = "" if isinstance(timestamp, list): hours_minutes = timestamp[1].split(":")[:2] deployed_before = timestamp[ 0] + " " + hours_minutes[ 0] + "hrs " + hours_minutes[1] + "mins" else: hours_minutes = timestamp.split(":")[:2] deployed_before = hours_minutes[ 0] + " hrs " + hours_minutes[1] + " mins" build_user_info_dict[build_number][ 'last_deployed'] = deployed_before except: continue return self.ah_obj.defaultdict_to_dict(build_user_info_dict) except Exception as exp_object: return {} def get_jenkins_build_userinfo(self, jobname): build_userinfo_dict = self.memcache_var.get(jobname + '_build_userinfo') if not build_userinfo_dict: build_userinfo_dict = self.memcache_var.get('global_' + jobname + '_build_userinfo') if build_userinfo_dict is not None: self.memcache_var.set(jobname + '_build_userinfo', build_userinfo_dict, 3 * 60 * 60) with threading.Lock(): thread = threading.Thread( target=self.cache_jenkins_build_userinfo) thread.start() return build_userinfo_dict def get_jenkins_job_info(self, jobname): job_info_dict, job_info = {}, {} jenkins_url = self.python_jenkinsurl + self.get_jenkins_job_folder( jobname) jenkins_obj = jenkins.Jenkins(jenkins_url, username=self.jenkins_username, password=self.jenkins_password) try: if jenkins_obj.job_exists(jobname): job_info = jenkins_obj.get_job_info(jobname) job_info_dict = { 'last_successful_build_number': job_info['lastSuccessfulBuild']['number'], 'last_successful_build_url': job_info['lastSuccessfulBuild']['url'], 'last_unsuccessful_build_number': job_info['lastUnsuccessfulBuild']['number'], 'last_unsuccessful_build_url': job_info['lastUnsuccessfulBuild']['url'], 'last_completed_build_number': job_info['lastCompletedBuild']['number'], 'last_completed_build_url': job_info['lastCompletedBuild']['url'], 'last_unstable_build_number': job_info['lastUnstableBuild'], 'last_unstable_build_url': job_info['lastUnstableBuild'], 'last_stable_build_number': job_info['lastStableBuild']['number'], 'last_stable_build_url': job_info['lastStableBuild']['url'], 'last_build': job_info['lastBuild']['url'], 'last_build-number': job_info['lastBuild']['number'], 'nextBuildNumber': job_info['nextBuildNumber'] } return job_info_dict except Exception as exp_object: return {} def get_console_output(self, build): console_output = build.get_console() if console_output: return console_output def check_build_status(self, job_name): status_dict = {} try: jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password) job = jenkinsapi_obj.get_job(job_name) build = job.get_last_build() other_info = self.get_jenkins_job_info(job_name) if other_info: status_dict['other_info'] = self.get_jenkins_job_info(job_name) status_dict['console_output'] = self.get_console_output(build) if build.is_running(): status_dict['exit_status'] = "Build not complete" status_dict['action_state'] = "action_in_progress" else: if build.is_good(): status_dict['exit_status'] = "Build Successful" status_dict['action_state'] = "action_completed" return status_dict except Exception as exp_object: status_dict['action_state'] = 'action_failed' return status_dict """ action methods """ def server_create_test(self, subnet, profile, node_name): """ Create a server on aws_obj. """ jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password) if profile == "ops-general": jenkinsapi_obj.build_job('server_create_test', { 'subnet': subnet, 'profile': profile, 'name': node_name }) else: jenkinsapi_obj.build_job('server_create_test', { 'subnet': subnet, 'profile': profile }) def echo_contents(self, text1, text2): """ Echo contents sample jenkins job. """ jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password) jenkinsapi_obj.build_job('echo_contents', { 'text1': text1, 'text2': text2 }) def initiate_actions(self, action, parameters): """ Initiate jenkins actions. """ initial_status = {} try: if parameters is None or parameters == '': return other_info = self.get_jenkins_job_info(action) if other_info: initial_status['other_info'] = other_info if action == 'echo_contents': self.echo_contents(parameters['text1'], parameters['text2']) if action == 'server_create_test': self.server_create_test(parameters['subnet'], parameters['profile'], parameters['node_name']) initial_status = self.check_build_status(action) initial_status['action_state'] = 'action_initiated' return initial_status except Exception as exp_object: return initial_status def action_state(self, action): """ Check the status of builds. """ action_state = self.check_build_status(action) return action_state def parameter_values(self, action, parameter, environment=None): """ Return parameter values for each build to be displayed as options to user. """ if action == 'server_create_test': if parameter == 'subnet': return self.aws_obj.get_information(environment, env_subnet_list='true') if parameter == 'profile': return self.aws_obj.get_information(environment, profiles='true') if parameter == 'name': return "" if action == 'echo_contents': if parameter == 'text1': return "" if parameter == 'text2': return "" def action_parameters(self, action_type, environment=None): """ Get parameters for each action. """ action_parameters_dict = {} if (action_type == 'vpc_actions'): action_parameters_dict = self.unpack_action_parameters( self.ah_obj.get_atlas_config_data(self.module, 'vpc_actions')[1], environment) elif action_type == 'instance_actions': pass elif action_type == 'instance_group_actions': pass elif action_type == 'stack_actions': action_parameters_dict = self.unpack_action_parameters( self.ah_obj.get_atlas_config_data(self.module, 'stack_actions')[1], environment) return action_parameters_dict def unpack_action_parameters(self, action_parameters_dict, environment=None): parameter_dict = {} for key, values in action_parameters_dict.iteritems(): parameter_list = values['parameters'] parameter_dict[key] = {} for parameter in parameter_list: temp_list = [] temp_parameter = parameter.split(',') temp_list.append(temp_parameter[1]) temp_list.append( self.parameter_values(key, temp_parameter[0], environment)) parameter_dict[key][temp_parameter[0]] = temp_list return parameter_dict
class CloudabilityModule(AtlasBase): def __init__(self, request=None, environment=None): self.module = "cloudability_module" self.ah_obj = AtlasHelper() self.cloud_obj = Cloudability() self.awshelper_obj = AwsHelper() self.request = request self.aws_object = AwsModule(request, environment) self.instance_cost = 0.0 self.storage_cost = 0.0 def get_information(self, environment=None, **kwargs): organization_list = self.awshelper_obj.get_organizations() if environment is None: if 'env_cost_dict' in kwargs: if kwargs['env_cost_dict'] == 'true': for organization in organization_list: env_cost_dict = self.cloud_obj.get_cloudability_costs( )['environment_costs'][organization] env_cost_dict['all'] = self.get_information( ec2_cost_dict='true')['region_zone'] return env_cost_dict if 'ec2_cost_dict' in kwargs: if kwargs['ec2_cost_dict'] == 'true': for organization in organization_list: ec2_costs = self.cloud_obj.get_cloudability_costs( )['ec2_costs'][organization] return ec2_costs else: if 'env_cost_dict' in kwargs: if kwargs['env_cost_dict'] == 'true': env_costs = 0 for organization in organization_list: env_cost_dict = self.cloud_obj.get_cloudability_costs( )['environment_costs'][organization] environment_groups = self.ah_obj.get_atlas_config_data( "global_config_data", "environment_groups") if environment_groups and environment in environment_groups[ 1].keys(): if environment == 'all': env_group_for_environment = self.awshelper_obj.get_environments( organization) else: env_group_for_environment = environment_groups[ 1][environment] for env in env_group_for_environment: env_costs += env_cost_dict[env] env_cost_dict[environment] = env_costs return env_cost_dict if 'apps_in_environment' in kwargs: if kwargs['apps_in_environment'] == 'true': return self.aws_object.get_information( environment, apps_in_environment='true') if 'instance_data' in kwargs: if kwargs['instance_data'] == 'true': return self.aws_object.get_information( environment, instance_data='true') if 'instances_cost_dict' in kwargs: if kwargs['instances_cost_dict'] == 'true': for organization in organization_list: return self.cloud_obj.get_cloudability_costs( )['instances_costs'][organization] if 'ebs_cost_dict' in kwargs: if kwargs['ebs_cost_dict'] == 'true': for organization in organization_list: return self.cloud_obj.get_cloudability_costs( )['ebs_costs'][organization] if 'aws_info_dict' in kwargs: if kwargs['aws_info_dict'] == 'true': return self.aws_object.get_information( environment, 'aws_info_dict') if 'application_subnets' in kwargs: if kwargs['application_subnets'] == 'true': return self.aws_object.get_information( environment, 'application_subnets') def get_configuration_data(self, key): value = self.ah_obj.get_atlas_config_data(self.module, key) if isinstance(value, dict): return value[0] else: return value def get_stack_attributes(self, environment=None): """ Get stack attributes from config file. """ stack_attribute_list = [] stack_attributes_dict = self.ah_obj.get_atlas_config_data( 'cloudability_module', 'stack_attributes')[1] for attribute, details in stack_attributes_dict.iteritems(): stack_attribute_list.append((attribute, details['editable'])) return (stack_attribute_list, stack_attributes_dict) def get_attribute_values(self, environment=None): return self.__get_detailed_instances_cost_dict(environment, 'stack_costs') def get_status(self, environment=None): status_information = self.get_configuration_data('status') cloud_status_dict = {} organization_list = self.awshelper_obj.get_organizations() environment_list = [] if environment == None: env_cost_dict = self.get_information(env_cost_dict='true') cloud_status_dict = { environment: ["$" + str(env_cost_dict[environment])] for environment in env_cost_dict.keys() } else: env_cost_dict = self.get_information(environment, env_cost_dict='true') region_vpc_selection = self.aws_object.get_information( environment, region_vpc_dict='true') if environment == "uncategorized": region_list = self.awshelper_obj.get_regions() for region in region_vpc_selection: if region == 'east': cloud_status_dict[region] = [ "$" + str(env_cost_dict[environment]) ] else: cloud_status_dict[region] = ["$" + "0.0"] else: for vpc in ['ame1' ]: #should be changed later to include all vpcs. cloud_status_dict[vpc] = [ "$" + str(env_cost_dict[environment]) ] return (status_information, cloud_status_dict) def get_tabs(self, environment=None): pass def get_instance_actions(self, environment=None): pass def get_environment_actions(self, environment=None): pass def get_instance_group_actions(self, environment=None): pass def get_stack_actions(self, environment=None): pass def get_vpc_actions(self): pass def get_action_status(self, json_data, environment=None): pass def perform_instance_actions(self, environment=None): pass def perform_instancegroup_actions(): pass def perform_stack_actions(): pass def perform_vpc_actions(self, json_data): pass def perform_environment_actions(self, environment=None): pass def get_columns(self, environment=None): column_list = self.ah_obj.get_atlas_config_data(self.module, 'columns') column_dict = self.ah_obj.create_nested_defaultdict() if column_list: column_dict = self.__get_detailed_instances_cost_dict( environment, 'instances_cost') return (column_list, self.ah_obj.defaultdict_to_dict(column_dict)) def get_action_parameters(self, action_type, environment=None): pass def load_session(self, request, environment=None): pass def save_session(self, request, environment=None): pass def get_defaults(): pass def get_aggregates(self, environment=None): aggregates = self.ah_obj.get_atlas_config_data(self.module, 'aggregates') if environment is None: aggregate_list = [ "$" + str(self.get_information(ec2_cost_dict='true')['region_zone']) ] return (aggregates, aggregate_list) else: aggregate_dict = collections.defaultdict(dict) for agg_key in aggregates: if agg_key == 'cost': aggregate_dict[agg_key] = self.get_information( env_cost_dict='true')[environment] return dict(aggregate_dict) def refresh_information(self, environment=None): self.cloud_obj.cache_cloud_costs() return def __get_detailed_instances_cost_dict(self, environment, cost_type): instances_cost_dict = self.get_information(environment, instances_cost_dict='true') ebs_cost_dict = self.get_information(environment, ebs_cost_dict='true') aws_tabs_dict = self.aws_object.get_tabs(environment)[1] instance_information = self.aws_object.get_information( environment, instance_data='true') organization_list = self.awshelper_obj.get_organizations() (stack_attr_list, stack_attr_details) = self.get_configuration_data('stack_attributes') apps_in_environment = self.get_information(environment, apps_in_environment='true') application_subnets = self.get_information(environment, application_subnets='true') region, vpc, subnet = "", "", "" instance_cost_column_dict = self.ah_obj.create_nested_defaultdict() ebs_cost_column_dict = self.ah_obj.create_nested_defaultdict() stack_cost_dict = self.ah_obj.create_nested_defaultdict() stack_cost_string_dict = self.ah_obj.create_nested_defaultdict() name_tag_value, fqdn_tag_value = "", "" for instance, aws_tabs_dict in aws_tabs_dict.iteritems(): attribute_cost = {} if 'Name' in aws_tabs_dict['aws_tags']: name_tag_value = aws_tabs_dict['aws_tags']["Name"] if 'fqdn' in aws_tabs_dict['aws_tags']: fqdn_tag_value = aws_tabs_dict['aws_tags']['fqdn'] instance_details = self.ah_obj.get_nested_attribute_values( instance_information, instance)[1] region = instance_details[ 'region'] if "region" in instance_details else "none" subnet = instance_details[ 'subnet'] if "subnet" in instance_details else "none" attribute_cost[subnet] = {} vpc = instance_details[ 'vpc'] if "vpc" in instance_details else "none" attribute_cost[vpc] = {} attribute_cost[vpc][subnet] = {} stack = instance_details[ 'application'] if "application" in instance_details else "none" if cost_type == 'instances_cost' or cost_type == 'stack_costs': if fqdn_tag_value in ebs_cost_dict: self.storage_cost = ebs_cost_dict[fqdn_tag_value] if environment == "uncategorized": instance_cost_column_dict[region]['subnets']['none'][ 'instance_attributes'][instance][ 'storage_cost'] = "$" + str( ebs_cost_dict[fqdn_tag_value]) + "/m" else: instance_cost_column_dict[vpc]['subnets'][subnet][ 'instance_attributes'][instance][ 'storage_cost'] = "$" + str( ebs_cost_dict[fqdn_tag_value]) + "/m" else: self.storage_cost = 0.0 if environment == "uncategorized": instance_cost_column_dict[region]['subnets']['none'][ 'instance_attributes'][instance][ 'storage_cost'] = "$" + "0.0" + "/m" else: instance_cost_column_dict[vpc]['subnets'][subnet][ 'instance_attributes'][instance][ 'storage_cost'] = "$" + "0.0" + "/m" if fqdn_tag_value in instances_cost_dict: if environment == "uncategorized": instance_cost_column_dict[region]['subnets']['none'][ 'instance_attributes'][instance][ 'instance_cost'] = "$" + str( instances_cost_dict[fqdn_tag_value]) + "/m" else: self.instance_cost = instances_cost_dict[ fqdn_tag_value] instance_cost_column_dict[vpc]['subnets'][subnet][ 'instance_attributes'][instance][ 'instance_cost'] = "$" + str( instances_cost_dict[fqdn_tag_value]) + "/m" elif name_tag_value in instances_cost_dict: self.instance_cost = instances_cost_dict[name_tag_value] if environment == "uncategorized": instance_cost_column_dict[region]['subnets']['none'][ 'instance_attributes'][instance][ 'instance_cost'] = "$" + str( instances_cost_dict[name_tag_value]) + "/m" else: self.instance_cost = instances_cost_dict[ fqdn_tag_value] instance_cost_column_dict[vpc]['subnets'][subnet][ 'instance_attributes'][instance][ 'instance_cost'] = "$" + str( instances_cost_dict[name_tag_value]) + "/m" else: if environment == "uncategorized": instance_cost_column_dict[region]['subnets']['none'][ 'instance_attributes'][instance][ 'instance_cost'] = "(empty)" else: self.instance_cost = 0.0 instance_cost_column_dict[vpc]['subnets'][subnet][ 'instance_attributes'][instance][ 'instance_cost'] = "(empty)" if cost_type == 'stack_costs': attr_cost = 0.0 for attribute in stack_attr_list: if attribute == 'instance_cost': attr_cost = self.instance_cost elif attribute == 'storage_cost': attr_cost = self.storage_cost elif attribute == 'total_cost': attr_cost = self.instance_cost + self.storage_cost if stack_attr_details[attribute]['stack'] == ['all']: for apps in apps_in_environment: if stack == apps: if not stack_cost_dict[region][vpc][ subnet][stack][attribute]: stack_cost_dict[region][vpc][subnet][ stack][attribute] = attr_cost else: cost = stack_cost_dict[region][vpc][ subnet][stack][attribute] stack_cost_dict[region][vpc][subnet][ stack][attribute] += attr_cost else: for attr_stack in stack_attr_details[attribute][ 'stack']: if attr_stack == stack: if not stack_cost_dict[region][vpc][ subnet][stack][attribute]: stack_cost_dict[region][vpc][subnet][ stack][attribute] = attr_cost else: stack_cost_dict[region][vpc][subnet][ stack][attribute] += attr_cost stack_cost_string_dict[region][vpc][subnet][stack][attribute] = \ "$"+str(stack_cost_dict[region][vpc][subnet][stack][attribute])+"/m" if cost_type == 'stack_costs': return self.ah_obj.defaultdict_to_dict(stack_cost_string_dict) if cost_type == 'instances_cost': return self.ah_obj.defaultdict_to_dict(instance_cost_column_dict)
class GraphiteHelper(): def __init__(self, request=None, environment=None): self.module = 'graphite_module' self.ah_obj = AtlasHelper() self.aws_helperobj = AwsHelper() self.module_config_data = self.ah_obj.get_atlas_configuration_data( self.module) self.graphite_url = " " self.framework = "" self.parameters_list = [] self.time_interval = 0.0 self.server_monitored = [] self.format = "" self.from_time = "" self.to_time = "" self.memcache_var = memcache.Client([ self.ah_obj.get_atlas_config_data("global_config_data", 'memcache_server_location') ], debug=0) if environment is not None: self.aws_moduleobj = AwsModule(request=request, environment=environment) def get_subnet_list(self, environment): """ Get the subnets for environment which has instances and decide if an attribute should be displayed on a subnet. """ if environment != 'uncategorized': subnets_with_instances = self.aws_moduleobj.get_information( environment, subnets_with_instances='true') subnet_list = [] for subnet, stack_list in subnets_with_instances.iteritems(): for attribute, attr_details in self.module_config_data[ 'stack_attributes'].iteritems(): if attr_details['stack'] == 'all' or set( attr_details['stack']).issubset(set(stack_list)): if subnet not in subnet_list: subnet_list.append(subnet) return subnet_list def get_query_parameters(self): """Get the query parameters from atlas config yaml""" self.graphite_url = self.module_config_data['others'][ 'graphite_url'] + "render/?" self.framework = self.module_config_data['others']['framework'] self.servers_monitored = self.module_config_data['others'][ 'server_name'] self.database = self.module_config_data['others']['database'] self.time_interval = self.module_config_data['others']['time_duration'] if 'from' in self.time_interval: self.from_time = self.time_interval['from'] if 'to' in self.time_interval: self.to_time = self.time_interval['to'] if self.to_time is not None and self.from_time is not None: self.time_string = "&from=" + str(self.from_time) + "&to=" + str( self.to_time) if self.from_time is None: self.time_string = "&to=" + str(self.to_time) if self.to_time is None: self.time_string = "&from=" + str(self.from_time) self.parameters_list = self.module_config_data['others']['parameters'] self.format = self.module_config_data['others']['format'] def queries_for_graphite(self, subnet_list): """Construct queries for grahite""" query_dict = collections.defaultdict(dict) self.get_query_parameters() for subnet in subnet_list: for server in self.servers_monitored: for parameter in self.parameters_list: target = self.framework + "." + subnet + ".ms." + server + "." + self.database + "." + parameter query_dict[subnet][ parameter] = self.graphite_url + "target=" + target + self.time_string + "&format=" + self.format return dict(query_dict) def generate_report(self, query): """Retrieve query results from the graphite server.""" try: report_json = {} response = requests.get(query) if response.status_code == 200: report_json = json.loads( response.text) #convert the json into a python dictionary return report_json except ConnectionError as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("graphite_helper.py", "generate_report()", exp_object, exc_type, exc_obj, exc_tb) except HTTPError as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("graphite_helper.py", "generate_report()", exp_object, exc_type, exc_obj, exc_tb) except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("graphite_helper.py", "generate_report()", exp_object, exc_type, exc_obj, exc_tb) return {} def get_stack_attributes(self, environment): """Get all stack attributes.""" stack_attribute_list, stack_attribute_dict = [], {} for attribute, details in self.module_config_data[ 'stack_attributes'].iteritems(): stack_attribute_list.append( (details['display_name'], details['editable'])) stack_attribute_dict[details['display_name']] = details return (stack_attribute_list, stack_attribute_dict) def get_stack_attribute_values(self, environment): """Get stack attribute values from cache. If it does not exists get it from the the global cache.""" stack_attribute_values = self.memcache_var.get( str(environment + "graphite_stack_attributes")) if not stack_attribute_values: stack_attributes_values = self.memcache_var.get( str(environment + "global_graphite_stack_attributes")) if stack_attribute_values is not None: self.memcache_var.set( str(environment + "graphite_stack_attributes"), stack_attribute_values, 10 * 60) with threading.Lock(): thread = threading.Thread( target=self.cache_stack_attribute_values, args=[environment]) thread.start() return stack_attribute_values def cache_stack_attribute_values(self, environment): """Cache stack attribute values.""" try: stack_attribute_values = self.stack_attribute_values(environment) self.memcache_var.set( str(environment + "graphite_stack_attributes"), stack_attribute_values, 10 * 60) if stack_attribute_values is None: raise Exception( "The graphite attribute values for environment " + environment + " has not been fetched. Please make sure the cache is populated !!!" ) if stack_attribute_values is not None: self.memcache_var.set( str(environment + "global_graphite_stack_attributes"), stack_attribute_values, 15 * 60) self.memcache_var.disconnect_all() except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("graphite_helper.py", "cache_stack_attribute_values()", exp_object, exc_type, exc_obj, exc_tb) return {} def stack_attribute_values(self, environment): """get stack attribute values from graphite server and parse it.""" if environment != 'uncategorized': stack_attribute_dict = self.ah_obj.create_nested_defaultdict() organization_list = self.aws_helperobj.get_organizations() region_list = self.aws_helperobj.get_regions() stack_attributes_from_config = self.module_config_data[ 'stack_attributes'] attributes_list = stack_attributes_from_config.keys() subnet_list = self.get_subnet_list(environment) graphite_query_dict = self.queries_for_graphite(subnet_list) for organization in organization_list: for region in region_list: vpc_list = self.aws_helperobj.get_vpc_in_region(region) if vpc_list: for vpc in vpc_list: for subnet in subnet_list: for attribute in stack_attributes_from_config: stack_list = stack_attributes_from_config[ attribute]['stack'] attribute_value = "" suffix = "" if 'suffix' in stack_attributes_from_config[ attribute]: suffix = stack_attributes_from_config[ attribute]['suffix'] display_name = "" if 'display_name' in stack_attributes_from_config[ attribute]: display_name = stack_attributes_from_config[ attribute]['display_name'] report = self.generate_report( graphite_query_dict[subnet] [attribute]) if report: target = self.ah_obj.split_string( report[0]['target'], ('.')) if subnet in target and attribute in target: for index in range( len(report[0] ['datapoints']) - 1, 0, -1): if report and report[0][ 'datapoints'][index][ 0] is not None: attribute_value = str( int(report[0] ['datapoints'] [index][0]) ) + " " + suffix break else: attribute_value = "null" else: attribute_value = "null" for stack in stack_list: stack_attribute_dict[region][vpc][ subnet][stack][ display_name] = attribute_value return self.ah_obj.defaultdict_to_dict(stack_attribute_dict)
class GraphiteHelper(): def __init__(self, request=None, environment=None): self.module = 'graphite_module' self.ah_obj = AtlasHelper() self.aws_helperobj = AwsHelper() self.module_config_data = self.ah_obj.get_atlas_configuration_data(self.module) self.graphite_url = " " self.framework = "" self.parameters_list = [] self.time_interval = 0.0 self.server_monitored = [] self.format = "" self.from_time = "" self.to_time = "" self.memcache_var = memcache.Client([self.ah_obj.get_atlas_config_data("global_config_data",'memcache_server_location')], debug=0) if environment is not None: self.aws_moduleobj = AwsModule(request=request,environment=environment) def get_subnet_list(self, environment): """ Get the subnets for environment which has instances and decide if an attribute should be displayed on a subnet. """ if environment != 'uncategorized': subnets_with_instances = self.aws_moduleobj.get_information(environment, subnets_with_instances='true') subnet_list = [] for subnet, stack_list in subnets_with_instances.iteritems(): for attribute, attr_details in self.module_config_data['stack_attributes'].iteritems(): if attr_details['stack'] == 'all' or set(attr_details['stack']).issubset(set(stack_list)): if subnet not in subnet_list: subnet_list.append(subnet) return subnet_list def get_query_parameters(self): """Get the query parameters from atlas config yaml""" self.graphite_url = self.module_config_data['others']['graphite_url']+"render/?" self.framework = self.module_config_data['others']['framework'] self.servers_monitored = self.module_config_data['others']['server_name'] self.database = self.module_config_data['others']['database'] self.time_interval = self.module_config_data['others']['time_duration'] if 'from' in self.time_interval: self.from_time = self.time_interval['from'] if 'to' in self.time_interval: self.to_time = self.time_interval['to'] if self.to_time is not None and self.from_time is not None: self.time_string = "&from="+str(self.from_time)+"&to="+str(self.to_time) if self.from_time is None: self.time_string = "&to="+str(self.to_time) if self.to_time is None: self.time_string = "&from="+str(self.from_time) self.parameters_list = self.module_config_data['others']['parameters'] self.format = self.module_config_data['others']['format'] def queries_for_graphite(self, subnet_list): """Construct queries for grahite""" query_dict = collections.defaultdict(dict) self.get_query_parameters() for subnet in subnet_list: for server in self.servers_monitored: for parameter in self.parameters_list: target = self.framework+"."+subnet+".ms."+server+"."+self.database+"."+parameter query_dict[subnet][parameter] = self.graphite_url+"target="+target+self.time_string+"&format="+self.format return dict(query_dict) def generate_report(self, query): """Retrieve query results from the graphite server.""" try: report_json = {} response = requests.get(query) if response.status_code == 200: report_json = json.loads(response.text) #convert the json into a python dictionary return report_json except ConnectionError as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("graphite_helper.py", "generate_report()", exp_object, exc_type, exc_obj, exc_tb) except HTTPError as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("graphite_helper.py", "generate_report()", exp_object, exc_type, exc_obj, exc_tb) except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("graphite_helper.py", "generate_report()", exp_object, exc_type, exc_obj, exc_tb) return {} def get_stack_attributes(self, environment): """Get all stack attributes.""" stack_attribute_list, stack_attribute_dict = [], {} for attribute, details in self.module_config_data['stack_attributes'].iteritems(): stack_attribute_list.append((details['display_name'], details['editable'])) stack_attribute_dict[details['display_name']] = details return(stack_attribute_list, stack_attribute_dict) def get_stack_attribute_values(self, environment): """Get stack attribute values from cache. If it does not exists get it from the the global cache.""" stack_attribute_values = self.memcache_var.get(str(environment+"graphite_stack_attributes")) if not stack_attribute_values: stack_attributes_values = self.memcache_var.get(str(environment+"global_graphite_stack_attributes")) if stack_attribute_values is not None: self.memcache_var.set(str(environment+"graphite_stack_attributes"), stack_attribute_values, 10*60) with threading.Lock(): thread = threading.Thread(target=self.cache_stack_attribute_values, args=[environment]) thread.start() return stack_attribute_values def cache_stack_attribute_values(self, environment): """Cache stack attribute values.""" try: stack_attribute_values = self.stack_attribute_values(environment) self.memcache_var.set(str(environment+"graphite_stack_attributes"), stack_attribute_values, 10*60) if stack_attribute_values is None: raise Exception("The graphite attribute values for environment "+environment+" has not been fetched. Please make sure the cache is populated !!!") if stack_attribute_values is not None: self.memcache_var.set(str(environment+"global_graphite_stack_attributes"),stack_attribute_values, 15*60) self.memcache_var.disconnect_all() except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("graphite_helper.py", "cache_stack_attribute_values()", exp_object, exc_type, exc_obj, exc_tb) return {} def stack_attribute_values(self, environment): """get stack attribute values from graphite server and parse it.""" if environment != 'uncategorized': stack_attribute_dict = self.ah_obj.create_nested_defaultdict() organization_list = self.aws_helperobj.get_organizations() region_list = self.aws_helperobj.get_regions() stack_attributes_from_config = self.module_config_data['stack_attributes'] attributes_list = stack_attributes_from_config.keys() subnet_list = self.get_subnet_list(environment) graphite_query_dict = self.queries_for_graphite(subnet_list) for organization in organization_list: for region in region_list: vpc_list = self.aws_helperobj.get_vpc_in_region(region) if vpc_list: for vpc in vpc_list: for subnet in subnet_list: for attribute in stack_attributes_from_config: stack_list = stack_attributes_from_config[attribute]['stack'] attribute_value="" suffix="" if 'suffix' in stack_attributes_from_config[attribute]: suffix = stack_attributes_from_config[attribute]['suffix'] display_name= "" if 'display_name' in stack_attributes_from_config[attribute]: display_name = stack_attributes_from_config[attribute]['display_name'] report = self.generate_report(graphite_query_dict[subnet][attribute]) if report: target = self.ah_obj.split_string(report[0]['target'], ('.')) if subnet in target and attribute in target: for index in range(len(report[0]['datapoints'])-1, 0, -1): if report and report[0]['datapoints'][index][0] is not None: attribute_value = str(int(report[0]['datapoints'][index][0]))+" "+suffix break else: attribute_value = "null" else:attribute_value = "null" for stack in stack_list: stack_attribute_dict[region][vpc][subnet][stack][display_name] = attribute_value return self.ah_obj.defaultdict_to_dict(stack_attribute_dict)
class JenkinsActions: def __init__(self, request=None, environment=None): self.ah_obj = AtlasHelper() self.module="jenkins_module" self.python_jenkinsurl = self.ah_obj.get_atlas_config_data(self.module, "python_jenkins_url") self.build_record_count = self.ah_obj.get_atlas_config_data(self.module, "build_record_count") self.jenkins_password = os.environ.get('JENKINS_PASSWORD') self.jenkins_username = os.environ.get('JENKINS_USERNAME') self.jenkinsurl = os.environ.get('JENKINS_URL') self.python_jenkinsurl = self.jenkinsurl+"/job/" self.memcache_var = memcache.Client([self.ah_obj.get_atlas_config_data("global_config_data", 'memcache_server_location') ], debug=0) if environment: self.aws_obj = AwsModule(request, environment) """ helper methods """ def get_jenkins_job_folder(self, jobname): job_folder_information = self.ah_obj.get_atlas_config_data(self.module, "folders")[1] for folder, job_list in job_folder_information.iteritems(): if jobname in job_list: return folder def cache_jenkins_build_userinfo(self): try: jobname = 'AWS-Build-Dev-Deploy-Dev' build_userinfo_dict = self.jenkins_build_userinfo(jobname) self.memcache_var.set(jobname+'_build_userinfo', build_userinfo_dict,15*60) if build_user_info_dict is None: raise Exception("Source data from Jenkins server is unavailable. Please ensure data is available and populate the cache.") if build_userinfo_dict is not None: self.memcache_var.set('global_'+jobname+'_build_userinfo', build_userinfo_dict,86400) self.memcache_var.disconnect_all() except Exception as exp_object: exc_type, exc_obj, exc_tb = sys.exc_info() self.ah_obj.print_exception("cloudability.py", "construct_cost_query()", exp_object, exc_type, exc_obj, exc_tb) return def jenkins_build_userinfo(self, jobname): job_info_dict, job_info = {}, {} jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password) jenkins_url = self.python_jenkinsurl+self.get_jenkins_job_folder(jobname) jenkins_obj = jenkins.Jenkins(jenkins_url, username=self.jenkins_username, password=self.jenkins_password) build_user_info_dict = collections.defaultdict(dict) try: if jenkins_obj.job_exists(jobname): job_info = jenkins_obj.get_job_info(jobname) build_information_list = job_info['builds'] latest_build_number = build_information_list[0]['number'] build_info = jenkins_obj.get_build_info(jobname, latest_build_number) for build_number in range(latest_build_number-self.build_record_count, latest_build_number+1): try: build_info_dict = jenkins_obj.get_build_info(jobname, build_number) build_user_info_dict[build_number] = {'deployed_by':"", 'branch':"", 'last_deployed': "", 'subnet':"", 'commit_hash':""} branch= "" if 'actions' in build_info_dict: if 'parameters' in build_info_dict['actions'][0]: for parameter_dict in build_info_dict['actions'][0]['parameters']: if parameter_dict['name'] == 'subnet': build_user_info_dict[build_number]['subnet'] = parameter_dict['value'] if parameter_dict['name'] == 'branch': build_user_info_dict[build_number]['branch'] = parameter_dict['value'] branch = parameter_dict['value'] if 'causes' in build_info_dict['actions'][1]: actions = build_info_dict['actions'][1] if 'userName' in actions['causes'][0]: build_user_info_dict[build_number]['deployed_by'] = build_info_dict['actions'][1]['causes'][0]['userName'] if 'buildsByBranchName' in build_info_dict['actions'][2]: commit_hash = build_info_dict['actions'][2]['buildsByBranchName']['origin/develop']['revision']['SHA1'][:7] build_user_info_dict[build_number]['commit_hash'] = commit_hash if 'timestamp' in build_info_dict: timestamp = str(datetime.datetime.now() - datetime.datetime.fromtimestamp(build_info_dict['timestamp']/1000)) deployed_before = "" if isinstance(timestamp, list): hours_minutes = timestamp[1].split(":")[:2] deployed_before = timestamp[0] + " "+hours_minutes[0]+"hrs "+hours_minutes[1]+"mins" else: hours_minutes = timestamp.split(":")[:2] deployed_before = hours_minutes[0]+" hrs "+hours_minutes[1]+" mins" build_user_info_dict[build_number]['last_deployed'] = deployed_before except: continue return self.ah_obj.defaultdict_to_dict(build_user_info_dict) except Exception as exp_object: return {} def get_jenkins_build_userinfo(self, jobname): build_userinfo_dict = self.memcache_var.get(jobname+'_build_userinfo') if not build_userinfo_dict: build_userinfo_dict = self.memcache_var.get('global_'+jobname+'_build_userinfo') if build_userinfo_dict is not None: self.memcache_var.set(jobname+'_build_userinfo', build_userinfo_dict, 3*60*60) with threading.Lock(): thread = threading.Thread(target=self.cache_jenkins_build_userinfo) thread.start() return build_userinfo_dict def get_jenkins_job_info(self, jobname): job_info_dict, job_info = {}, {} jenkins_url = self.python_jenkinsurl+self.get_jenkins_job_folder(jobname) jenkins_obj = jenkins.Jenkins(jenkins_url, username=self.jenkins_username, password=self.jenkins_password) try: if jenkins_obj.job_exists(jobname): job_info = jenkins_obj.get_job_info(jobname) job_info_dict= {'last_successful_build_number':job_info['lastSuccessfulBuild']['number'], 'last_successful_build_url': job_info['lastSuccessfulBuild']['url'], 'last_unsuccessful_build_number': job_info['lastUnsuccessfulBuild']['number'], 'last_unsuccessful_build_url': job_info['lastUnsuccessfulBuild']['url'], 'last_completed_build_number':job_info['lastCompletedBuild']['number'], 'last_completed_build_url':job_info['lastCompletedBuild']['url'], 'last_unstable_build_number':job_info['lastUnstableBuild'], 'last_unstable_build_url':job_info['lastUnstableBuild'], 'last_stable_build_number':job_info['lastStableBuild']['number'], 'last_stable_build_url':job_info['lastStableBuild']['url'], 'last_build': job_info['lastBuild']['url'], 'last_build-number': job_info['lastBuild']['number'], 'nextBuildNumber':job_info['nextBuildNumber'] } return job_info_dict except Exception as exp_object: return {} def get_console_output(self,build): console_output = build.get_console() if console_output: return console_output def check_build_status(self, job_name): status_dict = {} try: jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password) job = jenkinsapi_obj.get_job(job_name) build = job.get_last_build() other_info = self.get_jenkins_job_info(job_name) if other_info: status_dict['other_info'] = self.get_jenkins_job_info(job_name) status_dict['console_output'] = self.get_console_output(build) if build.is_running(): status_dict['exit_status'] = "Build not complete" status_dict['action_state'] = "action_in_progress" else: if build.is_good(): status_dict['exit_status'] = "Build Successful" status_dict['action_state'] = "action_completed" return status_dict except Exception as exp_object: status_dict['action_state'] = 'action_failed' return status_dict """ action methods """ def server_create_test(self, subnet, profile, node_name): """ Create a server on aws_obj. """ jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password) if profile == "ops-general": jenkinsapi_obj.build_job('server_create_test', {'subnet': subnet, 'profile': profile, 'name':node_name}) else: jenkinsapi_obj.build_job('server_create_test', {'subnet': subnet, 'profile': profile}) def echo_contents(self, text1, text2): """ Echo contents sample jenkins job. """ jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password) jenkinsapi_obj.build_job('echo_contents', {'text1': text1, 'text2': text2}) def initiate_actions(self, action, parameters): """ Initiate jenkins actions. """ initial_status = {} try: if parameters is None or parameters =='': return other_info = self.get_jenkins_job_info(action) if other_info: initial_status['other_info'] = other_info if action =='echo_contents': self.echo_contents(parameters['text1'], parameters['text2']) if action == 'server_create_test': self.server_create_test(parameters['subnet'], parameters['profile'], parameters['node_name']) initial_status = self.check_build_status(action) initial_status['action_state'] = 'action_initiated' return initial_status except Exception as exp_object: return initial_status def action_state(self, action): """ Check the status of builds. """ action_state = self.check_build_status(action) return action_state def parameter_values(self, action, parameter, environment=None): """ Return parameter values for each build to be displayed as options to user. """ if action == 'server_create_test': if parameter == 'subnet': return self.aws_obj.get_information(environment, env_subnet_list='true') if parameter == 'profile': return self.aws_obj.get_information(environment, profiles='true') if parameter == 'name': return "" if action == 'echo_contents': if parameter == 'text1': return "" if parameter == 'text2': return "" def action_parameters(self, action_type, environment=None): """ Get parameters for each action. """ action_parameters_dict={} if (action_type=='vpc_actions'): action_parameters_dict = self.unpack_action_parameters(self.ah_obj.get_atlas_config_data(self.module, 'vpc_actions')[1], environment) elif action_type == 'instance_actions': pass elif action_type == 'instance_group_actions': pass elif action_type == 'stack_actions': action_parameters_dict = self.unpack_action_parameters(self.ah_obj.get_atlas_config_data(self.module, 'stack_actions')[1], environment) return action_parameters_dict def unpack_action_parameters(self, action_parameters_dict, environment=None): parameter_dict = {} for key, values in action_parameters_dict.iteritems(): parameter_list = values['parameters'] parameter_dict[key] = {} for parameter in parameter_list: temp_list = [] temp_parameter = parameter.split(',') temp_list.append(temp_parameter[1]) temp_list.append(self.parameter_values(key, temp_parameter[0], environment)) parameter_dict[key][temp_parameter[0]] = temp_list return parameter_dict