Ejemplo n.º 1
0
class Cloudability:
    #constructor

    def __init__(self):
        self.cloudability_dict = {}
        self.ah_obj = AtlasHelper()
        self.aws_helper_object = AwsHelper()
        self.module = "cloudability_module"
        self.auth_token = os.environ.get('CLOUDABILITY_AUTH_TOKEN')
        self.cl_base_url = self.ah_obj.get_atlas_config_data(
            self.module, "cloudability_base_url")
        self.cl_cost_url = self.ah_obj.get_atlas_config_data(
            self.module, "cloudability_cost_url")
        self.report_query = ""
        self.memcache_var = memcache.Client([
            self.ah_obj.get_atlas_config_data("global_config_data",
                                              'memcache_server_location')
        ],
                                            debug=1)
        self.environment_subnets_details = self.aws_helper_object.get_environment_subnets_details(
        )

    def construct_cost_query(self, query_parameters):
        try:
            self.report_query = self.cl_base_url + self.cl_cost_url + query_parameters + self.auth_token
            return self.report_query
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py",
                                        "construct_cost_query()", exp_object,
                                        exc_type, exc_obj, exc_tb)
            return

    def generate_report(self, query):
        try:
            report_json = {}
            response = requests.get(query)
            if response.status_code == 200:
                report_json = json.loads(
                    response.text)  #convert the json into a python dictionary
                return report_json
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py", "generate_report()",
                                        exp_object, exc_type, exc_obj, exc_tb)
            return {}

    def get_previous_period(self, start_date, end_date):
        try:
            start = datetime.datetime.strptime(start_date, '%Y-%m-%d')
            end = datetime.datetime.strptime(end_date, '%Y-%m-%d')
            period = ((end - start).days) + 1
            previous_start_date = (
                start - datetime.timedelta(days=period)).strftime('%Y-%m-%d')
            previous_end_date = (
                end - datetime.timedelta(days=period)).strftime('%Y-%m-%d')
            return (previous_start_date, previous_end_date)
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py",
                                        "get_previous_period()", exp_object,
                                        exc_type, exc_obj, exc_tb)
            return

    #get ec2 costs
    def get_ec2_costs(self, start_date, end_date):
        ec2_costs = {'region_zone': 0.0}
        try:
            query_parameters = "verbose=1&start_date=" + start_date + "&end_date=" + end_date + "&dimensions=linked_account_name&metrics=invoiced_cost&sort_by=invoiced_cost&order=desc&max_results=50&offset=0&auth_token="
            ec2_costs_query = self.construct_cost_query(query_parameters)
            ec2_cost_dict = self.generate_report(ec2_costs_query)
            if ec2_cost_dict:
                ec2_costs['region_zone'] = round(
                    float(ec2_cost_dict['meta']['aggregates'][0]
                          ['value'].strip('$').replace(',', '')), 2)
            if ec2_costs['region_zone']:
                return ec2_costs
            else:
                raise Exception("Could not generate EC2 costs")
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py",
                                        "calculate_ec2_costs()", exp_object,
                                        exc_type, exc_obj, exc_tb)
            return ec2_costs

    #get aggregate cost spent for ec2 for the specified period
    def get_current_prev_ec2_costs(self, start_date, end_date):
        #query parameters should be moved to config file
        ec2_cost_dict = {'current_period': 0.0, 'previous_period': 0.0}
        try:
            previous_period = self.get_previous_period(start_date, end_date)
            previous_start_date, previous_end_date = previous_period[
                0], previous_period[1]
            ec2_cost_dict['current_period'] = self.get_ec2_costs(
                start_date, end_date)
            ec2_cost_dict['previous_period'] = self.get_ec2_costs(
                previous_start_date, previous_end_date)
            return ec2_cost_dict
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py", "get_ec2_costs()",
                                        exp_object, exc_type, exc_obj, exc_tb)
            return

    def create_envcost_dict(self):
        try:
            env_subnet_zip = self.environment_subnets_details
            per_environment_costs = {}
            for env_subnet_tuple in env_subnet_zip:
                per_environment_costs[env_subnet_tuple[0]] = 0
            return per_environment_costs
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py",
                                        "create_envcost_dict()", exp_object,
                                        exc_type, exc_obj, exc_tb)
            return {}

    def get_environment_costs(self, start_date, end_date):
        try:
            query_parameters = "verbose=1&start_date=" + start_date + "&end_date=" + end_date + "&dimensions=tag2&metrics=invoiced_cost&sort_by=invoiced_cost&order=desc&auth_token="
            subnets_cost_query = self.construct_cost_query(query_parameters)
            subnet_cost_dict = self.generate_report(subnets_cost_query)
            cost_dict = self.create_envcost_dict()
            env_subnet_zip = self.environment_subnets_details
            subnet_details = subnet_cost_dict['results']
            for subnet_index in subnet_details:
                for env_subnet_tuple in env_subnet_zip:
                    if subnet_index['tag2'] in env_subnet_tuple[1]:
                        #strip of the $ symbol and , convert the string to float with 2 precisions
                        subnet_cost = float(
                            (subnet_index['invoiced_cost'].strip('$')
                             ).strip(',').replace(",", ""))
                        env_cost = cost_dict[env_subnet_tuple[0]]
                        if subnet_index['tag2'] in cost_dict.keys():
                            cost_dict[env_subnet_tuple[0]] += round(
                                (env_cost + subnet_cost), 2)
                        else:
                            cost_dict[env_subnet_tuple[0]] = round(
                                (env_cost + subnet_cost), 2)
            if cost_dict:
                return cost_dict
            else:
                raise Exception('Could not calculate environment costs')
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py",
                                        "get_environment_costs()", exp_object,
                                        exc_type, exc_obj, exc_tb)
            return {}

    def get_current_prev_environment_costs(self, start_date, end_date):
        #query parameters should be moved to config file
        try:
            env_cost_dict = {}
            previous_period = self.get_previous_period(start_date, end_date)
            previous_start_date, previous_end_date = previous_period[
                0], previous_period[1]
            env_cost_dict['current_period'] = self.get_environment_costs(
                start_date, end_date)
            env_cost_dict['previous_period'] = self.get_environment_costs(
                previous_start_date, previous_end_date)
            return env_cost_dict
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py",
                                        "get_subnet_costs()", exp_object,
                                        exc_type, exc_obj, exc_tb)
            return

    #calculate costs subnet wise
    def get_subnet_costs(self, start_date, end_date):
        subnet_cost_dict = {}
        try:
            query_parameters = "verbose=1&start_date=" + start_date + "&end_date=" + end_date + "&dimensions=tag2&metrics=invoiced_cost&sort_by=invoiced_cost&order=desc&auth_token="
            subnets_cost_query = self.construct_cost_query(query_parameters)
            subnet_cost_json = self.generate_report(subnets_cost_query)
            env_subnet_zip = self.environment_subnets_details
            subnet_details = subnet_cost_json['results']
            for subnet_index in subnet_details:
                for env_subnet_tuple in env_subnet_zip:
                    if subnet_index['tag2'] in env_subnet_tuple[1]:
                        #strip of the $ symbol and , convert the string to float with 2 precisions
                        subnet_cost = float(
                            (subnet_index['invoiced_cost'].strip('$')
                             ).strip(',').replace(",", ""))
                        if subnet_index['tag2'] in subnet_cost_dict.keys():
                            subnet_cost_dict[subnet_index['tag2']] += round(
                                subnet_cost, 2)
                        else:
                            subnet_cost_dict[subnet_index['tag2']] = round(
                                subnet_cost, 2)
            return subnet_cost_dict
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py",
                                        "get_subnetwise_costs()", exp_object,
                                        exc_type, exc_obj, exc_tb)
            return {}

    def get_current_prev_subnet_costs(self, start_date, end_date):
        #query parameters should be moved to config file
        subnet_cost_dict = {}
        try:
            previous_period = self.get_previous_period(start_date, end_date)
            previous_start_date, previous_end_date = previous_period[
                0], previous_period[1]
            subnet_cost_dict['current_period'] = self.get_subnet_costs(
                start_date, end_date)
            subnet_cost_dict['previous_period'] = self.get_subnet_costs(
                previous_start_date, previous_end_date)
            return subnet_cost_dict
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py",
                                        "get_subnet_costs()", exp_object,
                                        exc_type, exc_obj, exc_tb)
            return {}

    def split_string(self, input_string, delimiters):
        #this function splits a string on multiple delimiters
        delimiters = tuple(delimiters)
        string_list = [
            input_string,
        ]
        for delimiter in delimiters:
            for index1, input_sub_string in enumerate(string_list):
                temp_var = input_sub_string.split(delimiter)
                string_list.pop(index1)
                for index2, input_sub_string in enumerate(temp_var):
                    string_list.insert(index1 + index2, input_sub_string)
        return string_list

    def get_ebs_costs(self, start_date, end_date):
        try:
            query_parameters = "&start_date=" + start_date + "&end_date=" + end_date + "&filters=usage_type=@EBS&dimensions=usage_type,tag1,&metrics=invoiced_cost&order=desc&auth_token="
            ebs_cost_query = self.construct_cost_query(query_parameters)
            ebs_cost_json = self.generate_report(ebs_cost_query)
            ebs_details = ebs_cost_json['results']
            ebs_cost_dict = collections.defaultdict(dict)
            for instance_index in ebs_details:
                if instance_index['tag1'] in ebs_cost_dict:
                    ebs_cost_dict[instance_index['tag1']] += round(
                        float(
                            instance_index['invoiced_cost'].strip('$').replace(
                                ',', '')), 2)
                else:
                    ebs_cost_dict[instance_index['tag1']] = round(
                        float(
                            instance_index['invoiced_cost'].strip('$').replace(
                                ',', '')), 2)
            return dict(ebs_cost_dict)
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py",
                                        "get_instances_costs()", exp_object,
                                        exc_type, exc_obj, exc_tb)
            return {}

    def get_instances_costs(self, start_date, end_date):
        try:
            query_parameters = "&start_date=" + start_date + "&end_date=" + end_date + "&filters=service_key==AmazonEC2&dimensions=tag1,tag3,&metrics=invoiced_cost&order=desc&auth_token="
            instance_cost_query = self.construct_cost_query(query_parameters)
            instance_cost_json = self.generate_report(instance_cost_query)
            instance_details = instance_cost_json['results']
            instance_cost_dict = collections.defaultdict(dict)
            for instance_index in instance_details:
                if 'tag3' in instance_index:
                    if instance_index['tag3'] in instance_cost_dict:
                        instance_cost_dict[instance_index['tag3']] += round(
                            float(instance_index['invoiced_cost'].strip(
                                '$').replace(',', '')), 2)
                    else:
                        instance_cost_dict[instance_index['tag3']] = round(
                            float(instance_index['invoiced_cost'].strip(
                                '$').replace(',', '')), 2)
                elif 'tag1' in instance_index:
                    if instance_index['tag1'] in instance_cost_dict:
                        instance_cost_dict[instance_index['tag1']] += round(
                            float(instance_index['invoiced_cost'].strip(
                                '$').replace(',', '')), 2)
                    else:
                        instance_cost_dict[instance_index['tag1']] = round(
                            float(instance_index['invoiced_cost'].strip(
                                '$').replace(',', '')), 2)
            return dict(instance_cost_dict)
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py",
                                        "get_instances_costs()", exp_object,
                                        exc_type, exc_obj, exc_tb)
            return {}

    def get_current_prev_instances_costs(self, start_date, end_date):
        try:
            instance_cost_dict = {}
            previous_period = self.get_previous_period(start_date, end_date)
            previous_start_date, previous_end_date = previous_period[
                0], previous_period[1]
            instance_cost_dict['current_period'] = self.get_instances_costs(
                start_date, end_date)
            instance_cost_dict['previous_period'] = self.get_instances_costs(
                previous_start_date, previous_end_date)
            return instance_cost_dict
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py",
                                        "get_subnet_costs()", exp_object,
                                        exc_type, exc_obj, exc_tb)
            return

    def get_percentage_change(self, cost_dict):

        percentage_dict, current_costs_dict, previous_costs_dict = {}, {}, {}
        try:
            if cost_dict:
                if cost_dict.has_key('current_period'):
                    current_costs_dict = cost_dict['current_period']
                if cost_dict.has_key('previous_period'):
                    previous_costs_dict = cost_dict['previous_period']
            else:
                raise Exception(
                    "Invalid value: No values for current and previous costs")
            for key in current_costs_dict:
                if key in previous_costs_dict.keys():
                    current_cost = current_costs_dict[key]
                    previous_cost = previous_costs_dict[key]
                    difference = current_cost - previous_cost

                    if difference < 0:
                        tag = 'decrease'
                    elif difference > 0:
                        tag = 'increase'
                    else:
                        tag = 'equal'
                    if previous_cost == 0.0:
                        percentage = round((abs(difference)), 2)
                    else:
                        percentage = round(
                            (abs(difference) * 100 / previous_cost), 2)
                    percentage_dict[key] = (current_cost, tag, percentage)
                else:
                    percentage_dict[key] = (current_costs_dict[key], '', 0)
            return percentage_dict
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py",
                                        "get_subnet_costs()", exp_object,
                                        exc_type, exc_obj, exc_tb)
            return {}

    def get_cloudability_costs(self):
        cloud_cost_dict = self.memcache_var.get('cloud_costs')
        if cloud_cost_dict is None:
            cloud_cost_dict = self.memcache_var.get(
                'global_cloudability_costs')
            if cloud_cost_dict is not None:
                self.memcache_var.set("cloud_costs", cloud_cost_dict, 600)
            with threading.RLock():
                thread = threading.Thread(target=self.cache_cloud_costs)
                thread.start()
        return cloud_cost_dict

    def cache_cloud_costs(self):
        try:
            cloudability_dict = self.get_cloud_costs()
            self.memcache_var.set("cloud_costs", cloudability_dict,
                                  2 * 60 * 60)
            if cloudability_dict is None:
                raise Exception(
                    "Clodability data is not available. Please ensure data is available and populate the cache."
                )
            if cloudability_dict is not None:
                self.memcache_var.set("global_cloudability_costs",
                                      cloudability_dict, 86400)
            self.memcache_var.disconnect_all()
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py",
                                        "cache_cloud_costs()", exp_object,
                                        exc_type, exc_obj, exc_tb)
            self.memcache_var.disconnect_all()

    def get_cloud_costs(self):
        current_date = date.today().strftime('%Y-%m-%d')
        month = date.today().month
        if month in [1, 3, 5, 7, 8, 10, 12]:
            previous_date = (date.today() -
                             timedelta(days=30)).strftime('%Y-%m-%d')
        elif month in [2]:
            previous_date = (date.today() -
                             timedelta(days=28)).strftime('%Y-%m-%d')
        else:
            previous_date = (date.today() -
                             timedelta(days=29)).strftime('%Y-%m-%d')
        organization_list = self.aws_helper_object.get_organizations()
        region_list = self.aws_helper_object.get_regions()
        self.cloudability_dict = self.ah_obj.create_nested_defaultdict()
        for organization in organization_list:
            for region in region_list:
                vpc_list = self.aws_helper_object.get_vpc_in_region(region)
                if vpc_list:
                    for vpc in ["ame1"]:
                        if vpc:

                            ec2_costs = self.get_ec2_costs(
                                previous_date, current_date)
                            self.cloudability_dict = self.ah_obj.create_nested_defaultdict(
                            )
                            self.cloudability_dict['ec2_costs'][
                                organization] = ec2_costs
                            ec2_costs = self.get_current_prev_ec2_costs(
                                previous_date, current_date)
                            ec2_percentage_change = self.get_percentage_change(
                                ec2_costs)
                            self.cloudability_dict['ec2_percentage_change'][
                                organization] = ec2_percentage_change

                            environment_costs = self.get_environment_costs(
                                previous_date, current_date)
                            self.cloudability_dict['environment_costs'][
                                organization] = environment_costs
                            environment_costs = self.get_current_prev_environment_costs(
                                previous_date, current_date)
                            env_percentage_change = self.get_percentage_change(
                                environment_costs)
                            self.cloudability_dict['env_percentage_change'][
                                organization] = env_percentage_change

                            subnet_costs = subnet_costs = self.get_subnet_costs(
                                previous_date, current_date)
                            self.cloudability_dict['subnet_costs'][
                                organization] = subnet_costs
                            subnet_costs = self.get_current_prev_subnet_costs(
                                previous_date, current_date)
                            subnet_percentage_change = self.get_percentage_change(
                                subnet_costs)
                            self.cloudability_dict['subnet_percentage_change'][
                                organization] = subnet_percentage_change

                            instances_costs = self.get_instances_costs(
                                previous_date, current_date)
                            self.cloudability_dict['instances_costs'][
                                organization] = instances_costs
                            instances_costs = self.get_current_prev_instances_costs(
                                previous_date, current_date)
                            instances_percentage_change = self.get_percentage_change(
                                instances_costs)
                            self.cloudability_dict[
                                'instances_percentage_change'][
                                    organization] = instances_percentage_change

                            ebs_costs = self.get_ebs_costs(
                                previous_date, current_date)
                            self.cloudability_dict['ebs_costs'][
                                organization] = ebs_costs
        return self.ah_obj.defaultdict_to_dict(self.cloudability_dict)
Ejemplo n.º 2
0
class Cloudability:
    #constructor

    def __init__(self):
        self.cloudability_dict = {}
        self.ah_obj = AtlasHelper()
        self.aws_helper_object = AwsHelper()
        self.module = "cloudability_module"
        self.auth_token =  os.environ.get('CLOUDABILITY_AUTH_TOKEN')
        self.cl_base_url = self.ah_obj.get_atlas_config_data(self.module, "cloudability_base_url")
        self.cl_cost_url = self.ah_obj.get_atlas_config_data(self.module, "cloudability_cost_url")
        self.report_query = ""
        self.memcache_var = memcache.Client([self.ah_obj.get_atlas_config_data("global_config_data",
                                                                'memcache_server_location')

                                    ], debug=1)
        self.environment_subnets_details = self.aws_helper_object.get_environment_subnets_details()


    def construct_cost_query(self, query_parameters):
        try:
            self.report_query = self.cl_base_url+self.cl_cost_url+query_parameters+self.auth_token
            return self.report_query
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py", "construct_cost_query()", exp_object, exc_type, exc_obj, exc_tb)
            return

   
    def generate_report(self, query):
        try:
            report_json = {}
            response = requests.get(query)
            if response.status_code == 200:
                report_json = json.loads(response.text) #convert the json into a python dictionary
                return report_json
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py", "generate_report()", exp_object, exc_type, exc_obj, exc_tb)
            return {}


    def get_previous_period(self, start_date, end_date):
        try:
            start = datetime.datetime.strptime(start_date, '%Y-%m-%d')
            end = datetime.datetime.strptime(end_date, '%Y-%m-%d')
            period = ((end - start).days)+1
            previous_start_date = (start-datetime.timedelta(days=period)).strftime('%Y-%m-%d')
            previous_end_date = (end-datetime.timedelta(days=period)).strftime('%Y-%m-%d')
            return (previous_start_date, previous_end_date)
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py", "get_previous_period()", exp_object, exc_type, exc_obj, exc_tb)
            return


    #get ec2 costs
    def get_ec2_costs(self, start_date, end_date):
        ec2_costs={'region_zone': 0.0}
        try:
            query_parameters = "verbose=1&start_date="+start_date+"&end_date="+end_date+"&dimensions=linked_account_name&metrics=invoiced_cost&sort_by=invoiced_cost&order=desc&max_results=50&offset=0&auth_token="
            ec2_costs_query = self.construct_cost_query(query_parameters)
            ec2_cost_dict = self.generate_report(ec2_costs_query)
            if ec2_cost_dict:
                ec2_costs['region_zone']= round(float(ec2_cost_dict['meta']['aggregates'][0]['value'].strip('$').replace(',','')),2)
            if ec2_costs['region_zone']:
                return ec2_costs
            else:
                raise Exception("Could not generate EC2 costs")
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py", "calculate_ec2_costs()", exp_object, exc_type, exc_obj, exc_tb)
            return ec2_costs

    #get aggregate cost spent for ec2 for the specified period
    def get_current_prev_ec2_costs(self, start_date, end_date):
        #query parameters should be moved to config file
        ec2_cost_dict = {'current_period': 0.0, 'previous_period': 0.0}
        try:
            previous_period = self.get_previous_period(start_date,end_date)
            previous_start_date, previous_end_date = previous_period[0], previous_period[1]
            ec2_cost_dict['current_period'] = self.get_ec2_costs(start_date, end_date)
            ec2_cost_dict['previous_period'] = self.get_ec2_costs(previous_start_date, previous_end_date)
            return ec2_cost_dict
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py", "get_ec2_costs()", exp_object, exc_type, exc_obj, exc_tb)
            return

    def create_envcost_dict(self):
        try:
            env_subnet_zip = self.environment_subnets_details
            per_environment_costs = {}
            for env_subnet_tuple in env_subnet_zip:
                per_environment_costs[env_subnet_tuple[0]] = 0
            return per_environment_costs
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py", "create_envcost_dict()", exp_object, exc_type, exc_obj, exc_tb)
            return {}

    def get_environment_costs(self, start_date, end_date):
        try:
            query_parameters = "verbose=1&start_date="+start_date+"&end_date="+end_date+"&dimensions=tag2&metrics=invoiced_cost&sort_by=invoiced_cost&order=desc&auth_token="
            subnets_cost_query = self.construct_cost_query(query_parameters)
            subnet_cost_dict = self.generate_report(subnets_cost_query)
            cost_dict = self.create_envcost_dict()
            env_subnet_zip = self.environment_subnets_details
            subnet_details = subnet_cost_dict['results']
            for subnet_index in subnet_details:
                for env_subnet_tuple in env_subnet_zip:
                    if subnet_index['tag2'] in env_subnet_tuple[1]:
                        #strip of the $ symbol and , convert the string to float with 2 precisions
                        subnet_cost = float((subnet_index['invoiced_cost'].strip('$')).strip(',').replace(",",""))
                        env_cost = cost_dict[env_subnet_tuple[0]]
                        if subnet_index['tag2'] in cost_dict.keys():
                            cost_dict[env_subnet_tuple[0]]+= round((env_cost+subnet_cost),2)
                        else:
                            cost_dict[env_subnet_tuple[0]] = round((env_cost+subnet_cost),2)
            if cost_dict:
                return cost_dict
            else:
                raise Exception('Could not calculate environment costs')
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py", "get_environment_costs()", exp_object, exc_type, exc_obj, exc_tb)
            return {}
       

    def get_current_prev_environment_costs(self, start_date, end_date):
        #query parameters should be moved to config file
        try:
            env_cost_dict = {}
            previous_period = self.get_previous_period(start_date,end_date)
            previous_start_date, previous_end_date = previous_period[0], previous_period[1]
            env_cost_dict['current_period'] = self.get_environment_costs(start_date, end_date)
            env_cost_dict['previous_period'] = self.get_environment_costs(previous_start_date, previous_end_date)
            return env_cost_dict
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py", "get_subnet_costs()", exp_object, exc_type, exc_obj, exc_tb)
            return


    #calculate costs subnet wise
    def get_subnet_costs(self, start_date, end_date):
        subnet_cost_dict = {}
        try:
            query_parameters = "verbose=1&start_date="+start_date+"&end_date="+end_date+"&dimensions=tag2&metrics=invoiced_cost&sort_by=invoiced_cost&order=desc&auth_token="
            subnets_cost_query = self.construct_cost_query(query_parameters)
            subnet_cost_json = self.generate_report(subnets_cost_query)
            env_subnet_zip = self.environment_subnets_details
            subnet_details = subnet_cost_json['results']
            for subnet_index in subnet_details:
                for env_subnet_tuple in env_subnet_zip:
                    if subnet_index['tag2'] in env_subnet_tuple[1]:
                    #strip of the $ symbol and , convert the string to float with 2 precisions
                        subnet_cost = float((subnet_index['invoiced_cost'].strip('$')).strip(',').replace(",",""))
                        if subnet_index['tag2'] in subnet_cost_dict.keys():
                            subnet_cost_dict[subnet_index['tag2']] += round(subnet_cost,2)
                        else:
                            subnet_cost_dict[subnet_index['tag2']] = round(subnet_cost,2)
            return subnet_cost_dict
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py", "get_subnetwise_costs()", exp_object, exc_type, exc_obj, exc_tb)
            return {}

    def get_current_prev_subnet_costs(self, start_date, end_date):
        #query parameters should be moved to config file
        subnet_cost_dict = {}
        try:
            previous_period = self.get_previous_period(start_date,end_date)
            previous_start_date, previous_end_date = previous_period[0], previous_period[1]
            subnet_cost_dict['current_period'] = self.get_subnet_costs(start_date, end_date)
            subnet_cost_dict['previous_period'] = self.get_subnet_costs(previous_start_date, previous_end_date)
            return subnet_cost_dict
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py", "get_subnet_costs()", exp_object, exc_type, exc_obj, exc_tb)
            return {}

    def split_string(self, input_string, delimiters):
        #this function splits a string on multiple delimiters
        delimiters = tuple(delimiters)
        string_list = [input_string,]
        for delimiter in delimiters:
            for index1, input_sub_string in enumerate(string_list):
                temp_var = input_sub_string.split(delimiter)
                string_list.pop(index1)
                for index2, input_sub_string in enumerate(temp_var):
                    string_list.insert(index1+index2, input_sub_string)
        return string_list

    def get_ebs_costs(self, start_date, end_date):
        try:
            query_parameters = "&start_date="+start_date+"&end_date="+end_date+"&filters=usage_type=@EBS&dimensions=usage_type,tag1,&metrics=invoiced_cost&order=desc&auth_token="
            ebs_cost_query = self.construct_cost_query(query_parameters)
            ebs_cost_json = self.generate_report(ebs_cost_query)
            ebs_details = ebs_cost_json['results']
            ebs_cost_dict = collections.defaultdict(dict)
            for instance_index in ebs_details:
                if instance_index['tag1'] in ebs_cost_dict:
                    ebs_cost_dict[instance_index['tag1']] += round(float(instance_index['invoiced_cost'].strip('$').replace(',','')),2)
                else:
                    ebs_cost_dict[instance_index['tag1']] = round(float(instance_index['invoiced_cost'].strip('$').replace(',','')),2)
            return dict(ebs_cost_dict)
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py", "get_instances_costs()", exp_object, exc_type, exc_obj, exc_tb)
            return {}


    def get_instances_costs(self, start_date, end_date):
        try:
            query_parameters = "&start_date="+start_date+"&end_date="+end_date+"&filters=service_key==AmazonEC2&dimensions=tag1,tag3,&metrics=invoiced_cost&order=desc&auth_token="
            instance_cost_query = self.construct_cost_query(query_parameters)
            instance_cost_json = self.generate_report(instance_cost_query)
            instance_details = instance_cost_json['results']
            instance_cost_dict = collections.defaultdict(dict)
            for instance_index in instance_details:
                if 'tag3' in instance_index:
                    if instance_index['tag3'] in instance_cost_dict:
                        instance_cost_dict[instance_index['tag3']]+= round(float(instance_index['invoiced_cost'].strip('$').replace(',','')),2)
                    else:
                        instance_cost_dict[instance_index['tag3']] = round(float(instance_index['invoiced_cost'].strip('$').replace(',','')),2)
                elif 'tag1' in instance_index:
                    if instance_index['tag1'] in instance_cost_dict:
                        instance_cost_dict[instance_index['tag1']] += round(float(instance_index['invoiced_cost'].strip('$').replace(',','')),2)
                    else:
                        instance_cost_dict[instance_index['tag1']] = round(float(instance_index['invoiced_cost'].strip('$').replace(',','')),2)
            return dict(instance_cost_dict)
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py", "get_instances_costs()", exp_object, exc_type, exc_obj, exc_tb)
            return {}

    def get_current_prev_instances_costs(self, start_date, end_date):
        try:
            instance_cost_dict = {}
            previous_period = self.get_previous_period(start_date,end_date)
            previous_start_date, previous_end_date = previous_period[0], previous_period[1]
            instance_cost_dict['current_period'] = self.get_instances_costs(start_date, end_date)
            instance_cost_dict['previous_period'] = self.get_instances_costs(previous_start_date, previous_end_date)
            return instance_cost_dict
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py", "get_subnet_costs()", exp_object, exc_type, exc_obj, exc_tb)
            return

    def get_percentage_change(self, cost_dict):

        percentage_dict, current_costs_dict, previous_costs_dict = {},{},{}
        try:
            if cost_dict:
                if cost_dict.has_key('current_period'): current_costs_dict = cost_dict['current_period']
                if cost_dict.has_key('previous_period'): previous_costs_dict = cost_dict['previous_period']
            else:
                raise Exception("Invalid value: No values for current and previous costs")
            for key in current_costs_dict:
                if key in previous_costs_dict.keys():
                    current_cost = current_costs_dict[key]
                    previous_cost = previous_costs_dict[key]
                    difference = current_cost - previous_cost

                    if difference < 0:
                        tag = 'decrease'
                    elif difference >0:
                        tag = 'increase'
                    else:
                        tag = 'equal'
                    if previous_cost == 0.0:
                        percentage = round((abs(difference)),2)
                    else:
                        percentage = round((abs(difference)*100/previous_cost),2)
                    percentage_dict[key] = (current_cost, tag, percentage)
                else:
                    percentage_dict[key] = (current_costs_dict[key], '',0)
            return percentage_dict
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py", "get_subnet_costs()", exp_object, exc_type, exc_obj, exc_tb)
            return {}

    def get_cloudability_costs(self):
        cloud_cost_dict = self.memcache_var.get('cloud_costs')
        if cloud_cost_dict is None:
            cloud_cost_dict = self.memcache_var.get('global_cloudability_costs')
            if cloud_cost_dict is not None:
                self.memcache_var.set("cloud_costs", cloud_cost_dict, 600)
            with threading.RLock():
                thread = threading.Thread(target= self.cache_cloud_costs)
                thread.start()
        return cloud_cost_dict

    def cache_cloud_costs(self):
        try:
            cloudability_dict = self.get_cloud_costs()
            self.memcache_var.set("cloud_costs", cloudability_dict,2*60*60)
            if cloudability_dict is None:
                raise Exception("Clodability data is not available. Please ensure data is available and populate the cache.")
            if cloudability_dict is not None:
                self.memcache_var.set("global_cloudability_costs", cloudability_dict,86400)
            self.memcache_var.disconnect_all()
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py", "cache_cloud_costs()", exp_object, exc_type, exc_obj, exc_tb)
            self.memcache_var.disconnect_all()
      
    def get_cloud_costs(self):
        current_date= date.today().strftime('%Y-%m-%d')
        month = date.today().month
        if month in [1, 3, 5, 7, 8, 10, 12]:
            previous_date = (date.today()-timedelta(days=30)).strftime('%Y-%m-%d')
        elif month in [2]:
            previous_date = (date.today()-timedelta(days=28)).strftime('%Y-%m-%d')
        else:
            previous_date = (date.today()-timedelta(days=29)).strftime('%Y-%m-%d')
        organization_list = self.aws_helper_object.get_organizations()
        region_list = self.aws_helper_object.get_regions()
        self.cloudability_dict = self.ah_obj.create_nested_defaultdict()
        for organization in organization_list:
            for region in region_list:
                vpc_list = self.aws_helper_object.get_vpc_in_region(region)
                if vpc_list:
                    for vpc in ["ame1"]:
                        if vpc:
                           
                            ec2_costs = self.get_ec2_costs(previous_date, current_date)
                            self.cloudability_dict = self.ah_obj.create_nested_defaultdict()
                            self.cloudability_dict['ec2_costs'][organization] = ec2_costs   
                            ec2_costs = self.get_current_prev_ec2_costs(previous_date, current_date)
                            ec2_percentage_change = self.get_percentage_change(ec2_costs)
                            self.cloudability_dict['ec2_percentage_change'][organization]= ec2_percentage_change
                            
                            environment_costs = self.get_environment_costs(previous_date, current_date)
                            self.cloudability_dict['environment_costs'][organization] = environment_costs
                            environment_costs = self.get_current_prev_environment_costs(previous_date, current_date) 
                            env_percentage_change = self.get_percentage_change(environment_costs)
                            self.cloudability_dict['env_percentage_change'][organization] = env_percentage_change
                           
                            subnet_costs = subnet_costs = self.get_subnet_costs(previous_date, current_date)
                            self.cloudability_dict['subnet_costs'][organization]= subnet_costs
                            subnet_costs = self.get_current_prev_subnet_costs(previous_date, current_date)
                            subnet_percentage_change = self.get_percentage_change(subnet_costs)
                            self.cloudability_dict['subnet_percentage_change'][organization] = subnet_percentage_change
                            
                            instances_costs = self.get_instances_costs(previous_date, current_date)
                            self.cloudability_dict['instances_costs'][organization]= instances_costs
                            instances_costs = self.get_current_prev_instances_costs(previous_date, current_date)
                            instances_percentage_change = self.get_percentage_change(instances_costs)
                            self.cloudability_dict['instances_percentage_change'][organization] = instances_percentage_change
                            
                            ebs_costs = self.get_ebs_costs(previous_date, current_date)
                            self.cloudability_dict['ebs_costs'][organization]= ebs_costs
        return self.ah_obj.defaultdict_to_dict(self.cloudability_dict)
Ejemplo n.º 3
0
class JenkinsActions:
    def __init__(self, request=None, environment=None):

        self.ah_obj = AtlasHelper()
        self.module = "jenkins_module"
        self.python_jenkinsurl = self.ah_obj.get_atlas_config_data(
            self.module, "python_jenkins_url")
        self.build_record_count = self.ah_obj.get_atlas_config_data(
            self.module, "build_record_count")
        self.jenkins_password = os.environ.get('JENKINS_PASSWORD')
        self.jenkins_username = os.environ.get('JENKINS_USERNAME')
        self.jenkinsurl = os.environ.get('JENKINS_URL')
        self.python_jenkinsurl = self.jenkinsurl + "/job/"
        self.memcache_var = memcache.Client([
            self.ah_obj.get_atlas_config_data("global_config_data",
                                              'memcache_server_location')
        ],
                                            debug=0)
        if environment:
            self.aws_obj = AwsModule(request, environment)

    """
    helper methods
    """

    def get_jenkins_job_folder(self, jobname):
        job_folder_information = self.ah_obj.get_atlas_config_data(
            self.module, "folders")[1]
        for folder, job_list in job_folder_information.iteritems():
            if jobname in job_list:
                return folder

    def cache_jenkins_build_userinfo(self):
        try:
            jobname = 'AWS-Build-Dev-Deploy-Dev'
            build_userinfo_dict = self.jenkins_build_userinfo(jobname)
            self.memcache_var.set(jobname + '_build_userinfo',
                                  build_userinfo_dict, 15 * 60)
            if build_user_info_dict is None:
                raise Exception(
                    "Source data from Jenkins server is unavailable. Please ensure data is available and populate the cache."
                )
            if build_userinfo_dict is not None:
                self.memcache_var.set('global_' + jobname + '_build_userinfo',
                                      build_userinfo_dict, 86400)
            self.memcache_var.disconnect_all()
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py",
                                        "construct_cost_query()", exp_object,
                                        exc_type, exc_obj, exc_tb)
            return

    def jenkins_build_userinfo(self, jobname):
        job_info_dict, job_info = {}, {}
        jenkinsapi_obj = api.Jenkins(self.jenkinsurl,
                                     username=self.jenkins_username,
                                     password=self.jenkins_password)
        jenkins_url = self.python_jenkinsurl + self.get_jenkins_job_folder(
            jobname)
        jenkins_obj = jenkins.Jenkins(jenkins_url,
                                      username=self.jenkins_username,
                                      password=self.jenkins_password)
        build_user_info_dict = collections.defaultdict(dict)
        try:
            if jenkins_obj.job_exists(jobname):
                job_info = jenkins_obj.get_job_info(jobname)
                build_information_list = job_info['builds']
                latest_build_number = build_information_list[0]['number']
                build_info = jenkins_obj.get_build_info(
                    jobname, latest_build_number)
                for build_number in range(
                        latest_build_number - self.build_record_count,
                        latest_build_number + 1):
                    try:
                        build_info_dict = jenkins_obj.get_build_info(
                            jobname, build_number)
                        build_user_info_dict[build_number] = {
                            'deployed_by': "",
                            'branch': "",
                            'last_deployed': "",
                            'subnet': "",
                            'commit_hash': ""
                        }
                        branch = ""
                        if 'actions' in build_info_dict:
                            if 'parameters' in build_info_dict['actions'][0]:
                                for parameter_dict in build_info_dict[
                                        'actions'][0]['parameters']:
                                    if parameter_dict['name'] == 'subnet':
                                        build_user_info_dict[build_number][
                                            'subnet'] = parameter_dict['value']
                                    if parameter_dict['name'] == 'branch':
                                        build_user_info_dict[build_number][
                                            'branch'] = parameter_dict['value']
                                        branch = parameter_dict['value']
                            if 'causes' in build_info_dict['actions'][1]:
                                actions = build_info_dict['actions'][1]
                                if 'userName' in actions['causes'][0]:
                                    build_user_info_dict[build_number][
                                        'deployed_by'] = build_info_dict[
                                            'actions'][1]['causes'][0][
                                                'userName']
                            if 'buildsByBranchName' in build_info_dict[
                                    'actions'][2]:
                                commit_hash = build_info_dict['actions'][2][
                                    'buildsByBranchName']['origin/develop'][
                                        'revision']['SHA1'][:7]
                                build_user_info_dict[build_number][
                                    'commit_hash'] = commit_hash
                        if 'timestamp' in build_info_dict:
                            timestamp = str(datetime.datetime.now() -
                                            datetime.datetime.fromtimestamp(
                                                build_info_dict['timestamp'] /
                                                1000))
                            deployed_before = ""
                            if isinstance(timestamp, list):
                                hours_minutes = timestamp[1].split(":")[:2]
                                deployed_before = timestamp[
                                    0] + " " + hours_minutes[
                                        0] + "hrs " + hours_minutes[1] + "mins"
                            else:
                                hours_minutes = timestamp.split(":")[:2]
                                deployed_before = hours_minutes[
                                    0] + " hrs " + hours_minutes[1] + " mins"
                            build_user_info_dict[build_number][
                                'last_deployed'] = deployed_before
                    except:
                        continue
            return self.ah_obj.defaultdict_to_dict(build_user_info_dict)
        except Exception as exp_object:
            return {}

    def get_jenkins_build_userinfo(self, jobname):
        build_userinfo_dict = self.memcache_var.get(jobname +
                                                    '_build_userinfo')
        if not build_userinfo_dict:
            build_userinfo_dict = self.memcache_var.get('global_' + jobname +
                                                        '_build_userinfo')
            if build_userinfo_dict is not None:
                self.memcache_var.set(jobname + '_build_userinfo',
                                      build_userinfo_dict, 3 * 60 * 60)
                with threading.Lock():
                    thread = threading.Thread(
                        target=self.cache_jenkins_build_userinfo)
                    thread.start()
        return build_userinfo_dict

    def get_jenkins_job_info(self, jobname):
        job_info_dict, job_info = {}, {}
        jenkins_url = self.python_jenkinsurl + self.get_jenkins_job_folder(
            jobname)
        jenkins_obj = jenkins.Jenkins(jenkins_url,
                                      username=self.jenkins_username,
                                      password=self.jenkins_password)
        try:
            if jenkins_obj.job_exists(jobname):
                job_info = jenkins_obj.get_job_info(jobname)
            job_info_dict = {
                'last_successful_build_number':
                job_info['lastSuccessfulBuild']['number'],
                'last_successful_build_url':
                job_info['lastSuccessfulBuild']['url'],
                'last_unsuccessful_build_number':
                job_info['lastUnsuccessfulBuild']['number'],
                'last_unsuccessful_build_url':
                job_info['lastUnsuccessfulBuild']['url'],
                'last_completed_build_number':
                job_info['lastCompletedBuild']['number'],
                'last_completed_build_url':
                job_info['lastCompletedBuild']['url'],
                'last_unstable_build_number':
                job_info['lastUnstableBuild'],
                'last_unstable_build_url':
                job_info['lastUnstableBuild'],
                'last_stable_build_number':
                job_info['lastStableBuild']['number'],
                'last_stable_build_url':
                job_info['lastStableBuild']['url'],
                'last_build':
                job_info['lastBuild']['url'],
                'last_build-number':
                job_info['lastBuild']['number'],
                'nextBuildNumber':
                job_info['nextBuildNumber']
            }
            return job_info_dict
        except Exception as exp_object:
            return {}

    def get_console_output(self, build):
        console_output = build.get_console()
        if console_output:
            return console_output

    def check_build_status(self, job_name):
        status_dict = {}
        try:
            jenkinsapi_obj = api.Jenkins(self.jenkinsurl,
                                         username=self.jenkins_username,
                                         password=self.jenkins_password)
            job = jenkinsapi_obj.get_job(job_name)
            build = job.get_last_build()
            other_info = self.get_jenkins_job_info(job_name)
            if other_info:
                status_dict['other_info'] = self.get_jenkins_job_info(job_name)
            status_dict['console_output'] = self.get_console_output(build)
            if build.is_running():
                status_dict['exit_status'] = "Build not complete"
                status_dict['action_state'] = "action_in_progress"
            else:
                if build.is_good():
                    status_dict['exit_status'] = "Build Successful"
                    status_dict['action_state'] = "action_completed"
            return status_dict
        except Exception as exp_object:
            status_dict['action_state'] = 'action_failed'
            return status_dict

    """
    action methods    
    """

    def server_create_test(self, subnet, profile, node_name):
        """
        Create a server on aws_obj.
        """
        jenkinsapi_obj = api.Jenkins(self.jenkinsurl,
                                     username=self.jenkins_username,
                                     password=self.jenkins_password)
        if profile == "ops-general":
            jenkinsapi_obj.build_job('server_create_test', {
                'subnet': subnet,
                'profile': profile,
                'name': node_name
            })
        else:
            jenkinsapi_obj.build_job('server_create_test', {
                'subnet': subnet,
                'profile': profile
            })

    def echo_contents(self, text1, text2):
        """
        Echo contents sample jenkins job.
        """
        jenkinsapi_obj = api.Jenkins(self.jenkinsurl,
                                     username=self.jenkins_username,
                                     password=self.jenkins_password)
        jenkinsapi_obj.build_job('echo_contents', {
            'text1': text1,
            'text2': text2
        })

    def initiate_actions(self, action, parameters):
        """
        Initiate jenkins actions. 
        """
        initial_status = {}
        try:
            if parameters is None or parameters == '':
                return
            other_info = self.get_jenkins_job_info(action)
            if other_info:
                initial_status['other_info'] = other_info
            if action == 'echo_contents':
                self.echo_contents(parameters['text1'], parameters['text2'])
            if action == 'server_create_test':
                self.server_create_test(parameters['subnet'],
                                        parameters['profile'],
                                        parameters['node_name'])
            initial_status = self.check_build_status(action)
            initial_status['action_state'] = 'action_initiated'
            return initial_status
        except Exception as exp_object:
            return initial_status

    def action_state(self, action):
        """
        Check the status of builds.
        """
        action_state = self.check_build_status(action)
        return action_state

    def parameter_values(self, action, parameter, environment=None):
        """
        Return parameter values for each build to be displayed as options to user.
        """
        if action == 'server_create_test':
            if parameter == 'subnet':
                return self.aws_obj.get_information(environment,
                                                    env_subnet_list='true')
            if parameter == 'profile':
                return self.aws_obj.get_information(environment,
                                                    profiles='true')
            if parameter == 'name':
                return ""
        if action == 'echo_contents':
            if parameter == 'text1':
                return ""
            if parameter == 'text2':
                return ""

    def action_parameters(self, action_type, environment=None):
        """
        Get parameters for each action.
        """
        action_parameters_dict = {}
        if (action_type == 'vpc_actions'):
            action_parameters_dict = self.unpack_action_parameters(
                self.ah_obj.get_atlas_config_data(self.module,
                                                  'vpc_actions')[1],
                environment)
        elif action_type == 'instance_actions':
            pass
        elif action_type == 'instance_group_actions':
            pass
        elif action_type == 'stack_actions':
            action_parameters_dict = self.unpack_action_parameters(
                self.ah_obj.get_atlas_config_data(self.module,
                                                  'stack_actions')[1],
                environment)
        return action_parameters_dict

    def unpack_action_parameters(self,
                                 action_parameters_dict,
                                 environment=None):
        parameter_dict = {}
        for key, values in action_parameters_dict.iteritems():
            parameter_list = values['parameters']
            parameter_dict[key] = {}
            for parameter in parameter_list:
                temp_list = []
                temp_parameter = parameter.split(',')
                temp_list.append(temp_parameter[1])
                temp_list.append(
                    self.parameter_values(key, temp_parameter[0], environment))
                parameter_dict[key][temp_parameter[0]] = temp_list
        return parameter_dict
Ejemplo n.º 4
0
class GraphiteHelper():
    def __init__(self, request=None, environment=None):
        self.module = 'graphite_module'
        self.ah_obj = AtlasHelper()
        self.aws_helperobj = AwsHelper()
        self.module_config_data = self.ah_obj.get_atlas_configuration_data(
            self.module)
        self.graphite_url = " "
        self.framework = ""
        self.parameters_list = []
        self.time_interval = 0.0
        self.server_monitored = []
        self.format = ""
        self.from_time = ""
        self.to_time = ""
        self.memcache_var = memcache.Client([
            self.ah_obj.get_atlas_config_data("global_config_data",
                                              'memcache_server_location')
        ],
                                            debug=0)
        if environment is not None:
            self.aws_moduleobj = AwsModule(request=request,
                                           environment=environment)

    def get_subnet_list(self, environment):
        """
        Get the subnets for environment which has instances and decide if an attribute should be displayed on a subnet.
        """
        if environment != 'uncategorized':
            subnets_with_instances = self.aws_moduleobj.get_information(
                environment, subnets_with_instances='true')
            subnet_list = []
            for subnet, stack_list in subnets_with_instances.iteritems():
                for attribute, attr_details in self.module_config_data[
                        'stack_attributes'].iteritems():
                    if attr_details['stack'] == 'all' or set(
                            attr_details['stack']).issubset(set(stack_list)):
                        if subnet not in subnet_list:
                            subnet_list.append(subnet)
            return subnet_list

    def get_query_parameters(self):
        """Get the query parameters from atlas config yaml"""
        self.graphite_url = self.module_config_data['others'][
            'graphite_url'] + "render/?"
        self.framework = self.module_config_data['others']['framework']
        self.servers_monitored = self.module_config_data['others'][
            'server_name']
        self.database = self.module_config_data['others']['database']
        self.time_interval = self.module_config_data['others']['time_duration']
        if 'from' in self.time_interval:
            self.from_time = self.time_interval['from']
        if 'to' in self.time_interval: self.to_time = self.time_interval['to']
        if self.to_time is not None and self.from_time is not None:
            self.time_string = "&from=" + str(self.from_time) + "&to=" + str(
                self.to_time)
        if self.from_time is None:
            self.time_string = "&to=" + str(self.to_time)
        if self.to_time is None:
            self.time_string = "&from=" + str(self.from_time)
        self.parameters_list = self.module_config_data['others']['parameters']
        self.format = self.module_config_data['others']['format']

    def queries_for_graphite(self, subnet_list):
        """Construct queries for grahite"""
        query_dict = collections.defaultdict(dict)
        self.get_query_parameters()
        for subnet in subnet_list:
            for server in self.servers_monitored:
                for parameter in self.parameters_list:
                    target = self.framework + "." + subnet + ".ms." + server + "." + self.database + "." + parameter
                    query_dict[subnet][
                        parameter] = self.graphite_url + "target=" + target + self.time_string + "&format=" + self.format
        return dict(query_dict)

    def generate_report(self, query):
        """Retrieve query results from the graphite server."""
        try:
            report_json = {}
            response = requests.get(query)
            if response.status_code == 200:
                report_json = json.loads(
                    response.text)  #convert the json into a python dictionary
                return report_json
        except ConnectionError as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("graphite_helper.py",
                                        "generate_report()", exp_object,
                                        exc_type, exc_obj, exc_tb)
        except HTTPError as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("graphite_helper.py",
                                        "generate_report()", exp_object,
                                        exc_type, exc_obj, exc_tb)
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("graphite_helper.py",
                                        "generate_report()", exp_object,
                                        exc_type, exc_obj, exc_tb)
            return {}

    def get_stack_attributes(self, environment):
        """Get all stack attributes."""
        stack_attribute_list, stack_attribute_dict = [], {}
        for attribute, details in self.module_config_data[
                'stack_attributes'].iteritems():
            stack_attribute_list.append(
                (details['display_name'], details['editable']))
            stack_attribute_dict[details['display_name']] = details
        return (stack_attribute_list, stack_attribute_dict)

    def get_stack_attribute_values(self, environment):
        """Get stack attribute values from cache. If it does not exists get it from the the global cache."""
        stack_attribute_values = self.memcache_var.get(
            str(environment + "graphite_stack_attributes"))
        if not stack_attribute_values:
            stack_attributes_values = self.memcache_var.get(
                str(environment + "global_graphite_stack_attributes"))
            if stack_attribute_values is not None:
                self.memcache_var.set(
                    str(environment + "graphite_stack_attributes"),
                    stack_attribute_values, 10 * 60)
            with threading.Lock():
                thread = threading.Thread(
                    target=self.cache_stack_attribute_values,
                    args=[environment])
                thread.start()
        return stack_attribute_values

    def cache_stack_attribute_values(self, environment):
        """Cache stack attribute values."""
        try:
            stack_attribute_values = self.stack_attribute_values(environment)
            self.memcache_var.set(
                str(environment + "graphite_stack_attributes"),
                stack_attribute_values, 10 * 60)
            if stack_attribute_values is None:
                raise Exception(
                    "The graphite attribute values for environment " +
                    environment +
                    " has not been fetched. Please make sure the cache is populated !!!"
                )
            if stack_attribute_values is not None:
                self.memcache_var.set(
                    str(environment + "global_graphite_stack_attributes"),
                    stack_attribute_values, 15 * 60)
            self.memcache_var.disconnect_all()
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("graphite_helper.py",
                                        "cache_stack_attribute_values()",
                                        exp_object, exc_type, exc_obj, exc_tb)
            return {}

    def stack_attribute_values(self, environment):
        """get stack attribute values from graphite server and parse it."""
        if environment != 'uncategorized':
            stack_attribute_dict = self.ah_obj.create_nested_defaultdict()
            organization_list = self.aws_helperobj.get_organizations()
            region_list = self.aws_helperobj.get_regions()
            stack_attributes_from_config = self.module_config_data[
                'stack_attributes']
            attributes_list = stack_attributes_from_config.keys()
            subnet_list = self.get_subnet_list(environment)
            graphite_query_dict = self.queries_for_graphite(subnet_list)
            for organization in organization_list:
                for region in region_list:
                    vpc_list = self.aws_helperobj.get_vpc_in_region(region)
                    if vpc_list:
                        for vpc in vpc_list:
                            for subnet in subnet_list:
                                for attribute in stack_attributes_from_config:
                                    stack_list = stack_attributes_from_config[
                                        attribute]['stack']
                                    attribute_value = ""
                                    suffix = ""
                                    if 'suffix' in stack_attributes_from_config[
                                            attribute]:
                                        suffix = stack_attributes_from_config[
                                            attribute]['suffix']
                                        display_name = ""
                                    if 'display_name' in stack_attributes_from_config[
                                            attribute]:
                                        display_name = stack_attributes_from_config[
                                            attribute]['display_name']
                                        report = self.generate_report(
                                            graphite_query_dict[subnet]
                                            [attribute])
                                        if report:
                                            target = self.ah_obj.split_string(
                                                report[0]['target'], ('.'))
                                            if subnet in target and attribute in target:
                                                for index in range(
                                                        len(report[0]
                                                            ['datapoints']) -
                                                        1, 0, -1):
                                                    if report and report[0][
                                                            'datapoints'][index][
                                                                0] is not None:
                                                        attribute_value = str(
                                                            int(report[0]
                                                                ['datapoints']
                                                                [index][0])
                                                        ) + " " + suffix
                                                        break
                                                    else:
                                                        attribute_value = "null"
                                        else:
                                            attribute_value = "null"
                                    for stack in stack_list:
                                        stack_attribute_dict[region][vpc][
                                            subnet][stack][
                                                display_name] = attribute_value
            return self.ah_obj.defaultdict_to_dict(stack_attribute_dict)
Ejemplo n.º 5
0
class GraphiteHelper():
    

    def __init__(self, request=None, environment=None):
        self.module = 'graphite_module'
        self.ah_obj = AtlasHelper() 
        self.aws_helperobj = AwsHelper()
        self.module_config_data = self.ah_obj.get_atlas_configuration_data(self.module)
        self.graphite_url = " "
        self.framework = ""
        self.parameters_list = []
        self.time_interval = 0.0
        self.server_monitored = []
        self.format = ""
        self.from_time = ""
        self.to_time = ""
        self.memcache_var = memcache.Client([self.ah_obj.get_atlas_config_data("global_config_data",'memcache_server_location')], debug=0)
        if environment is not None:
            self.aws_moduleobj = AwsModule(request=request,environment=environment)

    def get_subnet_list(self, environment):
        """
        Get the subnets for environment which has instances and decide if an attribute should be displayed on a subnet.
        """
        if environment != 'uncategorized':
            subnets_with_instances = self.aws_moduleobj.get_information(environment, subnets_with_instances='true')
            subnet_list = []
            for subnet, stack_list in subnets_with_instances.iteritems():
                for attribute, attr_details in self.module_config_data['stack_attributes'].iteritems():
                    if attr_details['stack'] == 'all' or set(attr_details['stack']).issubset(set(stack_list)):
                        if subnet not in subnet_list: subnet_list.append(subnet)
            return subnet_list
        
    def get_query_parameters(self):
        """Get the query parameters from atlas config yaml"""
        self.graphite_url = self.module_config_data['others']['graphite_url']+"render/?"
        self.framework = self.module_config_data['others']['framework']
        self.servers_monitored =  self.module_config_data['others']['server_name']
        self.database = self.module_config_data['others']['database']
        self.time_interval = self.module_config_data['others']['time_duration']
        if 'from' in self.time_interval: self.from_time = self.time_interval['from']
        if 'to' in self.time_interval: self.to_time = self.time_interval['to']
        if self.to_time is not None and self.from_time is not None:
            self.time_string = "&from="+str(self.from_time)+"&to="+str(self.to_time)
        if self.from_time is None:
            self.time_string = "&to="+str(self.to_time)
        if self.to_time is None:
            self.time_string = "&from="+str(self.from_time)
        self.parameters_list = self.module_config_data['others']['parameters']
        self.format = self.module_config_data['others']['format']

      
    def queries_for_graphite(self, subnet_list):
        """Construct queries for grahite"""
        query_dict = collections.defaultdict(dict)
        self.get_query_parameters()
        for subnet in subnet_list:
            for server in self.servers_monitored:
                for parameter in self.parameters_list:
                    target = self.framework+"."+subnet+".ms."+server+"."+self.database+"."+parameter   
                    query_dict[subnet][parameter] = self.graphite_url+"target="+target+self.time_string+"&format="+self.format      
        return dict(query_dict)                             


    def generate_report(self, query):
        """Retrieve query results from the graphite server."""
        try:
            report_json = {}
            response = requests.get(query)
            if response.status_code == 200:
                report_json = json.loads(response.text) #convert the json into a python dictionary
                return report_json
        except ConnectionError as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("graphite_helper.py", "generate_report()", exp_object, exc_type, exc_obj, exc_tb)   
        except HTTPError as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("graphite_helper.py", "generate_report()", exp_object, exc_type, exc_obj, exc_tb)   
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("graphite_helper.py", "generate_report()", exp_object, exc_type, exc_obj, exc_tb)
            return {}

    
    def get_stack_attributes(self, environment):
        """Get all stack attributes."""
        stack_attribute_list, stack_attribute_dict = [], {}
        for attribute, details in self.module_config_data['stack_attributes'].iteritems():
            stack_attribute_list.append((details['display_name'], details['editable']))
            stack_attribute_dict[details['display_name']] = details
        return(stack_attribute_list, stack_attribute_dict)


    def get_stack_attribute_values(self, environment):
        """Get stack attribute values from cache. If it does not exists get it from the the global cache."""
        stack_attribute_values = self.memcache_var.get(str(environment+"graphite_stack_attributes"))
        if not stack_attribute_values:
            stack_attributes_values = self.memcache_var.get(str(environment+"global_graphite_stack_attributes"))
            if stack_attribute_values is not None:
                self.memcache_var.set(str(environment+"graphite_stack_attributes"), stack_attribute_values, 10*60)
            with threading.Lock():
                thread = threading.Thread(target=self.cache_stack_attribute_values, args=[environment])
                thread.start()
        return stack_attribute_values

    def cache_stack_attribute_values(self, environment):
        """Cache stack attribute values."""
        try:
            stack_attribute_values =  self.stack_attribute_values(environment)    
            self.memcache_var.set(str(environment+"graphite_stack_attributes"), stack_attribute_values, 10*60)
            if stack_attribute_values is None:
                raise Exception("The graphite attribute values for environment "+environment+" has not been fetched. Please make sure the cache is populated !!!")
            if stack_attribute_values is not None:     
                self.memcache_var.set(str(environment+"global_graphite_stack_attributes"),stack_attribute_values, 15*60)
            self.memcache_var.disconnect_all()
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("graphite_helper.py", "cache_stack_attribute_values()", exp_object, exc_type, exc_obj, exc_tb)
            return {}


    def stack_attribute_values(self, environment):
        """get stack attribute values from graphite server and parse it."""
        if environment != 'uncategorized':
            stack_attribute_dict = self.ah_obj.create_nested_defaultdict()
            organization_list = self.aws_helperobj.get_organizations()
            region_list = self.aws_helperobj.get_regions()
            stack_attributes_from_config = self.module_config_data['stack_attributes']
            attributes_list = stack_attributes_from_config.keys()
            subnet_list = self.get_subnet_list(environment)
            graphite_query_dict = self.queries_for_graphite(subnet_list)
            for organization in organization_list:
                for region in region_list:
                    vpc_list = self.aws_helperobj.get_vpc_in_region(region)
                    if vpc_list:
                        for vpc in vpc_list:
                            for subnet in subnet_list:
                                for attribute in stack_attributes_from_config:
                                    stack_list = stack_attributes_from_config[attribute]['stack']
                                    attribute_value=""
                                    suffix=""
                                    if 'suffix' in stack_attributes_from_config[attribute]: 
                                        suffix = stack_attributes_from_config[attribute]['suffix'] 
                                        display_name= ""
                                    if 'display_name' in stack_attributes_from_config[attribute]:
                                        display_name = stack_attributes_from_config[attribute]['display_name']
                                        report = self.generate_report(graphite_query_dict[subnet][attribute])
                                        if report:
                                            target = self.ah_obj.split_string(report[0]['target'], ('.'))
                                            if subnet in target and attribute in target:
                                                for index in range(len(report[0]['datapoints'])-1, 0, -1):
                                                    if report and report[0]['datapoints'][index][0] is not None:
                                                        attribute_value = str(int(report[0]['datapoints'][index][0]))+" "+suffix
                                                        break
                                                    else: attribute_value = "null"
                                        else:attribute_value = "null"
                                    for stack in stack_list:
                                        stack_attribute_dict[region][vpc][subnet][stack][display_name] = attribute_value               
            return self.ah_obj.defaultdict_to_dict(stack_attribute_dict)                                               
Ejemplo n.º 6
0
class JenkinsActions:

    def __init__(self, request=None, environment=None):

        self.ah_obj = AtlasHelper()
        self.module="jenkins_module"
        self.python_jenkinsurl = self.ah_obj.get_atlas_config_data(self.module, "python_jenkins_url")
        self.build_record_count = self.ah_obj.get_atlas_config_data(self.module, "build_record_count")
        self.jenkins_password = os.environ.get('JENKINS_PASSWORD')
        self.jenkins_username = os.environ.get('JENKINS_USERNAME')
        self.jenkinsurl = os.environ.get('JENKINS_URL')
        self.python_jenkinsurl = self.jenkinsurl+"/job/"
        self.memcache_var = memcache.Client([self.ah_obj.get_atlas_config_data("global_config_data",
                                                                    'memcache_server_location')
                                        ], debug=0)
        if environment:
            self.aws_obj = AwsModule(request, environment)

    """
    helper methods
    """

    def get_jenkins_job_folder(self, jobname):
        job_folder_information = self.ah_obj.get_atlas_config_data(self.module, "folders")[1]
        for folder, job_list in job_folder_information.iteritems():
            if jobname in job_list:
                return folder

    def cache_jenkins_build_userinfo(self):
        try:
            jobname = 'AWS-Build-Dev-Deploy-Dev'
            build_userinfo_dict = self.jenkins_build_userinfo(jobname)
            self.memcache_var.set(jobname+'_build_userinfo', build_userinfo_dict,15*60)
            if build_user_info_dict is None:
                raise Exception("Source data from Jenkins server is unavailable. Please ensure data is available and populate the cache.")
            if build_userinfo_dict is not None:
                self.memcache_var.set('global_'+jobname+'_build_userinfo', build_userinfo_dict,86400)
            self.memcache_var.disconnect_all()
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("cloudability.py", "construct_cost_query()", exp_object, exc_type, exc_obj, exc_tb)
            return

    def jenkins_build_userinfo(self, jobname):
        job_info_dict, job_info = {}, {}
        jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password)
        jenkins_url = self.python_jenkinsurl+self.get_jenkins_job_folder(jobname)
        jenkins_obj = jenkins.Jenkins(jenkins_url, username=self.jenkins_username, password=self.jenkins_password)
        build_user_info_dict = collections.defaultdict(dict)
        try:
            if jenkins_obj.job_exists(jobname):
                job_info = jenkins_obj.get_job_info(jobname)
                build_information_list = job_info['builds']
                latest_build_number = build_information_list[0]['number']
                build_info = jenkins_obj.get_build_info(jobname, latest_build_number)
                for build_number in range(latest_build_number-self.build_record_count, latest_build_number+1):
                    try:
                        build_info_dict = jenkins_obj.get_build_info(jobname, build_number)
                        build_user_info_dict[build_number] = {'deployed_by':"", 'branch':"", 'last_deployed': "", 'subnet':"", 'commit_hash':""}
                        branch= ""
                        if 'actions' in build_info_dict:
                            if 'parameters' in build_info_dict['actions'][0]:
                                for parameter_dict in build_info_dict['actions'][0]['parameters']:
                                    if parameter_dict['name'] == 'subnet':
                                        build_user_info_dict[build_number]['subnet'] = parameter_dict['value']   
                                    if parameter_dict['name'] == 'branch':
                                        build_user_info_dict[build_number]['branch'] = parameter_dict['value']
                                        branch = parameter_dict['value']
                            if 'causes' in build_info_dict['actions'][1]:
                                actions = build_info_dict['actions'][1]
                                if 'userName' in actions['causes'][0]:
                                    build_user_info_dict[build_number]['deployed_by'] = build_info_dict['actions'][1]['causes'][0]['userName']
                            if 'buildsByBranchName' in build_info_dict['actions'][2]:
                                commit_hash =  build_info_dict['actions'][2]['buildsByBranchName']['origin/develop']['revision']['SHA1'][:7]
                                build_user_info_dict[build_number]['commit_hash'] = commit_hash
                        if 'timestamp' in build_info_dict:
                            timestamp = str(datetime.datetime.now() - datetime.datetime.fromtimestamp(build_info_dict['timestamp']/1000))
                            deployed_before = ""
                            if isinstance(timestamp, list):
                                hours_minutes = timestamp[1].split(":")[:2]
                                deployed_before = timestamp[0] + " "+hours_minutes[0]+"hrs "+hours_minutes[1]+"mins"
                            else:
                                hours_minutes = timestamp.split(":")[:2]
                                deployed_before = hours_minutes[0]+" hrs "+hours_minutes[1]+" mins"
                            build_user_info_dict[build_number]['last_deployed'] = deployed_before
                    except:
                        continue  
            return self.ah_obj.defaultdict_to_dict(build_user_info_dict)   
        except Exception as exp_object:
            return {}
        
    def get_jenkins_build_userinfo(self, jobname):
        build_userinfo_dict = self.memcache_var.get(jobname+'_build_userinfo')
        if not build_userinfo_dict:
            build_userinfo_dict = self.memcache_var.get('global_'+jobname+'_build_userinfo')
            if build_userinfo_dict is not None:
                self.memcache_var.set(jobname+'_build_userinfo', build_userinfo_dict, 3*60*60)
                with threading.Lock():
                    thread = threading.Thread(target=self.cache_jenkins_build_userinfo)
                    thread.start()
        return build_userinfo_dict


    def get_jenkins_job_info(self, jobname):
        job_info_dict, job_info = {}, {}
        jenkins_url = self.python_jenkinsurl+self.get_jenkins_job_folder(jobname)
        jenkins_obj = jenkins.Jenkins(jenkins_url, username=self.jenkins_username, password=self.jenkins_password)
        try:
            if jenkins_obj.job_exists(jobname):
               job_info = jenkins_obj.get_job_info(jobname)
            job_info_dict= {'last_successful_build_number':job_info['lastSuccessfulBuild']['number'],
                            'last_successful_build_url': job_info['lastSuccessfulBuild']['url'],
                            'last_unsuccessful_build_number': job_info['lastUnsuccessfulBuild']['number'],
                            'last_unsuccessful_build_url': job_info['lastUnsuccessfulBuild']['url'],
                            'last_completed_build_number':job_info['lastCompletedBuild']['number'],
                            'last_completed_build_url':job_info['lastCompletedBuild']['url'],
                            'last_unstable_build_number':job_info['lastUnstableBuild'],
                            'last_unstable_build_url':job_info['lastUnstableBuild'],
                            'last_stable_build_number':job_info['lastStableBuild']['number'],
                            'last_stable_build_url':job_info['lastStableBuild']['url'],
                            'last_build': job_info['lastBuild']['url'],
                            'last_build-number': job_info['lastBuild']['number'],
                            'nextBuildNumber':job_info['nextBuildNumber']
                        }
            return job_info_dict
        except Exception as exp_object:
            return {}

   
    def get_console_output(self,build):
        console_output = build.get_console()
        if console_output:
            return console_output

    def check_build_status(self, job_name):
        status_dict = {}
        try:
            jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password)
            job = jenkinsapi_obj.get_job(job_name)
            build = job.get_last_build()
            other_info = self.get_jenkins_job_info(job_name)
            if other_info:
                status_dict['other_info'] = self.get_jenkins_job_info(job_name)
            status_dict['console_output'] = self.get_console_output(build)
            if build.is_running():
                status_dict['exit_status'] = "Build not complete"
                status_dict['action_state'] = "action_in_progress"
            else:
                if build.is_good():
                    status_dict['exit_status'] = "Build Successful"
                    status_dict['action_state'] = "action_completed"
            return status_dict
        except Exception as exp_object:
            status_dict['action_state'] = 'action_failed'
            return status_dict

    """
    action methods    
    """

    def server_create_test(self, subnet, profile, node_name):
        """
        Create a server on aws_obj.
        """
        jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password)
        if profile == "ops-general":
            jenkinsapi_obj.build_job('server_create_test', {'subnet': subnet, 'profile': profile, 'name':node_name})
        else:
            jenkinsapi_obj.build_job('server_create_test', {'subnet': subnet, 'profile': profile})
        

    def echo_contents(self, text1, text2):
        """
        Echo contents sample jenkins job.
        """
        jenkinsapi_obj = api.Jenkins(self.jenkinsurl, username=self.jenkins_username, password=self.jenkins_password)
        jenkinsapi_obj.build_job('echo_contents', {'text1': text1, 'text2': text2})

   

    def initiate_actions(self, action, parameters):
        """
        Initiate jenkins actions. 
        """
        initial_status = {}
        try:
            if parameters is None or parameters =='':
                return
            other_info = self.get_jenkins_job_info(action)
            if other_info:
                initial_status['other_info'] = other_info
            if action =='echo_contents':
                self.echo_contents(parameters['text1'], parameters['text2'])
            if action == 'server_create_test':
                self.server_create_test(parameters['subnet'], parameters['profile'], parameters['node_name'])
            initial_status = self.check_build_status(action)
            initial_status['action_state'] = 'action_initiated'
            return initial_status
        except Exception as exp_object:
            return initial_status

    def action_state(self, action):
        """
        Check the status of builds.
        """
        action_state = self.check_build_status(action)
        return action_state

    def parameter_values(self, action, parameter, environment=None):
        """
        Return parameter values for each build to be displayed as options to user.
        """
        if action == 'server_create_test':
            if parameter == 'subnet':
                return self.aws_obj.get_information(environment, env_subnet_list='true')
            if parameter == 'profile':
                return self.aws_obj.get_information(environment, profiles='true')
            if parameter == 'name':
                return ""
        if action == 'echo_contents':
            if parameter == 'text1':
                return ""
            if parameter == 'text2':
                return ""

    def action_parameters(self, action_type, environment=None):
        """
        Get parameters for each action.
        """
        action_parameters_dict={}
        if (action_type=='vpc_actions'):
            action_parameters_dict = self.unpack_action_parameters(self.ah_obj.get_atlas_config_data(self.module, 'vpc_actions')[1], environment)
        elif action_type == 'instance_actions':
            pass
        elif action_type == 'instance_group_actions':
            pass
        elif action_type == 'stack_actions':
            action_parameters_dict = self.unpack_action_parameters(self.ah_obj.get_atlas_config_data(self.module, 'stack_actions')[1], environment)
        return action_parameters_dict

    def unpack_action_parameters(self, action_parameters_dict, environment=None):
        parameter_dict = {}
        for key, values in action_parameters_dict.iteritems():
            parameter_list = values['parameters']
            parameter_dict[key] = {}
            for parameter in parameter_list:
                temp_list = []
                temp_parameter = parameter.split(',')
                temp_list.append(temp_parameter[1])
                temp_list.append(self.parameter_values(key, temp_parameter[0], environment))
                parameter_dict[key][temp_parameter[0]] = temp_list
        return parameter_dict
Ejemplo n.º 7
0
class ChefHelper:

    def __init__(self):
        self.ah_obj = AtlasHelper()
        self.awshelper_obj = aws_helper.AwsHelper()
        self.module = "chef_module"
        self.db_obj = DatabaseHelper()
        self.environment_groups = self.ah_obj.get_atlas_config_data("global_config_data", "environment_groups")
        self.memcache_var = memcache.Client([self.ah_obj.get_atlas_config_data("global_config_data",
                                                                    'memcache_server_location')
                                        ], debug=0)
        self.environment_subnets_details = self.awshelper_obj.get_environment_subnets_details()

        try:
            base_path = self.ah_obj.get_atlas_config_data("chef_module", 'chef-base-path')
            self.api = chef.autoconfigure(base_path)
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("chef_helper.py", "__init__()", exp_object, exc_type, exc_obj, exc_tb)

    def get_databag_list(self, databag=''):
        #returns a list of all available data bags on the chef_server
        data_bag_list = []
        try:
            data_bags = DataBag(databag,self.api)
            for items in data_bags.list(self.api):
                data_bag_list.append(items)
            if not data_bag_list:
                raise Exception, "No Data bags items found"
            else:
                return data_bag_list
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("chef_helper.py", "get_databag_list()", exp_object, exc_type, exc_obj, exc_tb)
            return []


    def cache_databag_attributes_foritem(self, databag, item):
        """
        Fetch databag attributes from chef server using databag name and item name and cache it locally.
        """
        try:
            databag_attributes_foritem = self.get_databag_attribute_foritem(databag, item)
            self.memcache_var.set("cpdeployment_databag_attrs", databag_attributes_foritem,600) 
            if databag_attributes_foritem is None:
                raise Exception("Databag attributes cannot be obtained from Chef server. Please make sure data is obtained and populate the cache !!!")
            if databag_attributes_foritem is not None:
                self.memcache_var.set("global_cpdeployment_databag_attributes", databag_attributes_foritem ,86400)
            self.memcache_var.disconnect_all()
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("chef_helper.py", "cache_databag_attributes_foritem()", exp_object, exc_type, exc_obj, exc_tb)
            self.memcache_var.disconnect_all()
            return

      
    def get_chefdbag_attributes_foritem(self, databag, item):
        """
        Check in short term cache if not fetch results from global cache
        """
        db_attribute_dict = self.memcache_var.get('cpdeployment_databag_attrs')
        if db_attribute_dict is None:
            db_attribute_dict = self.memcache_var.get('global_cpdeployment_databag_attrs')
            if db_attribute_dict is not None:
                self.memcache_var.set("cpdeployment_databag_attrs", db_attribute_dict, 600)
                self.memcache_var.disconnect_all()
                with threading.Lock():
                    thread = threading.Thread(target=self.cache_databag_attributes_foritem, args=(databag, item))
                    thread.start()
        return db_attribute_dict
       

    def get_databag_attribute_foritem(self, databag, item):
        try:
            data_bag = DataBag(databag,self.api)
            data_bag_item = data_bag[item]
            chef_databags = self.ah_obj.get_atlas_config_data("chef_module", 'chef_databags')[0]
            chef_databags_info = self.ah_obj.get_atlas_config_data("chef_module", 'chef_databags')[1]
            key_list = []
            for index in chef_databags:
                if databag in chef_databags_info.keys():
                    for item_index in chef_databags_info[databag]['items'].keys():
                        if item == item_index:
                            key_list = chef_databags_info[databag]['items'][item]['keys']

            data_bag_attr = {}
            data_bag_attr.fromkeys(data_bag_item.keys(), None)
            for d_item_key, d_item_values in data_bag_item.iteritems():

                if type(d_item_values)== unicode:
                    if d_item_key in key_list:
                        data_bag_attr[d_item_key] = {}
                        data_bag_attr[d_item_key] = d_item_values
                        break;
                elif type(d_item_values) == dict:
                    data_bag_attr[d_item_key] = {}

                    for key in key_list:
                        data_bag_attr[d_item_key][key] = self.ah_obj.get_nested_attribute_values(d_item_values, key)

            return data_bag_attr
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("chef_helper.py", "get_databag_attributes()", exp_object, exc_type, exc_obj, exc_tb)
            return []

    def cache_chef_node_attributes(self):
        """
        Fetch node attributes from chef server and cache it locally.
        """
        try:
            chef_node_attributes_dict = self.get_node_attrs_from_chef()
            if chef_node_attributes_dict is None:
                raise Exception("Chef node attributes not available from the Chef server. Please make sure the data is available and populate the cache.")
            if chef_node_attributes_dict is not None:
                self.memcache_var.set("global_chef_node_attributes_cache", chef_node_attributes_dict,86400)
            self.memcache_var.disconnect_all()
            self.memcache_var.set("chef_node_attr_caches", chef_node_attributes_dict,2*60*60)
            self.memcache_var.disconnect_all()
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("chef_helper.py", "cache_chef_node_attributes()", exp_object, exc_type, exc_obj, exc_tb)
            self.memcache_var.disconnect_all()
            return
    
    def get_node_attributes(self):
        node_attribute_dict = self.memcache_var.get('chef_node_attr_caches')
        if node_attribute_dict is None:
            node_attribute_dict = self.memcache_var.get('global_chef_node_attributes_cache')
            self.memcache_var.disconnect_all()
            if node_attribute_dict is not None:
                self.memcache_var.set('chef_node_attr_caches', node_attribute_dict, 600)
                with threading.Lock():
                    thread = threading.Thread(target=self.cache_chef_node_attributes)
                    thread.start()
        return node_attribute_dict


    def get_node_attrs_from_chef(self):
        try:
            env_subnets_dict = {}
            node_attribute_dict = {}
            for organization in self.awshelper_obj.get_organizations():
                node_attribute_dict = defaultdict(dict)
                node_list = Node.list(self.api)
                for environment in self.awshelper_obj.get_environments(organization):
                    for region in self.awshelper_obj.get_regions():
                        vpc_list = self.awshelper_obj.get_vpc_in_region(region)
                        if vpc_list:
                            for vpc in self.awshelper_obj.get_vpc_in_region(region):
                                env_subnets_dict = self.awshelper_obj.get_env_subnets(organization, region, vpc)
                for node in node_list:
                    node_obj = Node(node, api=self.api)
                    node_split = self.ah_obj.split_string(node, ["."])
                    if node_split is None or len(node_split)<=1:
                        pass
                    else:
                        node_subnet = self.ah_obj.split_string(node, ['.'])[1]
                        for key_tuple, environment in env_subnets_dict.iteritems():
                            if node_subnet in key_tuple:
                                environment = env_subnets_dict[key_tuple]
                                attribute_list = node_obj.attributes
                                if 'ec2' in attribute_list:
                                    if 'instance_id' in node_obj.attributes.get_dotted('ec2'):
                                        instance_id = node_obj.attributes.get_dotted('ec2.instance_id')
                                        node_attribute_dict[instance_id]['node'] = node
                                if 'os' in attribute_list:
                                    node_attribute_dict[instance_id]['os']=node_obj['os']
                                if 'os_version' in attribute_list:
                                    node_attribute_dict[instance_id]['os_version'] = node_obj['os_version']
                                if 'platform' in attribute_list:
                                    node_attribute_dict[instance_id]['platform'] = node_obj['platform']
                                if 'platform_version' in attribute_list:
                                    node_attribute_dict[instance_id]['platform_version'] = node_obj['platform_version']
                                if 'uptime' in attribute_list:
                                    node_attribute_dict[instance_id]['uptime'] = node_obj['uptime']
                                if 'idletime' in attribute_list:
                                    node_attribute_dict[instance_id]['idletime'] = node_obj['idletime']
            return dict(node_attribute_dict)
        except Exception as exp_object:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.ah_obj.print_exception("chef_helper.py", "get_node_attrs_from_chef1()", exp_object, exc_type, exc_obj, exc_tb)
            return {}

    def map_node_databag_attributes(self, node_attributes, databag_attributes, environment):
        tabs_info_dict = {}
        for instance_id, details in node_attributes.iteritems():
            tabs_info_dict[instance_id] = {'chef_information':{}}
            tabs_info_dict[instance_id]['chef_information'] = details
            for node_attr, node_attr_value in details.iteritems():
                tabs_info_dict[instance_id]['chef_information'][node_attr]=node_attr_value
            if databag_attributes.has_key(details['node']):
                for key, value in databag_attributes.iteritems():
                    if details['node'] == key:
                        for attribute, attribute_value in value.iteritems():
                            tabs_info_dict[instance_id]['chef_information'][attribute] = attribute_value           
        return tabs_info_dict

    def get_values_for_attribute(self, environment, region, vpc, subnet, stack, attribute, details):
        attribute_value_dict = {}
        if attribute == "owner":
            ownership_records = self.db_obj.get_stack_ownership_details(environment, region, vpc, subnet, stack)
            if ownership_records is not None:
                attribute_value_dict["owner"] = ownership_records.owner
                attribute_value_dict["start_time"] = int(ownership_records.start_time.strftime("%s")) * 1000
            else:
                attribute_value_dict["owner"] = "none"
        if attribute == "dbhost" or attribute == "email_override" or attribute == "branch":
            attribute_record = self.db_obj.get_stack_attribute_value(environment, region, vpc, subnet, stack, attribute)
            if attribute_record is not None:
                attribute_value_dict[attribute] = attribute_record.attribute_value
            else:
                custportal_dbag_attrs = self.memcache_var.get('cpdeployment_databag_attrs');
                if custportal_dbag_attrs is not None:
                    for keys, values in custportal_dbag_attrs.iteritems():
                        custportal_dbag_attrs_subnet = keys.split(".")[1]
                        if custportal_dbag_attrs_subnet == subnet and stack in details['stack']:
                            if attribute == "dbhost":
                                if values['AWS_DB_HOST']:
                                    attribute_value_dict["dbhost"] = values['AWS_DB_HOST']
                                else:
                                    attribute_value_dict["dbhost"] = "none"
                            if attribute == "email_override":
                                if values['EMAIL_OVERRIDE'] and values['EMAIL_OVERRIDE'] is not None:
                                    attribute_value_dict["email_override"] = values['EMAIL_OVERRIDE'] 
                                if values['EMAIL_OVERRIDE'] == "none":
                                    pass 
                            if attribute == "branch":
                                if values["branch"]:
                                    attribute_value_dict["branch"] = values['branch']
                                else:
                                  "none"
        return attribute_value_dict


    def stack_attribute_values(self, request, environment, region_vpc_dict):
        """
        Get attributes and values for each stack.
        """
        stack_attribute_dict = collections.defaultdict(dict)
        awsmodule_obj = AwsModule(request, environment)
        (stack_attr_list, stack_attr_details) = stack_attributes = self.ah_obj.get_atlas_config_data(self.module, 'stack_attributes')
        application_subnets = awsmodule_obj.get_information(environment, application_subnets='true')
        apps_in_environment = awsmodule_obj.get_information(environment, apps_in_environment='true')
        if application_subnets is not None and apps_in_environment is not None:
            for region, vpc_list in region_vpc_dict.iteritems():
                stack_attribute_dict[region] = {}
                if vpc_list is not None:
                    for vpc in vpc_list:
                        stack_attribute_dict[region][vpc] = {}
                        for subnet in application_subnets:
                            stack_attribute_dict[region][vpc][subnet] = {}
                            for stack in apps_in_environment:
                                stack_attribute_dict[region][vpc][subnet][stack] = {}
                                for attribute in stack_attr_list:
                                    details = stack_attr_details[attribute]
                                    stack_attribute_dict[region][vpc][subnet][stack].update(self.get_values_for_attribute(environment, region, vpc, subnet, stack, attribute, details))                
        return dict(stack_attribute_dict)

    def get_stack_attribute_values(self, request, environment, region_vpc_dict):
        """
        Get stack attribute values for environment or environment groups.
        """
        stack_attribute_dict = {}
        stack_attribute_dict = self.stack_attribute_values(request, environment, region_vpc_dict)

        return stack_attribute_dict

    def get_stack_attributes(self, environment):
        """
        Get stack attributes from config file.
        """
        stack_attribute_list = []
        stack_attributes_dict = self.ah_obj.get_atlas_config_data('chef_module', 'stack_attributes')[1]
        for attribute, details in stack_attributes_dict.iteritems():
            stack_attribute_list.append((attribute, details['editable']))
        return(stack_attribute_list, stack_attributes_dict)