def project_list_per_center(center): log.info(' IN PROJECT LIST PER CENTER __ {0}'.format(center)) project_list_center = '' project_list_center_arr = [] project_list_center_arr_id = [] project_center_all_list = [] project_list_local = [] status = 200 try: cost_center_list = set_global_cost_center_list() for project in cost_center_list: project_center_all_list.append(project['project_id']) if project['cost_center'] == center: project_list_center += ",'" + project['project_id'] + "'" if center.lower() == 'other' and project['project_name'] == project['project_id']: project_list_center_arr.append(project['project_id']) else: project_list_center_arr.append(project['project_name']) project_list_center_arr_id.append(project['project_id']) project_list_local = project_list_center_arr except Exception as e: status = 500 return {'list': project_list_local, 'ids': project_list_center_arr_id, 'data': project_list_center_arr, 'status': status}
def resource_list_per_project(center, project): resource_list = [] project_list_local = {} log.info('IN RESOURCE_LIST_PER_PROJECT -- {0} -- {1}'.format(center,project)) cost_center_list = get_center_list(False) log.info('COST_CENTER_LIST == {0}'.format(cost_center_list)) for project_info in cost_center_list: project_list_local[project_info['project_id']] = project_info['project_name'] if project is not None: for project_id in project_list_local: if project_list_local[project_id] == project: project = str(project_id) query_data = get_resource_list_per_project([project]) log_output(query_data) else: project_ids = project_list_per_center(center)['ids'] query_data = get_resource_list_per_project(project_ids) log_output(query_data) log_output('In resource List') log_output(query_data) for (resource) in query_data: resource_list.append(resource[0]) log_output('local list resource') log_output(resource_list) return resource_list
def get_regions_list(project): data = dict(data=[], status=200) project_list_local = dict() try: cost_center_list = get_center_list(False) for project_info in cost_center_list: project_list_local[project_info['project_id']] = project_info['project_name'] for project_id in project_list_local: if project_list_local[project_id] == project: if 'ID' in project_id: project = project_id.split('-')[1] else: log.info('No ID') log.info(project_id) project = '' data = regions_list(project) except Exception as e: data['status'] = 500 resp = Response(response=json.dumps(data['data']), status=data['status'], mimetype="application/json") return resp
def get_project_list_data(): project_list = dict() status = 200 project_names = [] try: log.info('In Project List Data ----') projects = get_distinct_projects() cost_center_projects = set_global_cost_center_list() for (project) in projects: for center in cost_center_projects: center_data = json.loads(json.dumps(center, cls=AlchemyEncoder)) if project[0] == center_data['project_id']: project_list[project[0]] = center_data['project_name'] elif project[0] not in project_list: project_list[project[0]] = project[0] for (id, names) in project_list.iteritems(): project_names.append(names) except Exception as e: log_error(e[0]) status = 500 project_list['message'] = str(e[0]) response = dict(data=project_names, status=status) return response
def project_list_per_center(center): log.info(' IN PROJECT LIST PER CENTER __ {0}'.format(center)) cost_center_list = get_center_list(False) log.info('COST_CENTER_LIST == {0}'.format(cost_center_list)) project_list_center = '' project_list_center_arr = [] project_list_center_arr_id = [] project_center_all_list = [] for project in cost_center_list: project_center_all_list.append(project['project_id']) if project['cost_center'] == center: project_list_center += ",'" + project['project_id'] + "'" if center.lower() == 'other' and project['project_name'] == project['project_id']: project_list_center_arr.append(project['project_id']) else: project_list_center_arr.append(project['project_name']) project_list_center_arr_id.append(project['project_id']) log_output('Cost Center List') log_output(project_list_center_arr) project_list_local = project_list_center_arr log_output('local list project') log_output(project_list_local) return {'list': project_list_local, 'ids': project_list_center_arr_id}
def get_costs_per_resource_all_project_per_day_week(year, week, center, resource, output_type): log.info('get_costs_per_resource_all_project_per_day_week == {0}--{1} --{2} --{3}--{4} '.format(year, week, center, resource, output_type)) resource_json = dict() day_data = dict() status = 200 try: project_list_local = project_list_per_center(center)['list'] project_list_ids = project_list_per_center(center)['ids'] resource_list_local = resource_list_per_project(center, None) query_data = get_billing_data_per_resource_all_project_per_day_week(str(year), str(week), project_list_ids, str(resource), output_type) log_output(query_data) if output_type == 'day': day_data = get_per_day_data(query_data) elif output_type == 'week': day_data['usage_data'] = get_week_data(query_data, year, None) day_data['d3_json'] = [] resource_json = {'usage_data': day_data['usage_data'], 'd3_json': day_data['d3_json'], 'project_list': project_list_local, 'resource_list': resource_list_local} except Exception as e: log_error(e[0]) status = 500 resource_json['message'] = str(e[0]) response = dict(data=resource_json, status=status) return response
def insert_data(usage_date, cost, project_id, resource_type, account_id, usage_value, measurement_unit): done = False log.info('{0}<---->{1}<----->{2}<------>{3}<------>{4}'.format( usage_date, cost, project_id, resource_type, usage_value)) try: usage = Billing(usage_date, cost, project_id, resource_type, account_id, usage_value, measurement_unit) db_session.add(usage) db_session.commit() done = True except IntegrityError as e: # log.info('---- DATA ALREADY IN DB --- UPDATE ------') # log.info('{0}<---->{1}<----->{2}<------>{3}<------>{4}'.format(usage_date, cost, project_id, resource_type,usage_value)) db_session.rollback() usage = Billing.query.filter_by(project_id=project_id, usage_date=usage_date, resource_type=resource_type).first() usage.cost = cost usage.usage_value = usage_value usage.measurement_unit = measurement_unit db_session.commit() done = True except Exception as e: log.error( ' ------------- ERROR IN ADDING DATA TO THE DB ------------- {0}'. format(e[0])) return done
def insert_data(usage_date, resource_type, resource_id, resource_uri, location, usage_value, measurement_unit): done = False log.info('---- starting to add info to DB ----') try: #log.info('--------------------- ADDED INFO TO DB ---------------------') usage = Usage(usage_date=usage_date, resource_type=resource_type, resource_id=resource_id, resource_uri=resource_uri, location=location, usage_value=usage_value, measurement_unit=measurement_unit) db_session.add(usage) db_session.commit() done = True except IntegrityError as e: #log.info('---- DATA ALREADY IN DB --- UPDATE ------') # log.info('{0}<---->{1}<----->{2}<------>{3}<------>{4}'.format(usage_date, cost, project_id, resource_type,usage_value)) db_session.rollback() usage = Usage.query.filter_by(usage_date=usage_date, resource_type=resource_type, resource_id=resource_id, location=location).first() usage.usage_value = usage_value usage.measurement_unit = measurement_unit db_session.commit() done = True except Exception as e: log.error( ' ------------- ERROR IN ADDING DATA TO THE DB ------------- {0}'. format(e[0])) return done
def insert_project__table_data(data): project = dict() try: ''' update the Project table with project_name with data['projectName'] if there else use data['projectId'] if there else add as support ''' if 'projectNumber' in data: project_id = 'ID-' + str(data['projectNumber']) if 'projectName' in data: insert_done = insert_project_data(project_id, data['projectName']) else: insert_done = insert_project_data(project_id, project_id) else: project_id = 'Not Available' insert_done = insert_project_data(project_id, 'support') if not insert_done: log.info(data) raise Exception("DB Error: Information not stored") project = dict(message=' data has been added to db') except Exception as e: log.error('Error in inserting data into the DB -- {0}'.format(e[0])) db_session.rollback() traceback.print_exc() return project
def get_per_month_cost(query_data): log.info('get_per_month_cost == {0}'.format(query_data)) per_month_data = [{'cost': float(0), 'name': 'January','month':'1'}, {'cost': float(0), 'name': 'February','month':'2'}, {'cost': float(0), 'name': 'March','month':'3'}, {'cost': float(0), 'name': 'April','month':'4'}, {'cost': float(0), 'name': 'May','month':'5'}, {'cost': float(0), 'name': 'June','month':'6'}, {'cost': float(0), 'name': 'July','month':'7'}, {'cost': float(0), 'name': 'August','month':'8'}, {'cost': float(0), 'name': 'September','month':'9'}, {'cost': float(0), 'name': 'October','month':'10'}, {'cost': float(0), 'name': 'November','month':'11'}, {'cost': float(0), 'name': 'December','month':'12'}] for (month, cost) in query_data: for val in per_month_data: if val['month'] == str(month): val['cost'] = float(cost) log.info('per_month_data == {0}'.format(per_month_data)) return per_month_data
def get_costs_year(year, output_type): data = dict() usage_data = [] status = 200 log.info(' In get_costs_year == {0} -- {1}'.format(year, output_type)) try: query_data = get_billing_data_per_year(str(year), output_type) log_output(query_data) data = { 'usage_data': usage_data } if output_type == 'month': usage_data = get_per_month_cost(query_data, None, year) data['usage_data'] = usage_data elif output_type == 'week' or output_type == 'quarter': usage_data = get_usage_data(query_data) data['usage_data'] = usage_data elif output_type == 'day': day_data = get_per_day_data(query_data) data['usage_data'] = day_data['usage_data'] data['d3_json'] = day_data['d3_json'] except Exception as e: log_error(e[0]) status = 500 data['message'] = str(e[0]) response = dict(data=data, status=status) return response
def get_regions_list(project): data = dict(data=[], status=200) project_list_local = dict() try: cost_center_list = get_center_list(False) for project_info in cost_center_list: project_list_local[ project_info['project_id']] = project_info['project_name'] for project_id in project_list_local: if project_list_local[project_id] == project: if 'ID' in project_id: project = project_id.split('-')[1] else: log.info('No ID') log.info(project_id) project = '' data = regions_list(project) except Exception as e: data['status'] = 500 resp = Response(response=json.dumps(data['data']), status=data['status'], mimetype="application/json") return resp
def copy_file_to_archive(filename, service, main_bucket, dest_bucket): resp = dict() try: log.info('Starting to move the file to {0} ---- {1}'.format( dest_bucket, filename)) copy_object = service.objects().copy(sourceBucket=main_bucket, sourceObject=filename, destinationBucket=dest_bucket, destinationObject=filename, body={}) resp = copy_object.execute() log.info('DONE Moving of file - {0} to Archive -{1} '.format( filename, dest_bucket)) # delete_moved_file(filename, service) except Exception as e: log.error('Error in Copying the object to archive folder - {0}'.format( e[0])) return resp
def resource_list_per_project(center, project): resource_list = [] project_list_local = {} log.info('IN RESOURCE_LIST_PER_PROJECT -- {0} -- {1}'.format(center, project)) cost_center_list = set_global_cost_center_list() for project_info in cost_center_list: project_list_local[project_info['project_id']] = project_info['project_name'] if project is not None: for project_id in project_list_local: if project_list_local[project_id] == project: project = str(project_id) query_data = get_resource_list_per_project([project]) else: project_ids = project_list_per_center(center)['ids'] query_data = get_resource_list_per_project(project_ids) log_output('In resource List') log_output(query_data) for (resource) in query_data: resource_list.append(resource[0]) return resource_list
def load_instance_table(): body = request.get_json() log.info("------- body: {0} -------".format(body)) machine_type = body.get('machine_type', None) tags = body.get('tags', None) project = body.get('project', None) month = body.get('month', None) year = body.get('year', None) data = db_session.query(Instance).order_by(Instance.instanceId).all() data = [x.__dict__ for x in data] for x in data: del x['_sa_instance_state'] instance_obj_list = data instance_obj_list = build_objs(data) for x in instance_obj_list: if 'machineType' in x: x['machineType'] = x['machineType'].split('/')[-1] # project = project name if project is not None: project = project.lower() instance_obj_list = filter(lambda x: x['project'] == project, instance_obj_list) # machine_type = just the machine type bit if machine_type is not None: instance_obj_list = filter(lambda x: x['machineType'] == machine_type, instance_obj_list) # tags should be filtered as AND if tags is not None: tags = [x.encode('UTF8') for x in tags] tags = set(tags) instance_obj_list = filter( lambda x: 'tags.items' in x and tags.issubset(set(x['tags.items']) ), instance_obj_list) if year is not None: year = int(year) instance_obj_list = filter( lambda x: int(x['creationTimestamp'].split('-')[0]) == year, instance_obj_list) if month is not None: month = int(month) instance_obj_list = filter( lambda x: int(x['creationTimestamp'].split('-')[1]) == month, instance_obj_list) resp = Response(response=simplejson.dumps(instance_obj_list), status=200, mimetype="application/json") return resp
def loadData(): msg = data_processor('now') log.info(msg) resp = Response(response=msg['data'], status=200, mimetype="application/json") return resp
def insert_usage_data(data_list, filename, service): usage = dict() try: data_count = 0 total_count = 0 for data in data_list: total_count += 1 date = data['startTime'] resource_type = str(data['lineItemId']).replace("com.google.cloud/services", "").replace( "com.googleapis/services", "") account_id = str(data['accountId']) usage_date = datetime.datetime.strptime( date.split("-")[0] + '-' + date.split("-")[1] + '-' + date.split("-")[2], "%Y-%m-%dT%H:%M:%S") # check if there is projectnumber else add it as Not available if 'projectNumber' in data: project_id = 'ID-' + str(data['projectNumber']) else: project_id = 'Not Available' if len(data['measurements']) != 0: usage_value = float(data['measurements'][0]['sum']) measurement_unit = str(data['measurements'][0]['unit']) else: usage_value = float(0) measurement_unit = str('none') # check if credits is there if so then add it to cost cost = float(data['cost']['amount']) if 'credits' in data: cost = float(data['cost']['amount']) # log.info('CREDITS PRESENT FOR THIS DATA') # log.info('cost before-- {0}'.format(cost)) for credit in data['credits']: cost += float(credit['amount']) # log.info('{0}<---->{1}<----->{2}<------>{3}'.format(usage_date, project_id, credit['amount'], cost)) # log.info('cost after -- {0}'.format(cost)) if cost == 0: # log.info('--- COST is 0 --- NOT adding to DB') continue else: # log.info('INSERTING DATA INTO DB -- {0}'.format(data)) insert_done = insert_data(usage_date, cost, project_id, resource_type, account_id, usage_value, measurement_unit) if not insert_done: log.info(data) continue else: data_count += 1 usage = dict(message=' data has been added to db') log.info( 'DONE adding {0} items out of {1} for file -- {2} into the db '.format(data_count, total_count, filename)) except Exception as e: log.error('Error in inserting data into the DB -- {0}'.format(e[0])) db_session.rollback() return usage
def get_instance_metadata(): msg = data_processor('now') log.info(msg['data']) resp = Response(response=msg['data'], status=200, mimetype="application/json") return resp
def set_scheduler_initial(): print(' -------------- SETTING INITiAL SCHEDULER ---------------------') global scheduler scheduler = BackgroundScheduler() scheduler.start() log.info(os.environ.get('SCHEDULER_HOUR')) log.info(os.environ.get('SCHEDULER_MIN')) set_scheduler(os.environ.get('SCHEDULER_HOUR'), os.environ.get('SCHEDULER_MIN'))
def run_scheduler(): global scheduler log.info('---- In run_scheduler ----') scheduler.remove_all_jobs() scheduler.print_jobs() scheduler.add_job(data_processor, id='data_processor', replace_existing=True, args=['now'], max_instances=1) log.info('------ Jobs List -----') scheduler.print_jobs() return scheduler
def run_scheduler(): global scheduler log.info('---- In run_scheduler ----') scheduler.remove_all_jobs() scheduler.print_jobs() scheduler.add_job(data_processor, id='data_processor', args=['now']) scheduler.add_job(usage_processor, id='usage_data_processor', args=['now']) log.info('------ Jobs List -----') scheduler.print_jobs() return scheduler
def get_costs_per_resource_quarter_center(year, quarter, center, project, output_type): log.info( 'get_costs_per_resource_month_center == {0}--{1} --{2} --{3} -- {4}'.format(year, quarter, center, project, output_type)) project_json = dict() usage_data = [] status = 200 try: project_list_local = project_list_per_center(center)['list'] resource_list_local = resource_list_per_project(center, project) project_json = { 'usage_data': usage_data, 'resource_list': resource_list_local, 'project_list': project_list_local } if output_type == 'month': query_data = get_billing_data_per_year_quarter_week_day(str(year), str(quarter), str(project), output_type) log_output(query_data) usage_data = get_per_month_cost(query_data, quarter, year) project_json['usage_data'] = usage_data elif output_type == 'week': query_data = get_billing_data_per_year_quarter_week_day(str(year), str(quarter), str(project), output_type) log_output(query_data) usage_data = get_week_data(query_data, year, None) project_json['usage_data'] = usage_data elif output_type == 'day': query_data = get_billing_data_per_year_quarter_week_day(str(year), str(quarter), str(project), output_type) log_output(query_data) day_data = get_per_day_data(query_data) project_json['usage_data'] = day_data['usage_data'] project_json['d3_json'] = day_data['d3_json'] except Exception as e: log_error(e[0]) status = 500 project_json['message'] = str(e[0]) response = dict(data=project_json, status=status) return response
def get_costs_per_cost_month(year, value_to_match, output_type): log.info('get_costs_per_cost_month == {0}--{1} --{2}'.format(year, value_to_match, output_type)) month_json = dict() month_data = [] status = 200 try: cost_center_list = set_global_cost_center_list() new_dict = dict() projects_list = [] month_json = { 'usage_data': month_data } for project_info in cost_center_list: projects_list.append(str(project_info['project_id'])) query_data = get_billing_data_per_year_month(str(year), str(value_to_match), str(output_type)) log_output(query_data) if query_data is not None: for (project, cost) in query_data: for project_info in cost_center_list: cost_center = str(project_info['cost_center']) project_id = str(project_info['project_id']) owner = str(project_info['owner']) if project == project_id: new_dict[cost_center] = new_dict.get(cost_center, {}) new_dict[cost_center]['owner'] = owner new_dict[cost_center]['cost'] = new_dict[cost_center].get('cost', 0.0) new_dict[cost_center]['project'] = new_dict[cost_center].get('project', []) new_dict[cost_center]['project'].append(str(project)) new_dict[cost_center]['cost'] += cost for key, value in new_dict.items(): each_month = dict(name=value['owner'], cost=value['cost'], id=key) month_data.append(each_month) month_json['usage_data'] = month_data month_json = { 'usage_data': month_data } except Exception as e: log_error(e[0]) status = 500 month_json['message'] = str(e[0]) response = dict(data=month_json, status=status) return response
def get_costs_per_cost_month(year, value_to_match, output_type): log.info('get_costs_per_cost_month == {0}--{1} --{2}'.format(year, value_to_match, output_type)) month_json = dict() month_data = [] status = 200 try: cost_center_list = set_global_cost_center_list() new_dict = dict() projects_list = [] month_json = { 'usage_data': month_data } for project_info in cost_center_list: projects_list.append(str(project_info['project_id'])) query_data = get_billing_data_per_year_month(str(year), str(value_to_match), str(output_type)) log_output(query_data) if query_data is not None: for (project, cost) in query_data: for project_info in cost_center_list: cost_center = str(project_info['cost_center']) project_id = str(project_info['project_id']) director = str(project_info['director']) if project == project_id: new_dict[cost_center] = new_dict.get(cost_center, {}) new_dict[cost_center]['director'] = director new_dict[cost_center]['cost'] = new_dict[cost_center].get('cost', 0.0) new_dict[cost_center]['project'] = new_dict[cost_center].get('project', []) new_dict[cost_center]['project'].append(str(project)) new_dict[cost_center]['cost'] += cost for key, value in new_dict.items(): each_month = dict(name=value['director'], cost=value['cost'], id=key) month_data.append(each_month) month_json['usage_data'] = month_data month_json = { 'usage_data': month_data } except Exception as e: log_error(e[0]) status = 500 month_json['message'] = str(e[0]) response = dict(data=month_json, status=status) return response
def insert_usage_data(data_list, filename, service): usage = dict() # data_list is a string in csv format # read csv to db reader = csv.DictReader(data_list.splitlines(), delimiter=',') try: data_count = 0 total_count = 0 for data in reader: total_count += 1 usage_date = datetime.datetime.strptime(data['Report Date'], '%Y-%m-%d') if len(data['Quantity']) != 0: usage_value = int(data['Quantity']) measurement_unit = str(data['Unit']) else: usage_value = 0 measurement_unit = "none" resource_uri = str(data['Resource URI'].replace( "https://www.googleapis.com/compute/v1/projects", "")) location = str(data['Location']) resource_id = str(data['ResourceId']) resource_type = str(data['MeasurementId']).replace( "com.google.cloud/services", "") insert_done = insert_data(usage_date=usage_date, resource_type=resource_type, resource_id=resource_id, resource_uri=resource_uri, location=location, usage_value=usage_value, measurement_unit=measurement_unit) if not insert_done: log.info(data) log.debug('row not added!!!') continue else: log.debug('row added') data_count += 1 usage = dict(message=' data has been added to db') log.info( 'DONE adding {0} items out of {1} for file -- {2} into the db '. format(data_count, total_count, filename)) except Exception as e: log.error('Error in inserting data into the DB -- {0}'.format(e[0])) db_session.rollback() return usage
def delete_file(filename, service): resp = dict() try: log.info('Starting to Delete the file {0} from {1}'.format(filename, BUCKET_NAME)) delete_object = service.objects().delete(bucket=BUCKET_NAME, object=filename) resp = delete_object.execute() log.info('DONE Deleting file - {0} from - {1} '.format(filename, BUCKET_NAME)) except Exception as e: log.error('Error in deleting the old file - {0}'.format(e[0])) # add code to add metadata or rename the file return resp
def get_project_list_data(): log.info('In Project List Data ----') data = dict() project_list = dict() try: projects = get_distinct_projects() for (project) in projects: project_list[project[0]] = project[0] log_output('PROJECT LIST') log_output(project_list) for (project) in project_list: log.info('INSIDE LOOP') # Request an access token from the metadata server. token_data = get_access_token() resp_access = token_data['resp_access'] content_access = token_data['content_access'] if resp_access.status == 200: # Extract the access token from the response. d = json.loads(content_access) access_token = d['access_token'] # Save the access token # log.debug('access_token -- {0}'.format(access_token)) # Construct the request to Google Cloud Storage if project != 'Not Available': project_id = project.split('-')[1] else: project_id = 'Not Available' project_data = get_project_data(project_id, access_token) resp = project_data['resp'] content = project_data['content'] if resp.status == 200: # log.debug('Project_data {0} -- {1}'.format(project_id, content)) data = json.loads(content) project_list[project] = data['name'] else: log.error('Project_data Error {0} -- {1}'.format(project_id, resp.status)) else: log.error('Access Token Error {0}'.format(resp_access.status)) except Exception as e: log_error(e) return project_list
def get_costs_per_center_week(year, week, center, output_type): log.info( 'get_costs_per_center_week == {0}--{1} --{2} --{3}'.format(year, week, center, output_type)) center_json = dict() status = 200 try: cost_center_list = set_global_cost_center_list() project_list_local = project_list_per_center(center)['list'] project_id_local = project_list_per_center(center)['ids'] resource_list_local = resource_list_per_project(center, None) cost_center_projects_id = [] cost_center_projects_name = [] for project_info in cost_center_list: if project_info['cost_center'] == center: cost_center_projects_id.append(project_info['project_id']) cost_center_projects_name.append(project_info['project_name']) week_output = [] center_json = { 'usage_data': week_output, 'project_list': project_list_local, 'resource_list': resource_list_local } if output_type == 'week': query_data = get_billing_data_per_year_week_day(str(year), str(week), str(output_type), project_id_local) log_output(query_data) center_json['usage_data'] = get_week_data(query_data, year, cost_center_projects_id) elif output_type == 'day': query_data = get_billing_data_per_year_week_day(str(year), str(week), str(output_type), project_id_local) log_output(query_data) day_data = get_per_day_data(query_data) center_json['usage_data'] = day_data['usage_data'] center_json['d3_json'] = day_data['d3_json'] except Exception as e: log_error(e[0]) status = 500 center_json['message'] = str(e[0]) response = dict(data=center_json, status=status) return response
def delete_file(filename, service): resp = dict() try: log.info('Starting to Delete the file {0} from {1}'.format( filename, BUCKET_NAME)) delete_object = service.objects().delete(bucket=BUCKET_NAME, object=filename) resp = delete_object.execute() log.info('DONE Deleting file - {0} from - {1} '.format( filename, BUCKET_NAME)) except Exception as e: log.error('Error in deleting the old file - {0}'.format(e[0])) # add code to add metadata or rename the file return resp
def get_costs_per_month(year): data = {} log.info(' In get_costs_per_month == {0}'.format(year)) try: query_data = get_billing_data_per_year(str(year)) log_output(query_data) usage_data = get_per_month_cost(query_data) log.info(' get_costs_per_month DATA == {0}'.format(usage_data)) data = { 'usage_data': usage_data } except Exception as e: log_error(e) return data
def get_costs_per_center_month(year, month, center): center_json = {} cost_center_list = get_center_list(False) try: cost_center_projects_id = [] cost_center_projects_name = [] cost_center_projects_all_id = [] cost_center_projects_all_name = [] for project_info in cost_center_list: cost_center_projects_all_id.append(project_info['project_id']) cost_center_projects_all_name.append(project_info['project_name']) if project_info['cost_center'] == center: cost_center_projects_id.append(project_info['project_id']) cost_center_projects_name.append(project_info['project_name']) month_data = [] query_data = get_billing_data_per_year_per_month(str(year), str(month)) log_output(query_data) for (project, cost) in query_data: if project in cost_center_projects_id: if cost_center_projects_name[cost_center_projects_id.index(project)].lower() == 'none': name = project else: name = cost_center_projects_name[cost_center_projects_id.index(project)] each_month = {'name': name, 'cost': float(cost)} month_data.append(each_month) project_list_local = project_list_per_center(center)['list'] log.info('MonthData {0}'.format(month_data)) log.info('Project List {0}'.format(project_list_local)) center_json = { 'usage_data': month_data, 'project_list': project_list_local } except Exception as e: log_error(e) return center_json
def insert_usage_data(data_list, filename, service): usage = dict() # data_list is a string in csv format # read csv to db reader = csv.DictReader(data_list.splitlines(), delimiter=',') try: data_count = 0 total_count = 0 for data in reader: total_count += 1 usage_date = datetime.datetime.strptime(data['Report Date'], '%Y-%m-%d') if len(data['Quantity']) != 0: usage_value = int(data['Quantity']) measurement_unit = str(data['Unit']) else: usage_value = 0 measurement_unit = "none" resource_uri = str(data['Resource URI'].replace("https://www.googleapis.com/compute/v1/projects", "")) location = str(data['Location']) resource_id = str(data['ResourceId']) resource_type = str(data['MeasurementId']).replace("com.google.cloud/services", "") insert_done = insert_data(usage_date=usage_date, resource_type=resource_type, resource_id=resource_id, resource_uri=resource_uri, location=location, usage_value=usage_value, measurement_unit=measurement_unit) if not insert_done: log.info(data) log.debug('row not added!!!') continue else: log.debug('row added') data_count += 1 usage = dict(message=' data has been added to db') log.info( 'DONE adding {0} items out of {1} for file -- {2} into the db '.format(data_count, total_count, filename)) except Exception as e: log.error('Error in inserting data into the DB -- {0}'.format(e[0])) db_session.rollback() return usage
def get_per_month_cost(query_data, quarter, year): log.info('get_per_month_cost == {0}'.format(query_data)) year = str(year) per_month_data = [{'cost': float(0), 'name': year[-2:] + '-Jan', 'month': '1'}, {'cost': float(0), 'name': year[-2:] + '-Feb', 'month': '2'}, {'cost': float(0), 'name': year[-2:] + '-Mar', 'month': '3'}, {'cost': float(0), 'name': year[-2:] + '-Apr', 'month': '4'}, {'cost': float(0), 'name': year[-2:] + '-May', 'month': '5'}, {'cost': float(0), 'name': year[-2:] + '-Jun', 'month': '6'}, {'cost': float(0), 'name': year[-2:] + '-Jul', 'month': '7'}, {'cost': float(0), 'name': year[-2:] + '-Aug', 'month': '8'}, {'cost': float(0), 'name': year[-2:] + '-Sep', 'month': '9'}, {'cost': float(0), 'name': year[-2:] + '-Oct', 'month': '10'}, {'cost': float(0), 'name': year[-2:] + '-Nov', 'month': '11'}, {'cost': float(0), 'name': year[-2:] + '-Dec', 'month': '12'}] per_quarter_data = dict() per_quarter_data['1'] = [{'cost': float(0), 'name': year[-2:] + '-Jan', 'month': '1'}, {'cost': float(0), 'name': year[-2:] + '-Feb', 'month': '2'}, {'cost': float(0), 'name': year[-2:] + '-Mar', 'month': '3'}] per_quarter_data['2'] = [{'cost': float(0), 'name': year[-2:] + '-Apr', 'month': '4'}, {'cost': float(0), 'name': year[-2:] + '-May', 'month': '5'}, {'cost': float(0), 'name': year[-2:] + '-Jun', 'month': '6'}] per_quarter_data['3'] = [{'cost': float(0), 'name': year[-2:] + '-Jul', 'month': '7'}, {'cost': float(0), 'name': year[-2:] + '-Aug', 'month': '8'}, {'cost': float(0), 'name': year[-2:] + '-Sep', 'month': '9'}] per_quarter_data['4'] = [{'cost': float(0), 'name': year[-2:] + '-Oct', 'month': '10'}, {'cost': float(0), 'name': year[-2:] + '-Nov', 'month': '11'}, {'cost': float(0), 'name': year[-2:] + '-Dec', 'month': '12'}] if quarter is not None: for (month, cost) in query_data: for val in per_quarter_data[str(quarter)]: if val['month'] == str(month): val['cost'] = float(cost) return per_quarter_data[str(quarter)] else: for (month, cost) in query_data: for val in per_month_data: if val['month'] == str(month): val['cost'] = float(cost) return per_month_data
def insert_project_data(project_id, project_name): done = False log.info('{0}<---->{1}'.format(project_id, project_name)) try: project = Project('other', project_id, project_name, 'other', '', '', '', 0) db_session.add(project) db_session.commit() done = True except IntegrityError as e: # log.info('---- Project DATA ALREADY IN DB --- UPDATE ------') db_session.rollback() project = Project.query.filter_by(project_id=project_id).first() project.project_name = project_name db_session.commit() done = True except Exception as e: log.error(' ------------- ERROR IN ADDING DATA TO THE DB ------------- {0}'.format(e[0])) return done
def get_costs_per_resource_per_project_per_day_quarter(year, value_to_match, center, project, resource, output_type): log.info('get_costs_per_resource_per_project_per_day_quarter == {0}--{1} --{2} --{3}--{4}--{5} '.format(year, value_to_match, center, project, resource, output_type)) resource_json = dict() day_data = dict() status = 200 try: project_list_local = project_list_per_center(center)['list'] resource_list_local = resource_list_per_project(center, project) query_data = get_billing_data_per_resource_per_project_per_quarter(str(year), str(value_to_match), str(project), str(resource), output_type) log_output(query_data) if output_type == 'month': day_data['usage_data'] = get_per_month_cost(query_data, value_to_match, year) day_data['d3_json'] = [] elif output_type == 'day': day_data = get_per_day_data(query_data) else: day_data['usage_data'] = get_week_data(query_data, year, None) day_data['d3_json'] = [] resource_json = {'usage_data': day_data['usage_data'], 'd3_json': day_data['d3_json'], 'project_list': project_list_local, 'resource_list': resource_list_local} except Exception as e: log_error(e[0]) status = 500 resource_json['message'] = str(e[0]) response = dict(data=resource_json, status=status) return response
def process_file(filename, file_content, service): insert_resp = dict() try: log.info('Processing file -- {0} -- STARTING'.format(filename)) data_list = json.loads(file_content) ''' parse the json and load the data to db once loading done move the file to archive folder todo : attach timestamp and tmp to file name while processing ''' insert_resp = insert_usage_data(data_list, filename, service) log.info('Processing file -- {0} -- ENDING'.format(filename)) except Exception as e: log.error('Error in processing the file -- {0}'.format(e[0])) return insert_resp
def copy_file_to_archive(filename, service, main_bucket, dest_bucket): resp = dict() try: log.info('Starting to move the file to {0} ---- {1}'.format(dest_bucket, filename)) copy_object = service.objects().copy(sourceBucket=main_bucket, sourceObject=filename, destinationBucket=dest_bucket, destinationObject=filename, body={}) resp = copy_object.execute() log.info('DONE Moving of file - {0} to Archive -{1} '.format(filename, dest_bucket)) # delete_moved_file(filename, service) except Exception as e: log.error('Error in Copying the object to archive folder - {0}'.format(e[0])) return resp
def insert_data(instanceId, key, value): done = False log.info('---- starting to add info to DB {0}, {1}, {2} ----'.format( instanceId, key, value)) try: log.info( '--------------------- ADDED INFO TO DB ---------------------') instance = Instance(instanceId=instanceId, key=key, value=value) db_session.add(instance) db_session.commit() done = True except IntegrityError as e: log.info('---- DATA ALREADY IN DB --- UPDATE ------') # log.info('instanceId = {0}<----> key = {1}<-----> value = {2}'.format(instanceId, key, value)) db_session.rollback() instance = Instance.query.filter_by(instanceId=instanceId, key=key).first() instance.value = value db_session.commit() done = True except Exception as e: log.error( ' ------------- ERROR IN ADDING DATA TO THE DB ------------- {0}'. format(e[0])) return done
def get_costs_per_center_year(year, center): data = {} log.info('get_costs_per_CENTER_YEAR_month == {0} --{1}'.format(year,center)) log_output(year) log_output(center) try: project_list_local = project_list_per_center(center)['list'] project_ids = project_list_per_center(center)['ids'] log_output('Project_list_local') log_output(project_list_local) query_data = get_billing_data_per_year_per_center(str(year), project_ids) log_output(query_data) log.info('get_billing_data_per_year_per_center == {0}'.format(query_data)) usage_data = get_per_month_cost(query_data) data = { 'usage_data': usage_data, 'project_list': project_list_local } except Exception as e: log_error(e) log.info(e) return data
def insert_project_data(project_id, project_name): done = False log.info('{0}<---->{1}'.format(project_id, project_name)) try: project = Project('other', project_id, project_name, 'other', '', '', '', 0) db_session.add(project) db_session.commit() done = True except IntegrityError as e: # log.info('---- Project DATA ALREADY IN DB --- UPDATE ------') db_session.rollback() project = Project.query.filter_by(project_id=project_id).first() project.project_name = project_name db_session.commit() done = True except Exception as e: log.error( ' ------------- ERROR IN ADDING DATA TO THE DB ------------- {0}'. format(e[0])) return done
def insert_data(usage_date, resource_type, resource_id, resource_uri, location, usage_value, measurement_unit): done = False log.info('---- starting to add info to DB ----') try: #log.info('--------------------- ADDED INFO TO DB ---------------------') usage = Usage(usage_date=usage_date, resource_type=resource_type, resource_id=resource_id, resource_uri=resource_uri, location=location, usage_value=usage_value, measurement_unit=measurement_unit) db_session.add(usage) db_session.commit() done = True except IntegrityError as e: #log.info('---- DATA ALREADY IN DB --- UPDATE ------') # log.info('{0}<---->{1}<----->{2}<------>{3}<------>{4}'.format(usage_date, cost, project_id, resource_type,usage_value)) db_session.rollback() usage = Usage.query.filter_by( usage_date=usage_date, resource_type=resource_type, resource_id=resource_id, location=location).first() usage.usage_value = usage_value usage.measurement_unit = measurement_unit db_session.commit() done = True except Exception as e: log.error(' ------------- ERROR IN ADDING DATA TO THE DB ------------- {0}'.format(e[0])) return done
def insert_data(usage_date, cost, project_id, resource_type, account_id, usage_value, measurement_unit): done = False log.info('{0}<---->{1}<----->{2}<------>{3}<------>{4}'.format(usage_date, cost, project_id, resource_type,usage_value)) try: usage = Billing(usage_date, cost, project_id, resource_type, account_id, usage_value, measurement_unit) db_session.add(usage) db_session.commit() done = True except IntegrityError as e: # log.info('---- DATA ALREADY IN DB --- UPDATE ------') # log.info('{0}<---->{1}<----->{2}<------>{3}<------>{4}'.format(usage_date, cost, project_id, resource_type,usage_value)) db_session.rollback() usage = Billing.query.filter_by(project_id=project_id, usage_date=usage_date, resource_type=resource_type).first() usage.cost = cost usage.usage_value = usage_value usage.measurement_unit = measurement_unit db_session.commit() done = True except Exception as e: log.error(' ------------- ERROR IN ADDING DATA TO THE DB ------------- {0}'.format(e[0])) return done
def set_scheduler(hour, min): global scheduler scheduler.remove_all_jobs() scheduler.print_jobs() log.info(' ----- IN SET SCHEDULER -----') scheduler.add_job(data_processor, 'cron', hour=get_time(hour, min)['hour'], minute=get_time(hour, min)['mins'], second=get_time(hour, min)['sec'], replace_existing=True, max_instances=1, id='data_processor', args=['cron']) log.info('------ SCHEDULER INIT -----') log.info('------ Jobs List -----') scheduler.print_jobs() return scheduler
def insert_project__table_data(data_list, filename, service): project = dict() try: data_count = 0 total_count = 0 log.info('---- Starting to Add/Update Project Name -----') for data in data_list: total_count += 1 ''' update the Project table with project_name with data['projectName'] if there else use data['projectId'] if there else add as support ''' if 'projectNumber' in data: project_id = 'ID-' + str(data['projectNumber']) if 'projectName' in data: insert_done = insert_project_data(project_id, data['projectName']) else: insert_done = insert_project_data(project_id, project_id) else: project_id = 'Not Available' insert_done = insert_project_data(project_id, 'support') if not insert_done: log.info(data) continue else: data_count += 1 project = dict(message=' data has been added to db') log.info( 'DONE adding {0} items out of {1} for file -- {2} into the db '. format(data_count, total_count, filename)) except Exception as e: log.error('Error in inserting data into the DB -- {0}'.format(e[0])) db_session.rollback() return project
def set_scheduler(hour, min): global scheduler scheduler.remove_all_jobs() scheduler.print_jobs() log.info(' ----- IN SET SCHEDULER -----') scheduler.add_job(data_processor, 'cron', hour=get_time(hour, min)['hour'], minute=get_time(hour, min)['mins'], second=get_time(hour, min)['sec'], id='data_processor', args=['cron']) if USAGE_VIEW == True: scheduler.add_job(usage_processor, 'cron', hour=get_time(hour, min)['hour'], minute=get_time(hour, min)['mins'], second=get_time(hour, min)['sec'], id='usage_data_processor', args=['cron']) log.info('------ SCHEDULER INIT -----') log.info('------ Jobs List -----') scheduler.print_jobs() return scheduler
def data_processor(job_type): status = 200 message = 'Prcoess Complete ' startTime = datetime.datetime.now() lock_file = True try: bucket = BUCKET_NAME archive_bucket = ARCHIVE_BUCKET_NAME random_number = binascii.hexlify(os.urandom(32)).decode() log.info(' RANDOM NUMBER --- {0}'.format(random_number)) # Get the application default credentials. When running locally, these are # available after running `gcloud init`. When running on compute # engine, these are available from the environment. credentials = GoogleCredentials.get_application_default() # Construct the service object for interacting with the Cloud Storage API - # the 'storage' service, at version 'v1'. # You can browse other available api services and versions here: # https://developers.google.com/api-client-library/python/apis/ service = discovery.build('storage', 'v1', credentials=credentials) # Make a request to buckets.get to retrieve a list of objects in the # specified bucket. req = service.buckets().get(bucket=bucket) resp = req.execute() # print(json.dumps(resp, indent=2)) # Create a request to objects.list to retrieve a list of objects. fields_to_return = \ 'nextPageToken,items(name,size,contentType,metadata(my-key))' req = service.objects().list(bucket=bucket, fields=fields_to_return) file_count = 0 log.info('Process {0} Start time --- {1}'.format(bucket, startTime)) # If you have too many items to list in one request, list_next() will # automatically handle paging with the pageToken. while req: resp = req.execute() # print(json.dumps(resp, indent=2)) if len(resp) == 0: log.info( '############################################################################################' ) log.info('--------- THE BUCKET LIST IS EMPTY --------------') log.info('--------- NO FILES TO PROCESS --------------') log.info(resp) log.info( '############################################################################################' ) else: get_filenames(resp, service, random_number) req = service.objects().list_next(req, resp) except Exception as e: log.error(' Error in getting Bucket Details - {0}'.format(e[0])) message = e[0] status = 500 endTime = datetime.datetime.now() log.info('Process End time --- {0}'.format(endTime)) elapsedTime = endTime - startTime time = 'Total Time to Process all the files -- {0}'.format( divmod(elapsedTime.total_seconds(), 60)) log.info(time) log.info(' ARGS PASSED --- {0}'.format(job_type)) if job_type == 'now': set_scheduler(os.environ.get('SCHEDULER_HOUR'), os.environ.get('SCHEDULER_MIN')) response = dict(data=json.dumps(message), status=status, time=time) return response