def describe_elb(accounts): now = datetime.utcnow() date_from = now.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta(days=30) date_to = now.replace(hour=23, minute=59, second=59, microsecond=999999) ares = [] for account in accounts: res = [] elb_usage = AWSDetailedLineitem.get_elb_usage_a_day( account.get_aws_user_id(), date_from=date_from, date_to=date_to) for elb in AWSELBInfo.get_elb_info(account.get_aws_user_id()): for usage in elb_usage: if usage['rid'].endswith(elb['name']) or usage['rid'].split( '/')[-2] == elb['name']: usage['region'] = elb['region'] usage['name'] = elb['name'] usage['instances'] = elb['instances'] res.append(usage) for d in res: if d not in ares: ares.append(d) if not len(ares): if AWSDetailedLineitem.keys_has_data( [account.get_aws_user_id() for account in accounts]): return jsonify( message="You do not have ELB set up in your environment") return jsonify(message=get_next_update_estimation_message_aws( accounts, AWS_KEY_PROCESSING_INTERVAL_HOURS)) return jsonify(elbs=ares)
def aws_accounts_m_stats_dailycostbyproduct(accounts, nb_days): """--- get: tags: - aws produces: - application/json description: &desc Get daily costs summed by product summary: *desc responses: 200: description: List of AWS accounts schema: properties: days: type: array items: properties: day: type: string products: type: array items: properties: cost: type: number product: type: string 403: description: Not logged in 404: description: AWS account not registered """ assert len(accounts) > 0 now = datetime.utcnow().replace(hour=23, minute=59, second=59, microsecond=999999) now = AWSDetailedLineitem.get_last_date( [account.get_aws_user_id() for account in accounts], limit=now) date_from = now.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta(days=nb_days) date_to = now.replace(hour=23, minute=59, second=59, microsecond=999999) - relativedelta(days=1) data = AWSDetailedLineitem.get_daily_cost_by_product( keys=[account.get_aws_user_id() for account in accounts], date_from=date_from, date_to=date_to)['days'] for d in data: d['products'] = cut_cost_by_product( sorted(d['products'], key=lambda x: x['cost'], reverse=True), int(request.args['show']) - 1 if 'show' in request.args else 9) if not len(data): return jsonify(message=get_next_update_estimation_message_aws( accounts, AWS_KEY_PROCESSING_INTERVAL_HOURS)) return jsonify(days=data)
def get_aws_accounts(user): """--- get: tags: - aws produces: - application/json description: &desc Get AWS accounts summary: *desc responses: 200: description: List of AWS accounts schema: properties: accounts: type: array items: $ref: "#/definitions/AWSAccount" 403: description: Not logged in """ if user: now = datetime.utcnow() date_from = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0) date_to = now.replace(day=calendar.monthrange(date_from.year, date_from.month)[1], hour=23, minute=59, second=59, microsecond=999999) if user.admin: res = [] keys = AWSKey.query.all() for key in keys: user_id = key.get_aws_user_id() key_infos = aws_key_schema.dump(key)[0] full = False if not user_id else AWSDetailedLineitem.keys_has_data(key.get_aws_user_id()) month = False if not full else AWSDetailedLineitem.keys_has_data(key.get_aws_user_id(), date_from=date_from, date_to=date_to) key_infos['has_data_full'] = full key_infos['has_data_month'] = month if key.id_user != user.id: if not key_infos['pretty']: key_infos['pretty'] = key.user.email else: key_infos['pretty'] = key_infos['pretty'] + ' (' + key.user.email + ')' res.append(key_infos) return jsonify(accounts=res), 200 keys = [] for key in user.aws_keys: user_id = key.get_aws_user_id() key_infos = aws_key_schema.dump(key)[0] full = False if not user_id else AWSDetailedLineitem.keys_has_data(key.get_aws_user_id()) month = False if not full else AWSDetailedLineitem.keys_has_data(key.get_aws_user_id(), date_from=date_from, date_to=date_to) key_infos['has_data_full'] = full key_infos['has_data_month'] = month keys.append(key_infos) return jsonify(accounts=keys), 200 else: return jsonify(error="Forbidden"), 403
def aws_accounts_m_stats_monthlycostbyproductbyaccount(accounts, nb_months): """--- get: tags: - aws produces: - application/json description: &desc Get monthly costs summed by product for each account summary: *desc responses: 200: description: List of AWS accounts schema: properties: months: type: array items: properties: month: type: string products: type: array items: properties: cost: type: number product: type: string 403: description: Not logged in 404: description: AWS account not registered """ assert len(accounts) > 0 now = datetime.utcnow() month = nb_months - 1 date_from = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0) - relativedelta(months=month) date_to = now.replace(day=calendar.monthrange(now.year, now.month)[1], hour=23, minute=59, second=59, microsecond=999999) res = [{ 'account_id': account.get_aws_user_id(), 'account_name': account.pretty, 'months': AWSDetailedLineitem.get_monthly_cost_by_product( keys=account.get_aws_user_id(), date_from=date_from, date_to=date_to)['months'], } for account in accounts] if 'csv' in request.args: return Response(generate_csv(res, 'products', 'product', account=True), mimetype='text/csv') return jsonify(accounts=res)
def aws_cost_by_resource_months(accounts): raw_data = AWSDetailedLineitem.get_first_to_last_date( [account.get_aws_user_id() for account in accounts]) if not raw_data: return jsonify(message=get_next_update_estimation_message_aws( accounts, AWS_KEY_PROCESSING_INTERVAL_HOURS)) return jsonify(months=[data.strftime("%Y-%m-01") for data in raw_data])
def _get_bandwidth_info(account, bucket_names): bucket_ids = [ bucket for bucket in (bucket_names if isinstance(bucket_names, list) else [bucket_names]) ] bandwidth_cost = AWSDetailedLineitem.get_s3_bandwidth_info_and_cost_per_name(account.get_aws_user_id(), bucket_ids) return bandwidth_cost
def aws_cost_by_resource_m(accounts, month, category): try: date_from = datetime.strptime(month, "%Y-%m-%d") assert category[0] in ['<', '>'] cat = int(category[1:]) except: return jsonify(error='Not found.'), 404 raw_data = AWSDetailedLineitem.get_cost_by_resource( [account.get_aws_user_id() for account in accounts], date_from=date_from) def transform(r): r['resource_name'] = r['resource'] return r minus = category[0] == '<' data = [ transform(r) for r in raw_data if (minus and cat > r['cost'] >= cat / 10) or ( not minus and r['cost'] > cat) ] if len(data) <= 0: return jsonify(error='Not found.'), 404 return jsonify( category=dict(resources=data, total=sum([x['cost'] for x in data])))
def aws_get_resource_tags(accounts): tags = AWSDetailedLineitem.get_available_tags( [account.get_aws_user_id() for account in accounts])['tags'] if not len(tags): return jsonify(message=get_next_update_estimation_message_aws( accounts, AWS_KEY_PROCESSING_INTERVAL_HOURS)) return jsonify(tags=sorted(tags, key=unicode.lower))
def aws_accounts_m_stats_monthlycostbyregion(accounts, nb_months): """--- get: tags: - aws produces: - application/json description: &desc Get monthly costs summed by region summary: *desc responses: 200: description: List of AWS accounts schema: properties: months: type: array items: properties: month: type: string products: type: array items: properties: cost: type: number region: type: string 403: description: Not logged in 404: description: AWS account not registered """ assert len(accounts) > 0 now = datetime.utcnow() date_from = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0) - relativedelta(months=nb_months - 1) date_to = now.replace(day=calendar.monthrange(now.year, now.month)[1], hour=23, minute=59, second=59, microsecond=999999) raw_data = AWSDetailedLineitem.get_cost_by_region(keys=[account.get_aws_user_id() for account in accounts], date_from=date_from, date_to=date_to)['intervals']['buckets'] res = [ { 'month': data['key_as_string'].split('T')[0], 'regions': [ { 'region': region['key'], 'cost': region['cost']['value'], } for region in data['regions']['buckets'] ], } for data in raw_data ] if 'csv' in request.args: return Response(generate_csv(res, 'regions', 'region'), mimetype='text/csv') return jsonify(months=res)
def aws_get_resource_tags_with_data(accounts): tags = list(set(itertools.chain.from_iterable( AWSDetailedLineitem.get_available_tags(account.get_aws_user_id(), only_with_data=account.key)['tags'] for account in accounts ))) if not len(tags): return jsonify(message=get_next_update_estimation_message_aws(accounts, AWS_KEY_PROCESSING_INTERVAL_HOURS)) return jsonify(tags=sorted(tags, key=unicode.lower))
def aws_accounts_m_stats_totalcost(accounts, time_arg): """--- get: tags: - aws produces: - application/json description: &desc Get total cost summary: *desc responses: 200: description: List of AWS accounts schema: properties: months: type: array items: properties: total_cost: type: number 403: description: Not logged in 404: description: AWS account not registered """ assert len(accounts) > 0 now = datetime.utcnow() this_day = now.replace(hour=0, minute=0, second=0, microsecond=0) this_month = this_day.replace(day=1) time_val = { 'ever': AWSDetailedLineitem.get_first_date( [account.get_aws_user_id() for account in accounts]), 'currentyear': this_month - relativedelta(months=this_month.month), 'currentmonth': this_month, } date_from = time_val.get(time_arg, now) date_to = now.replace(hour=23, minute=59, second=59, microsecond=999999) raw_data = AWSDetailedLineitem.get_cost( keys=[account.get_aws_user_id() for account in accounts], date_from=date_from, date_to=date_to) return jsonify(raw_data)
def get_account_data(account): for date, cpu_usage in dict( AWSMetric.daily_cpu_utilization(account.key)).iteritems(): yield (date, cpu_usage, None) for date, cost in dict( AWSDetailedLineitem.get_ec2_daily_cost( account.get_aws_user_id())).iteritems(): yield (date, None, cost)
def aws_get_resource_tags_for_s3(accounts): tags = list(set(itertools.chain.from_iterable( AWSDetailedLineitem.get_available_tags( account.get_aws_user_id(), product_name='Simple Storage Service', )['tags'] for account in accounts ))) return jsonify(tags=sorted(tags, key=unicode.lower))
def aws_cost_by_tags_months(accounts, nb_months, tag): date_to = datetime.now() date_from = date_to.replace( day=1, minute=0, second=0, microsecond=0) - relativedelta(months=nb_months - 1) return jsonify( AWSDetailedLineitem.get_monthly_cost_by_tag( [account.get_aws_user_id() for account in accounts], tag, date_from=date_from, date_to=date_to))
def aws_underutilized_resources_reduced_cost(accounts): now = datetime.utcnow() date_from = now.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta(months=6) date_to = now.replace(hour=23, minute=59, second=59, microsecond=999999) resources = AWSMetric.underutilized_resources(account.key for account in accounts) resource_ids = set(r['id'] for r in resources['resources']) months = AWSDetailedLineitem.get_monthly_cost_by_resource(resource_ids, date_from=date_from, date_to=date_to) res = { # Simply multiply every cost by 20% as all instances usage is k: v * 0.2 # less than 20%. TODO: intelligently find the best type for k, v in months.iteritems() } return jsonify(res)
def get_on_demand_to_reserved_suggestion(session, key): existing_instances = [] for region, instance in get_all_instances(session): if instance.state != 'terminated': existing_instances.append(instance.id) now = datetime.utcnow() id_name_mapping = AWSIdNameMapping.get_id_name_mapping(key.key) instances_to_switch = AWSDetailedLineitem.get_instance_hour( key.get_aws_user_id(), now - timedelta(days=30), now, 15) res = [] for instance in instances_to_switch: if instance['id'] in existing_instances: instance['name'] = id_name_mapping[instance['id']] if instance[ 'id'] in id_name_mapping else instance['id'] res.append(instance) return dict(total=len(res), on_demand_instances=res)
def aws_cost_by_resource_month_categories_m(accounts, month): try: date_from = datetime.strptime(month, "%Y-%m-%d") except: return jsonify(error='Not found.'), 404 raw_data = AWSDetailedLineitem.get_cost_by_resource([account.get_aws_user_id() for account in accounts], date_from=date_from) cat = [] max_cat = 0 for new in raw_data: x = 1 while new['cost'] > x: x *= 10 if x >= max_cat: max_cat = x elif '<{}'.format(x) not in cat: cat.append('<{}'.format(x)) cat.append('>{}'.format(max_cat / 10)) return jsonify(categories=cat)
def aws_accounts_m_stats_monthlycost(accounts, nb_months): """--- get: tags: - aws produces: - application/json description: &desc Get monthly costs summary: *desc responses: 200: description: List of AWS accounts schema: properties: months: type: array items: properties: month: type: string total_cost: type: number 403: description: Not logged in 404: description: AWS account not registered """ assert len(accounts) > 0 now = datetime.utcnow() date_from = now.replace( day=1, hour=0, minute=0, second=0, microsecond=0) - relativedelta(months=nb_months - 1) date_to = now.replace(day=calendar.monthrange(now.year, now.month)[1], hour=23, minute=59, second=59, microsecond=999999) data = AWSDetailedLineitem.get_monthly_cost( keys=[account.get_aws_user_id() for account in accounts], date_from=date_from, date_to=date_to) return jsonify(data)
def aws_cost_by_resource_search_m(accounts, month, search): try: date_from = datetime.strptime(month, "%Y-%m-%d") except: return jsonify(error='Not found.'), 404 raw_data = [ AWSDetailedLineitem.get_cost_by_resource(account.get_aws_user_id(), date_from=date_from, search=search) for account in accounts ] def transform(r): r['resource_name'] = r['resource'] return r data = [transform(r) for raw in raw_data for r in raw] if not len(data): return jsonify(error='Not found.'), 404 return jsonify(search_result=data)
def aws_cost_by_resource_month_chart_m(accounts, month): # TODO: Use ES agg to categorize try: date_from = datetime.strptime(month, "%Y-%m-%d") except: return jsonify(error='Not found.'), 404 raw_data = [ AWSDetailedLineitem.get_cost_by_resource(account.get_aws_user_id(), date_from=date_from) for account in accounts ] data = [] def get_cat_with_cost(cost): x = 1 while cost > x: x *= 10 return x def add_resource_in_data(new): new_cat = get_cat_with_cost(new['cost']) for cat in data: if cat['category'] == '<{}'.format(new_cat): cat['total'] += new['cost'] return data.append(dict(category='<{}'.format(new_cat), total=new['cost'])) for one in raw_data: for new in one: add_resource_in_data(new) if not len(data): return jsonify(message=get_next_update_estimation_message_aws( accounts, AWS_KEY_PROCESSING_INTERVAL_HOURS)) max_cat = 0 for i in range(len(data)): if len(data[i]['category']) > len(data[max_cat]['category']): max_cat = i data[max_cat]['category'] = data[max_cat]['category'][:-1] data[max_cat]['category'] = data[max_cat]['category'].replace('<', '>', 1) return jsonify(categories=data)
def get_lambda_usage(accounts): """--- get: tags: - aws produces: - application/json description: &desc Get average Lambda usage summary: *desc responses: 200: description: List of lambda resources schema: properties: lambdas: type: array items: properties: rid: type: string name: type: string gb_seconds: type: number requests: type: number cost: 403: description: Not logged in 404: description: AWS account not registered """ res = AWSDetailedLineitem.get_lambda_usage( [account.get_aws_user_id() for account in accounts]) if not len(res): return jsonify(message=get_next_update_estimation_message_aws( account, AWS_KEY_PROCESSING_INTERVAL_HOURS)) return jsonify(lambdas=res)
def compute_reservation_forecast(keys): if isinstance(keys, models.AWSKey): keys = [keys] elif not isinstance(keys, list): keys = list(keys) if not all(isinstance(k, models.AWSKey) for k in keys): raise TypeError('All keys must be AWSKey.') now = datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0) range_end = now.replace(hour=0, minute=0, second=0, microsecond=0) range_end -= timedelta(days=1) range_start = range_end - timedelta(days=120) range_start = range_start.replace(day=1) s = AWSDetailedLineitem.get_instance_type( [k.get_aws_user_id() for k in keys], date_from=range_start, date_to=range_end) instance_type_hours = defaultdict(list) first_hour = datetime(2099, 1, 1) for r in s: rhour = datetime.strptime(r['hour'], "%Y-%m-%dT%H:%M:%S") if r['region'] != 'unknown': # Some EC2 instances have no region, sometimes... instance_type_hours[(r['region'], r['instance'])].append( (rhour, r['ridCount'])) first_hour = min(first_hour, rhour) hours_ahead = 120 * 24 total_hours = (range_end - first_hour).total_seconds() / 3600 - 1 + hours_ahead instance_types = [] lookup = get_instance_lookup() for (region, instance_type), hours in instance_type_hours.iteritems(): hours = count_forecast(hours, range_start, now, hours_ahead) prices = lookup[region, instance_type] price_results = get_monthly_prices( total_hours, hours, [p['amortized'] for p in prices['reserved']], prices['ondemand']['amortized']) ps = [] for pricing, (_, count, months) in zip(prices['reserved'] + [prices['ondemand']], price_results): pricing = dict(pricing) if count is not None: pricing['count'] = count pricing['months'] = [ dict(month=m.strftime('%Y-%m'), cost=c) for m, c in months[:-1] ] ps.append(pricing) instance_types.append( dict(region=region, type=instance_type, pricing_options=ps)) available_volumes = AWSStat.latest_available_volumes([k.key for k in keys]) now = datetime.utcnow() date_from = now.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta(months=6) date_to = now.replace(hour=23, minute=59, second=59, microsecond=999999) volume_monthly_costs = AWSDetailedLineitem.get_monthly_cost_by_resource( available_volumes['volumes'] if 'volumes' in available_volumes else (), date_from=date_from, date_to=date_to) resources = AWSMetric.underutilized_resources([k.key for k in keys]) rids = set(r['id'] for r in resources['resources']) months = AWSDetailedLineitem.get_monthly_cost_by_resource( rids, date_from=date_from, date_to=date_to) reduced_instance_costs = {k: v * 0.2 for k, v in months.iteritems()} return dict( instances=instance_types, volume_monthly_costs=volume_monthly_costs, reduced_instance_costs=reduced_instance_costs, )
def aws_accounts_m_stats_s3bucketsizepername(accounts): """--- get: tags: - aws produces: - application/csv description: &desc Stats about cost and usage of bandwith and storag on s3 buckets, organised by name summary: *desc responses: 200: description: Stats about cost and usage of bandwith and storag on s3 buckets, organised by name 403: description: Not logged in 404: description: AWS account not registered """ def _create_bandwith_breakdown(transfer_types_list, csv_row, bucket_bandwith_stat): for elem in transfer_types_list: _current_transfer_type = _check_if_in_list( bucket_bandwith_stat['transfer_stats'], elem, 'type') if _current_transfer_type is not None: csv_row[elem] = _current_transfer_type[ 'data'] * 1024 * 1024 * 1024 # The is by default given in GB return csv_row def _create_csv_rows(bucket_list, account, bandwith_cost, csv_row_all): for bucket in bucket_list['buckets']: csv_row = { 'account_id': account.get_aws_user_id(), 'used_space': bucket['used_space'], 'name': bucket['name'], 'storage_cost': _check_if_in_list(bucket['prices'], bucket['provider'], 'provider')['cost'] } bucket_bandwith_stat = _check_if_in_list(bandwith_cost, bucket['name'], 'bucket_name') if bucket_bandwith_stat is not None: csv_row = _create_bandwith_breakdown(transfer_types_list, csv_row, bucket_bandwith_stat) csv_row['bandwith_cost'] = bucket_bandwith_stat[ 'cost'] if bucket_bandwith_stat is not None else 0 csv_row['total_cost'] = csv_row['storage_cost'] + csv_row[ 'bandwith_cost'] csv_row_all.append(csv_row) return csv_row_all assert len(accounts) > 0 csv_header = [ 'account_id', 'name', 'used_space', 'storage_cost', 'bandwith_cost', 'total_cost' ] csv_row_all = [] for account in accounts: bucket_list = AWSStat.latest_s3_space_usage(account) bucket_ids = [ bucket['name'] for bucket in ( bucket_list['buckets'] if bucket_list is not None else []) ] bandwith_cost = AWSDetailedLineitem.get_s3_bandwith_info_and_cost_per_name( account.get_aws_user_id(), bucket_ids) transfer_types_list = _build_list_used_transfer_types(bandwith_cost) csv_header = _append_to_header_list(csv_header, transfer_types_list) csv_row_all = _create_csv_rows(bucket_list, account, bandwith_cost, csv_row_all) if len(csv_row_all) > 0 and csv_row_all[0] is None: csv_row_all = [] if 'csv' in request.args: return Response(generate_csv_clean(csv_row_all, csv_header)) return jsonify(accounts=csv_row_all)
def aws_accounts_m_stats_monthlycostbyregionbytagbyaccount( accounts, nb_months): """--- get: tags: - aws produces: - application/json description: &desc Get monthly costs summed by region for each account summary: *desc responses: 200: description: List of AWS accounts schema: properties: months: type: array items: properties: month: type: string products: type: array items: properties: cost: type: number region: type: string 403: description: Not logged in 404: description: AWS account not registered """ assert len(accounts) > 0 now = datetime.utcnow() date_from = now.replace( day=1, hour=0, minute=0, second=0, microsecond=0) - relativedelta(months=nb_months - 1) date_to = now.replace(day=calendar.monthrange(now.year, now.month)[1], hour=23, minute=59, second=59, microsecond=999999) raw_data = AWSDetailedLineitem.get_cost_by_region( keys=[account.get_aws_user_id() for account in accounts], tagged=True, byaccount=True, date_from=date_from, date_to=date_to)['accounts']['buckets'] def tagged_cost(bucket, total): total_tag = 0.0 for tag in bucket: total_tag += tag['cost']['value'] yield (tag['key'], tag['cost']['value']) if total != total_tag: yield ('untagged', total - total_tag) res = [{ 'account_id': account['key'], 'account_name': [a.pretty for a in accounts if a.get_aws_user_id() == account['key']][0], 'months': [{ 'month': data['key_as_string'].split('T')[0], 'regions': [{ 'region': region['key'], 'tags': [{ 'name': tag[0], 'cost': tag[1], } for tag in tagged_cost(region['tags']['buckets'], region['cost']['value'])], } for region in data['regions']['buckets']], } for data in account['intervals']['buckets']] } for account in raw_data] if 'csv' in request.args: return Response(generate_csv(res, 'regions', 'region', account=True, tagged=True), mimetype='text/csv') return jsonify(accounts=res)
def aws_accounts_m_stats_s3bucketsizepertag(accounts, tag): """--- get: tags: - aws produces: - application/csv description: &desc Stats about cost and usage of bandwith and storag on s3 buckets, organised by tag summary: *desc responses: 200: description: Stats about cost and usage of bandwith and storag on s3 buckets, organised by tag 403: description: Not logged in 404: description: AWS account not registered """ assert len(accounts) > 0 def _get_total_sizes_cost_and_names(bucket_names_list, bucket_list): total_size = 0 total_cost = 0 names = "" for bucket in bucket_list['buckets']: if _check_if_in_list(bucket_names_list, bucket['name'], 'bucket_name') is not None: total_size += float(bucket['used_space']) total_cost += _check_if_in_list(bucket['prices'], bucket['provider'], 'provider')['cost'] names += bucket['name'] + ", " return total_size, names[:-2], total_cost def _get_bandwith_info(account, bucket_names): bucket_ids = [ bucket for bucket in (bucket_names if isinstance(bucket_names, list ) else [bucket_names]) ] bandwith_cost = AWSDetailedLineitem.get_s3_bandwith_info_and_cost_per_name( account.get_aws_user_id(), bucket_ids) return bandwith_cost def _iterate_over_buckets_in_tag_for_total(bucket_bandwith_stat): total_cost = 0 for bucket in (bucket_bandwith_stat if bucket_bandwith_stat is not None else []): total_cost += bucket['cost'] return total_cost def _iterate_over_buckets_and_make_breakdown_bandwith_stat( bucket_bandwith_stat, buff_row_csv, tag_value): bandwith_cost = 0 for bucket in bucket_bandwith_stat: bandwith_cost += bucket['cost'] for elem in bucket['transfer_stats']: if elem['type'] in buff_row_csv: buff_row_csv[elem['type']] += (elem['data'] * 1024 * 1024 * 1024) else: buff_row_csv[elem['type']] = (elem['data'] * 1024 * 1024 * 1024) buff_row_csv['bandwith_cost'] = bandwith_cost return buff_row_csv def _build_csv_row_and_add_header(bucket_list_tagged, bucket_list, account, csv_header, csv_row_all): for tag_value in bucket_list_tagged['tag_value']: bucket_info = _get_total_sizes_cost_and_names( tag_value['s3_buckets'], bucket_list) bucket_bandwith_stat = _get_bandwith_info(account, bucket_info[1]) csv_header = _append_to_header_list( csv_header, _build_list_used_transfer_types(bucket_bandwith_stat)) csv_row = { "tag_key": bucket_list_tagged['tag_key'].split(':')[1], "tag_value": tag_value['tag_value'], "account_id": tag_value['s3_buckets'][0]["account_id"], "total_size": bucket_info[0], "bucket_names": bucket_info[1], "storage_cost": bucket_info[2], } csv_row = _iterate_over_buckets_and_make_breakdown_bandwith_stat( bucket_bandwith_stat, csv_row, tag_value) csv_row['total_cost'] = csv_row['storage_cost'] + csv_row[ 'bandwith_cost'] csv_row_all.append(csv_row) return csv_header, csv_row_all def _select_bucket_list_tag(bucket_list_per_tag, tag): for bucket_list_tagged in bucket_list_per_tag: if tag in bucket_list_tagged['tag_key'].split(':')[1]: return bucket_list_tagged csv_header = [ "account_id", "tag_key", "tag_value", "total_size", "bucket_names", "bandwith_cost", "storage_cost", "total_cost" ] csv_data = [] for account in accounts: bucket_list_per_tag = AWSDetailedLineitem.get_s3_buckets_per_tag( account.get_aws_user_id()) bucket_list_tagged = _select_bucket_list_tag(bucket_list_per_tag, tag) bucket_list = AWSStat.latest_s3_space_usage(account) csv_header, csv_data = _build_csv_row_and_add_header( bucket_list_tagged, bucket_list, account, csv_header, csv_data) if 'csv' in request.args: return Response(generate_csv_clean(csv_data, csv_header)) return jsonify(res=csv_data)