def get_billing_data_per_year(year, output_type):
    if output_type == 'day':
        billing_data = db_session.query(func.unix_timestamp(Usage.usage_date), func.sum(Usage.cost), Usage.usage_value,
                                        Usage.measurement_unit). \
            filter(func.extract('year', Usage.usage_date) == year).group_by(func.unix_timestamp(Usage.usage_date))
    else:
        billing_data = db_session.query(func.extract(output_type, Usage.usage_date), func.sum(Usage.cost)). \
            filter(func.extract('year', Usage.usage_date) == year).group_by(func.extract(output_type, Usage.usage_date))
    return billing_data
def get_billing_data_per_year_month(year, value_to_match, output_type):
    if year == value_to_match:
        billing_data = db_session.query(Usage.project_id, func.sum(Usage.cost)). \
            filter(func.extract('year', Usage.usage_date) == year).group_by(Usage.project_id)
    else:
        billing_data = db_session.query(Usage.project_id, func.sum(Usage.cost)). \
            filter(func.extract('year', Usage.usage_date) == year,
                   func.extract(output_type, Usage.usage_date) == value_to_match).group_by(Usage.project_id)

    return billing_data
def get_billing_data_per_year_per_center(year, project_ids, output_type):
    if output_type == 'week':
        billing_data = db_session.query(Usage.project_id, func.extract(output_type, Usage.usage_date),
                                        func.sum(Usage.cost)). \
            filter(func.extract('year', Usage.usage_date) == year, Usage.project_id.in_(project_ids)). \
            group_by(func.extract(output_type, Usage.usage_date))
    else:
        billing_data = db_session.query(func.extract(output_type, Usage.usage_date), func.sum(Usage.cost)). \
            filter(func.extract('year', Usage.usage_date) == year, Usage.project_id.in_(project_ids)). \
            group_by(func.extract(output_type, Usage.usage_date))

    return billing_data
def get_billing_data_per_resource_all_project_per_day_week(year, value_to_match, project_ids, resource, output_type):
    if output_type == 'day':
        billing_data = db_session.query(func.unix_timestamp(Usage.usage_date), func.sum(Usage.cost), Usage.usage_value,
                                        Usage.measurement_unit). \
            filter(func.extract('year', Usage.usage_date) == year, Usage.project_id.in_(project_ids),
                   Usage.resource_type == resource, func.extract('week', Usage.usage_date) == value_to_match). \
            group_by(func.unix_timestamp(Usage.usage_date))
    elif output_type == 'week':
        billing_data = db_session.query(Usage.project_id, func.extract(output_type, Usage.usage_date),
                                        func.sum(Usage.cost)). \
            filter(func.extract('year', Usage.usage_date) == year, Usage.project_id.in_(project_ids),
                   Usage.resource_type == resource, func.extract('week', Usage.usage_date) == value_to_match). \
            group_by(func.extract(output_type, Usage.usage_date))

    return billing_data
def load_usage_table():
    year = request.args.get('year', None)
    month = request.args.get('month', None)
    project = request.args.get('project', None)

    data = db_session.query(Usage).all()
    data = [x.__dict__ for x in data]

    # year = 4 digit year
    if year is not None:
        data = filter(lambda x: x['usage_date'].year == int(year), data)

    # month = month num
    if month is not None:
        data = filter(lambda x: x['usage_date'].month == int(month), data)

    # project = project_name
    if project is not None:
        data = filter(
            lambda x: x['resource_uri'].split('/')[1] == str(project), data)

    for x in data:
        x['usage_date'] = x['usage_date'].strftime('%Y-%m-%d')
        del x['_sa_instance_state']

    data = list(data)

    resp = Response(response=simplejson.dumps(data),
                    status=200,
                    mimetype='application/json')

    return resp
def get_billing_data_per_year_week_day(year, value_to_match, output_type, project_ids):
    if output_type == 'day':
        billing_data = db_session.query(func.unix_timestamp(Billing.usage_date), func.sum(Billing.cost),
                                        Billing.usage_value,
                                        Billing.measurement_unit). \
            filter(func.extract('year', Billing.usage_date) == year, Billing.project_id.in_(project_ids),
                   func.extract('week', Billing.usage_date) == value_to_match). \
            group_by(func.extract(output_type, Billing.usage_date))
    elif output_type == 'week':
        billing_data = db_session.query(Billing.project_id, func.extract(output_type, Billing.usage_date),
                                        func.sum(Billing.cost)). \
            filter(func.extract('year', Billing.usage_date) == year,
                   func.extract('week', Billing.usage_date) == value_to_match).group_by(
            func.extract(output_type, Billing.usage_date), Billing.project_id)

    return billing_data
def get_billing_data_per_resource_month_week_day_center(year, value_to_match, project_id, output_type):
    if output_type == 'day':
        billing_data = db_session.query(func.unix_timestamp(Usage.usage_date),
                                        func.sum(Usage.cost), Usage.usage_value,
                                        Usage.measurement_unit). \
            filter(func.extract('year', Usage.usage_date) == year,
                   func.extract('month', Usage.usage_date) == value_to_match, Usage.project_id == project_id). \
            group_by(func.extract(output_type, Usage.usage_date))
    else:
        billing_data = db_session.query(Usage.resource_type, func.extract(output_type, Usage.usage_date),
                                        func.sum(Usage.cost)). \
            filter(func.extract('year', Usage.usage_date) == year, Usage.project_id == project_id,
                   func.extract('month', Usage.usage_date) == value_to_match).group_by(
            func.extract(output_type, Usage.usage_date), Usage.resource_type)

    return billing_data
예제 #8
0
def load_usage_table():
    year = request.args.get('year', None)
    month = request.args.get('month', None)
    project = request.args.get('project', None)

    data = db_session.query(Usage).all()
    data = [x.__dict__ for x in data]

    # year = 4 digit year
    if year is not None:
        data = filter(lambda x: x['usage_date'].year == int(year), data)

    # month = month num
    if month is not None:
        data = filter(lambda x: x['usage_date'].month == int(month), data)

    # project = project_name
    if project is not None:
        data = filter(lambda x: x['resource_uri'].split('/')[1] == str(project), data)


    for x in data:
        x['usage_date'] = x['usage_date'].strftime('%Y-%m-%d')
        del x['_sa_instance_state']

    data = list(data)

    resp = Response(response=simplejson.dumps(data), status=200, mimetype='application/json')

    return resp
예제 #9
0
def load_instance_table():
    body = request.get_json()
    log.info("------- body: {0} -------".format(body))
    machine_type = body.get('machine_type', None)
    tags = body.get('tags', None)
    project = body.get('project', None)
    month = body.get('month', None)
    year = body.get('year', None)

    data = db_session.query(Instance).order_by(Instance.instanceId).all()

    data = [x.__dict__ for x in data]

    for x in data:
        del x['_sa_instance_state']

    instance_obj_list = data
    instance_obj_list = build_objs(data)

    for x in instance_obj_list:
        if 'machineType' in x:
            x['machineType'] = x['machineType'].split('/')[-1]

    # project = project name
    if project is not None:
        project = project.lower()
        instance_obj_list = filter(lambda x: x['project'] == project,
                                   instance_obj_list)

    # machine_type = just the machine type bit
    if machine_type is not None:
        instance_obj_list = filter(lambda x: x['machineType'] == machine_type,
                                   instance_obj_list)

    # tags should be filtered as AND
    if tags is not None:
        tags = [x.encode('UTF8') for x in tags]
        tags = set(tags)
        instance_obj_list = filter(
            lambda x: 'tags.items' in x and tags.issubset(set(x['tags.items'])
                                                          ), instance_obj_list)

    if year is not None:
        year = int(year)
        instance_obj_list = filter(
            lambda x: int(x['creationTimestamp'].split('-')[0]) == year,
            instance_obj_list)

    if month is not None:
        month = int(month)
        instance_obj_list = filter(
            lambda x: int(x['creationTimestamp'].split('-')[1]) == month,
            instance_obj_list)

    resp = Response(response=simplejson.dumps(instance_obj_list),
                    status=200,
                    mimetype="application/json")

    return resp
def get_billing_data_per_resource_month_center(year, value_to_match, project_id, output_type):
    billing_data = db_session.query(Usage.resource_type, func.sum(Usage.cost)). \
        filter(func.extract('year', Usage.usage_date) == year,
               func.extract(output_type, Usage.usage_date) == value_to_match,
               Usage.project_id == project_id, ). \
        group_by(Usage.resource_type)

    return billing_data
def get_billing_data_per_resource_per_project_per_week(year, value_to_match, project_id, resource, output_type):
    if output_type == 'day':
        billing_data = db_session.query(func.unix_timestamp(Billing.usage_date), func.sum(Billing.cost),
                                        Billing.usage_value,
                                        Billing.measurement_unit). \
            filter(func.extract('year', Billing.usage_date) == year, Billing.project_id == project_id,
                   Billing.resource_type == resource, func.extract('week', Billing.usage_date) == value_to_match). \
            group_by(func.unix_timestamp(Billing.usage_date))

    elif output_type == 'week':
        billing_data = db_session.query(Billing.project_id, func.extract('week', Billing.usage_date),
                                        func.sum(Billing.cost)). \
            filter(func.extract('year', Billing.usage_date) == year, Billing.project_id == project_id,
                   Billing.resource_type == resource, func.extract('week', Billing.usage_date) == value_to_match). \
            group_by(func.extract('week', Billing.usage_date))

    return billing_data
def get_billing_data_per_year_per_center_days(year, project_ids):
    billing_data = db_session.query(func.unix_timestamp(Usage.usage_date), func.sum(Usage.cost),
                                    Usage.usage_value,
                                    Usage.measurement_unit). \
        filter(func.extract('year', Usage.usage_date) == year, Usage.project_id.in_(project_ids)). \
        group_by(func.unix_timestamp(Usage.usage_date))

    return billing_data
def getProjectNames():
    project_list = db_session.query(Project.project_name).distinct()
    project_names = [item[0] for item in project_list]

    resp = Response(response=simplejson.dumps(project_names),
                    status=200,
                    mimetype='application/json')
    return resp
def get_billing_data_per_project_year(year, project_id, output_type):
    if output_type == 'day':
        billing_data = db_session.query(func.unix_timestamp(Billing.usage_date), func.sum(Billing.cost),
                                        Billing.usage_value,
                                        Billing.measurement_unit). \
            filter(func.extract('year', Billing.usage_date) == year, Billing.project_id == project_id). \
            group_by(func.unix_timestamp(Billing.usage_date))
    elif output_type == 'week':
        billing_data = db_session.query(Billing.project_id, func.extract(output_type, Billing.usage_date),
                                        func.sum(Billing.cost)). \
            filter(func.extract('year', Billing.usage_date) == year, Billing.project_id == project_id). \
            group_by(func.extract(output_type, Billing.usage_date))
    else:
        billing_data = db_session.query(func.extract(output_type, Billing.usage_date), func.sum(Billing.cost)). \
            filter(func.extract('year', Billing.usage_date) == year, Billing.project_id == project_id). \
            group_by(func.extract(output_type, Billing.usage_date))

    return billing_data
def get_billing_data_per_year_per_center_quarter(year, project_ids, quarter, output_type):
    if output_type == 'day':
        billing_data = db_session.query(func.unix_timestamp(Usage.usage_date), func.sum(Usage.cost), Usage.usage_value,
                                        Usage.measurement_unit). \
            filter(func.extract('year', Usage.usage_date) == year, Usage.project_id.in_(project_ids),
                   func.extract('quarter', Usage.usage_date) == quarter). \
            group_by(func.extract(output_type, Usage.usage_date))
    elif output_type == 'week':
        billing_data = db_session.query(Usage.project_id, func.extract(output_type, Usage.usage_date),
                                        func.sum(Usage.cost)). \
            filter(func.extract('year', Usage.usage_date) == year, Usage.project_id.in_(project_ids),
                   func.extract('quarter', Usage.usage_date) == quarter). \
            group_by(func.extract(output_type, Usage.usage_date))
    else:
        billing_data = db_session.query(func.extract(output_type, Usage.usage_date), func.sum(Usage.cost)). \
            filter(func.extract('year', Usage.usage_date) == year, Usage.project_id.in_(project_ids),
                   func.extract('quarter', Usage.usage_date) == quarter). \
            group_by(func.extract(output_type, Usage.usage_date))

    return billing_data
def get_billing_data_per_resource_per_project(year, project_id, resource, output_type):
    if output_type == 'day':
        billing_data = db_session.query(func.unix_timestamp(Usage.usage_date), func.sum(Usage.cost), Usage.usage_value,
                                        Usage.measurement_unit). \
            filter(func.extract('year', Usage.usage_date) == year, Usage.project_id == project_id,
                   Usage.resource_type == resource). \
            group_by(func.unix_timestamp(Usage.usage_date))
    elif output_type == 'week':
        billing_data = db_session.query(Usage.project_id, func.extract(output_type, Usage.usage_date),
                                        func.sum(Usage.cost)). \
            filter(func.extract('year', Usage.usage_date) == year, Usage.project_id == project_id,
                   Usage.resource_type == resource). \
            group_by(func.month(Usage.usage_date))
    else:
        billing_data = db_session.query(func.extract(output_type, Usage.usage_date), func.sum(Usage.cost)). \
            filter(func.extract('year', Usage.usage_date) == year, Usage.project_id == project_id,
                   Usage.resource_type == resource). \
            group_by(func.month(Usage.usage_date))

    return billing_data
예제 #17
0
def getAllProjectId():
    print("blah")
    data = db_session.query(Usage.resource_id).distinct()

    # data = [x.__dict__ for x in data]
    new_data = []
    for (item) in data:
        new_data.append(item[0])

    resp = Response(response=simplejson.dumps(new_data),
                    status=200,
                    mimetype="application/json")

    return resp
def getAllUsage():
    print("blah")
    data = db_session.query(Usage).all()
    data = [x.__dict__ for x in data]

    for x in data:
        x['usage_date'] = x['usage_date'].strftime('%Y-%m-%d')
        del x['_sa_instance_state']

    resp = Response(response=simplejson.dumps(data),
                    status=200,
                    mimetype="application/json")

    return resp
def getAllProjectId():
    print("blah")
    data = db_session.query(Usage.resource_id).distinct()

    # data = [x.__dict__ for x in data]
    new_data = []
    for (item) in data:
        new_data.append(item[0])

    resp = Response(response=simplejson.dumps(new_data),
                    status=200,
                    mimetype="application/json")

    return resp
예제 #20
0
def getAllUsage():
    print("blah")
    data = db_session.query(Usage).all()
    data = [x.__dict__ for x in data]

    for x in data:
        x['usage_date'] = x['usage_date'].strftime('%Y-%m-%d')
        del x['_sa_instance_state']

    resp = Response(response=simplejson.dumps(data),
                    status=200,
                    mimetype="application/json")

    return resp
예제 #21
0
def get_machine_types():

    data = db_session.query(Instance).filter(
        Instance.key == 'machineType').all()
    data = (x.__dict__ for x in data)

    machine_types_list = [y['value'].split('/')[-1] for y in data]

    # remove duplicate machine types
    machine_types_list = list(set(machine_types_list))

    resp = Response(response=simplejson.dumps(machine_types_list),
                    status=200,
                    mimetype="application/json")

    return resp
예제 #22
0
def get_tags():

    data = db_session.query(
        Instance.value).filter(Instance.key == "tags.items").distinct()

    tmp = set()
    for x in data:
        x = tuple(x)
        tmp.add(x[0])

    data = list(tmp)
    resp = Response(response=simplejson.dumps(data),
                    status=200,
                    mimetype="application/json")

    return resp
예제 #23
0
def data_processor(job_type):

    #  get list of buckets; for each project, do try except

    status = 200
    message = dict(success=[], fail=[])
    startTime = datetime.datetime.now()
    lock_file = True

    # get list of project names
    project_list = db_session.query(Project.project_name).distinct()
    project_names = (item[0] for item in project_list)

    for b in project_names:
        try:
            bucket = b + "-usage-export"
            archive_bucket = bucket + '-archive'
            log.info('--------- THE BUCKET : {0} --------------'.format(bucket))
            random_number = binascii.hexlify(os.urandom(32)).decode()
            log.info(' RANDOM NUMBER --- {0}'.format(random_number))

            # Get the application default credentials. When running locally, these are
            # available after running `gcloud init`. When running on compute
            # engine, these are available from the environment.
            credentials = GoogleCredentials.get_application_default()

            # Construct the service object for interacting with the Cloud Storage API -
            # the 'storage' service, at version 'v1'.
            # You can browse other available api services and versions here:
            # https://developers.google.com/api-client-library/python/apis/
            service = discovery.build('storage', 'v1', credentials=credentials)

            # Make a request to buckets.get to retrieve a list of objects in the
            # specified bucket.
            req = service.buckets().get(bucket=bucket)
            resp = req.execute()

            # print(json.dumps(resp, indent=2))

            # Create a request to objects.list to retrieve a list of objects.
            fields_to_return = \
                'nextPageToken,items(name,size,contentType,metadata(my-key))'
            req = service.objects().list(bucket=bucket, fields=fields_to_return)
            file_count = 0
            log.info('Process {0} Start time --- {1}'.format(bucket, startTime))
            # If you have too many items to list in one request, list_next() will
            # automatically handle paging with the pageToken.
            while req:
                resp = req.execute()
                # print(json.dumps(resp, indent=2))
                if len(resp) == 0:
                    log.info('############################################################################################')
                    log.info('--------- THE BUCKET LIST IS EMPTY --------------')
                    log.info('--------- NO FILES TO PROCESS  --------------')
                    log.info(resp)
                    log.info('############################################################################################')

                else:
                    get_filenames(resp, service, random_number, bucket)

                req = service.objects().list_next(req, resp)
            message['success'].append(bucket)

        except Exception as e:
            log.error(' Error in getting Bucket Details - {0}'.format(e))
            message['fail'].append({'bucket':bucket, 'error':str(e)})
            status = 500
            pass

    endTime = datetime.datetime.now()
    log.info('Process End time --- {0}'.format(endTime))

    elapsedTime = endTime - startTime

    time = 'Total Time to Process all the files -- {0}'.format(divmod(elapsedTime.total_seconds(), 60))
    log.info(time)

    log.info(' ARGS PASSED --- {0}'.format(job_type))
    # if job_type == 'now':
    #     set_scheduler(os.environ.get('SCHEDULER_HOUR'), os.environ.get('SCHEDULER_MIN'))

    response = dict(data=json.dumps(message), status=status, time=time)
    return response
예제 #24
0
def getProjectNames():
    project_list = db_session.query(Project.project_name).distinct()
    project_names = [item[0] for item in project_list]

    resp = Response(response=simplejson.dumps(project_names), status=200, mimetype='application/json')
    return resp
def data_processor(job_type):

    #  get list of buckets; for each project, do try except

    status = 200
    message = dict(success=[], fail=[])
    startTime = datetime.datetime.now()
    lock_file = True

    # get list of project names
    project_list = db_session.query(Project.project_name).distinct()
    project_names = (item[0] for item in project_list)

    for b in project_names:
        try:
            bucket = b + "-usage-export"
            archive_bucket = bucket + '-archive'
            log.info(
                '--------- THE BUCKET : {0} --------------'.format(bucket))
            random_number = binascii.hexlify(os.urandom(32)).decode()
            log.info(' RANDOM NUMBER --- {0}'.format(random_number))

            # Get the application default credentials. When running locally, these are
            # available after running `gcloud init`. When running on compute
            # engine, these are available from the environment.
            credentials = GoogleCredentials.get_application_default()

            # Construct the service object for interacting with the Cloud Storage API -
            # the 'storage' service, at version 'v1'.
            # You can browse other available api services and versions here:
            # https://developers.google.com/api-client-library/python/apis/
            service = discovery.build('storage', 'v1', credentials=credentials)

            # Make a request to buckets.get to retrieve a list of objects in the
            # specified bucket.
            req = service.buckets().get(bucket=bucket)
            resp = req.execute()

            # print(json.dumps(resp, indent=2))

            # Create a request to objects.list to retrieve a list of objects.
            fields_to_return = \
                'nextPageToken,items(name,size,contentType,metadata(my-key))'
            req = service.objects().list(bucket=bucket,
                                         fields=fields_to_return)
            file_count = 0
            log.info('Process {0} Start time --- {1}'.format(
                bucket, startTime))
            # If you have too many items to list in one request, list_next() will
            # automatically handle paging with the pageToken.
            while req:
                resp = req.execute()
                # print(json.dumps(resp, indent=2))
                if len(resp) == 0:
                    log.info(
                        '############################################################################################'
                    )
                    log.info(
                        '--------- THE BUCKET LIST IS EMPTY --------------')
                    log.info('--------- NO FILES TO PROCESS  --------------')
                    log.info(resp)
                    log.info(
                        '############################################################################################'
                    )

                else:
                    get_filenames(resp, service, random_number, bucket)

                req = service.objects().list_next(req, resp)
            message['success'].append(bucket)

        except Exception as e:
            log.error(' Error in getting Bucket Details - {0}'.format(e))
            message['fail'].append({'bucket': bucket, 'error': str(e)})
            status = 500
            pass

    endTime = datetime.datetime.now()
    log.info('Process End time --- {0}'.format(endTime))

    elapsedTime = endTime - startTime

    time = 'Total Time to Process all the files -- {0}'.format(
        divmod(elapsedTime.total_seconds(), 60))
    log.info(time)

    log.info(' ARGS PASSED --- {0}'.format(job_type))
    # if job_type == 'now':
    #     set_scheduler(os.environ.get('SCHEDULER_HOUR'), os.environ.get('SCHEDULER_MIN'))

    response = dict(data=json.dumps(message), status=status, time=time)
    return response
def get_cost_centers(unique):
    if unique:
        center_list = db_session.query(Projects.cost_center).distinct()
    else:
        center_list = db_session.query(Projects).all()
    return center_list
def get_resource_list_per_project(project_ids):
    resource_list = db_session.query(Usage.resource_type). \
        filter(Usage.project_id.in_(project_ids)). \
        distinct()
    return resource_list
def get_distinct_projects():
    project_list = db_session.query(Usage.project_id).distinct()

    return project_list