Beispiel #1
0
def process_aws_resources_names(key):
    try:
        key = AWSKey.query.get(key.id)
        session = boto3.Session(aws_access_key_id=key.key,
                                aws_secret_access_key=key.secret)
        id_name_mapping = get_instances_id_name_mapping(session)
        id_name_mapping.update(get_volumes_id_name_mapping(session))

        def get_id_name_mappings():
            now = datetime.utcnow()
            for rid, name in id_name_mapping.iteritems():
                yield AWSIdNameMapping(meta={'id': rid},
                                       key=key.key,
                                       date=now,
                                       rid=rid,
                                       name=name)

        bulk_save(get_id_name_mappings())
    except botocore.exceptions.ClientError as e:
        logging.error("[user={}][key={}] {}".format(key.user.email, key.pretty
                                                    or key.key, str(e)))
        aws_credentials_error(key, traceback.format_exc())
        key.error_status = u"bad_key"
        db.session.commit()
    except Exception, e:
        aws_key_processing_generic_error_email(key, traceback.format_exc())
        key.error_status = u"processing_error"
        db.session.commit()
Beispiel #2
0
def process_google_identity(identity_ids):
    if not isinstance(identity_ids, list):
        identity_ids = [identity_ids]
    identities = list(
        GoogleCloudIdentity.query.filter(
            GoogleCloudIdentity.id.in_(identity_ids)))
    if not identities:
        return
    identity = min(identities,
                   key=lambda i: i.last_fetched or datetime(1970, 1, 1))
    since = None
    if identity.last_fetched:
        since = identity.last_fetched - google_data_grace
        if identity.last_fetched > datetime.utcnow() - google_data_fetch_freq:
            return

    for i in identities:
        i.last_fetched = datetime.utcnow()
    db.session.commit()

    res_by_day = GoogleResourceByDayAccumulator()

    try:

        def get_instance_metric_records(identity, project, since):
            for metric in gcloud_get_instance_metrics(identity, project,
                                                      since):
                id = checksum(str(metric['time']), metric['metric'],
                              metric['resource'], identity.email)
                yield GoogleMetric(meta={'id': id},
                                   identity=identity.email,
                                   **metric)

        for project in get_identity_projects_from_gcloud_api(identity):
            bulk_save(get_instance_metric_records(identity, project, since))
            for bucket in get_project_buckets_from_gcloud_api(
                    identity, project):
                for l in gcloud_get_all_line_items(identity, bucket, since):
                    res_by_day(l)

        def get_daily_resource_usage_records():
            for usage in res_by_day.results():
                yield GoogleDailyResource(
                    meta={'id': checksum(usage.pop('id'), identity.email)},
                    identity=identity.email,
                    **usage)

        bulk_save(get_daily_resource_usage_records())
    except Exception, e:
        identity.last_errored = datetime.utcnow()
        db.session.commit()
        logging.error("[user={}][identity={}] {}".format(
            identity.user.email, identity.email, str(e)))
Beispiel #3
0
def process_aws_key(key_ids):
    # TODO: remove this
    if not isinstance(key_ids, list):
        key_ids = [key_ids]
    keys = list(AWSKey.query.filter(AWSKey.id.in_(key_ids)))
    if not keys:
        return
    key = min(keys, key=lambda k: k.last_fetched or datetime(1970, 1, 1))
    since = None
    if key.last_fetched:
        since = key.last_fetched - aws_data_grace
        if key.last_fetched > datetime.utcnow() - aws_data_fetch_freq:
            return
    for k in keys:
        k.last_fetched = datetime.utcnow()
    db.session.commit()
    try:
        processing_start = datetime.utcnow()
        session = boto3.Session(aws_access_key_id=key.key,
                                aws_secret_access_key=key.secret)
        AWSStat(key=key.key,
                time=datetime.utcnow(),
                stat='instances',
                data=get_instance_stats(session)).save()

        def get_instance_metric_records():
            for metric in get_instance_metrics(session, since):
                id = checksum(str(metric['time']), metric['metric'],
                              metric['resource'])
                yield AWSMetric(meta={'id': id}, key=key.key, **metric)

        bulk_save(get_instance_metric_records())

        def get_volume_metric_records():
            for metric in get_volume_metrics(session, since):
                id = checksum(str(metric['time']), metric['metric'],
                              metric['resource'])
                yield AWSMetric(meta={'id': id}, key=key.key, **metric)

        bulk_save(get_volume_metric_records())

        def get_bucket_metrics_records():
            for metric in get_bucket_metrics(session, since):
                id = checksum(str(metric['time']), metric['metric'],
                              metric['resource'])
                yield AWSMetric(meta={'id': id}, key=key.key, **metric)

        bulk_save(get_bucket_metrics_records())

        def get_bucket_object_access_records():
            for log in get_object_access_logs(session, since):
                id = checksum(str(log['time']), log['bucket'], log['object'])
                yield AWSAccessLog(meta={'id': id}, key=key.key, **log)

        bulk_save(get_bucket_object_access_records())

        AWSStat(key=key.key,
                time=datetime.utcnow(),
                stat='ondemandtoreserved',
                data=get_on_demand_to_reserved_suggestion(session,
                                                          key)).save()

        AWSStat(key=key.key,
                time=datetime.utcnow(),
                stat='s3spaceusage',
                data=get_s3_space_usage(key)).save()

        AWSStat(key=key.key,
                time=datetime.utcnow(),
                stat='detachedvolumes',
                data=get_available_volumes(session)).save()

        AWSStat(key=key.key,
                time=datetime.utcnow(),
                stat='hourlycpubytag',
                data=get_hourly_cpu_usage_by_tag(session, key.key)).save()

        AWSStat(key=key.key,
                time=datetime.utcnow(),
                stat='dailycpubytag',
                data=get_daily_cpu_usage_by_tag(session, key.key)).save()

        my_resources_record = MyResourcesAWS.query.filter(
            MyResourcesAWS.key == key.key).order_by(desc(
                MyResourcesAWS.date)).first()
        if not my_resources_record:
            res = get_providers_comparison_aws(key)
            if res is not None:
                my_resources_record = MyResourcesAWS(key=key.key,
                                                     date=datetime.utcnow())
                my_resources_record.set_json(res)
                db.session.add(my_resources_record)
                db.session.commit()

        my_db_resources_record = MyDBResourcesAWS.query.filter(
            MyDBResourcesAWS.key == key.key).order_by(
                desc(MyDBResourcesAWS.date)).first()
        if not my_db_resources_record:
            res = compare_rds_instances(key)
            my_db_resources_record = MyDBResourcesAWS(key=key.key,
                                                      date=datetime.utcnow())
            my_db_resources_record.set_json(res)
            db.session.add(my_db_resources_record)
            db.session.commit()

        key.error_status = None
        key.last_duration = (datetime.utcnow() -
                             processing_start).total_seconds()
        db.session.commit()
    except Exception as e:
        key.last_duration = (datetime.utcnow() -
                             processing_start).total_seconds()
        except_handler(e, key)
Beispiel #4
0
        logging.error("[user={}][key={}] {}".format(key.user.email, key.pretty
                                                    or key.key, str(e)))
        aws_credentials_error(key, traceback.format_exc())
        key.error_status = u"bad_key"
        db.session.commit()
        return
    now = datetime.utcnow()

    def get_instances_state():
        for region, instance in get_all_instances(session):
            yield AWSStat(key=key.key,
                          time=now,
                          stat='instancestate/' + instance.id,
                          data=dict(state=instance.state['Name']))

    bulk_save(get_instances_state())


@runner.task
def generate_stopped_instances_report(key):
    try:
        session = boto3.Session(aws_access_key_id=key.key,
                                aws_secret_access_key=key.secret)
    except Exception, e:
        logging.error("[user={}][key={}] {}".format(key.user.email, key.pretty
                                                    or key.key, str(e)))
        aws_credentials_error(key, traceback.format_exc())
        key.error_status = u"bad_key"
        db.session.commit()
        return