Пример #1
0
def elbdata_to_db():
    now = datetime.datetime.now()
    elb_data = elb_collect()
    if elb_data is None:
        logger.error("elb aws api receive data is none")
        return False
    for item in elb_data:
        res = aws_model.DBsession.query(
            aws_model.Elb).filter_by(dns_name=item["DNSName"]).first()
        vpc = aws_model.DBsession.query(
            aws_model.Vpc).filter_by(vpc_id=item["VPCId"]).first()
        if not res:
            ec2_list = []
            for ec2_ins in item["Instances"]:
                ec2_qurey = aws_model.DBsession.query(aws_model.Ec2).filter_by(
                    instance_id=ec2_ins["InstanceId"]).first()
                ec2_list.append(ec2_qurey)
            elb_ins = aws_model.Elb(
                loadbalancer_name=item["LoadBalancerName"],
                loadbalancer_port=item["LoadBalancerPort"],
                instance_port=item["InstancePort"],
                aws_create_time=item["CreatedTime"],
                # vpc_id=vpc.id,
                dns_name=item["DNSName"],
                data_create_time=now,
                data_update_time=now,
                data_status=True,
                ec2=ec2_list,
            )
            aws_model.DBsession.add(elb_ins)
            try:
                aws_model.DBsession.commit()
            except exc.SQLAlchemyError:
                logger.exception("Exception Logged")
                aws_model.DBsession.rollback()
        else:
            elb_to_db = DataToDB(item, res, Elb_res_keys, Elb_db_keys, now,
                                 aws_model.Elb, elb_data, "DNSName")
            elb_to_db.update()

    diff_elb = update_status(aws_model.Elb, elb_data, "DNSName", "dns_name")

    if diff_elb:
        for item in diff_elb:
            res = aws_model.DBsession.query(
                aws_model.Elb).filter_by(dns_name=item).first()
            if res.data_status is True:
                randstr = "".join(
                    random.sample(
                        string.ascii_lowercase + string.ascii_letters, 5))
                setattr(res, "data_status", False)
                setattr(res, "data_update_time", now)
                setattr(res, "loadbalancer_name",
                        res.loadbalancer_name + "_" + randstr)
                try:
                    aws_model.DBsession.commit()
                except exc.SQLAlchemyError:
                    logger.exception("Exception Logged")
                    aws_model.DBsession.rollback()
    return True
Пример #2
0
def voldata_to_db():
    now = datetime.datetime.now()
    vol_data = vol_collect()
    if vol_data is None:
        logger.error("volume aws api receive data is none")
        return False
    for item in vol_data:
        res = aws_model.DBsession.query(
            aws_model.Volume).filter_by(volume_id=item["VolumeId"]).first()
        if not res:
            ec2_obj = aws_model.DBsession.query(aws_model.Ec2).filter_by(
                instance_id=item["InstanceId"]).first()
            if not ec2_obj:
                ec2_instance_name = "None"
                ec2_instance_id = "None"
            else:
                ec2_instance_name = ec2_obj.name
                ec2_instance_id = ec2_obj.instance_id
            vol_ins = aws_model.Volume(volume_id=item["VolumeId"],
                                       size=item["Size"],
                                       state=item["State"],
                                       aws_create_time=item["CreateTime"],
                                       iops=item["Iops"],
                                       device=item["Device"],
                                       ec2_instance_name=ec2_instance_name,
                                       ec2_instance_id=ec2_instance_id,
                                       volume_type=item["VolumeType"],
                                       data_create_time=now,
                                       data_update_time=now,
                                       data_status=True)
            aws_model.DBsession.add(vol_ins)
            try:
                aws_model.DBsession.commit()
            except exc.SQLAlchemyError:
                logger.exception("Exception Logged")
                aws_model.DBsession.rollback()
        else:
            vol_to_db = DataToDB(item, res, Vol_res_keys, Vol_db_keys, now,
                                 aws_model.Volume, vol_data, "VolumeId")
            vol_to_db.update()

    diff_rds = update_status(aws_model.Volume, vol_data, "VolumeId",
                             "volume_id")

    if diff_rds:
        for item in diff_rds:
            res = aws_model.DBsession.query(
                aws_model.Volume).filter_by(volume_id=item).first()
            if res.data_status is True:
                setattr(res, "data_status", False)
                setattr(res, "data_update_time", now)
                try:
                    aws_model.DBsession.commit()
                except exc.SQLAlchemyError:
                    logger.exception("Exception Logged")
                    aws_model.DBsession.rollback()
    return True
Пример #3
0
def ecdata_to_db():
    now = datetime.datetime.now()
    ec_data = ec_collect()
    if ec_data is None:
        logger.error("elasticache aws api receive data is none")
        return False
    for item in ec_data:
        res = aws_model.DBsession.query(aws_model.Elasticache).filter_by(cachecluster_id=item["CacheClusterId"]).first()
        if not res:
            ec_type_query = aws_model.DBsession.query(aws_model.Ectype)
            ec_type_query = ec_type_query.filter_by(type=item["CacheNodeType"])
            ec_type_obj = ec_type_query.first()
            if not ec_type_obj:
                logger.error("Ectype table don't have the elasticache_type of %s" %item["InstanceType"])
                log_data=aws_model.LogCrontab(content="new instance %s of Elasticache don't have the Elasticache_type of %s"
                                                     %(item["CacheClusterId"], item["InstanceType"]), data_create_time=now)
                aws_model.DBsession.add(log_data)
                aws_model.DBsession.commit()
                continue
            ec_ins = aws_model.Elasticache(cachecluster_id=item["CacheClusterId"],
                               aws_create_time=item["CacheClusterCreateTime"],
                               status=item["CacheClusterStatus"],
                               engine=item["Engine"],
                               cachenode_type=ec_type_obj.type,
                               data_create_time=now,
                               data_update_time=now,
                               data_status=True
            )
            aws_model.DBsession.add(ec_ins)
            try:
                aws_model.DBsession.commit()
            except exc.SQLAlchemyError:
                logger.exception("Exception Logged")
                aws_model.DBsession.rollback()
        else:
            ec_to_db = DataToDB(item, res, Ec_res_keys, Ec_db_keys, now, aws_model.Elasticache, ec_data, "CacheClusterId")
            ec_to_db.update()

    diff_ec = update_status(aws_model.Elasticache, ec_data, "CacheClusterId", "cachecluster_id")

    if diff_ec:
        for item in diff_ec:
            res = aws_model.DBsession.query(aws_model.Elasticache).filter_by(cachecluster_id=item).first()
            if res.data_status is True:
                randstr = "".join(random.sample(string.ascii_lowercase+string.ascii_letters, 5))
                setattr(res, "data_status", False)
                setattr(res, "data_update_time", now)
                setattr(res, "cachecluster_id", res.cachecluster_id+"_"+randstr)
                try:
                    aws_model.DBsession.commit()
                except exc.SQLAlchemyError:
                    logger.exception("Exception Logged")
                    aws_model.DBsession.rollback()
    return True
Пример #4
0
def iamdata_to_db():
    now = datetime.now()
    iam_data = iam_collect()
    if iam_data is None:
        logger.error("iam aws api receive data is none")
        return False
    for item in iam_data:
        res = aws_model.DBsession.query(
            aws_model.Iam).filter_by(user_id=item["UserId"]).first()
        if not res:
            iam_ins = aws_model.Iam(user_id=item["UserId"],
                                    user_name=item["UserName"],
                                    password_lastused=item["PasswordLastUsed"],
                                    aws_create_time=item["CreateDate"],
                                    data_create_time=now,
                                    data_update_time=now,
                                    data_status=True)
            aws_model.DBsession.add(iam_ins)
            try:
                aws_model.DBsession.commit()
            except exc.SQLAlchemyError:
                logger.exception("Exception Logged")
                aws_model.DBsession.rollback()
        else:
            iam_to_db = DataToDB(item, res, Iam_res_keys, Iam_db_keys, now,
                                 aws_model.Iam, iam_data, "UserId")
            iam_to_db.update()

    diff_iam = update_status(aws_model.Iam, iam_data, "UserId", "user_id")

    if diff_iam:
        for item in diff_iam:
            res = aws_model.DBsession.query(
                aws_model.Iam).filter_by(user_id=item).first()
            if res.data_status is True:
                randstr = "".join(
                    random.sample(
                        string.ascii_lowercase + string.ascii_letters, 5))
                setattr(res, "data_status", False)
                setattr(res, "data_update_time", now)
                setattr(res, "user_name", res.user_name + "_" + randstr)
                try:
                    aws_model.DBsession.commit()
                except exc.SQLAlchemyError:
                    logger.exception("Exception Logged")
                    aws_model.DBsession.rollback()
    return True
Пример #5
0
def s3data_to_db():
    now = datetime.datetime.now()
    s3_data = s3_collect()
    if s3_data is None:
        logger.error("s3 aws api receive data is none")
        return False
    for item in s3_data:
        res = aws_model.DBsession.query(
            aws_model.S3).filter_by(name=item["Name"]).first()
        if not res:
            s3_ins = aws_model.S3(aws_create_time=item["CreationDate"],
                                  name=item["Name"],
                                  size=item["Size"],
                                  data_create_time=now,
                                  data_update_time=now,
                                  data_status=True)
            aws_model.DBsession.add(s3_ins)
            try:
                aws_model.DBsession.commit()
            except exc.SQLAlchemyError:
                logger.exception("Exception Logged")
                aws_model.DBsession.rollback()
        else:
            s3_to_db = DataToDB(item, res, S3_res_keys, S3_db_keys, now,
                                aws_model.S3, s3_data, "Name")
            s3_to_db.update()

    diff_s3 = update_status(aws_model.S3, s3_data, "Name", "name")

    if diff_s3:
        for item in diff_s3:
            res = aws_model.DBsession.query(
                aws_model.S3).filter_by(name=item).first()
            if res.data_status is True:
                randstr = "".join(
                    random.sample(
                        string.ascii_lowercase + string.ascii_letters, 5))
                setattr(res, "data_status", False)
                setattr(res, "data_update_time", now)
                setattr(res, "name", res.name + "_" + randstr)
                try:
                    aws_model.DBsession.commit()
                except exc.SQLAlchemyError:
                    logger.exception("Exception Logged")
                    aws_model.DBsession.rollback()
    return True
Пример #6
0
def vpcdata_to_db():
    now = datetime.datetime.now()
    vpc_data = vpc_collect()
    if vpc_data is None:
        logger.error("vpc aws api receive data is none")
        return False
    for item in vpc_data:
        res = aws_model.DBsession.query(
            aws_model.Vpc).filter_by(vpc_id=item["VpcId"]).first()
        if not res:
            vpc_ins = aws_model.Vpc(vpc_id=item["VpcId"],
                                    cidrblock=item["CidrBlock"],
                                    status=item["State"],
                                    data_create_time=now,
                                    data_update_time=now,
                                    data_status=True)
            aws_model.DBsession.add(vpc_ins)
            try:
                aws_model.DBsession.commit()
            except exc.SQLAlchemyError:
                logger.exception("Exception Logged")
                aws_model.DBsession.rollback()
        else:
            vpc_to_db = DataToDB(item, res, Vpc_res_keys, Vpc_db_keys, now,
                                 aws_model.Vpc, vpc_data, "VpcId")
            vpc_to_db.update()

    diff_vpc = update_status(aws_model.Vpc, vpc_data, "VpcId", "vpc_id")

    if diff_vpc:
        for item in diff_vpc:
            res = aws_model.DBsession.query(
                aws_model.Vpc).filter_by(vpc_id=item).first()
            if res.data_status is True:
                setattr(res, "data_status", False)
                setattr(res, "data_update_time", now)
                setattr(res, "status", "delete")
                try:
                    aws_model.DBsession.commit()
                except exc.SQLAlchemyError:
                    logger.exception("Exception Logged")
                    aws_model.DBsession.rollback()
    return True
Пример #7
0
def rdsdata_to_db():
    now = datetime.datetime.now()
    rds_data = rds_collect()
    if rds_data is None:
        logger.error("rds aws api receive data is none")
        return False
    for item in rds_data:
        res = aws_model.DBsession.query(aws_model.Rds).filter_by(
            resource_id=item["DbiResourceId"]).first()
        if not res:
            rds_type_query = aws_model.DBsession.query(aws_model.Rdstype)
            rds_type_query = rds_type_query.filter_by(
                type=item["DBInstanceClass"])
            rds_type_obj = rds_type_query.first()
            vpc = aws_model.DBsession.query(
                aws_model.Vpc).filter_by(vpc_id=item["VpcId"]).first()
            if not rds_type_obj:
                logger.error(
                    "Ectype table don't have the elasticache_type of %s" %
                    item["InstanceType"])
                log_data = aws_model.LogCrontab(
                    content=
                    "new instance %s of rds don't have the rds_type of %s" %
                    (item["DBInstanceIdentifier"], item["InstanceType"]),
                    data_create_time=now)
                aws_model.DBsession.add(log_data)
                aws_model.DBsession.commit()
                continue
            rds_ins = aws_model.Rds(db_identifier=item["DBInstanceIdentifier"],
                                    status=item["DBInstanceStatus"],
                                    engine=item["Engine"],
                                    resource_id=item["DbiResourceId"],
                                    aws_create_time=item["InstanceCreateTime"],
                                    instance_type=rds_type_obj.type,
                                    vpc_id=vpc.id,
                                    storage_type=item["StorageType"],
                                    data_create_time=now,
                                    data_update_time=now,
                                    data_status=True)
            aws_model.DBsession.add(rds_ins)
            try:
                aws_model.DBsession.commit()
            except exc.SQLAlchemyError:
                logger.exception("Exception Logged")
                aws_model.DBsession.rollback()
        else:
            rds_to_db = DataToDB(item, res, Rds_res_keys, Rds_db_keys, now,
                                 aws_model.Rds, rds_data, "DbiResourceId")
            rds_to_db.update()

    diff_rds = update_status(aws_model.Rds, rds_data, "DbiResourceId",
                             "resource_id")

    if diff_rds:
        for item in diff_rds:
            res = aws_model.DBsession.query(
                aws_model.Rds).filter_by(resource_id=item).first()
            if res.data_status is True:
                randstr = "".join(
                    random.sample(
                        string.ascii_lowercase + string.ascii_letters, 5))
                setattr(res, "data_status", False)
                setattr(res, "data_update_time", now)
                setattr(res, "db_identifier",
                        res.db_identifier + "_" + randstr)
                try:
                    aws_model.DBsession.commit()
                except exc.SQLAlchemyError:
                    logger.exception("Exception Logged")
                    aws_model.DBsession.rollback()
    return True
Пример #8
0
def ec2data_to_db():
    now = datetime.datetime.now()
    ec2_data = ec2_collect()
    aws_add_ec2 = []
    if ec2_data is None:
        logger.error("ec2 aws api receive data is none")
        return False
    for item in ec2_data:
        res = aws_model.DBsession.query(
            aws_model.Ec2).filter_by(instance_id=item["InstanceId"]).first()
        if not res:
            ec2_type_query = aws_model.DBsession.query(aws_model.Ec2type)
            ec2_type_query = ec2_type_query.filter_by(
                type=item["InstanceType"])
            ec2_type_obj = ec2_type_query.first()
            vpc = aws_model.DBsession.query(
                aws_model.Vpc).filter_by(vpc_id=item["VpcId"]).first()
            if not ec2_type_obj:
                logger.error("Ec2type table don't have the ec2_type of %s" %
                             item["InstanceType"])
                log_data = aws_model.LogCrontab(
                    content=
                    "new instance %s of ec2 don't have the ec2_type of %s" %
                    (item["Name"], item["InstanceType"]),
                    data_create_time=now)
                aws_model.DBsession.add(log_data)
                aws_model.DBsession.commit()
                continue
            ec2_ins = aws_model.Ec2(instance_id=item["InstanceId"],
                                    private_ip=item["PrivateIpAddress"],
                                    public_ip=item["PublicIpAddress"],
                                    run_state=item["State"],
                                    keyname=item["KeyName"],
                                    aws_create_time=item["LaunchTime"],
                                    vpc_id=vpc.id,
                                    name=item.get("Name"),
                                    instance_type=ec2_type_obj.type,
                                    data_create_time=now,
                                    data_update_time=now,
                                    data_status=True)
            aws_add_ec2.append(item["PrivateIpAddress"])
            aws_model.DBsession.add(ec2_ins)
            try:
                aws_model.DBsession.commit()
            except exc.SQLAlchemyError:
                logger.exception("Exception Logged")
                aws_model.DBsession.rollback()
        elif item["State"] == "terminated":
            setattr(res, "data_update_time", now)
            setattr(res, "run_state", "terminated")
            setattr(res, "data_status", False)
            try:
                aws_model.DBsession.commit()
            except exc.SQLAlchemyError:
                logger.exception("Exception Logged")
                aws_model.DBsession.rollback()
        else:
            ec2_to_db = DataToDB(item, res, Ec2_res_keys, Ec2_db_keys, now,
                                 aws_model.Ec2, ec2_data, "InstanceId")
            ec2_to_db.update()

    #邮件通知新增的ec2实例
    aws_add_ec2 = " ".join(aws_add_ec2)
    if aws_add_ec2:
        res = mail(aws_add_ec2, "aws增加ec2,对比ansible的host文件去确认增加")
        if res is False:
            log_data = aws_model.LogCrontab(content="add ec2 send mail failed",
                                            data_create_time=now)
            aws_model.DBsession.add(log_data)
            aws_model.DBsession.commit()

    #逻辑删除释放的ec2并发邮件通知
    diff_ec2 = update_status(aws_model.Ec2, ec2_data, "InstanceId",
                             "instance_id")
    if diff_ec2:
        aws_delete_ec2 = []
        for item in diff_ec2:
            res = aws_model.DBsession.query(
                aws_model.Ec2).filter_by(instance_id=item).first()
            if res.data_status is True:
                aws_delete_ec2.append(res.private_ip)
                setattr(res, "data_status", False)
                setattr(res, "data_update_time", now)
                try:
                    aws_model.DBsession.commit()
                except exc.SQLAlchemyError:
                    logger.exception("Exception Logged")
                    aws_model.DBsession.rollback()
        aws_delete_ec2 = " ".join(aws_delete_ec2)
        if aws_delete_ec2:
            res = mail(aws_delete_ec2, "aws删除的ec2,对比ansible的host文件去确认删除")
            if res is False:
                log_data = aws_model.LogCrontab(
                    content="delete ec2 send mail failed",
                    data_create_time=now)
                aws_model.DBsession.add(log_data)
                aws_model.DBsession.commit()

    #容量计算
    ec2_all = aws_model.DBsession.query(
        aws_model.Ec2).filter(aws_model.Ec2.data_status == True).all()
    for ec2 in ec2_all:
        utilization = ec2_utilizat(ec2.instance_id)
        ec2.utilization = utilization
        try:
            aws_model.DBsession.commit()
        except exc.SQLAlchemyError:
            logger.exception("Exception Logged")
            aws_model.DBsession.rollback()

    return True