コード例 #1
0
def create_rms_service_dict(
        data: Iterable[Dict[str, str]],
        values: Iterable[str]) -> Dict[str, Dict[str, Iterable[str]]]:
    logger = get_logger()
    return_dict = {}
    logger.debug(
        f"Creating services dictionary from \n{data}\nwith values {values}")
    for item in data:
        if "service" in item:
            service = item["service"]
        else:
            logger.info(f"Object {item} has no associated service\n")
            continue
        if not rip.get_internal_services(
            [service]):  # check if the service is internal
            continue
        for value in values:
            if service not in return_dict:
                return_dict[service] = {value: [item[value]]}
            else:
                if value in return_dict[service]:
                    return_dict[service][value].append(item[value])
                else:
                    return_dict[service][value] = [item[value]]

    logger.info(f"Created dictionary:\n{return_dict}")
    return return_dict
コード例 #2
0
def get_all_edges():
    logger = get_logger()
    logger.info(f"Fetching all of the RMS edges...")
    rmsv2_auth = get_aws_auth(AWS_HOST_RMSV2, AWS_REGION_EAST)
    fetched_data = get_rms_data(AWS_HOST_RMSV2, rmsv2_auth,
                                RmsEndpoint.EDGES.value)
    logger.info(f"Edges retrieved:\n{fetched_data}")
    return fetched_data
コード例 #3
0
def get_services_with_plan():
    logger = get_logger()
    recon_auth = get_aws_auth(AWS_HOST_RECON, AWS_REGION_EAST)

    logger.info(f"Fetching all services with a plan categorization...")
    data = get_recon_data(AWS_HOST_RECON, recon_auth, "services")
    logger.info(f"Data retrieved:\n{data}")
    return data
コード例 #4
0
def get_recon_data(aws_host: str, auth: AWSRequestsAuth, endpoint: str,
                   **kwargs):
    logger = get_logger()
    query_params = urllib.parse.urlencode(kwargs)
    url = urllib.parse.urlunparse(
        ("https", aws_host, f"api/{endpoint}", None, query_params, None))
    logger.info(f'Making recon request with:\n{url}')
    json_object = get_recon_response(auth, url)
    logger.info(f'Response received:\n{json_object}')
    return json_object
コード例 #5
0
def get_recon_response(recon_auth: AWSRequestsAuth, url: str):
    logger = get_logger()
    json_object = {}
    try:
        response = retry_get_requests(url, auth=recon_auth)
        response.raise_for_status()
        json_object = response.json()
        logger.info(f"Recon response received:\n{json_object}")
    except requests.exceptions.HTTPError as e:
        logger.error(e)

    return json_object
コード例 #6
0
def update_service_plan(service: str, plan: ServicePlan):
    logger = get_logger()
    plan_str = "" if plan == ServicePlan.UNCATEGORIZED else plan.value
    recon_auth = get_aws_auth(AWS_HOST_RECON_BETA, AWS_REGION_EAST)
    patch_params = {"op": "replace", "path": "/plan", "value": plan_str}
    url = urllib.parse.urlunparse(
        ("https", AWS_HOST_RECON_BETA, f"api/services/{service}", None, None,
         None))
    logger.info(
        f"Sending patch request to update the plan of {service} with new value \"{plan_str}\"..."
    )
    with requests.patch(url, data=patch_params, auth=recon_auth) as response:
        logger.info(f"Response received:\n{response}")
コード例 #7
0
def get_aws_auth(aws_host, aws_region):
    logger = get_logger()
    session = boto3.Session()
    credentials = session.get_credentials()

    current_credentials = credentials.get_frozen_credentials()

    return AWSRequestsAuth(
        aws_access_key=current_credentials.access_key,
        aws_secret_access_key=current_credentials.secret_key,
        aws_token=current_credentials.token,
        aws_host=aws_host,
        aws_region=aws_region,
        aws_service="execute-api")
コード例 #8
0
def get_rms_response(rmsauth: AWSRequestsAuth, url: str, next_token=None):
    url = url_token_checker(url, next_token)
    logger = get_logger()
    response = retry_get_requests(url, auth=rmsauth)
    json_object = None
    try:
        response.raise_for_status()
        json_object = response.json()
        next_token = json_object.get("nextToken")
        logger.info(f"RMS response received:\n{json_object}")
    except requests.exceptions.HTTPError as e:
        logger.error(response.raise_for_status())
        next_token = None

    return json_object, next_token
コード例 #9
0
def get_cpt_data(endpoints: Iterable[str]) -> Iterable[Dict[str, str]]:
    logger = get_logger()
    rmsv2_auth = get_aws_auth(AWS_HOST_RMSV2, AWS_REGION_EAST)
    fetched_data = []
    for endpoint in endpoints:
        kwargs = {
            "label": "COMMERCIAL_PARTITION_TEMPLATE"
        } if endpoint == RmsEndpoint.EDGES.value else {
            "dimension": "COMMERCIAL_PARTITION_TEMPLATE"
        }
        fetched_data.append(
            get_rms_data(AWS_HOST_RMSV2, rmsv2_auth, endpoint, **kwargs))

    logger.info(f"Data retrieved:\n{fetched_data}")
    return fetched_data
コード例 #10
0
def get_internal_services(service_identifiers: Iterable[str]=[]):
    logger = get_logger()
    helper = RIPHelperLocal(metapackage="RIPDataAllSQLite-1.0")
    services = []

    # Check in case future use involves grabbing all internal services
    if service_identifiers:
        for identifier in service_identifiers:
            try:
                if helper.service(identifier).visibility == Visibility.INTERNAL:
                    services.append(identifier)
            except ServiceNotFoundError:
                logger.warning(f"The service identifier, \"{identifier}\", could not be found. {ServiceNotFoundError}")

    return services
コード例 #11
0
def find_plan(service: str):
    logger = get_logger()
    recon_auth = get_aws_auth(AWS_HOST_RECON, AWS_REGION_EAST)

    logger.info(f"Fetching the plan categorization for {service}...")
    data = get_recon_data(AWS_HOST_RECON, recon_auth,
                          f"services/{service}/plans")
    logger.info(f"Data retrieved:\n{data}")
    plan = data.get("service", {}).get("plan")
    if plan == ServicePlan.LAUNCH_BLOCKING.value:
        return ServicePlan.LAUNCH_BLOCKING
    elif plan == ServicePlan.MANDATORY.value:
        return ServicePlan.MANDATORY
    else:
        return ServicePlan.NON_GLOBAL
コード例 #12
0
def get_rms_data(aws_host: str, auth: AWSRequestsAuth, endpoint: str,
                 **kwargs):
    data = []
    logger = get_logger()
    query_params = urllib.parse.urlencode(kwargs)
    url = urllib.parse.urlunparse(
        ("https", aws_host, f"prod/{endpoint}", None, query_params, None))
    json_object, next_token = get_rms_response(auth, url)
    data.extend(json_object[endpoint])
    while next_token:
        logger.info(
            f"Sending request to fetch data from RMSV2 commercial partition template with token {next_token}..."
        )
        json_object, next_token = get_rms_response(auth, url, next_token)
        data.extend(json_object[endpoint])
    return data
コード例 #13
0
def get_milestones_tasks_successors(arn: str, option: RmsEndpoint):
    logger = get_logger()
    rmsv2_auth = get_aws_auth(AWS_HOST_RMSV2, AWS_REGION_EAST)
    fetched_data = None
    if option == RmsEndpoint.MILESTONES or option == RmsEndpoint.TASKS:
        endpoints = [option.value]
    elif option == RmsEndpoint.MT:
        endpoints = [RmsEndpoint.MILESTONES.value, RmsEndpoint.TASKS.value]
    else:
        raise ValueError(f"{option} is not a supported option")

    fetched_data = [
        get_rms_data(AWS_HOST_RMSV2, rmsv2_auth, endpoint, predecessor=arn)
        for endpoint in endpoints
    ]
    logger.info(
        f"Grabbed the {option.value} succesors for ARN: {arn}:\n{fetched_data}"
    )
    return fetched_data
コード例 #14
0
from typing import Iterator, List

from regions_recon_python_common.buildables_dao_models.service_metadata import SERVICE_METADATA_LATEST_ARTIFACT, \
    ServiceMetadataLatest
from regions_recon_python_common.buildables_dao_models.service_plan import PLAN_BY_SERVICE_ARTIFACT, \
    PlanByService
from regions_recon_python_common.query_utils.region_metadata_query_utils import get_region_metadata
from regions_recon_python_common.utils.log import get_logger
from regions_recon_python_common.utils.rms_managed_regions import get_regions_within_one_day_post_launch, \
    get_regions_within_ninety_business_days_post_launch

logger = get_logger()

LAUNCH_BLOCKING_SERVICE_PLAN = "Globally Expanding - Launch Blocking"
MANDATORY_SERVICE_PLAN = "Globally Expanding - Mandatory"


def get_launch_blocking_services() -> Iterator[ServiceMetadataLatest]:
    return ServiceMetadataLatest.artifact_plan_index.query(
        SERVICE_METADATA_LATEST_ARTIFACT,
        range_key_condition=ServiceMetadataLatest.plan == LAUNCH_BLOCKING_SERVICE_PLAN
    )


def get_mandatory_services() -> Iterator[ServiceMetadataLatest]:
    return ServiceMetadataLatest.artifact_plan_index.query(
        SERVICE_METADATA_LATEST_ARTIFACT,
        range_key_condition=ServiceMetadataLatest.plan == MANDATORY_SERVICE_PLAN
    )

コード例 #15
0
def test_format_csvrow():
    logger = get_logger()
    fake_services_seen_categories = {
        "ecytu": ServicePlan.MANDATORY,
        "stree": ServicePlan.MANDATORY
    }
    fake_rms_jsons = [{
        "arn": "arn:aws:rmsv2:::milestones/001",
        "dimension": "COMMERCIAL_PARTITION_TEMPLATE",
        "service": "stree3",
        "name": "Milestone 1"
    }, {
        "arn": "arn:aws:rmsv2:::milestones/002",
        "dimension": "COMMERCIAL_PARTITION_TEMPLATE",
        "service": "ecytu",
        "name": "Milestone 2"
    }, {
        "arn": "arn:aws:rmsv2:::milestones/003",
        "dimension": "COMMERCIAL_PARTITION_TEMPLATE",
        "service": "llamada",
        "name": "Milestone 3"
    }, {
        "arn": "arn:aws:rmsv2:::milestones/004",
        "dimension": "COMMERCIAL_PARTITION_TEMPLATE",
        "service": "cloudobserve",
        "name": "Milestone 4"
    }, {
        "arn": "arn:aws:rmsv2:::milestones/005",
        "dimension": "COMMERCIAL_PARTITION_TEMPLATE",
        "service": "blueshift",
        "name": "Milestone 5"
    }, {
        "arn": "arn:aws:rmsv2:::milestones/006",
        "dimension": "COMMERCIAL_PARTITION_TEMPLATE",
        "service": "menervesah",
        "name": "Milestone 6"
    }]
    fake_service_to_arn = {
        "stree": {
            "arn": [
                "arn:aws:rmsv2:::milestones/007",
                "arn:aws:rmsv2:::milestones/008"
            ]
        }
    }
    fake_successors = [
        RmsMilestoneOrTaskData(
            rms_data=fake_rms_jsons[0],
            service_plan=ServicePlan.MANDATORY,
            on_path_predecessor=fake_service_to_arn["stree"]["arn"][0]),
        RmsMilestoneOrTaskData(
            rms_data=fake_rms_jsons[1],
            service_plan=ServicePlan.NON_GLOBAL,
            on_path_predecessor=fake_service_to_arn["stree"]["arn"][1]),
        RmsMilestoneOrTaskData(rms_data=fake_rms_jsons[2],
                               service_plan=ServicePlan.MANDATORY,
                               on_path_predecessor=fake_rms_jsons[0]["arn"]),
        RmsMilestoneOrTaskData(rms_data=fake_rms_jsons[3],
                               service_plan=ServicePlan.MANDATORY,
                               on_path_predecessor=fake_rms_jsons[1]["arn"]),
        RmsMilestoneOrTaskData(rms_data=fake_rms_jsons[4],
                               service_plan=ServicePlan.NON_GLOBAL,
                               on_path_predecessor=fake_rms_jsons[1]["arn"]),
        RmsMilestoneOrTaskData(rms_data=fake_rms_jsons[5],
                               service_plan=ServicePlan.MANDATORY,
                               on_path_predecessor=fake_rms_jsons[1]["arn"])
    ]

    expected_basic_output = [
        "stree", ServicePlan.MANDATORY.value,
        "stree3, ecytu, llamada, cloudobserve, blueshift, menervesah"
    ]

    expected_detailed_output = [
        "stree", ServicePlan.MANDATORY.value,
        f"{fake_service_to_arn['stree']['arn'][0]}\n{fake_service_to_arn['stree']['arn'][1]}",
        f"({fake_rms_jsons[0]['name']}, arn:aws:rmsv2:::milestones/001, stree3, {ServicePlan.MANDATORY.value}, {fake_service_to_arn['stree']['arn'][0]})\n\n"
        +
        f"({fake_rms_jsons[1]['name']}, arn:aws:rmsv2:::milestones/002, ecytu, {ServicePlan.NON_GLOBAL.value}, {fake_service_to_arn['stree']['arn'][1]})\n\n"
        +
        f"({fake_rms_jsons[2]['name']}, arn:aws:rmsv2:::milestones/003, llamada, {ServicePlan.MANDATORY.value}, {fake_rms_jsons[0]['arn']})\n\n"
        +
        f"({fake_rms_jsons[3]['name']}, arn:aws:rmsv2:::milestones/004, cloudobserve, {ServicePlan.MANDATORY.value}, {fake_rms_jsons[1]['arn']})\n\n"
        +
        f"({fake_rms_jsons[4]['name']}, arn:aws:rmsv2:::milestones/005, blueshift, {ServicePlan.NON_GLOBAL.value}, {fake_rms_jsons[1]['arn']})\n\n"
        +
        f"({fake_rms_jsons[5]['name']}, arn:aws:rmsv2:::milestones/006, menervesah, {ServicePlan.MANDATORY.value}, {fake_rms_jsons[1]['arn']})"
    ]

    basic_output, detailed_output = format_csvrow(
        "stree", fake_successors, fake_services_seen_categories,
        fake_service_to_arn)

    assert expected_basic_output == basic_output
    assert expected_detailed_output == detailed_output
コード例 #16
0
from dateutil import parser
import pytz
from regions_recon_python_common.utils.cloudwatch_metrics_utils import submit_cloudwatch_metrics, increment_metric
from regions_recon_python_common.message_multiplexer_helper import MessageMultiplexerHelper, get_mm_endpoint
from regions_recon_python_common.utils.constants import BUILDABLES_TABLE_NAME
from regions_recon_python_common.utils.log import get_logger
from regions_recon_python_common.utils.misc import get_rip_name_from_instance_field
from regions_recon_lambda.utils.dynamo_query import DynamoQuery
from regions_recon_lambda.slips_email.slips_email_dao import SlipsEmailDAO

MESSAGE_GROUP_NAME = "region-slips"
CONDENSED_DATE_FORMAT = "%Y-%m-%d"
SLIP_DELTA = 30
NORMALIZED_DATE_FORMAT = "%Y-%m-%d %H:%M %Z"

logger = get_logger(logging.INFO)
mmlogger = logging.getLogger('messagemultiplexer')
mmlogger.setLevel(logging.WARN)


def send_slips_mail(event, context):
    try:
        if 'debug' in event:
            logger.setLevel(logging.DEBUG)
        if 'debug' in event or 'logmm' in event:
            mmlogger.setLevel(logging.INFO)
        dryrun = 'dryrun' in event

        metrics = {}
        metrics["message_send_success"] = 0
        metrics["message_send_failure"] = 0