def send_slips_mail(event, context):
    try:
        if 'debug' in event:
            logger.setLevel(logging.DEBUG)
        if 'debug' in event or 'logmm' in event:
            mmlogger.setLevel(logging.INFO)
        dryrun = 'dryrun' in event

        metrics = {}
        metrics["message_send_success"] = 0
        metrics["message_send_failure"] = 0

        dao = SlipsEmailDAO()

        results = send_slips_mail_internal(dao, dryrun, metrics)
        metrics = increment_metric(metrics, "message_send_success")
        return results

    except:
        logger.exception("Lambda function failed.")
        metrics = increment_metric(metrics, "message_send_failure")
        raise

    finally:
        if dryrun:
            logger.info(
                "CloudWatch metrics (not sent because dryrun): {}".format(
                    metrics))
        else:
            submit_cloudwatch_metrics(metrics_dict=metrics,
                                      service_name='ServiceSlipsMailer')
Пример #2
0
 def cleanup_sqs(self, record):
     if record.receipt_handle:
         self.delete_sqs_message(handle=record.receipt_handle)
     elif not is_in_list_of_ignored_services(record.rip_name):
         logger.error(
             f"lambda was not invoked by sqs and service/region {record.rip_name} not in ignored allowlist"
         )
         self.metrics = increment_metric(
             self.metrics, "lambda_not_invoked_by_sqs_or_tests")
     else:
         self.metrics = increment_metric(self.metrics,
                                         "integ_or_canary_execution")
Пример #3
0
    def process_rip_message(self, message):
        change_status = message["status"]
        logger.debug("change_status is {}".format(change_status))
        if change_status != "APPROVED":
            self.metrics = increment_metric(self.metrics,
                                            "change_not_approved")
            logger.info("Ignoring unapproved message: {}".format(message))
            return

        dimension_type = message["dimension"]["type"]
        if dimension_type == "SERVICE":
            buildable_item = self.get_buildable_service(message=message)
        elif dimension_type == "REGION":
            buildable_item = self.get_buildable_region(message=message)
        elif dimension_type == "TITAN":
            logger.info("Ignoring TITAN {}".format(
                message["dimension"]["name"]))
            return
        else:
            self.metrics = increment_metric(self.metrics,
                                            "unknown_dimension_type")
            logger.error(
                "Unknown dimension type for message {}".format(message))
            return

        if buildable_item is None:
            return

        buildable_item.backfill_item_with_ddb_data()
        buildable_item.local_item.update(self.get_updating_agent_value())

        if dimension_type == "REGION":
            buildable_item = validate_region_item(buildable_item)

        self.metrics = merge_metrics_dicts(merge_to=self.metrics,
                                           merge_from=buildable_item.metrics)

        if dimension_type == "REGION":
            self.region_items_to_write.append(
                convert_buildable_region_to_model(buildable_item))

        elif is_service_metadata(buildable_item):
            self.service_metadata_items_to_write.append(
                convert_buildable_service_metadata_to_model(buildable_item))

        elif is_service_plan(buildable_item):
            self.service_plan_items_to_write.append(
                convert_buildable_service_plan_to_model(buildable_item))

        else:
            self.ddb_items_to_write += buildable_item.items_pending_write
def get_rms_and_recon_combined_object(all_milestones, service):
    global metrics
    service_name = service["instance"].split(":")[0]
    service_region = service["instance"].split(":")[2]

    # gets and trims time off launch date YYYY-MM-DD
    service_launch_date_long = service.get("date", "-")
    service_launch_date = get_short_date(service_launch_date_long)

    # iterate over all milestones returned by rms to find the one we care about.. GA
    found_matching_milestone = False
    for milestone in all_milestones:

        # make sure the data is as expected before we start reaching into it.
        if (validate_rms_milestone(milestone, logger)):
            namespace_milestone = milestone["namespace"]

            # checks if namespace for this milestone matches
            # * GA milestone specific namespace format
            # * Service name matches
            # * Service region matches
            GA_matched_namespace = f"instance/service/{service_name}/{service_region}/GA" in namespace_milestone

            if GA_matched_namespace:
                # logger.info(json.dumps(milestone, indent=2))

                milestone_status = milestone["status"]
                if milestone_status == "COMPLETED":
                    rms_date = "COMPLETED"
                    slack = None
                else:
                    early_finish = milestone[
                        "early_finish"]  # one we care about
                    late_finish = milestone[
                        "late_finish"]  # might care about range
                    slack = milestone["slack"]
                    rms_date = get_short_date(early_finish)

                if (valid_date(service_launch_date) and valid_date(rms_date)):
                    date_diff = days_between(service_launch_date, rms_date)
                else:
                    date_diff = None

                found_matching_milestone = True

                return {
                    "service": service_name,
                    "region": service_region,
                    "service_launch_date": service_launch_date,
                    "rms_early_finish_date": rms_date,
                    "slack": slack,
                    "date_diff": date_diff
                }

    if found_matching_milestone is False:
        logger.error(
            "Was not able to find GA milestone for service: {} in region: {}".
            format(service_name, service_region))
        metrics = increment_metric(metrics, "no_ga_milestone_found")
def find_service_and_update_dynamo(data, all_services_plans):
    global metrics

    composedData = "{}:v0:{}".format(data["service"], data["region"])
    found = False

    # find the ddb data that matches this piece of data
    for service in all_services_plans:
        service_instance = service["instance"]

        if service_instance == composedData:
            found = True
            if service.get(
                    "rms_early_finish_date") == data["rms_early_finish_date"]:
                logger.debug(
                    "🏃‍♂️ No need to write. No data changed for service {} in region {}, date: {}"
                    .format(data["service"], data["region"],
                            data["rms_early_finish_date"]))
                metrics = increment_metric(metrics,
                                           "ddb_item_already_uptodate")
            else:
                original_date = service.get("rms_early_finish_date",
                                            "non-existent")
                logger.info(
                    "✅ Found new date value from RMS for service {} in region {}, writing to DB. Was {}, is now {}"
                    .format(data["service"], data["region"], original_date,
                            data["rms_early_finish_date"]))

                plan = ServicePlan.create_and_backfill(
                    artifact="SERVICE",
                    instance=service_instance,
                    rms_early_finish_date=data["rms_early_finish_date"],
                    rms_early_finish_date_updated=datetime.now().isoformat(),
                )

                plan.save()

                logger.debug(f"UPDATED: {plan}")
                metrics = increment_metric(metrics, "ddb_updated_item")

            break

    return found
Пример #6
0
    def get_buildable_region(self, message):
        """
        Process a message about a change to a RIP region.  Return a BuildableRegion item.
        Sample message - see region_non_ga.json
        """
        item_attrs = self.get_attributes_from_new_value(
            map_of_attr_names=get_region_attributes_to_buildables_map(),
            new_value=message["newValue"])

        region_ac = message["dimension"]["name"]
        if is_in_list_of_ignored_regions(region_ac):
            logger.info(
                f"Ignoring REGION '{region_ac}' - its an ignored region (TEST/RETAIL/CLOSED)"
            )
            return

        item_attrs["airport_code"] = region_ac
        item_attrs["name_sortable"] = region_ac
        item_attrs[
            "updated"] = self.get_date_iso_string_from_epoch_milliseconds_timestamp(
                timestamp=message["approvedDate"])
        item_attrs["updater"] = message["registrant"]
        accessibility_attrs = message.get("newValue",
                                          {}).get("accessibilityAttributes")
        unwanted_attrs = ["TEST", "RETAIL", "CLOSING"]

        if accessibility_attrs is not None and any(
                item in unwanted_attrs for item in accessibility_attrs):
            logger.warning(
                f"Region is test, retail, or closing and is not tracked in the database. Accessibility attributes for region '{region_ac}' are '{accessibility_attrs}'"
            )
            return None
        if "status" in item_attrs.keys() and item_attrs["status"] == "GA":
            date_from_message = message.get("newValue",
                                            {}).get("statusDates",
                                                    {}).get("GA")
        else:
            date_from_message = message.get("newValue",
                                            {}).get("estimatedLaunchDate")

        try:
            item_attrs[
                "date"] = self.get_date_iso_string_from_epoch_milliseconds_timestamp(
                    timestamp=date_from_message)
        except (TypeError):
            logger.warning(
                "region contains an unreadable date: {}".format(message))

        instance = self.get_region_instance_value(region_ac=region_ac,
                                                  version=0)
        buildable_item = BuildableRegion(instance=instance,
                                         table=self.get_table(),
                                         **item_attrs)
        self.metrics = increment_metric(self.metrics, "region_change")
        return buildable_item
def _send_mail(ddb, notifications, service, state, contact, template, endpoint_params, mm_helper, metrics):
    msg_group_name = create_msg_group_name(contact)
    message_group_arn = mm_helper.get_own_message_group_arn(message_group_name=msg_group_name)

    _prepare_mm_target(message_group_arn, contact, endpoint_params, msg_group_name, template, mm_helper)

    regions = []
    for region in state["regions"].values():
        regions.append(region)
    regions = sorted(regions, key=lambda item: item["region"])
    gm, vp = get_contact_names(service["contacts"])

    params = {
        "message_group_name": msg_group_name,
        "params": json.dumps({
            "name_rip": service["rip"],
            "name_long": service["name"],
            "l8": gm,
            "l10": vp,
            "regions": regions
        })
    }

    try:
        logger.info("sending message to {} stage with payload: {} ".format(contact, params))
        response = mm_helper.perform_operation(operation="send_message", params=params, convert_message_group_name=True)

        logger.info("sent mail! {}".format(response))
        metrics = increment_metric(metrics, "message_send_success")

        for region in state["regions"]:
            _update_notification(ddb, service, state, state["regions"][region], 0)

    except Exception as e:
        logger.exception("unable to send message: '{}': ".format(params))
        metrics = increment_metric(metrics, "message_send_failure")
        if notification["retries"]+1 >= 3:
            metrics = _send_fallback(service, state, contact, template, mm_helper, metrics)
        for region in state["regions"]:
            notification = notifications["delivery-date-{}-{}".format(service["rip"], region)]
            _update_notification(ddb, service, state, state["regions"][region], notification["retries"]+1)
    return metrics
def _send_fallback(service, state, contact, template, mm_helper, metrics):
    msg_group_name = GROUP_PREFIX+"-"+CC_TEAM
    message_group_arn = mm_helper.get_own_message_group_arn(message_group_name=msg_group_name)
    endpoint_params = dict(
            EMAIL_SUBJECT="Please Forward: Action Requested for Scheduled Launch".format(state["value"]),
            EMAIL_SHORT_NAME="Recon"
        )
    template = get_fallback_header() + template
    _prepare_mm_target(message_group_arn, CC_TEAM, endpoint_params, msg_group_name, template, mm_helper)
    gm, vp = get_contact_names(service["contacts"])

    params = {
        "message_group_name": msg_group_name,
        "params": json.dumps({
            "name_rip": service["rip"],
            "name_long": service["name"],
            "l8": gm,
            "l10": vp,
            "regions": state["regions"],
            "contact": contact
        })
    }

    try:
        logger.info("sending message to {} stage with payload: {} ".format(CC_TEAM, params))
        response = mm_helper.perform_operation(operation="send_message", params=params, convert_message_group_name=True)

        logger.info("sent mail! {}".format(response))
        metrics = increment_metric(metrics, "message_send_success")

    except Exception as e:
        logger.exception("unable to send message: '{}': ".format(params))
        metrics = increment_metric(metrics, "message_send_failure")
    
    logger.debug("metrics in fallback: {}".format(metrics))
    return metrics
Пример #9
0
    def run_workflow(self):
        logger.debug("Running workflow")

        logger.debug("Processing {} records".format(len(self.records)))
        for record in self.records:
            self.metrics = {}
            self.metrics["records_processed"] = 1

            if record.change_type in PROCESS_CHANGE_TYPES:
                logger.debug("message is {}".format(record.message))
                self.process_rip_message(message=record.message)
                self.write_to_dynamo()
            else:
                logger.debug("ignoring change type {}".format(
                    record.change_type))
                self.metrics = increment_metric(self.metrics,
                                                "ignored_change_type")

            self.cleanup_sqs(record)
            submit_cloudwatch_metrics(metrics_dict=self.metrics,
                                      service_name=self.metrics_service_name)
def call_rms_and_create_diff_object(auth, service):
    global metrics

    service_name = service["instance"].split(":")[0]
    service_region = service["instance"].split(":")[2]
    if (validate_service_name(service_name, logger)
            and validate_airport_code(service_region, logger)):
        service_name = service_name.replace("/", "+")
        encoded_service_name = urllib.parse.quote(service_name)
        encoded_service_region = urllib.parse.quote(service_region)

        logger.debug("calling rms for data for {} in {}".format(
            service_name, service_region))

        url = "https://{}/dates?region={}&service={}".format(
            AWS_RMS_ANALYTICS, encoded_service_region, encoded_service_name)
        response = requests.get(url, auth=auth)

        data = response.content
        json_object = json.loads(data)

        all_milestones = json_object.get("dates", None)
        if (all_milestones is not None):
            return get_rms_and_recon_combined_object(all_milestones, service)
        else:
            logger.error(
                "RMS Response did not come back as expected for service {} in region {}"
                .format(service_name, service_region))
            ### Sometimes you will get this error if you are not allowlisted - to do so
            # Cut a ticket to "AWS / Regions Management Service / Contact Us"
            # Title: "[Analytics] Dates Allowlisting Request"
            # Include your use case, and accounts/roles/users to allowlist

            metrics = increment_metric(metrics, "rms_response_invalid")
    else:
        logger.error(
            "There was an issue validating the service coming back from Recon: {} , {}"
            .format(service_name, service_region))
def send_updates_mail(event, context):
    # In days.
    PAST_SYSTEM_EVENT_THRESHOLD = int(
        os.environ["PAST_SYSTEM_EVENT_THRESHOLD"])

    logger.info("event: {}".format(event))
    logger.info("context: {}".format(context.__dict__))

    metrics = {}
    metrics["message_send_success"] = 0
    metrics["message_send_failure"] = 0

    ddb = boto3.client("dynamodb", region_name='us-east-1')

    now = datetime.now(timezone.utc)
    notifications = {}
    update_limit = now - timedelta(hours=TIME_HORIZON)
    oldest_update = now
    table = os.environ["BUILDABLES_TABLE_NAME"]
    items = []
    services = {}

    try:
        response = ddb.query(TableName=table,
                             KeyConditions={
                                 "artifact": {
                                     "ComparisonOperator": "EQ",
                                     "AttributeValueList": [{
                                         "S": "NOTIFICATION"
                                     }]
                                 }
                             },
                             QueryFilter={
                                 "type": {
                                     "ComparisonOperator": "EQ",
                                     "AttributeValueList": [{
                                         "S": "update"
                                     }]
                                 }
                             })

        if "Items" not in response.keys():
            logger.error(
                "no NOTIFICATION artifacts are configured in the database, bozo"
            )
        else:
            items = response["Items"]

        logger.info(
            "STEP 1: regain our memory about the state of notifications")
        for item in items:
            # Protect ourselves from poorly formatted date values.
            updated = update_limit
            try:
                updated = (parser.parse(
                    item["updated"]["S"])).replace(tzinfo=pytz.UTC)
                if updated < update_limit:
                    updated = update_limit
            except Exception as e:
                logger.error(
                    "could not parse date, IGNORE THIS IF WE ARE BOOTSTRAPPING: '{}': {}"
                    .format(item["updated"]["S"], e))
                updated = update_limit

            notification = {
                "instance": item["instance"]["S"],
                "updated": updated,
                "region": item["region"]["S"],
                "type": item["type"]["S"]
            }

            notifications[notification["region"]] = notification
            logger.info("NOTIFICATION: {}".format(notification))

            # Figure out how far back in time we need to look to satisfy whichever notification is the MOST out-of-date.
            # We don't want to have to make separate queries for every notification.
            if notification["updated"] < oldest_update:
                oldest_update = notification["updated"]

        logger.info("oldest_update: {}, update_limit {}".format(
            oldest_update, update_limit))

        logger.info("STEP 2: get list of services that have categories")
        try:
            total = 0
            args = dict(TableName=table,
                        IndexName="artifact-plan-index",
                        KeyConditions={
                            "artifact": {
                                "ComparisonOperator": "EQ",
                                "AttributeValueList": [{
                                    "S": "SERVICE"
                                }]
                            },
                        },
                        QueryFilter={
                            "version_instance": {
                                "ComparisonOperator": "EQ",
                                "AttributeValueList": [{
                                    "N": "0"
                                }]
                            }
                        })

            while True:
                response = ddb.query(**args)

                if "Items" not in response:
                    logger.error("no SERVICE instances returned")
                else:
                    items = response["Items"]
                    for item in items:
                        rip = (item["instance"]["S"].split(":"))[0]
                        if rip not in services:
                            services[rip] = dict(
                                # Raw items about each region for which we have data about this service.
                                regions={},

                                # Raw items about this service's non-regional data.
                                instances={})

                        if belongs_to_region(item):
                            # This is a region-specific item.
                            region = item["belongs_to_instance"]["S"]
                            if region in notifications:
                                if region not in services[rip]["regions"]:
                                    services[rip]["regions"][region] = dict(
                                        history={0: item},
                                        notification=notifications[region])
                                services[rip]["regions"][region]["history"][
                                    int(item["version_instance"]["N"])] = item
                        else:
                            services[rip]["instances"][item["version_instance"]
                                                       ["N"]] = item

                    total += len(items)

                    if "LastEvaluatedKey" in response:
                        args["ExclusiveStartKey"] = response[
                            "LastEvaluatedKey"]
                    else:
                        break

            # DATA CLEANUP
            # There are some services without categories set on their non-regional instances. These are typically deprecated.
            # for now, we need to remove these as they will wreck the code later on and it's unclear how to "patch" the data
            # in an automated way that is helpful to the customer.
            delete = [
                key for key in services if len(services[key]["instances"]) == 0
            ]
            logger.warn(
                "CLEANUP: eliding services which lack proper category settings: {}"
                .format(delete))
            for key in delete:
                del services[key]

            logger.info("{} SERVICES: {}".format(len(services),
                                                 sorted(services.keys())))

            try:
                args = dict(TableName=table,
                            IndexName="artifact-updated-index",
                            KeyConditions={
                                "artifact": {
                                    "ComparisonOperator": "EQ",
                                    "AttributeValueList": [{
                                        "S": "SERVICE"
                                    }]
                                },
                                "updated": {
                                    "ComparisonOperator":
                                    "BETWEEN",
                                    "AttributeValueList": [{
                                        "S":
                                        oldest_update.isoformat()
                                    }, {
                                        "S": now.isoformat()
                                    }]
                                }
                            },
                            QueryFilter={
                                "version_instance": {
                                    "ComparisonOperator": "EQ",
                                    "AttributeValueList": [{
                                        "N": "0"
                                    }]
                                },
                            })

                no_cat = 0
                service_specific = 0
                unconfig_region = 0
                unconfig_regions = []
                possibly_relevant = 0
                while True:
                    response = ddb.query(**args)

                    if "Items" not in response:
                        logger.info(
                            "STEP 3: no service updates since {}".format(
                                oldest_update.strftime(
                                    NORMALIZED_DATE_FORMAT_WITH_SEC)))
                    else:
                        items = response["Items"]
                        logger.info(
                            "STEP 3: reviewing a batch of {} service updates since {}"
                            .format(
                                len(items),
                                oldest_update.strftime(
                                    NORMALIZED_DATE_FORMAT)))

                        for item in items:
                            # Skip everything that's not categorized.
                            rip = (item["instance"]["S"].split(":"))[0]
                            if rip not in services:
                                no_cat += 1
                            else:
                                # Service-specific row.
                                if "belongs_to_instance" not in item:
                                    services[rip]["instances"][
                                        item["version_instance"]["N"]] = item
                                    service_specific += 1
                                else:
                                    # Service-in-region row.
                                    region = item["belongs_to_instance"]["S"]
                                    if region not in notifications:
                                        unconfig_region += 1
                                        if region not in unconfig_regions:
                                            unconfig_regions.append(region)
                                            logger.warn(
                                                "no NOTIFICATION configured for region {}"
                                                .format(
                                                    item["belongs_to_instance"]
                                                    ["S"]))
                                            logger.warn(
                                                "EXAMPLE of WEIRD instance: {}"
                                                .format(item))
                                    else:
                                        possibly_relevant += 1
                                        logger.info(
                                            "adding history for {} in {}".
                                            format(rip, region))
                                        if region not in services[rip][
                                                "regions"]:
                                            logger.info(
                                                "adding history for {} in {}".
                                                format(rip, region))
                                            services[rip]["regions"][
                                                region] = dict(
                                                    history={0: item},
                                                    notification=notifications[
                                                        region])
                                        else:
                                            services[rip]["regions"][region][
                                                "history"][int(
                                                    item["version_instance"]
                                                    ["N"])] = item

                    if "LastEvaluatedKey" in response:
                        args["ExclusiveStartKey"] = response[
                            "LastEvaluatedKey"]
                    else:
                        break

                logger.info(
                    "SKIPPED UPDATES: {} were for services that are not categorized, {} were for regions for which we are not configured to send updates, {} were service-specific (non-regional) updates"
                    .format(no_cat, unconfig_region, service_specific))
                logger.info("{} updates are possibly relevant".format(
                    possibly_relevant))

                # Now we're ready to start matching up the service information we have with the messages that need sending.
                messages = {}
                for service_key in services:
                    service = services[service_key]
                    for region_key in service["regions"]:

                        # # # # HACK # # # #
                        # remove_system_updates(service, region_key)
                        # # # # HACK # # # #

                        service_in_region = service["regions"][region_key]
                        if (region_key in notifications) and (
                                "history" in service_in_region) and (len(
                                    service_in_region["history"]) > 0):
                            notification = notifications[region_key]
                            if 'updated' in service_in_region["history"][
                                    0] and 'S' in service_in_region["history"][
                                        0]['updated']:
                                region_updated_date = parser.parse(
                                    service_in_region["history"][0]["updated"]
                                    ["S"]).replace(tzinfo=pytz.UTC)
                                # logger.info("STEP 4 (loop): compare dates: {} last updated on {}, region {} last notification was {}".format(service_key, region_updated_date, region_key, notification["updated"]))
                                if region_updated_date >= notification[
                                        "updated"]:
                                    logger.info(
                                        "STEP 4.a: TRIGGERED {} in {} last updated on {}, last notification sent on {}"
                                        .format(service_key, region_key,
                                                region_updated_date,
                                                notification["updated"]))

                                    previous_dates = []
                                    try:
                                        response = ddb.query(
                                            TableName=table,
                                            KeyConditions={
                                                "artifact": {
                                                    "ComparisonOperator":
                                                    "EQ",
                                                    "AttributeValueList": [{
                                                        "S":
                                                        "SERVICE"
                                                    }]
                                                },
                                                "instance": {
                                                    "ComparisonOperator":
                                                    "BEGINS_WITH",
                                                    "AttributeValueList": [{
                                                        "S":
                                                        service_key
                                                    }]
                                                }
                                            },
                                            QueryFilter={
                                                "belongs_to_instance": {
                                                    "ComparisonOperator":
                                                    "EQ",
                                                    "AttributeValueList": [{
                                                        "S":
                                                        region_key
                                                    }]
                                                }
                                            })

                                        if "Items" not in response.keys():
                                            logger.info(
                                                "no service history items.  wtf."
                                            )
                                        else:
                                            items = response["Items"]
                                            logger.info(
                                                "STEP 4.a.1: {} service history in {} response contains {} items"
                                                .format(
                                                    service_key, region_key,
                                                    len(items)))

                                            for item in items:
                                                index = int(
                                                    item["version_instance"]
                                                    ["N"])
                                                service_in_region["history"][
                                                    index] = item

                                            # # # # HACK # # # #
                                            # remove_system_updates(service, region_key)
                                            # # # # HACK # # # #

                                            history = service_in_region[
                                                "history"]
                                            logger.info(
                                                "{} in {} has {} versions".
                                                format(service_key, region_key,
                                                       len(history)))
                                            logger.info(
                                                "most recent instance is: {}".
                                                format(history[0]))

                                            new_confidence = "({})".format(
                                                get_confidence(history[0]))

                                            new_date = "See Note"
                                            if "date" in history[0]:
                                                new_date = condense_date_text(
                                                    history[0]["date"]["S"])
                                            else:
                                                if "updated" in history[0]:
                                                    new_date = condense_date_text(
                                                        history[0]["updated"]
                                                        ["S"])
                                                else:
                                                    # Give up. Uncertain how to report on this if we don't know when it happened.
                                                    logger.error(
                                                        "NO UPDATED DATE: {}".
                                                        format(history))
                                                    break

                                            # The bare minimum requirements to generate an email are a change to date, confidence, note, or status.
                                            # Most specifically, this will exclude changes to only the category since those aren't noted in the email
                                            # (an email based soley on a category change would, confusingly, contain no changed information).
                                            # See https://sim.amazon.com/issues/RECON-4560.

                                            if not has_relevant_changes(
                                                    history):
                                                # To determine if we are accurately determine which services were updated
                                                logger.warn(
                                                    f"IGNORING update with no relevant changes to date, note, or confidence and no status change to GA: {history}"
                                                )
                                                break

                                            if "updater" in history[0]:
                                                if ((history[0]["updater"]["S"]
                                                     == "system") or
                                                    (history[0]["updater"]["S"]
                                                     == "RIPScorecardListener")
                                                    ):
                                                    logger.warn(
                                                        "IGNORING SYSTEM UPDATE: {}"
                                                        .format(history[0]))

                                            # Sometimes we get system updates for things in the past. I don't know why. Sure, I'd like to know.
                                            # But figuring things like that out take time and I have to finish this report RIGHT NOW. Maybe
                                            # there will be time in the future to look into this.
                                            #
                                            # So, let's not send mail about past updates so we don't get questions about it from users.
                                            if ("date" in history[0]) and (
                                                    "updated" in history[0]):
                                                difference = (
                                                    parser.parse(
                                                        history[0]["updated"]
                                                        ["S"]).replace(
                                                            tzinfo=pytz.UTC) -
                                                    parser.parse(
                                                        history[0]["date"]["S"]
                                                    ).replace(tzinfo=pytz.UTC)
                                                ).days
                                                logger.warn(
                                                    "POTENTIAL OLD event for {} in {}, difference: {}"
                                                    .format(
                                                        service_key,
                                                        region_key,
                                                        difference))
                                                if difference > PAST_SYSTEM_EVENT_THRESHOLD:
                                                    logger.error(
                                                        "IGNORING an update about a past event for {} in {} {} days ago: {}"
                                                        .format(
                                                            service_key,
                                                            region_key,
                                                            difference,
                                                            history))
                                                    break

                                            updater = ""
                                            if "updater" in history[0]:
                                                updater = history[0][
                                                    "updater"]["S"]

                                            note = ""
                                            if "note" in history[0]:
                                                note = history[0]["note"]["S"]

                                            if ("date" not in history[0]) and (
                                                    "confidence"
                                                    not in history[0]):
                                                if ("status" not in history[0]
                                                    ) or (len(history[0]
                                                              ["status"]["S"])
                                                          == 0):
                                                    # This is another case where we have nothing to report if there's no date, status, or confidence change.
                                                    # Just abort.
                                                    logger.error(
                                                        "IGNORING an update with no date, status, or confidence change for {} in {}: {}"
                                                        .format(
                                                            service_key,
                                                            region_key,
                                                            history))
                                                    break
                                                if (("status" in history[0])
                                                        and
                                                    (history[0]["status"]["S"]
                                                     == "GA")):
                                                    # Only send these on GA/Complete. Otherwise, we send redundant updates about build status changes which RMS
                                                    # (presumably) is already sending.
                                                    pass
                                                else:
                                                    break

                                            # De-dupe.
                                            i = 1
                                            previous_dates = []
                                            end = len(history) - 1
                                            tup = (("date", new_date),
                                                   ("confidence",
                                                    new_confidence))
                                            seen = set()
                                            seen.add(tup)
                                            while i < end:
                                                previous = {}
                                                if "date" in history[i]:
                                                    previous = {
                                                        "date":
                                                        condense_date_text(
                                                            history[i]["date"]
                                                            ["S"]),
                                                        "confidence":
                                                        ""
                                                    }
                                                    if "confidence" in history[
                                                            i]:
                                                        previous[
                                                            "confidence"] = "(" + history[
                                                                i]["confidence"][
                                                                    "S"] + ")"
                                                    tup = (
                                                        ("date",
                                                         previous["date"]),
                                                        ("confidence",
                                                         previous["confidence"]
                                                         ))

                                                    logger.warn(
                                                        "TUP COMPARE: look for '{}' in '{}'"
                                                        .format(tup, seen))

                                                    if tup not in seen:
                                                        seen.add(tup)
                                                        previous_dates.append(
                                                            previous)
                                                i += 1

                                            # Sort.
                                            previous_dates = sorted(
                                                previous_dates,
                                                key=lambda item: item["date"],
                                                reverse=True)

                                            # Add visual separators.
                                            i = 0
                                            end = len(previous_dates)
                                            while i < end:
                                                separator = ""
                                                if i + 1 < end:
                                                    separator = ","
                                                previous_dates[i][
                                                    "separator"] = separator
                                                i += 1

                                            name = get_service_name(
                                                service["instances"]["0"])

                                            if region_key not in messages:
                                                messages[region_key] = {
                                                    "services": []
                                                }

                                            if (updater == "") and (note
                                                                    == ""):
                                                note = "automated update"

                                            messages[region_key][
                                                "services"].append(
                                                    dict(
                                                        rip=service_key,
                                                        service_name=name,
                                                        new_date=new_date,
                                                        new_confidence=
                                                        new_confidence,
                                                        previous=previous_dates,
                                                        note=note,
                                                        changed_on_date=
                                                        region_updated_date.
                                                        strftime(
                                                            NORMALIZED_DATE_FORMAT
                                                        ),
                                                        actor_username=updater
                                                        if updater != "system"
                                                        else ""))
                                            messages[region_key][
                                                "last_sent_date"] = notification[
                                                    "updated"].strftime(
                                                        NORMALIZED_DATE_FORMAT)
                                            messages[region_key][
                                                "airport_code"] = notification[
                                                    "region"]

                                    except Exception as e:
                                        logger.error(
                                            "unable to fetch history in region for {} because {}\n{}"
                                            .format(service_key, repr(e),
                                                    format_exc()))
                                else:
                                    pass
                                    # logger.info("STEP 4.b: we've already sent mail about {} in {}".format(service_key, notification["region"]))

                logger.info("STEP 5: preparing to send {} messages".format(
                    len(messages)))
                for message_key in messages.keys():
                    service_list = sorted(
                        messages[message_key]["services"],
                        key=lambda item: item["service_name"])

                    params = {
                        "message_group_name":
                        notifications[message_key]["instance"],
                        "params":
                        json.dumps({
                            "last_sent_date":
                            messages[message_key]["last_sent_date"],
                            "airport_code":
                            messages[message_key]["airport_code"],
                            "services":
                            service_list
                        })
                    }

                    try:
                        logger.info(
                            "sending message to {} stage with payload: {} ".
                            format(MM_STAGE, params))
                        mm_helper = MessageMultiplexerHelper(
                            endpoint=get_mm_endpoint(MM_STAGE),
                            own_account_number=os.environ["ACCOUNT_ID"])
                        response = mm_helper.perform_operation(
                            operation="send_message",
                            params=params,
                            convert_message_group_name=True)

                        logger.info("sent mail! {}".format(response))
                        metrics = increment_metric(metrics,
                                                   "message_send_success")

                        try:
                            params = dict(
                                TableName=table,
                                Key={
                                    "artifact": {
                                        "S": "NOTIFICATION"
                                    },
                                    "instance": {
                                        "S":
                                        notifications[message_key]["instance"]
                                    }
                                },
                                AttributeUpdates={
                                    "updated": {
                                        "Action": "PUT",
                                        "Value": {
                                            "S":
                                            now.strftime(
                                                NORMALIZED_DATE_FORMAT_WITH_SEC
                                            )
                                        }
                                    }
                                })
                            response = ddb.update_item(**params)
                            logger.info(
                                "STEP 5: successfully updated NOTIFICATION entry with {}: {}"
                                .format(params, response))

                        except Exception as e:
                            logger.error(
                                "unable to update notifications with '{}' because {} \n{}"
                                .format(params, repr(e), format_exc()))

                    except Exception as e:
                        logger.error(
                            "unable to send message: {}, because {} \n{}".
                            format(params, repr(e), format_exc()))
                        metrics = increment_metric(metrics,
                                                   "message_send_failure")

            except Exception as e:
                logger.error("unable to query for updates: {} \n{}".format(
                    repr(e), format_exc()))

        except Exception as e:
            logger.error(
                "unable to get list of categorized services: {} \n{}, bozo".
                format(repr(e), format_exc()))

    except Exception as e:
        logger.error("unable to read in notifications: {} \n{}".format(
            repr(e), format_exc()))

    submit_cloudwatch_metrics(metrics_dict=metrics,
                              service_name=context.function_name)
Пример #12
0
    def get_buildable_service(self, message):
        """
            Process a message about a change to a RIP service.  Return a BuildableService item.
            Sample message - see service_instance.json or new_service.json or new_component_service.json
        """

        rip_name = message["dimension"]["name"]
        parent_dimension_key = message.get("newValue").get(
            "parentDimensionKey")

        if not parent_dimension_key:
            logger.info(f"Ingesting new Service '{rip_name}'!!!")
            item_attrs = self.get_attributes_from_new_value(
                map_of_attr_names=get_service_attributes_to_buildables_map(),
                new_value=message["newValue"])
            instance = self.get_service_instance_value(rip_name=rip_name,
                                                       version=0)
        else:
            item_attrs = {}
            # parent_dimension_type can be: REGION, CELL, or SERVICE
            parent_dimension_type = parent_dimension_key["type"].upper()
            item_attrs["belongs_to_artifact"] = parent_dimension_type

            if parent_dimension_type == "REGION":
                parent_dimension_name = parent_dimension_key["name"].upper(
                )  # might not want this in SERVICE example - recon-integ
                item_attrs["belongs_to_instance"] = parent_dimension_name
                logger.info(
                    f"Ingesting new service in region combo -> '{rip_name}' in '{parent_dimension_name}'"
                )

                parent_exists = parent_object_exists(parent_dimension_type,
                                                     parent_dimension_name)

                if not parent_exists:
                    logger.warning(
                        f"Service in region is in a test, retail, or closed region and is not tracked. Parent region '{parent_dimension_name}' does not exist in database."
                    )
                    return None

                instance = self.get_serviceinstance_instance_value(
                    rip_name=rip_name,
                    version=0,
                    dimension_name=parent_dimension_name)
                item_attrs.update(
                    self.get_attributes_from_new_value(
                        map_of_attr_names=
                        get_service_attributes_to_buildables_map(),
                        new_value=message["newValue"]))

                # we must guarantee all new service_instance objects have the same "plan" aka category as their parent MD object
                service_metadata_object = self.get_service_metadata(rip_name)
                if service_metadata_object:
                    parent_plan = service_metadata_object.get("plan")
                    if parent_plan:
                        logger.info(
                            f"Found MD object for service '{rip_name}' with plan '{parent_plan}' - setting new instance '{parent_dimension_name}' to parent plan"
                        )
                        self.metrics = increment_metric(
                            self.metrics, "pulled_plan_from_parent")
                        item_attrs["plan"] = parent_plan

                # we must save the date a service goes GA in a region as a separate field https://sim.amazon.com/issues/RECON-6126
                is_turning_ga = message.get('newValue',
                                            {}).get('status') == 'GA'
                old_value_isnt_ga = message.get(
                    'previousValue') is not None and message.get(
                        'previousValue', {}).get('status') != 'GA'
                should_save_launch_date = is_turning_ga and old_value_isnt_ga

                if should_save_launch_date:
                    logger.info(
                        f"Service '{rip_name}' went GA in '{parent_dimension_name}' - saving its launch date"
                    )
                    self.metrics = increment_metric(self.metrics,
                                                    "save_ga_launched_date")
                    item_attrs[
                        'ga_launched_date'] = self.get_date_iso_string_from_epoch_milliseconds_timestamp(
                            timestamp=message["approvedDate"])
                    # If a service goes GA, we don't want to keep its confidence field. See https://issues.amazon.com/issues/RECON-6775.
                    item_attrs['confidence'] = None
                    self.metrics = increment_metric(self.metrics,
                                                    'remove_ga_confidence')

            elif parent_dimension_type == "SERVICE":
                parent_dimension_name = parent_dimension_key["name"]
                item_attrs["belongs_to_instance"] = parent_dimension_name
                logger.info(
                    f"Ingesting new component service '{rip_name}' for parent '{parent_dimension_name}'"
                )

                parent_exists = parent_object_exists(parent_dimension_type,
                                                     parent_dimension_name)
                if not parent_exists:
                    logger.error(
                        f"Component Services parent was not found in ddb. Parent name '{parent_dimension_name}'."
                    )
                    return None

                instance = self.get_service_instance_value(rip_name=rip_name,
                                                           version=0)
                item_attrs.update(
                    self.get_attributes_from_new_value(
                        map_of_attr_names=
                        get_service_attributes_to_buildables_map(),
                        new_value=message["newValue"]))

            else:
                # we only handle services in REGIONS and SERVICES now, but may handle AZ/CELL/... in the future.
                self.metrics = increment_metric(
                    self.metrics, "unsupported_parent_dimension")
                logger.warning(
                    "parent_dimension_type is {} - not doing anything!".format(
                        parent_dimension_type))
                return None

        item_attrs[
            "updated"] = self.get_date_iso_string_from_epoch_milliseconds_timestamp(
                timestamp=message["approvedDate"])
        item_attrs["updater"] = message["registrant"]
        buildable_item = BuildableService(instance=instance,
                                          table=self.get_table(),
                                          **item_attrs)
        self.metrics = increment_metric(self.metrics, "service_change")
        return buildable_item