def check_available_detection(event, stream_id, start_time_in_program_loop,
                              start_datetime, duration_sec):
    """
    If the playlist is looping and reuse detection is enabled, look up past detections for the same video segment
    :return The absolute start time of the past detection to reuse.
    """
    # look up past detections that share the same Start_Time_Sec_In_Loop attr with the current segment
    query_params = {
        'IndexName':
        'Stream_ID_Start_In_Loop',
        'ScanIndexForward':
        False,
        'KeyConditionExpression':
        Key('Stream_ID').eq(stream_id) & Key('Start_Time_Sec_In_Loop').eq(
            convert_float_to_dec(start_time_in_program_loop)),
        'FilterExpression':
        Attr('Finished').eq(True) & Attr('Start_DateTime').ne(start_datetime)
        & Attr('Duration_Sec').eq(convert_float_to_dec(duration_sec))
    }
    logger.info(f'{query_params}')
    result = query_item_ddb(DDB_FRAGMENT_TABLE, **query_params)
    if result:
        # reuse the most recent past detection
        logger.info(f'Found existing detections: {len(result)}')
        return result[0]['Start_DateTime']
    else:
        logger.info('Did not find existing detections to reuse.')
        return None
def find_expected_program_for_looping_input(stream_id,
                                            segment_start_time,
                                            duration_sec,
                                            ddb_client=None):
    """
    Find the expected program for the given video stream segment if the input is in looping
    (True live events will use a different approach by comparing absolute date time with program date time.
    :param stream_id: stream identifier
    :param segment_start_time: relative start timestamp of the video segment
    :param duration_sec: duration of video segment in seconds
    :param ddb_client: optional. a ddb client can be provided to overwrite the default client.
    :return: a dictionary of metadata about the expected program
        {
          "Team_Info": "MAN V TOT",
          "Station_Logo": "NBC",
          "Stream_ID": "test_1",
          "Event_Title": "MAN V TOT",
          "Event_ID": "SIM-EPL-002",
          "Event_Type": "Sports",
          "End_Time": 90.0,
          "Start_Time": 0.0,
          "languageCode": "en-en"
        }
    """
    loop_end_time = get_loop_end_time(stream_id, ddb_client)
    # because the expected program loops, use modulo operation to find the start time to look up in the schedule table
    # here we do math using Decimal to avoid floating point precision problems
    segment_start_time_in_loop = convert_float_to_dec(
        segment_start_time) % convert_float_to_dec(loop_end_time)
    logger.info(
        f'Loop input end time: {loop_end_time}, start time in loop: {segment_start_time_in_loop}'
    )

    query_params = {
        'ScanIndexForward':
        True,
        'KeyConditionExpression':
        Key('Stream_ID').eq(stream_id)
        & Key('Start_Time').lte(segment_start_time_in_loop +
                                convert_float_to_dec(duration_sec)),
        'FilterExpression':
        Attr('End_Time').gt(segment_start_time_in_loop)
    }

    # technically, it's possible one segment can straddle between two programs.
    # however, given segments is 6-10 seconds long, we will just take the first program
    items = query_item_ddb(DDB_SCHEDULE_TABLE, ddb_client, **query_params)
    expected_program = items[0]
    expected_program['Start_Time'] = float(expected_program['Start_Time'])
    expected_program['End_Time'] = float(expected_program['End_Time'])
    expected_program['Segment_Start_Time_In_Loop'] = float(
        segment_start_time_in_loop)
    logger.info(
        f'Found program title={expected_program["Event_Title"]} '
        f'({expected_program["Start_Time"]} - {expected_program["End_Time"]}) for video segment.'
    )

    return expected_program
Exemple #3
0
def process_audio_check(event, ddb_update_builder, segment_duration):
    audio = event['detections'][AUDIO_RESULT]

    if "Error" in audio:
        ddb_update_builder.update_attr('Audio_Check_Error', audio["Error"])
        return None
    # process results for audio detection
    audio_on_status, silence_duration, silence_confidence = eval_audio_status(
        audio, segment_duration)
    if 'volume' in audio:
        ddb_update_builder.update_attr(
            'Volume', convert_dict_float_to_dec(audio['volume']))
    ddb_update_builder.update_attr('Silence',
                                   json.dumps(audio['silence_chunks']))
    ddb_update_builder.update_attr('Audio_Status', audio_on_status)
    ddb_update_builder.update_attr('Silence_Duration',
                                   convert_float_to_dec(silence_duration))
    ddb_update_builder.update_attr('Silence_Confidence',
                                   convert_float_to_dec(silence_confidence))
    logger.info(f'Audio on status: {audio_on_status}')
    return audio_on_status
Exemple #4
0
def lambda_handler(event, context):
    """
    Download the playlist manifest file. Determine if it's the master manifest, and if it's child manifest,
     find the latest segment and calculate its starting date time using metadata in the manifest file.
    :param event: example
    {
        "Execution" : "arn:aws:states:us-east-1:12312312312:execution:statemachine-name:exc-id
        "Input": {
          "s3Bucket": "aws-rnd-broadcast-maas-video-processing-dev",
          "s3Key": "live/test_video_single_pipeline/test_1.m3u8",
          "s3VersionId": "T.Lfm.fslzaZa5lkV_bJrI.MmrQG7mE_"
        }
    }
    :param context: lambda context object https://docs.aws.amazon.com/lambda/latest/dg/python-context-object.html
    :return: whether this is a master manifest. If it's child manifest, find the latest segment and starting date time.
    {
        "isMasterManifest": false,
        "streamId": "test_1",
        "lastSegment": { # only if isMasterManifest = false
            "s3Key": "live/test_video_single_pipeline/test_1_00039.ts",
            "startDateTime": "2020-01-23T21:36:35.290000Z",
            "durationSec": 6
        }
    }
    """
    logger.info('Received event: %s', json.dumps(event, indent=2))
    s3_bucket = event['Input']['s3Bucket']
    manifest_s3_key = event['Input']['s3Key']
    manifest_s3_version_id = event['Input']['s3VersionId']

    manifest_content = read_file_from_s3_w_versionid(s3_bucket,
                                                     manifest_s3_key,
                                                     manifest_s3_version_id)
    if is_master_manifest(manifest_content):
        logger.info('Is master manifest. Skip processing')
        return {'isMasterManifest': True}
    else:
        last_segment, starting_time, duration_sec = get_last_segment_and_start_timestamp(
            manifest_content)
        segment_s3_key = os.path.join(os.path.dirname(manifest_s3_key),
                                      last_segment)
        segment_s3_version_id = get_s3_object_latest_version_id(
            s3_bucket, segment_s3_key)
        stream_id = os.path.splitext(os.path.basename(manifest_s3_key))[0]
        starting_time_str = parse_date_time_to_str(starting_time)

        fragment_ddb_entry = {
            'SFNArn': event['Execution'],
            'Stream_ID': stream_id,
            'Start_DateTime': starting_time_str,
            'S3_Key': segment_s3_key,
            'S3_VersionID': segment_s3_version_id,
            'Duration_Sec': convert_float_to_dec(duration_sec)
        }
        put_item_ddb(DDB_FRAGMENT_TABLE, fragment_ddb_entry)

        result = {
            'isMasterManifest': False,
            'streamId': stream_id,
            'lastSegment': {
                's3Key': segment_s3_key,
                'versionId': segment_s3_version_id,
                'durationSec': duration_sec,
                "startDateTime": starting_time_str
            }
        }
        logger.info('Response : %s', json.dumps(result, indent=2))
    return result
Exemple #5
0
def lambda_handler(event, context):
    """
    Process results from preceding steps in the workflow to determine status for each check being performed.
    Persist the computed status and raw data into DDB.
    :param event:
    {
      "s3Bucket": "aws-rnd-broadcast-maas-video-processing-dev",
      "s3Key": "live/test_video_single_pipeline/test_1.m3u8",
      "s3VersionId": "J5c7s6IZYD9TIt.BM5Zj53ku1l6rw1M9",
      "config": {
        "audio_check_enabled": true,
        "station_logo_check_enabled": true,
        "language_detect_check_enabled": true,
        "team_detect_check_enabled": true,
        "appsync_notify_enabled": true
      },
      "parsed": {
        {
            "isMasterManifest": false,
            "streamId": "test_1",
            "lastSegment": {
                "s3Key": "live/test_video_single_pipeline/test_1_00039.ts",
                "versionId": "ZQcYoj5uiDgaAU0lkukuHCS2zyh5NXM0",
                "startDateTime": "2020-01-23T21:36:35.290000Z",
                "durationSec": 6
            },
            "expectedProgram"{
                ...
            }
        }
      "detections":{
        [
          {
            "volume": {
              "mean": -29.2,
              "max": -13.9
            },
            "silence_chunks": []
          },
          {
             "expected": {
                 "lanaguageCode": "en-en",
                 "languageName": "English"
             },
             "dectected": {
                 "languageCode": "en-en",
                 "languageName": "English",
                 "confidence": 0.8843594193458557
             }
           },
          []
        ]
      }
    }
    :return:
    """
    logger.info('Received event: %s', json.dumps(event, indent=2))
    segment_start_dt = event['parsed']['lastSegment']['startDateTime']
    stream_id = event['parsed']['streamId']
    segment_relative_start_time = event['parsed']['lastSegment'][
        'startTimeRelative']
    segment_start_time_in_loop = event['parsed']['expectedProgram'][
        'Segment_Start_Time_In_Loop']
    segment_duration = event['parsed']['lastSegment']['durationSec']

    segment_table_key = {
        'Start_DateTime': segment_start_dt,
        'Stream_ID': stream_id
    }
    with DDBUpdateBuilder(key=segment_table_key,
                          table_name=DDB_FRAGMENT_TABLE) as ddb_update_builder:
        ddb_update_builder.update_attr(
            'Start_Time_Sec',
            convert_float_to_dec(segment_relative_start_time))
        ddb_update_builder.update_attr(
            'Start_Time_Sec_In_Loop',
            convert_float_to_dec(segment_start_time_in_loop))
        ddb_update_builder.update_attr('Finished', True)

        audio_on_status = process_audio_check(event, ddb_update_builder,
                                              segment_duration)
        station_status = get_station_logo_status(event, segment_table_key)
        team_status = get_team_status(event, segment_table_key)
        sports_status = get_sports_status(event, segment_table_key)
    status_summary = {
        'Audio_Status': audio_on_status,
        'Station_Status': station_status,
        'Team_Status': team_status,
        'Sports_Status': sports_status
    }

    frames = event['detections'][FRAME_RESULT]
    thumbnail_s3_key = frames[0]['S3_Key']

    event['thumbnailKey'] = thumbnail_s3_key
    event['statusSummary'] = status_summary
    return event