def test_rtc9724_tc_arc_001_unique(stream):
    """
    Create a recording with copy type as UNIQUE and verify whether it gets archived
    """
    recording = None
    web_service_obj = None

    try:
        start_time = utils.get_formatted_time(constants.SECONDS * 30, TimeFormat.TIME_FORMAT_MS, stream)
        end_time = utils.get_formatted_time(constants.SECONDS * 60, TimeFormat.TIME_FORMAT_MS, stream)
        recording = recording_model.Recording(StartTime=start_time, EndTime=end_time, StreamId=stream)
        recording_id = recording.get_entry(0).RecordingId
        web_service_obj = notification_utils.get_web_service_object(recording_id)
        recording.get_entry(0).UpdateUrl = web_service_obj.get_url()

        LOGGER.debug("Recording instance created=%s", recording.serialize())
        response = a8.create_recording(recording)
        is_valid, error = validate_common.validate_http_response_status_code(response, requests.codes.no_content)

        assert is_valid, error

        is_valid, error = validate_recordings.validate_recording(recording_id, web_service_obj)

        assert is_valid, error

        archive_helper.wait_for_archival(stream, recording_id, Archive.ARCHIVE, Archive.COMPLETE)
        response = rio.find_recording(recording_id).json()
        is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.ACTIVE_STORAGE,
                                                                         Cos.RECORDING_NOT_STORED)

        assert is_valid, error

        is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.ARCHIVE_STORAGE,
                                                                         Cos.RECORDING_STORED)

        assert is_valid, error

        is_valid, error = validate_storage.validate_copy_count(response, Cos.ARCHIVE_STORAGE)

        assert is_valid, error

        is_valid, error = validate_recordings.validate_playback(recording_id)

        assert is_valid, error
    finally:
        web_service_obj.stop_server()
        response = a8.delete_recording(recording)
        LOGGER.debug("Recording clean up status code=%s", response.status_code)
def test_tc_rec_033_015_(total_recording, stream, name, copy_type):
    """
    Create multiple recordings with copy type as COMMON
    """

    web_service_objects = []
    recording_pool = None
    recording = None
    try:
        queue = Queue.Queue()
        start_time = utils.get_formatted_time(constants.SECONDS * 30,
                                              TimeFormat.TIME_FORMAT_MS,
                                              stream)
        end_time = utils.get_formatted_time(constants.SECONDS * 60,
                                            TimeFormat.TIME_FORMAT_MS, stream)
        recording = recording_model.Recording(total_recordings=total_recording,
                                              StartTime=start_time,
                                              EndTime=end_time,
                                              copyType=copy_type,
                                              StreamId=stream)
        for i in range(total_recording):
            recording_id = recording.get_entry(i).RecordingId
            web_service_objects.append(
                notification_utils.get_web_service_object(recording_id))
            recording.get_entry(i).UpdateUrl = web_service_objects[i].get_url()

        LOGGER.debug("Recording instance created=%s", recording.serialize())
        response = a8.create_recording(recording)
        is_valid, error = validate_common.validate_http_response_status_code(
            response, requests.codes.no_content)

        assert is_valid, error

        recording_pool = mp_pool.ThreadPool(processes=total_recording)
        for i in range(total_recording):
            recording_pool.apply_async(
                validate_recordings.validate_recording,
                (recording.get_entry(i).RecordingId, web_service_objects[i]),
                callback=queue.put)

        for i in range(total_recording):
            is_valid, error = queue.get()
            assert is_valid, error

        for i in range(total_recording):
            response = rio.find_recording(
                recording.get_entry(i).RecordingId).json()
            is_valid, error = validate_storage.validate_recording_in_storage(
                response, Cos.ACTIVE_STORAGE, Cos.RECORDING_STORED)

            assert is_valid, error

        for i in range(total_recording):
            response = rio.find_recording(
                recording.get_entry(i).RecordingId).json()
            is_valid, error = validate_storage.validate_recording_in_storage(
                response, Cos.ACTIVE_STORAGE, Cos.RECORDING_STORED)

            assert is_valid, error

            if copy_type == RecordingAttribute.COPY_TYPE_COMMON:
                is_valid, error = validate_storage.validate_recording_in_storage(
                    response, Cos.ACTIVE_STORAGE, Cos.RECORDING_NOT_STORED, 1)

                assert is_valid, error

        if copy_type == RecordingAttribute.COPY_TYPE_UNIQUE:
            archive_helper.wait_for_archival(
                stream,
                recording.get_entry(0).RecordingId, Archive.ARCHIVE,
                Archive.COMPLETE)
            for i in range(total_recording):
                response = rio.find_recording(
                    recording.get_entry(i).RecordingId).json()
                is_valid, error = validate_storage.validate_recording_in_storage(
                    response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED)
                assert is_valid, error
                is_valid, error = validate_storage.validate_copy_count(
                    response, Cos.ARCHIVE_STORAGE)

                assert is_valid, error

        for i in range(total_recording):
            recording_pool.apply_async(validate_recordings.validate_playback,
                                       (recording.get_entry(i).RecordingId, ),
                                       callback=queue.put)

        for i in range(total_recording):
            is_valid, error = queue.get()
            assert is_valid, error
    finally:
        if recording_pool:
            recording_pool.close()
            recording_pool.join()
        for web_service_obj in web_service_objects:
            web_service_obj.stop_server()

        response = a8.delete_recording(recording)
        LOGGER.debug("Recording clean up status code=%s", response.status_code)
Beispiel #3
0
def test_rtc9763_tc_rec_027_multiple_delete_playback_unique(stream):
    """
    Create multiple recordings with copy type as UNIQUE, delete one and playback the rest
    """
    total_recordings = 3
    web_service_objs = []
    playback_pool = None
    recording = None

    try:
        queue = Queue.Queue()
        start_time = utils.get_formatted_time(constants.SECONDS * 30,
                                              TimeFormat.TIME_FORMAT_MS,
                                              stream)
        end_time = utils.get_formatted_time(constants.SECONDS * 60,
                                            TimeFormat.TIME_FORMAT_MS, stream)
        recording = recording_model.Recording(
            total_recordings=total_recordings,
            StartTime=start_time,
            EndTime=end_time,
            StreamId=stream)

        for i in range(total_recordings):
            recording_id = recording.get_entry(i).RecordingId
            web_service_objs.append(
                notification_utils.get_web_service_object(recording_id))
            recording.get_entry(i).UpdateUrl = web_service_objs[i].get_url()

        LOGGER.debug("Recording instance created=%s", recording.serialize())
        response = a8.create_recording(recording)
        is_valid, error = validate_common.validate_http_response_status_code(
            response, requests.codes.no_content)

        assert is_valid, error

        recording_id_0 = recording.get_entry(0).RecordingId

        recording_pool = mp_pool.ThreadPool(processes=total_recordings)
        for i in range(total_recordings):
            recording_pool.apply_async(
                validate_recordings.validate_recording,
                (recording.get_entry(i).RecordingId, web_service_objs[i]),
                callback=queue.put)

        for i in range(total_recordings):
            is_valid, error = queue.get()
            assert is_valid, error

        for i in range(total_recordings):
            response = rio.find_recording(
                recording.get_entry(i).RecordingId).json()
            is_valid, error = validate_storage.validate_copy_count(
                response, Cos.ACTIVE_STORAGE, total_recordings)

            assert is_valid, error

        recording_to_delete = copy.deepcopy(recording)
        del recording_to_delete.get_entries()[1:]
        del recording.get_entries()[:1]  # to clean up recordings later
        response = a8.delete_recording(recording_to_delete)
        is_valid, error = validate_common.validate_http_response_status_code(
            response, requests.codes.no_content)

        assert is_valid, error

        is_valid, error = validate_recordings.validate_recording_deletion(
            recording_id_0)

        assert is_valid, error

        is_valid, error = validate_recordings.validate_playback_using_vle(
            recording_id_0)

        assert not is_valid, ValidationError.DELETED_RECORDING_PLAYED_BACK.format(
            recording_id_0)

        playback_pool = mp_pool.ThreadPool(
            processes=len(recording.get_entries()))
        for recording_entry in recording.get_entries():
            playback_pool.apply_async(
                validate_recordings.validate_playback_using_vle,
                (recording_entry.RecordingId, ),
                callback=queue.put)

        for i in range(len(recording.get_entries())):
            is_valid, error = queue.get()
            assert is_valid, error
    finally:
        if playback_pool:
            playback_pool.close()
            playback_pool.join()
        for web_service_obj in web_service_objs:
            web_service_obj.stop_server()
        if recording:
            response = a8.delete_recording(recording)
            LOGGER.debug("Recording clean up status code=%s",
                         response.status_code)
Beispiel #4
0
def test_rtc9736_tc_er_012_hybrid_archival_batch_size_multiple(stream):
    """
    Hybrid copy - archiving :Schedule Unique copy and common copy recording (20 - 10 same start/end time, 10 different start/end times), batch size 4.
    """

    web_service_objects = []
    recording_pool = None
    recording = None
    total_recording = 20
    diff_start_time_recordings = 10
    same_start_time_recordings = 10
    service_name = V2pc.MANIFEST_AGENT
    namespace = Component.VMR

    try:
        # Taking backup of v2pc pod config info and editing the config and then restarting the services
        is_valid, error = cleanup(redeploy_config_map, service_name, revert=True)
        assert is_valid, error

        is_valid, error = v2pc_edit_manifest_config(V2pc.MANIFEST_AGENT, batch_size='4')
        assert is_valid, error

        is_valid, error = verify_batch_size_update(service_name, namespace, "4")
        assert is_valid, error
    
        queue = Queue.Queue()
        start_time = utils.get_formatted_time(constants.SECONDS * 30, TimeFormat.TIME_FORMAT_MS, stream)
        end_time = utils.get_formatted_time(constants.SECONDS * 80, TimeFormat.TIME_FORMAT_MS, stream)
        recording = recording_model.Recording(total_recordings=same_start_time_recordings, StartTime=start_time,
                                              EndTime=end_time, copyType=RecordingAttribute.COPY_TYPE_COMMON,
                                              StreamId=stream)

        for i in range(diff_start_time_recordings, same_start_time_recordings+diff_start_time_recordings):
            start_time = utils.get_formatted_time((constants.SECONDS * 30) + i, TimeFormat.TIME_FORMAT_MS, stream)
            end_time = utils.get_formatted_time((constants.SECONDS * 80) + i, TimeFormat.TIME_FORMAT_MS, stream)
            rec_with_diff_time = recording_model.Recording(total_recordings=1, StartTime=start_time, EndTime=end_time,
                                                           copyType=RecordingAttribute.COPY_TYPE_UNIQUE,
                                                           StreamId=stream)
            rec_with_diff_time.Entries[0].RecordingId = RecordingAttribute.RECORDING_ID_PREFIX + \
                                                        rec_with_diff_time.RequestId + '_' + str(i)
            recording.Entries.append(rec_with_diff_time.get_entry(0))
        last_recording_id = rec_with_diff_time.Entries[0].RecordingId

        for i in range(total_recording):
            recording_id = recording.get_entry(i).RecordingId
            web_service_objects.append(notification_utils.get_web_service_object(recording_id))
            recording.get_entry(i).UpdateUrl = web_service_objects[i].get_url()

        LOGGER.debug("Recording instance created=%s", recording.serialize())

        #Sending recording request to create recording
        response = a8.create_recording(recording)
        is_valid, error = validate_common.validate_http_response_status_code(response, requests.codes.no_content)

        assert is_valid, error

        response = rio.find_recording(last_recording_id).json()
        if not response:
            return False, ValidationError.RECORDING_RESPONSE_EMPTY.format(last_recording_id)
        start_time = utils.get_parsed_time(response[0][RecordingAttribute.START_TIME][:-1])

        current_time = datetime.datetime.utcnow()
        wait_time = utils.add_time_to_secs((start_time - current_time), constants.SECONDS)
        if wait_time < 0:
            wait_time = 0

        #Verifying recording is started or not
        recording_pool = mp_pool.ThreadPool()
        for i in range(total_recording):
            recording_pool.apply_async(validate_recordings.validate_notification,
                                       (web_service_objects[i], constants.RecordingStatus.STARTED, wait_time),
                                       callback=queue.put)

        for i in range(total_recording):
            is_valid, error = queue.get()
            assert is_valid, error

        end_time = utils.get_parsed_time(response[0][RecordingAttribute.END_TIME][:-1])
        current_time = datetime.datetime.utcnow()
        wait_time = utils.add_time_to_secs((end_time - current_time), constants.SECONDS)
        if wait_time < 0:
            wait_time = 0

        #Verifying recording is completed or not
        recording_pool = mp_pool.ThreadPool()
        for i in range(total_recording):
            recording_pool.apply_async(validate_recordings.validate_notification,
                                       (web_service_objects[i], constants.RecordingStatus.COMPLETE, wait_time),
                                       callback=queue.put)

        for i in range(total_recording):
            is_valid, error = queue.get()
            assert is_valid, error


        #Verfying storage archive storage after archival time
        archive_helper.wait_for_archival(stream, recording.get_entry(0).RecordingId, Archive.ARCHIVE,
                                         Archive.COMPLETE)

        for i in range(total_recording):
            response = rio.find_recording(recording.get_entry(i).RecordingId).json()
            is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.ARCHIVE_STORAGE,
                                                                              Cos.RECORDING_STORED)
            assert is_valid, error
            is_valid, error = validate_storage.validate_copy_count(response, Cos.ARCHIVE_STORAGE)

            assert is_valid, error
        
        #Playback verification after recording completed
        for i in range(total_recording):
            recording_pool.apply_async(validate_recordings.validate_playback,
                                       (recording.get_entry(i).RecordingId,), callback=queue.put)

        for i in range(total_recording):
            is_valid, error = queue.get()
            assert is_valid, error

        	
    finally:
        #Revert back the v2pc config changes
        is_valid, error = cleanup(redeploy_config_map, service_name, revert=True)
        assert is_valid, error

        if recording_pool:
            recording_pool.close()
            recording_pool.join()
        for web_service_obj in web_service_objects:
            if web_service_obj:
                web_service_obj.stop_server()

        if recording:
            response = a8.delete_recording(recording)
            LOGGER.debug("Recording clean up status code=%s", response.status_code)
Beispiel #5
0
def test_tc_arc_(name, stream, event):
    """
    test_tc_arc_[009_multiple_playback_one_unique-ARCHIVE -->    Create multiple recordings with copy type as UNIQUE, wait for them to get archived, playback one of the them and
    verify if the recording is present both in the ARCHIVE_STORAGE AND RECON_STORAGE
    test_tc_arc_[011_multiple_playback_one_re_archive_unique-RE_ARCHIVE -->  Create multiple recordings with copy type as UNIQUE, wait for them to get archived, playback one, and wait till it
    gets re-archived
    """
    LOGGER.info("#####################################################################")
    LOGGER.info("Starting test_tc_arc_%s", name)

    total_recordings = 3
    recording = None
    web_service_objs = []
    recording_pool = None

    try:
        queue = Queue.Queue()
        start_time = utils.get_formatted_time(constants.SECONDS * 30, TimeFormat.TIME_FORMAT_MS, stream)
        end_time = utils.get_formatted_time(constants.SECONDS * 60, TimeFormat.TIME_FORMAT_MS, stream)
        recording = recording_model.Recording(total_recordings=total_recordings, StartTime=start_time, EndTime=end_time,
                                              StreamId=stream)
        for i in range(total_recordings):
            recording_id = recording.get_entry(i).RecordingId
            web_service_objs.append(notification_utils.get_web_service_object(recording_id))
            recording.get_entry(i).UpdateUrl = web_service_objs[i].get_url()

        LOGGER.debug("Recording instance created=%s", recording.serialize())
        response = a8.create_recording(recording)
        is_valid, error = validate_common.validate_http_response_status_code(response, requests.codes.no_content)

        assert is_valid, error

        recording_id = recording.get_entry(0).RecordingId

        recording_pool = mp_pool.ThreadPool(processes=total_recordings)
        for i in range(total_recordings):
            recording_pool.apply_async(validate_recordings.validate_recording,
                                       (recording.get_entry(i).RecordingId, web_service_objs[i]), callback=queue.put)

        for i in range(total_recordings):
            is_valid, error = queue.get()
            assert is_valid, error

        archive_helper.wait_for_archival(stream, recording_id, Archive.ARCHIVE, Archive.COMPLETE)

        for i in range(total_recordings):
            response = rio.find_recording(recording.get_entry(i).RecordingId).json()
            is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.ACTIVE_STORAGE,
                                                                             Cos.RECORDING_NOT_STORED, i)

            assert is_valid, error

            is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.ARCHIVE_STORAGE,
                                                                             Cos.RECORDING_STORED)

            assert is_valid, error

            is_valid, error = validate_storage.validate_copy_count(response, Cos.ARCHIVE_STORAGE)

            assert is_valid, error

        is_valid, error = validate_recordings.validate_playback_using_vle(recording_id)

        assert is_valid, error
        response = rio.find_recording(recording.get_entry(0).RecordingId).json()
        is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.ARCHIVE_STORAGE,
                                                                     Cos.RECORDING_STORED)

        assert is_valid, error

        is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.RECON_STORAGE,
                                                                     Cos.RECORDING_STORED)

        assert is_valid, error
        if event == Archive.RE_ARCHIVE:
            archive_helper.wait_for_archival(stream, recording_id, Archive.RE_ARCHIVE, Archive.COMPLETE)
            is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.RECON_STORAGE,
                                                                             Cos.RECORDING_NOT_STORED)

            assert is_valid, error
            is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.ARCHIVE_STORAGE,
                                                                             Cos.RECORDING_STORED)

            assert is_valid, error

        for i in range(total_recordings):
            recording_pool.apply_async(validate_recordings.validate_playback,
                                       (recording.get_entry(i).RecordingId,), callback=queue.put)

        for i in range(total_recordings):
            is_valid, error = queue.get()
            assert is_valid, error

    finally:
        if recording_pool:
            recording_pool.close()
            recording_pool.join()
        for web_service_obj in web_service_objs:
            web_service_obj.stop_server()

        response = a8.delete_recording(recording)
        LOGGER.debug("Recording clean up status code=%s", response.status_code)
Beispiel #6
0
def test_tc_arc(name, stream, archive_playback):
    """
    Schedule recording with same start time, end time with unique copy,
    playback recording single recording and delete it, finally playback remaining records.
    """
    total_recordings = 3
    recording = None
    web_service_objs = []
    recording_pool = None

    try:
        queue = Queue.Queue()
        start_time = utils.get_formatted_time(constants.SECONDS * 30,
                                              TimeFormat.TIME_FORMAT_MS,
                                              stream)
        end_time = utils.get_formatted_time(constants.SECONDS * 60,
                                            TimeFormat.TIME_FORMAT_MS, stream)
        copy_type = RecordingAttribute.COPY_TYPE_UNIQUE
        recording = recording_model.Recording(
            total_recordings=total_recordings,
            StartTime=start_time,
            EndTime=end_time,
            copyType=copy_type,
            StreamId=stream)
        for i in range(total_recordings):
            recording_id = recording.get_entry(i).RecordingId
            web_service_objs.append(
                notification_utils.get_web_service_object(recording_id))
            recording.get_entry(i).UpdateUrl = web_service_objs[i].get_url()
            LOGGER.debug("Recording=%s, UpdateURL=%s", recording_id,
                         web_service_objs[i].get_url())

        LOGGER.debug("Recording instance created=%s", recording.serialize())
        response = a8.create_recording(recording)
        is_valid, error = validate_common.validate_http_response_status_code(
            response, requests.codes.no_content)

        assert is_valid, error

        recording_pool = mp_pool.ThreadPool(processes=total_recordings)
        for i in range(total_recordings):
            recording_pool.apply_async(
                validate_recordings.validate_recording,
                (recording.get_entry(i).RecordingId, web_service_objs[i]),
                callback=queue.put)

        for i in range(total_recordings):
            is_valid, error = queue.get()
            assert is_valid, error
        # Validating the copy count of unique copy recording in active storage
        for i in range(total_recordings):
            response = rio.find_recording(
                recording.get_entry(i).RecordingId).json()
            is_valid, error = validate_storage.validate_copy_count(
                response, Cos.ACTIVE_STORAGE, total_recordings)
            assert is_valid, error

        # Wait till archival completes
        recording_id_0 = recording.get_entry(0).RecordingId
        archive_helper.wait_for_archival(stream, recording_id_0,
                                         Archive.ARCHIVE, Archive.COMPLETE)
        # Validating copy count in archive storage after archival duration
        for i in range(total_recordings):
            response = rio.find_recording(
                recording.get_entry(i).RecordingId).json()
            is_valid, error = validate_storage.validate_recording_in_storage(
                response, Cos.ACTIVE_STORAGE, Cos.RECORDING_NOT_STORED)

            assert is_valid, error

            is_valid, error = validate_storage.validate_recording_in_storage(
                response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED)

            assert is_valid, error

            is_valid, error = validate_storage.validate_copy_count(
                response, Cos.ARCHIVE_STORAGE, 1)
            assert is_valid, error

        if archive_playback:
            response = rio.find_recording(recording_id_0).json()
            # Validating the first recording for playback
            is_valid, error = validate_recordings.validate_playback_using_vle(
                recording_id_0)
            assert is_valid, error

            # Validating the copy of segments in the archive folder after playback
            is_valid, error = validate_storage.validate_recording_in_storage(
                response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED)
            assert is_valid, error

            # Validating the segments in recon folder after playback
            is_valid, error = validate_storage.validate_recording_in_storage(
                response, Cos.RECON_STORAGE, Cos.RECORDING_STORED)
            assert is_valid, error

        response = a8.delete_recording(recording,
                                       recording.get_entry(0).RecordingId)
        is_valid, error = validate_common.validate_http_response_status_code(
            response, requests.codes.no_content)

        assert is_valid, error

        is_valid, error = validate_recordings.validate_recording_deletion(
            recording_id_0)

        assert is_valid, error

        is_valid, error = validate_recordings.validate_playback_using_vle(
            recording_id_0)

        assert not is_valid, ValidationError.DELETED_RECORDING_PLAYED_BACK.format(
            recording_id_0)

        del recording.get_entries()[:1]
        # Check remaining records still in archive
        if archive_playback:
            for recording_entry in recording.get_entries():
                response = rio.find_recording(
                    recording_entry.RecordingId).json()
                recording_pool.apply_async(
                    validate_storage.validate_recording_in_storage,
                    (response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED),
                    callback=queue.put)

            for i in range(len(recording.get_entries())):
                is_valid, error = queue.get()
                assert is_valid, error

        for recording_entry in recording.get_entries():
            recording_pool.apply_async(
                validate_recordings.validate_playback_using_vle,
                (recording_entry.RecordingId, ),
                callback=queue.put)

        for i in range(len(recording.get_entries())):
            is_valid, error = queue.get()
            assert is_valid, error
    finally:
        if recording_pool:
            recording_pool.close()
            recording_pool.join()
        for web_service_obj in web_service_objs:
            web_service_obj.stop_server()
        response = a8.delete_recording(recording)
        LOGGER.debug("Recording clean up status code=%s", response.status_code)
Beispiel #7
0
def test_rtc9735_er_011_hybrid_recording_recovery_manifest_restart(stream):
    """
    Hybrid (UNIQUE  and COMMON) copy Recording recovery
    """

    web_service_objects = []
    recording_pool = None
    recording = None
    total_recording = 10
    common_copy_recordings = 5
    unique_copy_recordings = 5

    try:
        queue = Queue.Queue()
        start_time = utils.get_formatted_time(constants.SECONDS * 30,
                                              TimeFormat.TIME_FORMAT_MS,
                                              stream)
        end_time = utils.get_formatted_time(constants.SECONDS * 120,
                                            TimeFormat.TIME_FORMAT_MS, stream)
        recording = recording_model.Recording(
            total_recordings=common_copy_recordings,
            StartTime=start_time,
            EndTime=end_time,
            copyType=RecordingAttribute.COPY_TYPE_COMMON,
            StreamId=stream)
        for i in range(unique_copy_recordings, total_recording):
            rec_uniq = recording_model.Recording(
                total_recordings=unique_copy_recordings,
                StartTime=start_time,
                EndTime=end_time,
                copyType=RecordingAttribute.COPY_TYPE_UNIQUE,
                StreamId=stream)
            rec_uniq.Entries[
                0].RecordingId = RecordingAttribute.RECORDING_ID_PREFIX + rec_uniq.RequestId + '_' + str(
                    i)
            recording.Entries.append(rec_uniq.get_entry(0))
        last_recording_id = rec_uniq.Entries[0].RecordingId

        for i in range(total_recording):
            recording_id = recording.get_entry(i).RecordingId
            web_service_objects.append(
                notification_utils.get_web_service_object(recording_id))
            recording.get_entry(i).UpdateUrl = web_service_objects[i].get_url()

        LOGGER.debug("Recording instance created=%s", recording.serialize())

        # Sending recording request to create recording
        response = a8.create_recording(recording)
        is_valid, error = validate_common.validate_http_response_status_code(
            response, requests.codes.no_content)

        assert is_valid, error

        response = rio.find_recording(last_recording_id).json()
        if not response:
            return False, ValidationError.RECORDING_RESPONSE_EMPTY.format(
                last_recording_id)
        start_time = utils.get_parsed_time(
            response[0][RecordingAttribute.START_TIME][:-1])

        current_time = datetime.datetime.utcnow()
        wait_time = utils.add_time_to_secs((start_time - current_time),
                                           constants.SECONDS)
        if wait_time < 0:
            wait_time = 0

        # Verifying recording is started or not
        recording_pool = mp_pool.ThreadPool()
        for i in range(total_recording):
            recording_pool.apply_async(
                validate_recordings.validate_notification,
                (web_service_objects[i], constants.RecordingStatus.STARTED,
                 wait_time),
                callback=queue.put)

        for i in range(total_recording):
            is_valid, error = queue.get()
            assert is_valid, error

        # Restarting manifest agent
        is_valid, error = delete_vmr_pods(V2pc.MANIFEST_AGENT)
        assert is_valid, error

        end_time = utils.get_parsed_time(
            response[0][RecordingAttribute.END_TIME][:-1])
        current_time = datetime.datetime.utcnow()
        wait_time = utils.add_time_to_secs((end_time - current_time),
                                           constants.SECONDS)
        if wait_time < 0:
            wait_time = 0

        for i in range(total_recording):
            recording_pool.apply_async(
                validate_recordings.validate_notification,
                (web_service_objects[i], constants.RecordingStatus.COMPLETE,
                 wait_time),
                callback=queue.put)

        for i in range(total_recording):
            is_valid, error = queue.get()
            assert is_valid, error

        # Verifying recording in storage
        for i in range(total_recording):
            response = rio.find_recording(
                recording.get_entry(i).RecordingId).json()
            is_valid, error = validate_storage.validate_recording_in_storage(
                response, Cos.ACTIVE_STORAGE, Cos.RECORDING_STORED)
            assert is_valid, error

        # Verfying Archive storage
        archive_helper.wait_for_archival(stream,
                                         recording.get_entry(0).RecordingId,
                                         Archive.ARCHIVE, Archive.COMPLETE)
        for i in range(total_recording):
            response = rio.find_recording(
                recording.get_entry(i).RecordingId).json()
            is_valid, error = validate_storage.validate_recording_in_storage(
                response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED)
            assert is_valid, error
            is_valid, error = validate_storage.validate_copy_count(
                response, Cos.ARCHIVE_STORAGE)

            assert is_valid, error

    finally:
        if recording_pool:
            recording_pool.close()
            recording_pool.join()
        for web_service_obj in web_service_objects:
            if web_service_obj:
                web_service_obj.stop_server()

        if recording:
            response = a8.delete_recording(recording)
            LOGGER.debug("Recording clean up status code=%s",
                         response.status_code)
Beispiel #8
0
def test_tc_rec(name, stream, copy_type):
    """
    Updating end time of shorter unique copy recording after shorter recording completed,
    and longer recording in progress.
    """

    total_recordings = 2
    validate_rec_wait_time = 240
    recording = None
    web_service_objs = []
    recording_pool = None
    try:
        queue = Queue.Queue()
        start_time = utils.get_formatted_time(constants.SECONDS * 30,
                                              TimeFormat.TIME_FORMAT_MS,
                                              stream)
        end_time = utils.get_formatted_time(constants.SECONDS * 60,
                                            TimeFormat.TIME_FORMAT_MS, stream)
        recording = recording_model.Recording(
            total_recordings=total_recordings,
            StartTime=start_time,
            EndTime=end_time,
            copyType=copy_type,
            StreamId=stream)
        for i in range(total_recordings):
            recording_id = recording.get_entry(i).RecordingId
            web_service_objs.append(
                notification_utils.get_web_service_object(recording_id))
            recording.get_entry(i).UpdateUrl = web_service_objs[i].get_url()
            LOGGER.debug("Recording=%s, UpdateURL=%s", recording_id,
                         web_service_objs[i].get_url())

        recording.get_entry(1).EndTime = utils.get_formatted_time(
            constants.SECONDS * 80, TimeFormat.TIME_FORMAT_MS, stream)
        LOGGER.debug("Recording instance created=%s", recording.serialize())
        response = a8.create_recording(recording)
        is_valid, error = validate_common.validate_http_response_status_code(
            response, requests.codes.no_content)

        assert is_valid, error

        # Validating longer recording parallel
        recording_id_1 = recording.get_entry(1).RecordingId

        # recording_pool = mp_pool.ThreadPool(processes=2)
        recording_pool = mp_pool.ThreadPool()
        recording_pool.apply_async(validate_recordings.validate_recording,
                                   (recording_id_1, web_service_objs[1]),
                                   callback=queue.put)

        # Validating shorter recording to complete
        recording_id_0 = recording.get_entry(0).RecordingId
        is_valid, error = validate_recordings.validate_recording(
            recording_id_0, web_service_objs[0])

        assert is_valid, error

        shorter_recording_res = rio.find_recording(recording_id_0).json()
        # Validate the copy count for unique/common copy
        if RecordingAttribute.COPY_TYPE_UNIQUE == copy_type:
            is_valid, error = validate_storage.validate_copy_count(
                shorter_recording_res, Cos.ACTIVE_STORAGE, 2)
        elif RecordingAttribute.COPY_TYPE_COMMON == copy_type:
            is_valid, error = validate_storage.validate_copy_count(
                shorter_recording_res, Cos.ACTIVE_STORAGE, 1)

        assert is_valid, error
        LOGGER.debug(copy_type + " copy validation success")

        # Try to update the end time after shorter recording completes
        response = rio.find_recording(recording_id_0).json()
        print "[INFO: ] response json ", response

        updated_end_time = utils.get_formatted_time(constants.SECONDS * 30,
                                                    TimeFormat.TIME_FORMAT_MS,
                                                    stream)
        recording.get_entry(0).EndTime = updated_end_time
        shorter_recording_res = a8.create_recording(recording, recording_id_0)
        response = rio.find_recording(recording_id_0).json()
        print "[INFO: ] response json ", response

        is_valid, error = validate_common.validate_http_response_status_code(
            shorter_recording_res, requests.codes.bad_request)
        print "[INFO: ] update end time after recording complete message ", error
        # assert is_valid, "CSCvc21524: - "+error

        # Validating the updated end time in response, it should not be equal
        response = rio.find_recording(recording_id_0).json()
        is_valid, error = validate_recordings.validate_time(
            response, updated_end_time, RecordingAttribute.END_TIME)

        assert not is_valid, error
        print "[INFO: ] Validating the updated end time in response, it should not be equal "
        print "[INFO: ] async validate recording wait time ", time.sleep(
            validate_rec_wait_time)
        print "[INFO: ] queue list empty ", queue.empty()

        # Validating the longer recording
        is_valid, error = queue.get(timeout=7)
        print "[INFO: ] queue value 1 ", is_valid
        assert is_valid, error
        print "[INFO: ] Validating the longer recording "

        # Playback all recordings
        for i in range(total_recordings):
            recording_pool.apply_async(validate_recordings.validate_playback,
                                       (recording.get_entry(i).RecordingId, ),
                                       callback=queue.put)
        print "[INFO: ] Playback all recordings "

        # Validate playback recordings
        for i in range(total_recordings):
            is_valid, error = queue.get()
            print "[INFO: ] queue value 2 ", is_valid
            assert is_valid, error
        print "[INFO: ] Validate playback recordings"

    finally:
        if recording_pool:
            recording_pool.close()
            recording_pool.join()
        for web_service_obj in web_service_objs:
            web_service_obj.stop_server()
        response = a8.delete_recording(recording)
        LOGGER.debug("Recording clean up status code=%s", response.status_code)
def test_rtc9769_tc_rec_037_(total_recording, stream, name, copy_type):
    """
    Update the end time of a recording after it starts
    """
    recording = None
    web_service_objects = []
    recording_pool = None
    recording_id_list = []

    try:
        queue = Queue.Queue()
        start_time = utils.get_formatted_time(constants.SECONDS * 30,
                                              TimeFormat.TIME_FORMAT_MS,
                                              stream)
        end_time = utils.get_formatted_time(constants.SECONDS * 70,
                                            TimeFormat.TIME_FORMAT_MS, stream)
        recording = recording_model.Recording(total_recordings=total_recording,
                                              StartTime=start_time,
                                              EndTime=end_time,
                                              StreamId=stream,
                                              copyType=copy_type)

        for i in range(total_recording):
            recording_id = recording.get_entry(i).RecordingId
            recording_id_list.append(recording.get_entry(i).RecordingId)
            web_service_objects.append(
                notification_utils.get_web_service_object(recording_id))
            recording.get_entry(i).UpdateUrl = web_service_objects[i].get_url()
            LOGGER.debug("Recording instance created=%s",
                         recording.serialize())

        response = a8.create_recording(recording)
        is_valid, error = validate_common.validate_http_response_status_code(
            response, requests.codes.no_content)
        assert is_valid, error

        # wait till the STARTED notification
        response = rio.find_recording(recording_id).json()
        LOGGER.debug("Response=%s", response)
        start_time = utils.get_parsed_time(
            response[0][RecordingAttribute.START_TIME][:-1])

        current_time = datetime.datetime.utcnow()
        if current_time < start_time:
            utils.wait(start_time - current_time, constants.TIME_DELTA)
        for i in range(total_recording):
            is_valid, error = validate_recordings.validate_notification(
                web_service_objects[i], constants.RecordingStatus.STARTED)
            assert is_valid, error

        # Updating the end time
        for i in range(total_recording):
            response = rio.find_recording(recording_id_list[i]).json()
            LOGGER.debug("Response=%s", response)
            end_time = response[0][RecordingAttribute.END_TIME][:-1]
            LOGGER.debug("Scheduled end time of recording=%s is %s",
                         recording_id, end_time)
            recording.get_entry(i).EndTime = utils.get_formatted_time(
                constants.SECONDS * 60, TimeFormat.TIME_FORMAT_MS, stream)
            new_end_time = recording.get_entry(i).EndTime[:-1]
            LOGGER.debug("Updated end time of recording=%s to %s",
                         recording_id,
                         recording.get_entry(i).EndTime)

        response = a8.create_recording(recording)
        is_valid, error = validate_common.validate_http_response_status_code(
            response, requests.codes.no_content)
        assert is_valid, error

        recording_pool = mp_pool.ThreadPool(processes=total_recording)
        end_time = utils.get_parsed_time(new_end_time)
        current_time = datetime.datetime.utcnow()
        wait_time = utils.add_time_to_secs((end_time - current_time),
                                           constants.SECONDS)

        if wait_time < 0:
            wait_time = 0

        for i in range(total_recording):
            recording_pool.apply_async(
                validate_recordings.validate_notification,
                (web_service_objects[i], constants.RecordingStatus.COMPLETE,
                 wait_time),
                callback=queue.put)
        for i in range(total_recording):
            is_valid, error = queue.get()
            assert is_valid, error

        for i in range(total_recording):
            response = rio.find_recording(
                recording.get_entry(i).RecordingId).json()
            is_valid, error = validate_storage.validate_recording_in_storage(
                response, Cos.ACTIVE_STORAGE, Cos.RECORDING_STORED)

            assert is_valid, error

            if copy_type == RecordingAttribute.COPY_TYPE_COMMON:
                is_valid, error = validate_storage.validate_recording_in_storage(
                    response, Cos.ACTIVE_STORAGE, Cos.RECORDING_NOT_STORED, 1)

                assert is_valid, error

        if copy_type == RecordingAttribute.COPY_TYPE_UNIQUE:
            archive_helper.wait_for_archival(
                stream,
                recording.get_entry(0).RecordingId, Archive.ARCHIVE,
                Archive.COMPLETE)
            for i in range(total_recording):
                response = rio.find_recording(
                    recording.get_entry(i).RecordingId).json()
                is_valid, error = validate_storage.validate_recording_in_storage(
                    response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED)
                assert is_valid, error
                is_valid, error = validate_storage.validate_copy_count(
                    response, Cos.ARCHIVE_STORAGE)

                assert is_valid, error

        for i in range(total_recording):
            recording_pool.apply_async(validate_recordings.validate_playback,
                                       (recording.get_entry(i).RecordingId, ),
                                       callback=queue.put)

        for i in range(total_recording):
            is_valid, error = queue.get()
            assert is_valid, error

        for i in range(total_recording):
            response = rio.find_recording(recording_id_list[i]).json()
            is_valid, error = validate_recordings.validate_actual_time(
                response,
                recording.get_entry(i).EndTime,
                RecordingAttribute.ACTUAL_END_TIME)
            assert is_valid, error

    finally:
        if recording_pool:
            recording_pool.close()
            recording_pool.join()
        for web_service_obj in web_service_objects:
            web_service_obj.stop_server()
        response = a8.delete_recording(recording)
        LOGGER.debug("Recording clean up status code=%s", response.status_code)
Beispiel #10
0
def test_tc_rec_008_034_(total_recording, stream, name, copy_type):
    """
    Update the start time of a recording before it starts for both unique/ common
    """
    recording = None
    web_service_objects = []
    recording_pool = None
    recording_id_list = []

    try:
        queue = Queue.Queue()
        start_time = utils.get_formatted_time(constants.SECONDS * 20,
                                              TimeFormat.TIME_FORMAT_MS,
                                              stream)
        end_time = utils.get_formatted_time(constants.SECONDS * 80,
                                            TimeFormat.TIME_FORMAT_MS, stream)
        recording = recording_model.Recording(total_recordings=total_recording,
                                              StartTime=start_time,
                                              EndTime=end_time,
                                              StreamId=stream,
                                              copyType=copy_type)

        for i in range(total_recording):
            recording_id = recording.get_entry(i).RecordingId
            recording_id_list.append(recording_id)
            web_service_objects.append(
                notification_utils.get_web_service_object(recording_id))
            recording.get_entry(i).UpdateUrl = web_service_objects[i].get_url()
        LOGGER.debug("Recording instance created=%s", recording.serialize())
        response = a8.create_recording(recording)
        is_valid, error = validate_common.validate_http_response_status_code(
            response, requests.codes.no_content)

        assert is_valid, error
        recording_pool = mp_pool.ThreadPool(processes=total_recording)
        for i in range(total_recording):
            response = rio.find_recording(recording_id_list[i]).json()
            LOGGER.debug("Response=%s", response)
            start_time = response[0][RecordingAttribute.START_TIME][:-1]
            LOGGER.debug("Scheduled start time of recording=%s is %s",
                         recording_id_list[i], start_time)
            recording.get_entry(i).StartTime = utils.get_formatted_time(
                constants.SECONDS * 40, TimeFormat.TIME_FORMAT_MS, stream)
            LOGGER.debug("Updated start time of recording=%s to %s",
                         recording_id_list[i],
                         recording.get_entry(i).StartTime)

        # Update the recording with the updated start time
        response = a8.create_recording(recording)
        is_valid, error = validate_common.validate_http_response_status_code(
            response, requests.codes.no_content)

        assert is_valid, error

        # Validating whether the updated start time was populated or not
        for i in range(total_recording):
            response = rio.find_recording(recording_id_list[i]).json()
            LOGGER.debug("Response=%s", response)
            is_valid, error = validate_recordings.validate_time(
                response,
                recording.get_entry(i).StartTime,
                RecordingAttribute.START_TIME)

            assert is_valid, error

        for i in range(total_recording):
            recording_pool.apply_async(
                validate_recordings.validate_recording,
                (recording.get_entry(i).RecordingId, web_service_objects[i]),
                callback=queue.put)
        for i in range(total_recording):
            is_valid, error = queue.get()
            assert is_valid, error

        for i in range(total_recording):
            response = rio.find_recording(
                recording.get_entry(i).RecordingId).json()
            is_valid, error = validate_storage.validate_recording_in_storage(
                response, Cos.ACTIVE_STORAGE, Cos.RECORDING_STORED)

            assert is_valid, error

            if copy_type == RecordingAttribute.COPY_TYPE_COMMON:
                is_valid, error = validate_storage.validate_recording_in_storage(
                    response, Cos.ACTIVE_STORAGE, Cos.RECORDING_NOT_STORED, 1)

                assert is_valid, error

        if copy_type == RecordingAttribute.COPY_TYPE_UNIQUE:
            archive_helper.wait_for_archival(
                stream,
                recording.get_entry(0).RecordingId, Archive.ARCHIVE,
                Archive.COMPLETE)
            for i in range(total_recording):
                response = rio.find_recording(
                    recording.get_entry(i).RecordingId).json()
                is_valid, error = validate_storage.validate_recording_in_storage(
                    response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED)
                assert is_valid, error
                is_valid, error = validate_storage.validate_copy_count(
                    response, Cos.ARCHIVE_STORAGE)

                assert is_valid, error

        for i in range(total_recording):
            recording_pool.apply_async(validate_recordings.validate_playback,
                                       (recording.get_entry(i).RecordingId, ),
                                       callback=queue.put)

        for i in range(total_recording):
            is_valid, error = queue.get()
            assert is_valid, error

    finally:
        if recording_pool:
            recording_pool.close()
            recording_pool.join()
        for web_service_obj in web_service_objects:
            web_service_obj.stop_server()
        response = a8.delete_recording(recording)
        LOGGER.debug("Recording clean up status code=%s", response.status_code)