def test_rtc9734_tc_er_010_playback_redirect_mode(stream, name, copy_type): """ Playback of Active and Archive recording(UNIQUE and COMMON) with DO(Dash Origin) redirect enabled """ recording = None web_service_obj = None try: start_time = utils.get_formatted_time(constants.SECONDS * 30, TimeFormat.TIME_FORMAT_MS, stream) end_time = utils.get_formatted_time(constants.SECONDS * 60, TimeFormat.TIME_FORMAT_MS, stream) recording = recording_model.Recording(StartTime=start_time, EndTime=end_time, StreamId=stream, copyType=copy_type) recording_id = recording.get_entry(0).RecordingId web_service_obj = notification_utils.get_web_service_object( recording_id) recording.get_entry(0).UpdateUrl = web_service_obj.get_url() LOGGER.debug("Recording instance created=%s", recording.serialize()) response = a8.create_recording(recording) is_valid, error = validate_common.validate_http_response_status_code( response, requests.codes.no_content) assert is_valid, error is_valid, error = validate_recordings.validate_recording( recording_id, web_service_obj) assert is_valid, error response = rio.find_recording(recording_id).json() is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ACTIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error is_valid, error = validate_recordings.validate_playback(recording_id) assert is_valid, error if copy_type == RecordingAttribute.COPY_TYPE_UNIQUE: archive_helper.wait_for_archival(stream, recording_id, Archive.ARCHIVE, Archive.IN_PROGRESS) response = rio.find_recording(recording_id).json() is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error is_valid, error = validate_recordings.validate_playback( recording_id) assert is_valid, error finally: if web_service_obj: web_service_obj.stop_server() if recording: response = a8.delete_recording(recording) LOGGER.debug("Recording clean up status code=%s", response.status_code)
def test_rtc9777_tc_arc_007_archive_playback_re_archive_unique(stream): """ Create a recording with copy type as UNIQUE, wait till archival completes, playback the recording and validate re-archival """ recording = None web_service_obj = None try: start_time = utils.get_formatted_time(constants.SECONDS * 30, TimeFormat.TIME_FORMAT_MS, stream) end_time = utils.get_formatted_time(constants.SECONDS * 60, TimeFormat.TIME_FORMAT_MS, stream) recording = recording_model.Recording(StartTime=start_time, EndTime=end_time, StreamId=stream) recording_id = recording.get_entry(0).RecordingId web_service_obj = notification_utils.get_web_service_object(recording_id) recording.get_entry(0).UpdateUrl = web_service_obj.get_url() LOGGER.debug("Recording instance created=%s", recording.serialize()) response = a8.create_recording(recording) is_valid, error = validate_common.validate_http_response_status_code(response, requests.codes.no_content) assert is_valid, error is_valid, error = validate_recordings.validate_recording(recording_id, web_service_obj) assert is_valid, error archive_helper.wait_for_archival(stream, recording_id, Archive.ARCHIVE, Archive.COMPLETE) response = rio.find_recording(recording_id).json() is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error is_valid, error = validate_recordings.validate_playback_using_vle(recording_id) assert is_valid, error is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.RECON_STORAGE, Cos.RECORDING_STORED) assert is_valid, error archive_helper.wait_for_archival(stream, recording_id, Archive.RE_ARCHIVE, Archive.COMPLETE) is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.RECON_STORAGE, Cos.RECORDING_NOT_STORED) assert is_valid, error finally: web_service_obj.stop_server() response = a8.delete_recording(recording) LOGGER.debug("Recording clean up status code=%s", response.status_code)
def test_rtc9744_tc_er_018_private_copy(stream, name, copy_type): """ Private copy - Archive/Pre-gen """ web_service_obj = None recording = None stream = str(private_copy_stream) try: start_time = utils.get_formatted_time(constants.SECONDS * 30, TimeFormat.TIME_FORMAT_MS, stream) end_time = utils.get_formatted_time(constants.SECONDS * 80, TimeFormat.TIME_FORMAT_MS, stream) recording = recording_model.Recording(StartTime=start_time, EndTime=end_time, StreamId=stream) recording_id = recording.get_entry(0).RecordingId web_service_obj = notification_utils.get_web_service_object( recording_id) recording.get_entry(0).UpdateUrl = web_service_obj.get_url() LOGGER.debug("Recording instance created=%s", recording.serialize()) response = a8.create_recording(recording) is_valid, error = validate_common.validate_http_response_status_code( response, requests.codes.no_content) assert is_valid, error is_valid, error = validate_recordings.validate_recording( recording_id, web_service_obj) assert is_valid, error # wait for georbage collect archive_helper.wait_for_archival(stream, recording_id, Archive.ARCHIVE, Archive.COMPLETE) response = rio.find_recording(recording_id).json() is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error finally: if web_service_obj: web_service_obj.stop_server() if recording: response = a8.delete_recording(recording) LOGGER.debug("Recording clean up status code=%s", response.status_code)
def test_rtc9773_tc_arc_003_common(stream): """ Create a recording with copy type as COMMON and check whether archival is happening or not """ recording = None web_service_obj = None try: start_time = utils.get_formatted_time(constants.SECONDS * 30, TimeFormat.TIME_FORMAT_MS, stream) end_time = utils.get_formatted_time(constants.SECONDS * 60, TimeFormat.TIME_FORMAT_MS, stream) copy_type = RecordingAttribute.COPY_TYPE_COMMON recording = recording_model.Recording(StartTime=start_time, EndTime=end_time, StreamId=stream, copyType=copy_type) recording_id = recording.get_entry(0).RecordingId web_service_obj = notification_utils.get_web_service_object( recording_id) recording.get_entry(0).UpdateUrl = web_service_obj.get_url() LOGGER.debug("Recording instance created=%s", recording.serialize()) response = a8.create_recording(recording) is_valid, error = validate_common.validate_http_response_status_code( response, requests.codes.no_content) assert is_valid, error is_valid, error = validate_recordings.validate_recording( recording_id, web_service_obj) assert is_valid, error archive_helper.wait_for_archival(stream, recording_id, Archive.ARCHIVE, Archive.IN_PROGRESS) response = rio.find_recording(recording_id).json() is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error is_valid, error = validate_recordings.validate_playback(recording_id) assert is_valid, error finally: web_service_obj.stop_server() response = a8.delete_recording(recording) LOGGER.debug("Recording clean up status code=%s", response.status_code)
def test_rtc9775_tc_arc_005_playback_archive_in_progress(stream): """ Create a recording with copy type as UNIQUE and play it back when archival is in progress """ recording = None web_service_obj = None try: start_time = utils.get_formatted_time(constants.SECONDS * 30, TimeFormat.TIME_FORMAT_MS, stream) end_time = utils.get_formatted_time(constants.SECONDS * 90, TimeFormat.TIME_FORMAT_MS, stream) recording = recording_model.Recording(StartTime=start_time, EndTime=end_time, StreamId=stream) recording_id = recording.get_entry(0).RecordingId web_service_obj = notification_utils.get_web_service_object(recording_id) recording.get_entry(0).UpdateUrl = web_service_obj.get_url() LOGGER.debug("Recording instance created=%s", recording.serialize()) response = a8.create_recording(recording) is_valid, error = validate_common.validate_http_response_status_code(response, requests.codes.no_content) assert is_valid, error is_valid, error = validate_recordings.validate_recording(recording_id, web_service_obj) assert is_valid, error archive_helper.wait_for_archival(stream, recording_id, Archive.ARCHIVE, Archive.IN_PROGRESS) response = rio.find_recording(recording_id).json() is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.ARCHIVE_STORAGE, [Cos.RECORDING_STORED, Cos.RECORDING_PARTIALLY_STORED]) assert is_valid, error is_valid, error = validate_recordings.validate_playback_using_vle(recording_id) assert is_valid, error # If we playback recording when archival is INPROGRESS, There is a chance of few segments being played from # Active storage. This means that only part of the segments would move from archive to recon. # The rest will move from active to archive. So both PARTIALLY_STORED and STORED are valid is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.RECON_STORAGE, [Cos.RECORDING_STORED, Cos.RECORDING_PARTIALLY_STORED]) assert is_valid, ValidationError.SEGMENT_NOT_MOVED.format(Cos.RECON_STORAGE, recording_id) finally: web_service_obj.stop_server() response = a8.delete_recording(recording) LOGGER.debug("Recording clean up status code=%s", response.status_code)
def test_sanity_hybrid_copy(stream, name, copy_type): """ Schedule UNIQUE and COMMON copy on same channel and verify the recording and playback. """ recording = None web_service_obj = None try: start_time = utils.get_formatted_time(constants.SECONDS * 30, TimeFormat.TIME_FORMAT_MS, stream) end_time = utils.get_formatted_time(constants.SECONDS * 80, TimeFormat.TIME_FORMAT_MS, stream) recording = recording_model.Recording(StartTime=start_time, EndTime=end_time, StreamId=stream, copyType=copy_type) recording_id = recording.get_entry(0).RecordingId web_service_obj = notification_utils.get_web_service_object( recording_id) recording.get_entry(0).UpdateUrl = web_service_obj.get_url() LOGGER.debug("Recording instance created=%s", recording.serialize()) response = a8.create_recording(recording) is_valid, error = validate_common.validate_http_response_status_code( response, requests.codes.no_content) assert is_valid, error is_valid, error = validate_recordings.validate_recording( recording_id, web_service_obj) assert is_valid, error archive_helper.wait_for_archival(stream, recording_id, Archive.ARCHIVE, Archive.COMPLETE) response = rio.find_recording(recording_id).json() is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ACTIVE_STORAGE, Cos.RECORDING_NOT_STORED) assert is_valid, error is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error is_valid, error = validate_recordings.validate_playback(recording_id) assert is_valid, error finally: web_service_obj.stop_server() response = a8.delete_recording(recording) LOGGER.debug("Recording clean up status code=%s", response.status_code)
def test_tc_rec_033_015_(total_recording, stream, name, copy_type): """ Create multiple recordings with copy type as COMMON """ web_service_objects = [] recording_pool = None recording = None try: queue = Queue.Queue() start_time = utils.get_formatted_time(constants.SECONDS * 30, TimeFormat.TIME_FORMAT_MS, stream) end_time = utils.get_formatted_time(constants.SECONDS * 60, TimeFormat.TIME_FORMAT_MS, stream) recording = recording_model.Recording(total_recordings=total_recording, StartTime=start_time, EndTime=end_time, copyType=copy_type, StreamId=stream) for i in range(total_recording): recording_id = recording.get_entry(i).RecordingId web_service_objects.append( notification_utils.get_web_service_object(recording_id)) recording.get_entry(i).UpdateUrl = web_service_objects[i].get_url() LOGGER.debug("Recording instance created=%s", recording.serialize()) response = a8.create_recording(recording) is_valid, error = validate_common.validate_http_response_status_code( response, requests.codes.no_content) assert is_valid, error recording_pool = mp_pool.ThreadPool(processes=total_recording) for i in range(total_recording): recording_pool.apply_async( validate_recordings.validate_recording, (recording.get_entry(i).RecordingId, web_service_objects[i]), callback=queue.put) for i in range(total_recording): is_valid, error = queue.get() assert is_valid, error for i in range(total_recording): response = rio.find_recording( recording.get_entry(i).RecordingId).json() is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ACTIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error for i in range(total_recording): response = rio.find_recording( recording.get_entry(i).RecordingId).json() is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ACTIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error if copy_type == RecordingAttribute.COPY_TYPE_COMMON: is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ACTIVE_STORAGE, Cos.RECORDING_NOT_STORED, 1) assert is_valid, error if copy_type == RecordingAttribute.COPY_TYPE_UNIQUE: archive_helper.wait_for_archival( stream, recording.get_entry(0).RecordingId, Archive.ARCHIVE, Archive.COMPLETE) for i in range(total_recording): response = rio.find_recording( recording.get_entry(i).RecordingId).json() is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error is_valid, error = validate_storage.validate_copy_count( response, Cos.ARCHIVE_STORAGE) assert is_valid, error for i in range(total_recording): recording_pool.apply_async(validate_recordings.validate_playback, (recording.get_entry(i).RecordingId, ), callback=queue.put) for i in range(total_recording): is_valid, error = queue.get() assert is_valid, error finally: if recording_pool: recording_pool.close() recording_pool.join() for web_service_obj in web_service_objects: web_service_obj.stop_server() response = a8.delete_recording(recording) LOGGER.debug("Recording clean up status code=%s", response.status_code)
def test_tc_arc_(name, stream, event): """ test_tc_arc_[009_multiple_playback_one_unique-ARCHIVE --> Create multiple recordings with copy type as UNIQUE, wait for them to get archived, playback one of the them and verify if the recording is present both in the ARCHIVE_STORAGE AND RECON_STORAGE test_tc_arc_[011_multiple_playback_one_re_archive_unique-RE_ARCHIVE --> Create multiple recordings with copy type as UNIQUE, wait for them to get archived, playback one, and wait till it gets re-archived """ LOGGER.info("#####################################################################") LOGGER.info("Starting test_tc_arc_%s", name) total_recordings = 3 recording = None web_service_objs = [] recording_pool = None try: queue = Queue.Queue() start_time = utils.get_formatted_time(constants.SECONDS * 30, TimeFormat.TIME_FORMAT_MS, stream) end_time = utils.get_formatted_time(constants.SECONDS * 60, TimeFormat.TIME_FORMAT_MS, stream) recording = recording_model.Recording(total_recordings=total_recordings, StartTime=start_time, EndTime=end_time, StreamId=stream) for i in range(total_recordings): recording_id = recording.get_entry(i).RecordingId web_service_objs.append(notification_utils.get_web_service_object(recording_id)) recording.get_entry(i).UpdateUrl = web_service_objs[i].get_url() LOGGER.debug("Recording instance created=%s", recording.serialize()) response = a8.create_recording(recording) is_valid, error = validate_common.validate_http_response_status_code(response, requests.codes.no_content) assert is_valid, error recording_id = recording.get_entry(0).RecordingId recording_pool = mp_pool.ThreadPool(processes=total_recordings) for i in range(total_recordings): recording_pool.apply_async(validate_recordings.validate_recording, (recording.get_entry(i).RecordingId, web_service_objs[i]), callback=queue.put) for i in range(total_recordings): is_valid, error = queue.get() assert is_valid, error archive_helper.wait_for_archival(stream, recording_id, Archive.ARCHIVE, Archive.COMPLETE) for i in range(total_recordings): response = rio.find_recording(recording.get_entry(i).RecordingId).json() is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.ACTIVE_STORAGE, Cos.RECORDING_NOT_STORED, i) assert is_valid, error is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error is_valid, error = validate_storage.validate_copy_count(response, Cos.ARCHIVE_STORAGE) assert is_valid, error is_valid, error = validate_recordings.validate_playback_using_vle(recording_id) assert is_valid, error response = rio.find_recording(recording.get_entry(0).RecordingId).json() is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.RECON_STORAGE, Cos.RECORDING_STORED) assert is_valid, error if event == Archive.RE_ARCHIVE: archive_helper.wait_for_archival(stream, recording_id, Archive.RE_ARCHIVE, Archive.COMPLETE) is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.RECON_STORAGE, Cos.RECORDING_NOT_STORED) assert is_valid, error is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error for i in range(total_recordings): recording_pool.apply_async(validate_recordings.validate_playback, (recording.get_entry(i).RecordingId,), callback=queue.put) for i in range(total_recordings): is_valid, error = queue.get() assert is_valid, error finally: if recording_pool: recording_pool.close() recording_pool.join() for web_service_obj in web_service_objs: web_service_obj.stop_server() response = a8.delete_recording(recording) LOGGER.debug("Recording clean up status code=%s", response.status_code)
def test_tc_er_006_007_incomplete_delete_playback_US62461(stream, name, copy_type): """ Create multiple recordings with copy type as COMMON """ web_service_objects = [] recording_pool = None recording = None total_recording = 20 diff_start_time_recordings = 10 same_start_time_recordings = 10 service_name = V2pc.MANIFEST_AGENT namespace = Component.VMR try: #Taking backup of v2pc pod config info, editing the config and then restarting the services is_valid, error = cleanup(redeploy_config_map, service_name, revert=True) assert is_valid, error is_valid, error = v2pc_edit_manifest_config(V2pc.MANIFEST_AGENT, batch_size='4') assert is_valid, error is_valid, error = verify_batch_size_update(service_name, namespace, "4") assert is_valid, error queue = Queue.Queue() start_time = utils.get_formatted_time(constants.SECONDS * 30, TimeFormat.TIME_FORMAT_MS, stream) end_time = utils.get_formatted_time(constants.SECONDS * 160, TimeFormat.TIME_FORMAT_MS, stream) recording = recording_model.Recording(total_recordings=same_start_time_recordings, StartTime=start_time, EndTime=end_time, copyType=copy_type, StreamId=stream) for i in range(diff_start_time_recordings, same_start_time_recordings+diff_start_time_recordings): start_time = utils.get_formatted_time((constants.SECONDS * 30) + i, TimeFormat.TIME_FORMAT_MS, stream) end_time = utils.get_formatted_time((constants.SECONDS * 160) + i, TimeFormat.TIME_FORMAT_MS, stream) rec_with_diff_time = recording_model.Recording(total_recordings=1, StartTime=start_time, EndTime=end_time, copyType=copy_type, StreamId=stream) rec_with_diff_time.Entries[0].RecordingId = RecordingAttribute.RECORDING_ID_PREFIX + \ rec_with_diff_time.RequestId + '_' + str(i) recording.Entries.append(rec_with_diff_time.get_entry(0)) for i in range(total_recording): recording_id = recording.get_entry(i).RecordingId web_service_objects.append(notification_utils.get_web_service_object(recording_id)) recording.get_entry(i).UpdateUrl = web_service_objects[i].get_url() last_recording_id = rec_with_diff_time.Entries[0].RecordingId LOGGER.debug("Recording instance created=%s", recording.serialize()) #Sending recording request to create recording response = a8.create_recording(recording) is_valid, error = validate_common.validate_http_response_status_code(response, requests.codes.no_content) assert is_valid, error response = rio.find_recording(last_recording_id).json() if not response: return False, ValidationError.RECORDING_RESPONSE_EMPTY.format(last_recording_id) start_time = utils.get_parsed_time(response[0][RecordingAttribute.START_TIME][:-1]) current_time = datetime.datetime.utcnow() wait_time = utils.add_time_to_secs((start_time - current_time), constants.SECONDS) if wait_time < 0: wait_time = 0 recording_pool = mp_pool.ThreadPool() for i in range(total_recording): recording_pool.apply_async(validate_recordings.validate_notification, (web_service_objects[i], constants.RecordingStatus.STARTED, wait_time), callback=queue.put) for i in range(total_recording): is_valid, error = queue.get() assert is_valid, error #restarting segment recorder to make INCOMPLETE recording is_valid, error = delete_vmr_pods(V2pc.SEGMENT_RECORDER) assert is_valid, error #Verifying recording INCOMPLETE STATE for i in range(total_recording): recording_id = recording.get_entry(i).RecordingId recording_pool.apply_async(validate_recordings.validate_recording_end_state, (recording_id, [constants.RecordingStatus.INCOMPLETE]), dict(web_service_obj=web_service_objects[i], end_time=end_time), callback=queue.put) for i in range(total_recording): is_valid, error = queue.get() assert is_valid, error recording.Entries.pop(-1) # Deleting all but one recording response = a8.delete_recording(recording) #wait for georbage collect archive_helper.wait_for_archival(stream, recording.get_entry(-1).RecordingId, Archive.ARCHIVE, Archive.COMPLETE) #Verifying playback of recording is_valid, error = validate_recordings.validate_playback_using_vle(rec_with_diff_time.Entries[0].RecordingId,) assert is_valid, error finally: #Revert back the v2pc config changes is_valid, error = cleanup(redeploy_config_map, service_name, revert=True) assert is_valid, error if recording_pool: recording_pool.close() recording_pool.join() for web_service_obj in web_service_objects: if web_service_obj: web_service_obj.stop_server() if recording: response = a8.delete_recording(rec_with_diff_time) LOGGER.debug("Recording clean up status code=%s", response.status_code)
def test_rtc9782_tc_arc_012_playback_after_archival_delete_during_playback( stream): """ Create a recording with unique copy, wait until archiving completes, playback the recording and delete the recording during playback """ recording = None web_service_obj = None try: start_time = utils.get_formatted_time(constants.SECONDS * 30, TimeFormat.TIME_FORMAT_MS, stream) end_time = utils.get_formatted_time(constants.SECONDS * 60, TimeFormat.TIME_FORMAT_MS, stream) recording = recording_model.Recording(StartTime=start_time, EndTime=end_time, StreamId=stream) recording_id = recording.get_entry(0).RecordingId web_service_obj = notification_utils.get_web_service_object( recording_id) recording.get_entry(0).UpdateUrl = web_service_obj.get_url() LOGGER.debug("Recording instance created=%s", recording.serialize()) response = a8.create_recording(recording) is_valid, error = validate_common.validate_http_response_status_code( response, requests.codes.no_content) assert is_valid, error is_valid, error = validate_recordings.validate_recording( recording_id, web_service_obj) assert is_valid, error vmr_plybk_url = utils.get_vmr_playback_url(recording_id=recording_id) print "[INFO: ] vmr playback url ", vmr_plybk_url archive_helper.wait_for_archival(stream, recording_id, Archive.ARCHIVE, Archive.COMPLETE) response = rio.find_recording(recording_id).json() print "[INFO: ] recording details ", response is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error queue = Queue.Queue() playback_pool = mp_pool.ThreadPool(processes=1) playback_pool.apply_async( validate_recordings.validate_playback_using_vle, (recording_id, ), callback=queue.put) #time.sleep(10 * constants.SECONDS) is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.RECON_STORAGE, Cos.RECORDING_STORED) assert is_valid, ValidationError.SEGMENT_NOT_MOVED.format( Cos.RECON_STORAGE, response) del_resp = a8.delete_recording(recording) is_valid, error = validate_common.validate_http_response_status_code( del_resp, requests.codes.no_content) assert is_valid, error is_valid, error = validate_recordings.validate_recording_deletion( recording_id) assert is_valid, error is_valid, error = queue.get() assert is_valid, error if is_valid: # verifying whether the recording available in VMR vmr_plybk_url = utils.get_vmr_playback_url( recording_id=recording_id) resp = requests.get(vmr_plybk_url) assert not ( resp.status_code == requests.codes.ok ), ValidationError.INCORRECT_HTTP_RESPONSE_STATUS_CODE.format( resp.status_code, resp.reason, resp.url) finally: web_service_obj.stop_server() response = a8.delete_recording(recording) LOGGER.debug("Recording clean up status code=%s", response.status_code)
def test_rtc9736_tc_er_012_hybrid_archival_batch_size_multiple(stream): """ Hybrid copy - archiving :Schedule Unique copy and common copy recording (20 - 10 same start/end time, 10 different start/end times), batch size 4. """ web_service_objects = [] recording_pool = None recording = None total_recording = 20 diff_start_time_recordings = 10 same_start_time_recordings = 10 service_name = V2pc.MANIFEST_AGENT namespace = Component.VMR try: # Taking backup of v2pc pod config info and editing the config and then restarting the services is_valid, error = cleanup(redeploy_config_map, service_name, revert=True) assert is_valid, error is_valid, error = v2pc_edit_manifest_config(V2pc.MANIFEST_AGENT, batch_size='4') assert is_valid, error is_valid, error = verify_batch_size_update(service_name, namespace, "4") assert is_valid, error queue = Queue.Queue() start_time = utils.get_formatted_time(constants.SECONDS * 30, TimeFormat.TIME_FORMAT_MS, stream) end_time = utils.get_formatted_time(constants.SECONDS * 80, TimeFormat.TIME_FORMAT_MS, stream) recording = recording_model.Recording(total_recordings=same_start_time_recordings, StartTime=start_time, EndTime=end_time, copyType=RecordingAttribute.COPY_TYPE_COMMON, StreamId=stream) for i in range(diff_start_time_recordings, same_start_time_recordings+diff_start_time_recordings): start_time = utils.get_formatted_time((constants.SECONDS * 30) + i, TimeFormat.TIME_FORMAT_MS, stream) end_time = utils.get_formatted_time((constants.SECONDS * 80) + i, TimeFormat.TIME_FORMAT_MS, stream) rec_with_diff_time = recording_model.Recording(total_recordings=1, StartTime=start_time, EndTime=end_time, copyType=RecordingAttribute.COPY_TYPE_UNIQUE, StreamId=stream) rec_with_diff_time.Entries[0].RecordingId = RecordingAttribute.RECORDING_ID_PREFIX + \ rec_with_diff_time.RequestId + '_' + str(i) recording.Entries.append(rec_with_diff_time.get_entry(0)) last_recording_id = rec_with_diff_time.Entries[0].RecordingId for i in range(total_recording): recording_id = recording.get_entry(i).RecordingId web_service_objects.append(notification_utils.get_web_service_object(recording_id)) recording.get_entry(i).UpdateUrl = web_service_objects[i].get_url() LOGGER.debug("Recording instance created=%s", recording.serialize()) #Sending recording request to create recording response = a8.create_recording(recording) is_valid, error = validate_common.validate_http_response_status_code(response, requests.codes.no_content) assert is_valid, error response = rio.find_recording(last_recording_id).json() if not response: return False, ValidationError.RECORDING_RESPONSE_EMPTY.format(last_recording_id) start_time = utils.get_parsed_time(response[0][RecordingAttribute.START_TIME][:-1]) current_time = datetime.datetime.utcnow() wait_time = utils.add_time_to_secs((start_time - current_time), constants.SECONDS) if wait_time < 0: wait_time = 0 #Verifying recording is started or not recording_pool = mp_pool.ThreadPool() for i in range(total_recording): recording_pool.apply_async(validate_recordings.validate_notification, (web_service_objects[i], constants.RecordingStatus.STARTED, wait_time), callback=queue.put) for i in range(total_recording): is_valid, error = queue.get() assert is_valid, error end_time = utils.get_parsed_time(response[0][RecordingAttribute.END_TIME][:-1]) current_time = datetime.datetime.utcnow() wait_time = utils.add_time_to_secs((end_time - current_time), constants.SECONDS) if wait_time < 0: wait_time = 0 #Verifying recording is completed or not recording_pool = mp_pool.ThreadPool() for i in range(total_recording): recording_pool.apply_async(validate_recordings.validate_notification, (web_service_objects[i], constants.RecordingStatus.COMPLETE, wait_time), callback=queue.put) for i in range(total_recording): is_valid, error = queue.get() assert is_valid, error #Verfying storage archive storage after archival time archive_helper.wait_for_archival(stream, recording.get_entry(0).RecordingId, Archive.ARCHIVE, Archive.COMPLETE) for i in range(total_recording): response = rio.find_recording(recording.get_entry(i).RecordingId).json() is_valid, error = validate_storage.validate_recording_in_storage(response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error is_valid, error = validate_storage.validate_copy_count(response, Cos.ARCHIVE_STORAGE) assert is_valid, error #Playback verification after recording completed for i in range(total_recording): recording_pool.apply_async(validate_recordings.validate_playback, (recording.get_entry(i).RecordingId,), callback=queue.put) for i in range(total_recording): is_valid, error = queue.get() assert is_valid, error finally: #Revert back the v2pc config changes is_valid, error = cleanup(redeploy_config_map, service_name, revert=True) assert is_valid, error if recording_pool: recording_pool.close() recording_pool.join() for web_service_obj in web_service_objects: if web_service_obj: web_service_obj.stop_server() if recording: response = a8.delete_recording(recording) LOGGER.debug("Recording clean up status code=%s", response.status_code)
def test_tc_arc(name, stream, archive_playback): """ Schedule recording with same start time, end time with unique copy, playback recording single recording and delete it, finally playback remaining records. """ total_recordings = 3 recording = None web_service_objs = [] recording_pool = None try: queue = Queue.Queue() start_time = utils.get_formatted_time(constants.SECONDS * 30, TimeFormat.TIME_FORMAT_MS, stream) end_time = utils.get_formatted_time(constants.SECONDS * 60, TimeFormat.TIME_FORMAT_MS, stream) copy_type = RecordingAttribute.COPY_TYPE_UNIQUE recording = recording_model.Recording( total_recordings=total_recordings, StartTime=start_time, EndTime=end_time, copyType=copy_type, StreamId=stream) for i in range(total_recordings): recording_id = recording.get_entry(i).RecordingId web_service_objs.append( notification_utils.get_web_service_object(recording_id)) recording.get_entry(i).UpdateUrl = web_service_objs[i].get_url() LOGGER.debug("Recording=%s, UpdateURL=%s", recording_id, web_service_objs[i].get_url()) LOGGER.debug("Recording instance created=%s", recording.serialize()) response = a8.create_recording(recording) is_valid, error = validate_common.validate_http_response_status_code( response, requests.codes.no_content) assert is_valid, error recording_pool = mp_pool.ThreadPool(processes=total_recordings) for i in range(total_recordings): recording_pool.apply_async( validate_recordings.validate_recording, (recording.get_entry(i).RecordingId, web_service_objs[i]), callback=queue.put) for i in range(total_recordings): is_valid, error = queue.get() assert is_valid, error # Validating the copy count of unique copy recording in active storage for i in range(total_recordings): response = rio.find_recording( recording.get_entry(i).RecordingId).json() is_valid, error = validate_storage.validate_copy_count( response, Cos.ACTIVE_STORAGE, total_recordings) assert is_valid, error # Wait till archival completes recording_id_0 = recording.get_entry(0).RecordingId archive_helper.wait_for_archival(stream, recording_id_0, Archive.ARCHIVE, Archive.COMPLETE) # Validating copy count in archive storage after archival duration for i in range(total_recordings): response = rio.find_recording( recording.get_entry(i).RecordingId).json() is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ACTIVE_STORAGE, Cos.RECORDING_NOT_STORED) assert is_valid, error is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error is_valid, error = validate_storage.validate_copy_count( response, Cos.ARCHIVE_STORAGE, 1) assert is_valid, error if archive_playback: response = rio.find_recording(recording_id_0).json() # Validating the first recording for playback is_valid, error = validate_recordings.validate_playback_using_vle( recording_id_0) assert is_valid, error # Validating the copy of segments in the archive folder after playback is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error # Validating the segments in recon folder after playback is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.RECON_STORAGE, Cos.RECORDING_STORED) assert is_valid, error response = a8.delete_recording(recording, recording.get_entry(0).RecordingId) is_valid, error = validate_common.validate_http_response_status_code( response, requests.codes.no_content) assert is_valid, error is_valid, error = validate_recordings.validate_recording_deletion( recording_id_0) assert is_valid, error is_valid, error = validate_recordings.validate_playback_using_vle( recording_id_0) assert not is_valid, ValidationError.DELETED_RECORDING_PLAYED_BACK.format( recording_id_0) del recording.get_entries()[:1] # Check remaining records still in archive if archive_playback: for recording_entry in recording.get_entries(): response = rio.find_recording( recording_entry.RecordingId).json() recording_pool.apply_async( validate_storage.validate_recording_in_storage, (response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED), callback=queue.put) for i in range(len(recording.get_entries())): is_valid, error = queue.get() assert is_valid, error for recording_entry in recording.get_entries(): recording_pool.apply_async( validate_recordings.validate_playback_using_vle, (recording_entry.RecordingId, ), callback=queue.put) for i in range(len(recording.get_entries())): is_valid, error = queue.get() assert is_valid, error finally: if recording_pool: recording_pool.close() recording_pool.join() for web_service_obj in web_service_objs: web_service_obj.stop_server() response = a8.delete_recording(recording) LOGGER.debug("Recording clean up status code=%s", response.status_code)
def test_rtc9735_er_011_hybrid_recording_recovery_manifest_restart(stream): """ Hybrid (UNIQUE and COMMON) copy Recording recovery """ web_service_objects = [] recording_pool = None recording = None total_recording = 10 common_copy_recordings = 5 unique_copy_recordings = 5 try: queue = Queue.Queue() start_time = utils.get_formatted_time(constants.SECONDS * 30, TimeFormat.TIME_FORMAT_MS, stream) end_time = utils.get_formatted_time(constants.SECONDS * 120, TimeFormat.TIME_FORMAT_MS, stream) recording = recording_model.Recording( total_recordings=common_copy_recordings, StartTime=start_time, EndTime=end_time, copyType=RecordingAttribute.COPY_TYPE_COMMON, StreamId=stream) for i in range(unique_copy_recordings, total_recording): rec_uniq = recording_model.Recording( total_recordings=unique_copy_recordings, StartTime=start_time, EndTime=end_time, copyType=RecordingAttribute.COPY_TYPE_UNIQUE, StreamId=stream) rec_uniq.Entries[ 0].RecordingId = RecordingAttribute.RECORDING_ID_PREFIX + rec_uniq.RequestId + '_' + str( i) recording.Entries.append(rec_uniq.get_entry(0)) last_recording_id = rec_uniq.Entries[0].RecordingId for i in range(total_recording): recording_id = recording.get_entry(i).RecordingId web_service_objects.append( notification_utils.get_web_service_object(recording_id)) recording.get_entry(i).UpdateUrl = web_service_objects[i].get_url() LOGGER.debug("Recording instance created=%s", recording.serialize()) # Sending recording request to create recording response = a8.create_recording(recording) is_valid, error = validate_common.validate_http_response_status_code( response, requests.codes.no_content) assert is_valid, error response = rio.find_recording(last_recording_id).json() if not response: return False, ValidationError.RECORDING_RESPONSE_EMPTY.format( last_recording_id) start_time = utils.get_parsed_time( response[0][RecordingAttribute.START_TIME][:-1]) current_time = datetime.datetime.utcnow() wait_time = utils.add_time_to_secs((start_time - current_time), constants.SECONDS) if wait_time < 0: wait_time = 0 # Verifying recording is started or not recording_pool = mp_pool.ThreadPool() for i in range(total_recording): recording_pool.apply_async( validate_recordings.validate_notification, (web_service_objects[i], constants.RecordingStatus.STARTED, wait_time), callback=queue.put) for i in range(total_recording): is_valid, error = queue.get() assert is_valid, error # Restarting manifest agent is_valid, error = delete_vmr_pods(V2pc.MANIFEST_AGENT) assert is_valid, error end_time = utils.get_parsed_time( response[0][RecordingAttribute.END_TIME][:-1]) current_time = datetime.datetime.utcnow() wait_time = utils.add_time_to_secs((end_time - current_time), constants.SECONDS) if wait_time < 0: wait_time = 0 for i in range(total_recording): recording_pool.apply_async( validate_recordings.validate_notification, (web_service_objects[i], constants.RecordingStatus.COMPLETE, wait_time), callback=queue.put) for i in range(total_recording): is_valid, error = queue.get() assert is_valid, error # Verifying recording in storage for i in range(total_recording): response = rio.find_recording( recording.get_entry(i).RecordingId).json() is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ACTIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error # Verfying Archive storage archive_helper.wait_for_archival(stream, recording.get_entry(0).RecordingId, Archive.ARCHIVE, Archive.COMPLETE) for i in range(total_recording): response = rio.find_recording( recording.get_entry(i).RecordingId).json() is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error is_valid, error = validate_storage.validate_copy_count( response, Cos.ARCHIVE_STORAGE) assert is_valid, error finally: if recording_pool: recording_pool.close() recording_pool.join() for web_service_obj in web_service_objects: if web_service_obj: web_service_obj.stop_server() if recording: response = a8.delete_recording(recording) LOGGER.debug("Recording clean up status code=%s", response.status_code)
def test_tc_rec_008_034_(total_recording, stream, name, copy_type): """ Update the start time of a recording before it starts for both unique/ common """ recording = None web_service_objects = [] recording_pool = None recording_id_list = [] try: queue = Queue.Queue() start_time = utils.get_formatted_time(constants.SECONDS * 20, TimeFormat.TIME_FORMAT_MS, stream) end_time = utils.get_formatted_time(constants.SECONDS * 80, TimeFormat.TIME_FORMAT_MS, stream) recording = recording_model.Recording(total_recordings=total_recording, StartTime=start_time, EndTime=end_time, StreamId=stream, copyType=copy_type) for i in range(total_recording): recording_id = recording.get_entry(i).RecordingId recording_id_list.append(recording_id) web_service_objects.append( notification_utils.get_web_service_object(recording_id)) recording.get_entry(i).UpdateUrl = web_service_objects[i].get_url() LOGGER.debug("Recording instance created=%s", recording.serialize()) response = a8.create_recording(recording) is_valid, error = validate_common.validate_http_response_status_code( response, requests.codes.no_content) assert is_valid, error recording_pool = mp_pool.ThreadPool(processes=total_recording) for i in range(total_recording): response = rio.find_recording(recording_id_list[i]).json() LOGGER.debug("Response=%s", response) start_time = response[0][RecordingAttribute.START_TIME][:-1] LOGGER.debug("Scheduled start time of recording=%s is %s", recording_id_list[i], start_time) recording.get_entry(i).StartTime = utils.get_formatted_time( constants.SECONDS * 40, TimeFormat.TIME_FORMAT_MS, stream) LOGGER.debug("Updated start time of recording=%s to %s", recording_id_list[i], recording.get_entry(i).StartTime) # Update the recording with the updated start time response = a8.create_recording(recording) is_valid, error = validate_common.validate_http_response_status_code( response, requests.codes.no_content) assert is_valid, error # Validating whether the updated start time was populated or not for i in range(total_recording): response = rio.find_recording(recording_id_list[i]).json() LOGGER.debug("Response=%s", response) is_valid, error = validate_recordings.validate_time( response, recording.get_entry(i).StartTime, RecordingAttribute.START_TIME) assert is_valid, error for i in range(total_recording): recording_pool.apply_async( validate_recordings.validate_recording, (recording.get_entry(i).RecordingId, web_service_objects[i]), callback=queue.put) for i in range(total_recording): is_valid, error = queue.get() assert is_valid, error for i in range(total_recording): response = rio.find_recording( recording.get_entry(i).RecordingId).json() is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ACTIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error if copy_type == RecordingAttribute.COPY_TYPE_COMMON: is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ACTIVE_STORAGE, Cos.RECORDING_NOT_STORED, 1) assert is_valid, error if copy_type == RecordingAttribute.COPY_TYPE_UNIQUE: archive_helper.wait_for_archival( stream, recording.get_entry(0).RecordingId, Archive.ARCHIVE, Archive.COMPLETE) for i in range(total_recording): response = rio.find_recording( recording.get_entry(i).RecordingId).json() is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error is_valid, error = validate_storage.validate_copy_count( response, Cos.ARCHIVE_STORAGE) assert is_valid, error for i in range(total_recording): recording_pool.apply_async(validate_recordings.validate_playback, (recording.get_entry(i).RecordingId, ), callback=queue.put) for i in range(total_recording): is_valid, error = queue.get() assert is_valid, error finally: if recording_pool: recording_pool.close() recording_pool.join() for web_service_obj in web_service_objects: web_service_obj.stop_server() response = a8.delete_recording(recording) LOGGER.debug("Recording clean up status code=%s", response.status_code)
def test_rtc9769_tc_rec_037_(total_recording, stream, name, copy_type): """ Update the end time of a recording after it starts """ recording = None web_service_objects = [] recording_pool = None recording_id_list = [] try: queue = Queue.Queue() start_time = utils.get_formatted_time(constants.SECONDS * 30, TimeFormat.TIME_FORMAT_MS, stream) end_time = utils.get_formatted_time(constants.SECONDS * 70, TimeFormat.TIME_FORMAT_MS, stream) recording = recording_model.Recording(total_recordings=total_recording, StartTime=start_time, EndTime=end_time, StreamId=stream, copyType=copy_type) for i in range(total_recording): recording_id = recording.get_entry(i).RecordingId recording_id_list.append(recording.get_entry(i).RecordingId) web_service_objects.append( notification_utils.get_web_service_object(recording_id)) recording.get_entry(i).UpdateUrl = web_service_objects[i].get_url() LOGGER.debug("Recording instance created=%s", recording.serialize()) response = a8.create_recording(recording) is_valid, error = validate_common.validate_http_response_status_code( response, requests.codes.no_content) assert is_valid, error # wait till the STARTED notification response = rio.find_recording(recording_id).json() LOGGER.debug("Response=%s", response) start_time = utils.get_parsed_time( response[0][RecordingAttribute.START_TIME][:-1]) current_time = datetime.datetime.utcnow() if current_time < start_time: utils.wait(start_time - current_time, constants.TIME_DELTA) for i in range(total_recording): is_valid, error = validate_recordings.validate_notification( web_service_objects[i], constants.RecordingStatus.STARTED) assert is_valid, error # Updating the end time for i in range(total_recording): response = rio.find_recording(recording_id_list[i]).json() LOGGER.debug("Response=%s", response) end_time = response[0][RecordingAttribute.END_TIME][:-1] LOGGER.debug("Scheduled end time of recording=%s is %s", recording_id, end_time) recording.get_entry(i).EndTime = utils.get_formatted_time( constants.SECONDS * 60, TimeFormat.TIME_FORMAT_MS, stream) new_end_time = recording.get_entry(i).EndTime[:-1] LOGGER.debug("Updated end time of recording=%s to %s", recording_id, recording.get_entry(i).EndTime) response = a8.create_recording(recording) is_valid, error = validate_common.validate_http_response_status_code( response, requests.codes.no_content) assert is_valid, error recording_pool = mp_pool.ThreadPool(processes=total_recording) end_time = utils.get_parsed_time(new_end_time) current_time = datetime.datetime.utcnow() wait_time = utils.add_time_to_secs((end_time - current_time), constants.SECONDS) if wait_time < 0: wait_time = 0 for i in range(total_recording): recording_pool.apply_async( validate_recordings.validate_notification, (web_service_objects[i], constants.RecordingStatus.COMPLETE, wait_time), callback=queue.put) for i in range(total_recording): is_valid, error = queue.get() assert is_valid, error for i in range(total_recording): response = rio.find_recording( recording.get_entry(i).RecordingId).json() is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ACTIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error if copy_type == RecordingAttribute.COPY_TYPE_COMMON: is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ACTIVE_STORAGE, Cos.RECORDING_NOT_STORED, 1) assert is_valid, error if copy_type == RecordingAttribute.COPY_TYPE_UNIQUE: archive_helper.wait_for_archival( stream, recording.get_entry(0).RecordingId, Archive.ARCHIVE, Archive.COMPLETE) for i in range(total_recording): response = rio.find_recording( recording.get_entry(i).RecordingId).json() is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error is_valid, error = validate_storage.validate_copy_count( response, Cos.ARCHIVE_STORAGE) assert is_valid, error for i in range(total_recording): recording_pool.apply_async(validate_recordings.validate_playback, (recording.get_entry(i).RecordingId, ), callback=queue.put) for i in range(total_recording): is_valid, error = queue.get() assert is_valid, error for i in range(total_recording): response = rio.find_recording(recording_id_list[i]).json() is_valid, error = validate_recordings.validate_actual_time( response, recording.get_entry(i).EndTime, RecordingAttribute.ACTUAL_END_TIME) assert is_valid, error finally: if recording_pool: recording_pool.close() recording_pool.join() for web_service_obj in web_service_objects: web_service_obj.stop_server() response = a8.delete_recording(recording) LOGGER.debug("Recording clean up status code=%s", response.status_code)
def test_rtc9790_tc_rec_039_bulk_deletion_archive_assets( total_recording, stream): """ Bulk deletion of assets in Archive(20 requests) """ web_service_objects = [] recording_pool = None recording = None recording_id_list = [] try: queue = Queue.Queue() start_time = utils.get_formatted_time(constants.SECONDS * 30, TimeFormat.TIME_FORMAT_MS, stream) end_time = utils.get_formatted_time(constants.SECONDS * 60, TimeFormat.TIME_FORMAT_MS, stream) copy_type = RecordingAttribute.COPY_TYPE_UNIQUE recording = recording_model.Recording(total_recordings=total_recording, StartTime=start_time, EndTime=end_time, copyType=copy_type, StreamId=stream) for i in range(total_recording): recording_id = recording.get_entry(i).RecordingId recording_id_list.append(recording_id) web_service_objects.append( notification_utils.get_web_service_object(recording_id)) recording.get_entry(i).UpdateUrl = web_service_objects[i].get_url() LOGGER.debug("Recording instance created=%s", recording.serialize()) response = a8.create_recording(recording) is_valid, error = validate_common.validate_http_response_status_code( response, requests.codes.no_content) assert is_valid, error recording_pool = mp_pool.ThreadPool(processes=total_recording) for i in range(total_recording): recording_pool.apply_async( validate_recordings.validate_recording, (recording.get_entry(i).RecordingId, web_service_objects[i]), callback=queue.put) for i in range(total_recording): is_valid, error = queue.get() assert is_valid, error archive_helper.wait_for_archival(stream, recording_id_list[0], Archive.ARCHIVE, Archive.COMPLETE) for i in range(total_recording): response = rio.find_recording( recording.get_entry(i).RecordingId).json() is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ACTIVE_STORAGE, Cos.RECORDING_NOT_STORED) assert is_valid, error is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED) assert is_valid, error response = a8.delete_recording(recording) is_valid, error = validate_common.validate_http_response_status_code( response, requests.codes.no_content) assert is_valid, error for i in range(total_recording): is_valid, error = validate_recordings.validate_recording_deletion( recording_id_list[i]) assert is_valid, error is_valid, error = validate_recordings.validate_playback_using_vle( recording_id_list[i]) assert not is_valid, ValidationError.DELETED_RECORDING_PLAYED_BACK.format( recording_id_list[i]) finally: if recording_pool: recording_pool.close() recording_pool.join() for web_service_obj in web_service_objects: web_service_obj.stop_server() if recording: response = a8.delete_recording(recording) LOGGER.debug("Recording clean up status code=%s", response.status_code)
def test_rtc9729_er_005_recording_incomplete_us62460(stream): """ Archiving of INCOMPELTE UNIQUE copy recordings and playback """ web_service_objects = [] recording_pool = None recording = None copy_type = RecordingAttribute.COPY_TYPE_UNIQUE total_rec = 3 recording_duration = 30 # in sec try: queue = Queue.Queue() start_time = utils.get_formatted_time( constants.SECONDS * recording_duration, TimeFormat.TIME_FORMAT_MS, stream) end_time = utils.get_formatted_time( constants.SECONDS * recording_duration * 5, TimeFormat.TIME_FORMAT_MS, stream) # same start time 10 recordings recording = recording_model.Recording(total_recordings=total_rec, StartTime=start_time, EndTime=end_time, copyType=copy_type, StreamId=stream) last_recording_id = recording.Entries[total_rec - 1].RecordingId # get recording id and update url for i in range(total_rec): recording_id = recording.get_entry(i).RecordingId web_service_objects.append( notification_utils.get_web_service_object(recording_id)) recording.get_entry(i).UpdateUrl = web_service_objects[i].get_url() # create recording LOGGER.debug("Recording instance created=%s", recording.serialize()) response = a8.create_recording(recording) is_valid, error = validate_common.validate_http_response_status_code( response, requests.codes.no_content) assert is_valid, error response = rio.find_recording(last_recording_id).json() if not response: return False, ValidationError.RECORDING_RESPONSE_EMPTY.format( last_recording_id) start_time = utils.get_parsed_time( response[0][RecordingAttribute.START_TIME][:-1]) current_time = datetime.datetime.utcnow() wait_time = utils.add_time_to_secs((start_time - current_time), constants.SECONDS) if wait_time < 0: wait_time = 0 # validate recording is started recording_pool = mp_pool.ThreadPool() for i in range(total_rec): recording_pool.apply_async( validate_recordings.validate_notification, (web_service_objects[i], constants.RecordingStatus.STARTED, wait_time), callback=queue.put) for i in range(total_rec): is_valid, error = queue.get() assert is_valid, error #restarting segment recorder to make INCOMPLETE recording is_valid, error = delete_vmr_pods(V2pc.SEGMENT_RECORDER) assert is_valid, error #Verifying recording INCOMPLETE STATE for i in range(total_rec): recording_id = recording.get_entry(i).RecordingId recording_pool.apply_async( validate_recordings.validate_recording_end_state, (recording_id, [constants.RecordingStatus.INCOMPLETE]), dict(web_service_obj=web_service_objects[i], end_time=end_time), callback=queue.put) for i in range(total_rec): is_valid, error = queue.get() assert is_valid, error # recording should have been completed by this time # Verifying recording in archive storage archive_helper.wait_for_archival(stream, recording.get_entry(0).RecordingId, Archive.ARCHIVE, Archive.COMPLETE) for i in range(total_rec): response = rio.find_recording( recording.get_entry(i).RecordingId).json() is_valid, error = validate_storage.validate_recording_in_storage( response, Cos.ARCHIVE_STORAGE, Cos.RECORDING_STORED, rec_status='INCOMPLETE') assert is_valid, error # Playback using VLE for i in range(total_rec): recording_pool.apply_async( validate_recordings.validate_playback_using_vle, (recording.get_entry(i).RecordingId, ), callback=queue.put) for i in range(total_rec): is_valid, error = queue.get() assert is_valid, error finally: if recording_pool: recording_pool.close() recording_pool.join() for web_service_obj in web_service_objects: if web_service_obj: web_service_obj.stop_server() if recording: response = a8.delete_recording(recording) LOGGER.debug("Recording clean up status code=%s", response.status_code)