def test_get_config_does_not_exist(self): """ Tests that utils.get_config if file does not exist. """ del os.environ['VIDEO_PIPELINE_CFG'] with self.assertRaises(IOError): utils.get_config(yaml_config_file='does_not_exist')
def test_get_config_with_path(self): """ Tests that utils.get_config works as expected when reading config from environment path. """ with patch('VEDA.utils.STATIC_CONFIG_FILE_PATH', self.static_file_path): instance_config = utils.get_config() self.assertDictEqual(instance_config, dict(TEST_CONFIG, **TEST_STATIC_CONFIG))
def test_get_config_with_default(self): """ Tests that utils.get_config works as expected when reading default config. """ del os.environ['VIDEO_PIPELINE_CFG'] instance_config = utils.get_config() self.assertNotEqual(instance_config, {}) # read the default config file default_yaml_config_file = os.path.join( utils.CONFIG_ROOT_DIR, utils.DEFAULT_CONFIG_FILE_NAME ) with open(default_yaml_config_file, 'r') as config: config_dict = yaml.safe_load(config) # read the default static config file with open(utils.STATIC_CONFIG_FILE_PATH, 'r') as config: static_config_dict = yaml.safe_load(config) self.assertDictEqual( instance_config, dict(config_dict, **static_config_dict) )
def __init__(self, **kwargs): self.auth_dict = get_config() self.body = kwargs.get('body', None) self.subject = kwargs.get('subject', None) self.additional_recipients = kwargs.get('additional_recipients', []) self.production_environment = self.auth_dict[ 'environment'] == "production"
def __init__(self, course_object, video_proto, **kwargs): self.course_object = course_object self.video_proto = video_proto self.auth_dict = get_config() self.node_work_directory = kwargs.get('node_work_directory', WORK_DIRECTORY) self.full_filename = kwargs.get('full_filename', None) self.complete = False self.archived = False
def __init__(self, **kwargs): self.current_time = datetime.datetime.utcnow().replace(tzinfo=utc) self.auth_dict = get_config() # for individuals self.video_query = kwargs.get('video_query', None) self.freezing_bug = kwargs.get('freezing_bug', False) self.val_status = None self.retry_barrier_hours = 24 self.no_audio = kwargs.get('no_audio', False)
def __init__(self, course_object, **kwargs): self.course_object = course_object self.encode_list = set() self.overencode = kwargs.get('overencode', False) self.veda_id = kwargs.get('veda_id', None) config_data = get_config() self.encode_dict = config_data['encode_dict'] self.sg_server_path = config_data['sg_server_path'] self.sg_script_name = config_data['sg_script_name'] self.sg_script_key = config_data['sg_script_key']
def heal_form(request): template = loader.get_template("heal.html") if request.method == 'POST': veda_id = request.POST['veda_id'] auth_dict = get_config() result = celeryapp.web_healer.apply_async( args=[veda_id], queue=auth_dict['celery_online_heal_queue'], connect_timeout=3) context = {'result': result} return HttpResponse(template.render(context=context, request=request)) return HttpResponse(template.render(request=request))
def __init__(self, **kwargs): self.video_info = {} self.auth_dict = get_config() self.bucket = None self.node_work_directory = kwargs.get('node_work_directory', WORK_DIRECTORY) # In stage, a course could possibly not exist in the local database # but remain referenced by edx-platform. # If the course doesn't exist but a course ID and hex is supplied, # create the course anyway. self.create_course_override = self.auth_dict['environment'] == "stage"
def __init__(self, veda_id, encode_profile, **kwargs): self.veda_id = veda_id self.encode_profile = encode_profile self.auth_dict = kwargs.get('CONFIG_DATA', get_config()) # Internal Methods self.video_query = None self.encode_query = None self.encoded_file = None self.node_work_directory = kwargs.get('node_work_directory', WORK_DIRECTORY) self.hotstore_url = None self.status = None self.endpoint_url = None self.video_proto = None self.val_status = None
def get_api_url_and_auth_headers(): """ Construct request headers. """ settings = get_config() token = get_auth_token(settings) if not token: return settings['val_api_url'], token # Build and return request headers. headers = { 'Authorization': 'Bearer {token}'.format(token=token), 'content-type': 'application/json' } return settings['val_api_url'], headers
def upload_alpha_1(request): """ TODO: Get This to expire in 24h / 1 Time URL Generate metadata From Fields Auth? """ auth_dict = get_config() policy_expiration = datetime.datetime.utcnow() + timedelta(hours=24) policy_exp = str(policy_expiration).replace(' ', 'T').split('.')[0] + 'Z' policy_document = bytes( ' \ {\"expiration\": \"' + policy_exp + '\", \ \"conditions\": [ \ {\"bucket\": \"' + auth_dict['veda_upload_bucket'] + '\"}, \ [\"starts-with\", \"$key\", \"\"], \ {\"acl\": \"private\"}, \ {\"success_action_redirect\": \"../upload_success/\"}, \ [\"starts-with\", \"$Content-Type\", \"\"], \ [\"content-length-range\", 0, 500000000000] \ ] \ } ', 'utf-8') abvid_serial = uuid.uuid1().hex[0:10] policy = base64.b64encode(policy_document) signature = base64.b64encode( hmac.new(auth_dict['veda_secret_access_key'].encode('utf-8'), policy, hashlib.sha1).digest()) template = loader.get_template('upload_video.html') context = { 'policy': policy, 'signature': signature, 'abvid_serial': abvid_serial, 'access_key': auth_dict['veda_access_key_id'], 'upload_bucket': auth_dict['veda_upload_bucket'], } return HttpResponse(template.render(context=context, request=request))
def get_incomplete_encodes(edx_id): """ Get incomplete encodes for the given video. Arguments: edx_id(unicode): an ID identifying the VEDA video. """ encode_list = [] try: video = Video.objects.filter(edx_id=edx_id).latest() except Video.DoesNotExist: return encode_list course = video.inst_class # Pick the encodes map from the settings. encodes_map = get_config().get('encode_dict', {}) # Active encodes according to course instance. for attr, encodes in six.iteritems(encodes_map): if getattr(course, attr, False): encode_list += [encode.strip() for encode in encodes] # Filter active encodes further according to their corresponding encode profiles activation. for encode in list(encode_list): encode_profile = Encode.objects.filter(product_spec=encode).first() if not encode_profile or (encode_profile and not encode_profile.profile_active): encode_list.remove(encode) # Filter encodes based on their successful encoding for the specified video. for encode in list(encode_list): completed_encode_profile = URL.objects.filter( videoID=video, encode_profile__product_spec=encode) if completed_encode_profile.exists(): encode_list.remove(encode) return encode_list
""" Production environment settings. """ from VEDA.settings.base import * from VEDA.utils import get_config from VEDA.settings.utils import get_logger_config DEBUG = False TEMPLATE_DEBUG = DEBUG DEFATULT_SERVICE_VARIANT_NAME = 'video-pipeline' ALLOWED_HOSTS = ['*'] CONFIG_DATA = get_config() LOGGING = get_logger_config(service_variant=CONFIG_DATA.get('SERVICE_VARIANT_NAME', DEFATULT_SERVICE_VARIANT_NAME)) # Keep track of the names of settings that represent dicts. Instead of overriding the values in base.py, # the values read from disk should UPDATE the pre-configured dicts. DICT_UPDATE_KEYS = ('DATABASES',) # Remove the items that should be used to update dicts, and apply them separately rather # than pumping them into the local vars. dict_updates = {key: CONFIG_DATA.pop(key, None) for key in DICT_UPDATE_KEYS} for key, value in dict_updates.items(): if value: vars()[key].update(value) vars().update(CONFIG_DATA)
import os import sys from django.test import TestCase from boto.s3.connection import S3Connection from mock import PropertyMock, patch from moto import mock_s3_deprecated from VEDA import utils from control.veda_file_ingest import VideoProto from control.veda_hotstore import Hotstore sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) CONFIG_DATA = utils.get_config('test_config.yaml') class TestHotstore(TestCase): def setUp(self): video_proto = VideoProto() video_proto.veda_id = 'XXXXXXXX2014-V00TEST' self.upload_filepath = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'test_files', 'OVTESTFILE_01.mp4') with patch.object(Hotstore, '_READ_AUTH', PropertyMock(return_value=lambda: CONFIG_DATA)): self.hotstore = Hotstore(video_object=video_proto, upload_filepath=self.upload_filepath, video_proto=video_proto)
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'VEDA.settings.local') django.setup() from VEDA_OS01.models import Institution from VEDA_OS01.models import Course from VEDA_OS01.models import Video from VEDA_OS01.models import Destination from VEDA_OS01.models import Encode from VEDA_OS01.models import URL from VEDA_OS01.models import VedaUpload from VEDA.utils import get_config """ Central Config """ CONFIG = get_config() DEFAULT_WORK_DIRECTORY = os.path.join( os.path.dirname(os.path.dirname(os.path.dirname( os.path.abspath(__file__)))), 'VEDA_WORKING') WORK_DIRECTORY = CONFIG.get('VEDA_WORKING', DEFAULT_WORK_DIRECTORY) if not os.path.exists(WORK_DIRECTORY): os.mkdir(WORK_DIRECTORY) """ Occasionally this throws an error in the env, but on EC2 with v_videocompile it's no biggie """ FFPROBE = "ffprobe" FFMPEG = "ffmpeg"
from rest_framework.views import APIView from api import token_finisher from control.veda_file_discovery import FileDiscovery from VEDA import utils from VEDA_OS01.enums import TranscriptionProviderErrorType from VEDA_OS01.models import (URL, Course, Encode, TranscriptCredentials, TranscriptProvider, Video) from VEDA_OS01.serializers import (CourseSerializer, EncodeSerializer, URLSerializer, VideoSerializer) from VEDA_OS01.transcripts import CIELO24_API_VERSION from VEDA_OS01.utils import PlainTextParser LOGGER = logging.getLogger(__name__) CONFIG = utils.get_config() CIELO24_LOGIN_URL = utils.build_url(CONFIG['cielo24_api_base_url'], '/account/login') try: boto.config.add_section('Boto') except: pass boto.config.set('Boto', 'http_socket_timeout', '100') class CourseViewSet(viewsets.ModelViewSet): queryset = Course.objects.all() serializer_class = CourseSerializer filter_backends = (filters.DjangoFilterBackend, )
def handle(self, *args, **options): """ handle method for command class. """ settings = get_config() hls_profile = Encode.objects.get(product_spec='hls') LOGGER.info('[Re-encode for HLS] Process started.') veda_id = options.get('veda_id') if veda_id: try: video = Video.objects.filter(edx_id=veda_id).latest() enqueue_video_for_hls_encode( veda_id=video.edx_id, encode_queue=settings['celery_worker_queue']) except Video.DoesNotExist: LOGGER.warning('Video "%s" not found.', veda_id) else: config = EncodeVideosForHlsConfiguration.current() all_videos = config.all_videos courses = self._validate_course_ids( course_ids=config.course_ids.split()) commit = config.commit if all_videos: edx_video_ids = get_videos_wo_hls(batch_size=config.batch_size, offset=config.offset) elif courses: edx_video_ids = get_videos_wo_hls(courses=courses) else: LOGGER.info('Missing job configuration.') return # Result will be None if we are Unable # to retrieve edxval Token. if edx_video_ids is None: LOGGER.info('Unable to get edxval Token.') return veda_videos = Video.objects.filter(studio_id__in=edx_video_ids) veda_video_ids = veda_videos.values_list('edx_id', flat=True) videos_with_hls_encodes = (URL.objects.filter( encode_profile=hls_profile, videoID__edx_id__in=veda_video_ids).values_list( 'videoID__edx_id', flat=True).distinct()) # Log stats about VEDA vs VAL videos. num_videos_found_in_veda = veda_videos.count() num_videos_not_found_in_veda = len( edx_video_ids) - veda_videos.count() num_videos_hls_profile_found_in_veda = videos_with_hls_encodes.count( ) num_videos_actually_needing_hls_encode = veda_videos.count( ) - num_videos_hls_profile_found_in_veda LOGGER.info( (u"[run=%s] videos(found in VEDA)=%s - " u"videos(not found in veda)=%s - " u"videos(hls profile present)=%s - " u"videos(hls profile not present)=%s."), config.command_run, num_videos_found_in_veda, num_videos_not_found_in_veda, num_videos_hls_profile_found_in_veda, num_videos_actually_needing_hls_encode, ) # Check if this job is configured for dry run. if commit: for veda_id in veda_video_ids: video = veda_videos.filter(edx_id=veda_id).latest() if veda_id in videos_with_hls_encodes: # Update the URL's value in edxval directly LOGGER.warning( '[run=%s] HLS encode is present for video=%s in VEDA.', config.command_run, veda_id) try: video_encode = URL.objects.filter( videoID=video, encode_profile=hls_profile).latest() except URL.DoesNotExist: LOGGER.warning( '[run=%s] HLS encode not found for video=%s in VEDA.', config.command_run, veda_id) continue if self._validate_video_encode(video_encode): response = update_hls_profile_in_val( video.studio_id, 'hls', encode_data={ 'file_size': video_encode.encode_size, 'bitrate': int( video_encode.encode_bitdepth.split(' ') [0]), 'url': video_encode.encode_url }) # Response will be None if we are Unable to get edxval Token. if response is None: LOGGER.info('Unable to get edxval Token.') continue if response.status_code == 200: LOGGER.info("[run=%s] Success for video=%s.", config.command_run, veda_id) else: LOGGER.warning( "[run=%s] Failure on VAL update - status_code=%s, video=%s.", config.command_run, response.status_code, veda_id) continue else: # After this clause, veda_id will be re-enqueued for HLS encoding since the # encode data is corrupt. LOGGER.warning( '[run=%s] HLS encode data was corrupt for video=%s in VEDA - Re-enqueueing..', config.command_run, veda_id) # Disable transcription video.process_transcription = False video.save() # Enqueue video for HLS re-encode. enqueue_video_for_hls_encode( veda_id=veda_id, encode_queue=settings['celery_worker_queue']) config.increment_run() config.update_offset() else: LOGGER.info('[run=%s] Dry run is complete.', config.command_run)
def __init__(self, **kwargs): self.auth_dict = get_config() self.status = kwargs.get('status', None) self.upload_serial = kwargs.get('upload_serial', None) self.youtube_id = kwargs.get('youtube_id', None)
""" Veda Delivery unit tests """ import os import unittest from django.test import TestCase import responses from control.veda_deliver import VedaDelivery from control.veda_file_ingest import VideoProto from mock import PropertyMock, patch from VEDA.utils import get_config from VEDA_OS01.models import URL, Course, Destination, Encode, Video CONFIG_DATA = get_config('test_config.yaml') class VedaDeliverRunTest(TestCase): """ Deliver Run Tests """ def setUp(self): self.veda_id = 'XXXXXXXX2014-V00TES1' self.encode_profile = 'hls' self.course = Course.objects.create(institution='XXX', edx_classid='XXXXX', course_name=u'Intro to VEDA', local_storedir=u'This/Is/A/testID') self.video = Video.objects.create( inst_class=self.course,
from rest_framework.views import APIView from .api import token_finisher from VEDA import utils from VEDA_OS01.enums import TranscriptionProviderErrorType from VEDA_OS01.models import (URL, Course, Encode, TranscriptCredentials, TranscriptProvider, Video) from VEDA_OS01.serializers import (CourseSerializer, EncodeSerializer, URLSerializer, VideoSerializer) from VEDA_OS01.transcripts import CIELO24_API_VERSION from VEDA_OS01.utils import PlainTextParser from control.http_ingest_celeryapp import ingest_video_and_upload_to_hotstore LOGGER = logging.getLogger(__name__) auth_dict = utils.get_config() CIELO24_LOGIN_URL = utils.build_url(auth_dict['cielo24_api_base_url'], '/account/login') class CourseViewSet(viewsets.ModelViewSet): queryset = Course.objects.all() serializer_class = CourseSerializer filter_backends = (filters.DjangoFilterBackend, ) filter_fields = ('institution', 'edx_classid', 'proc_loc', 'course_hold', 'sg_projID') @detail_route(renderer_classes=[renderers.StaticHTMLRenderer]) def highlight(self, request, *args, **kwargs): course = self.get_object()
""" This module contains calls to remote celery tasks that are run by the veda encode worker. The code for those tasks lives in the edx-video-worker repository. """ from __future__ import absolute_import from celery import Celery from VEDA.utils import get_config auth_dict = get_config() CEL_BROKER = 'redis://:@{redis_broker}:6379/0'.format( redis_broker=auth_dict['redis_broker']) app = Celery(auth_dict['celery_app_name'], broker=CEL_BROKER, include=['celeryapp']) app.conf.update(BROKER_CONNECTION_TIMEOUT=60, CELERY_IGNORE_RESULT=True, CELERY_TASK_RESULT_EXPIRES=10, CELERYD_PREFETCH_MULTIPLIER=1, CELERY_ACCEPT_CONTENT=['json'], CELERY_TASK_PUBLISH_RETRY=True, CELERY_TASK_PUBLISH_RETRY_POLICY={ "max_retries": 3, "interval_start": 0, "interval_step": 1, "interval_max": 5 })
def _READ_AUTH(self): return get_config()