def ingest(event): '''Ingest a finished recording to the Opencast server. ''' # Update status set_service_status(Service.INGEST, ServiceStatus.BUSY) recording_state(event.uid, 'uploading') update_event_status(event, Status.UPLOADING) # Select ingest service # The ingest service to use is selected at random from the available # ingest services to ensure that not every capture agent uses the same # service at the same time service = config('service-ingest') service = service[randrange(0, len(service))] logger.info('Selecting ingest service to use: ' + service) # create mediapackage logger.info('Creating new mediapackage') mediapackage = http_request(service + '/createMediaPackage') # extract workflow_def, workflow_config and add DC catalogs prop = 'org.opencastproject.capture.agent.properties' dcns = 'http://www.opencastproject.org/xsd/1.0/dublincore/' for attachment in event.get_data().get('attach'): data = attachment.get('data') if attachment.get('x-apple-filename') == prop: workflow_def, workflow_config = get_config_params(data) # Check for dublincore catalogs elif attachment.get('fmttype') == 'application/xml' and dcns in data: name = attachment.get('x-apple-filename', '').rsplit('.', 1)[0] logger.info('Adding %s DC catalog' % name) fields = [('mediaPackage', mediapackage), ('flavor', 'dublincore/%s' % name), ('dublinCore', data.encode('utf-8'))] mediapackage = http_request(service + '/addDCCatalog', fields) # add track for (flavor, track) in event.get_tracks(): logger.info('Adding track ({0} -> {1})'.format(flavor, track)) track = track.encode('ascii', 'ignore') fields = [('mediaPackage', mediapackage), ('flavor', flavor), ('BODY1', (pycurl.FORM_FILE, track))] mediapackage = http_request(service + '/addTrack', fields) # ingest logger.info('Ingest recording') fields = [('mediaPackage', mediapackage)] if workflow_def: fields.append(('workflowDefinitionId', workflow_def)) if event.uid: fields.append(('workflowInstanceId', event.uid.encode('ascii', 'ignore'))) fields += workflow_config mediapackage = http_request(service + '/ingest', fields) # Update status recording_state(event.uid, 'upload_finished') update_event_status(event, Status.FINISHED_UPLOADING) set_service_status_immediate(Service.INGEST, ServiceStatus.IDLE)
def test_http_request(self): config.config()['server']['insecure'] = True config.config()['server']['certificate'] = 'nowhere' try: utils.http_request('http://127.0.0.1:8', [('x', 'y')]) assert False except Exception as e: assert e.args[0] == 7 # connection error
def test_http_request_mocked_curl(self): config.config()['server']['insecure'] = True config.config()['server']['certificate'] = 'nowhere' utils.pycurl.Curl = CurlMock try: utils.http_request('http://127.0.0.1:8', [('x', 'y')]) except Exception: self.fail() reload(utils.pycurl)
def ingest(tracks, recording_dir, recording_id, workflow_def, workflow_config): '''Ingest a finished recording to the Matterhorn server. ''' # select ingest service # The ingest service to use is selected at random from the available # ingest services to ensure that not every capture agent uses the same # service at the same time service = config()['service-ingest'] service = service[randrange(0, len(service))] logger.info('Selecting ingest service to use: ' + service) # create mediapackage logger.info('Creating new mediapackage') mediapackage = http_request(service + '/createMediaPackage') # add episode DublinCore catalog if os.path.isfile('%s/episode.xml' % recording_dir): logger.info('Adding episode DC catalog') dublincore = '' with open('%s/episode.xml' % recording_dir, 'rb') as episodefile: dublincore = episodefile.read().decode('utf8') fields = [('mediaPackage', mediapackage), ('flavor', 'dublincore/episode'), ('dublinCore', dublincore)] mediapackage = http_request(service + '/addDCCatalog', fields) # add series DublinCore catalog if os.path.isfile('%s/series.xml' % recording_dir): logger.info('Adding series DC catalog') dublincore = '' with open('%s/series.xml' % recording_dir, 'rb') as seriesfile: dublincore = seriesfile.read().decode('utf8') fields = [('mediaPackage', mediapackage), ('flavor', 'dublincore/series'), ('dublinCore', dublincore)] mediapackage = http_request(service + '/addDCCatalog', fields) # add track for (flavor, track) in tracks: logger.info('Adding track ({0} -> {1})'.format(flavor, track)) track = track.encode('ascii', 'ignore') fields = [('mediaPackage', mediapackage), ('flavor', flavor), ('BODY1', (pycurl.FORM_FILE, track))] mediapackage = http_request(service + '/addTrack', fields) # ingest logger.info('Ingest recording') fields = [('mediaPackage', mediapackage)] if workflow_def: fields.append(('workflowDefinitionId', workflow_def)) if recording_id: fields.append(('workflowInstanceId', recording_id.encode('ascii', 'ignore'))) fields += workflow_config mediapackage = http_request(service + '/ingest', fields)
def test_http_request_mocked_curl(self): config.config()['server']['insecure'] = True config.config()['server']['certificate'] = 'nowhere' utils.pycurl.Curl = CurlMock try: utils.http_request('http://127.0.0.1:8', [('x', 'y')]) assert True except Exception: assert False reload(utils.pycurl)
def get_schedule(): '''Try to load schedule from the Matterhorn core. Returns a valid schedule or None on failure. ''' uri = '%s/calendars?agentid=%s' % (config()['service-scheduler'][0], config()['agent']['name']) lookahead = config()['agent']['cal_lookahead'] * 24 * 60 * 60 if lookahead: uri += '&cutoff=%i' % ((timestamp() + lookahead) * 1000) try: vcal = http_request(uri) except pycurl.error as e: logger.error('Could not get schedule: %s' % e) return try: cal = parse_ical(vcal.decode('utf-8')) except Exception: logger.error('Could not parse ical') logger.error(traceback.format_exc()) return db = get_session() db.query(UpcomingEvent).delete() for event in cal: # Ignore events that have already ended if event['dtend'] <= timestamp(): continue e = UpcomingEvent() e.start = event['dtstart'] e.end = event['dtend'] e.uid = event.get('uid') e.title = event.get('summary') e.set_data(event) db.add(e) db.commit()
def get_schedule(db): '''Try to load schedule from the Matterhorn core. Returns a valid schedule or None on failure. ''' params = {'agentid': config('agent', 'name').encode('utf8')} lookahead = config('agent', 'cal_lookahead') * 24 * 60 * 60 if lookahead: params['cutoff'] = str((timestamp() + lookahead) * 1000) uri = '%s/calendars?%s' % (service('scheduler')[0], urlencode(params)) try: vcal = http_request(uri) UpstreamState.update_sync_time(config('server', 'url')) except pycurl.error as e: logger.error('Could not get schedule: %s', e) return try: cal = parse_ical(vcal.decode('utf-8')) except Exception: logger.exception('Could not parse ical') return db.query(UpcomingEvent).delete() for event in cal: # Ignore events that have already ended if event['dtend'] <= timestamp(): continue e = UpcomingEvent() e.start = event['dtstart'] e.end = event['dtend'] e.uid = event.get('uid') e.title = event.get('summary') e.set_data(event) db.add(e) db.commit()
def schedule(title='pyCA Recording', duration=60, creator=None): '''Schedule a recording for this capture agent with the given title, creator and duration starting 10 seconds from now. :param title: Title of the event to schedule :type title: string :param creator: Creator of the event to schedule :type creator: string :param duration: Duration of the event to schedule in seconds :type creator: int ''' if not creator: creator = config('ui', 'username') # Select ingest service # The ingest service to use is selected at random from the available # ingest services to ensure that not every capture agent uses the same # service at the same time service_url = service('ingest', force_update=True) service_url = service_url[random.randrange(0, len(service_url))] logger.info('Selecting ingest service for scheduling: ' + service_url) # create media package logger.info('Creating new media package') mediapackage = http_request(service_url + '/createMediaPackage') # add dublin core catalog start = datetime.utcnow() + timedelta(seconds=10) end = start + timedelta(seconds=duration) dublincore = DUBLINCORE.format(agent_name=xml_escape( config('agent', 'name')), start=start.strftime('%Y-%m-%dT%H:%M:%SZ'), end=end.strftime('%Y-%m-%dT%H:%M:%SZ'), title=xml_escape(title), creator=xml_escape(creator)) logger.info('Adding Dublin Core catalog for scheduling') fields = [('mediaPackage', mediapackage), ('flavor', 'dublincore/episode'), ('dublinCore', dublincore)] mediapackage = http_request(service_url + '/addDCCatalog', fields) # schedule event logger.info('Scheduling recording') fields = [('mediaPackage', mediapackage)] mediapackage = http_request(service_url + '/schedule', fields) # Update status logger.info('Event successfully scheduled')
def get_schedule(): '''Try to load schedule from the Matterhorn core. Returns a valid schedule or None on failure. ''' try: uri = '%s/calendars?agentid=%s' % (config()['service-scheduler'][0], config()['agent']['name']) lookahead = config()['agent']['cal_lookahead'] * 24 * 60 * 60 if lookahead: uri += '&cutoff=%i' % ((timestamp() + lookahead) * 1000) vcal = http_request(uri) except Exception as e: # Silently ignore the error if the capture agent is not yet registered if e.args[1] != 404: logger.error('Could not get schedule') logger.error(traceback.format_exc()) return try: cal = parse_ical(vcal.decode('utf-8')) except Exception: logger.error('Could not parse ical') logger.error(traceback.format_exc()) return db = get_session() db.query(UpcomingEvent).delete() for event in cal: # Ignore events that have already ended if event['dtend'] <= timestamp(): continue e = UpcomingEvent() e.start = event['dtstart'] e.end = event['dtend'] e.uid = event.get('uid') e.set_data(event) db.add(e) db.commit()
def ingest(event): '''Ingest a finished recording to the Opencast server. ''' # Update status set_service_status(Service.INGEST, ServiceStatus.BUSY) notify.notify('STATUS=Uploading') recording_state(event.uid, 'uploading') update_event_status(event, Status.UPLOADING) # Select ingest service # The ingest service to use is selected at random from the available # ingest services to ensure that not every capture agent uses the same # service at the same time service_url = service('ingest', force_update=True) service_url = service_url[random.randrange(0, len(service_url))] logger.info('Selecting ingest service to use: ' + service_url) # create mediapackage logger.info('Creating new mediapackage') mediapackage = http_request(service_url + '/createMediaPackage') # extract workflow_def, workflow_config and add DC catalogs prop = 'org.opencastproject.capture.agent.properties' dcns = 'http://www.opencastproject.org/xsd/1.0/dublincore/' for attachment in event.get_data().get('attach'): data = attachment.get('data') if attachment.get('x-apple-filename') == prop: workflow_def, workflow_config = get_config_params(data) # dublin core catalogs elif attachment.get('fmttype') == 'application/xml' \ and dcns in data \ and config('ingest', 'upload_catalogs'): name = attachment.get('x-apple-filename', '').rsplit('.', 1)[0] logger.info('Adding %s DC catalog', name) fields = [('mediaPackage', mediapackage), ('flavor', 'dublincore/%s' % name), ('dublinCore', data.encode('utf-8'))] mediapackage = http_request(service_url + '/addDCCatalog', fields) else: logger.info('Not uploading %s', attachment.get('x-apple-filename')) continue # add track for (flavor, track) in event.get_tracks(): logger.info('Adding track (%s -> %s)', flavor, track) track = track.encode('ascii', 'ignore') fields = [('mediaPackage', mediapackage), ('flavor', flavor), ('BODY1', (pycurl.FORM_FILE, track))] mediapackage = http_request(service_url + '/addTrack', fields) # ingest logger.info('Ingest recording') fields = [('mediaPackage', mediapackage)] if workflow_def: fields.append(('workflowDefinitionId', workflow_def)) if event.uid: fields.append( ('workflowInstanceId', event.uid.encode('ascii', 'ignore'))) fields += workflow_config mediapackage = http_request(service_url + '/ingest', fields) # Update status recording_state(event.uid, 'upload_finished') update_event_status(event, Status.FINISHED_UPLOADING) if config('ingest', 'delete_after_upload'): directory = event.directory() logger.info("Removing uploaded event directory %s", directory) shutil.rmtree(directory) notify.notify('STATUS=Running') set_service_status_immediate(Service.INGEST, ServiceStatus.IDLE) logger.info('Finished ingest')
def test_http_request(self): config.config()['server']['insecure'] = True config.config()['server']['certificate'] = 'nowhere' with self.assertRaises(Exception) as e: utils.http_request('http://127.0.0.1:8', [('x', 'y')]) self.assertEqual(e.exception.args[0], 7)