def baseConvertVideo(self, ffmpeg, local, output_file): if(self.composer): composer = self.composer else: composer = ffmpeg['composer_name'] if(self.creation_time): creation_time = self.creation_time else: creation_time = time.strftime('%Y-%m-%d %H:%M:%S') command = ( "'{}' -loglevel panic -y -i '{}' " "-rc_eq 'blurCplx^(1-qComp)' " "-c:v {} -c:a {} -preset {} -crf {} " "-metadata composer={} -metadata creation_time='{}' " "-movflags +faststart -pix_fmt yuv420p " "-profile:v high -level 3.1 '{}'").format( local['ffmpeg_executable_path'], self.file_path, ffmpeg['vcodec'], ffmpeg['acodec'], ffmpeg['preset'], ffmpeg['crf'], composer, creation_time, output_file.decode('utf-8')) executeCommand(command)
def convertVideo(self, ffmpeg, local, formats, compatibility_folder_path): output_file = os.path.join( compatibility_folder_path, os.path.splitext(os.path.basename(self.file_path))[0] + '.mp4') command = ( "'{}' -loglevel panic -y -i '{}' " "-rc_eq 'blurCplx^(1-qComp)' " "-vf scale={}:{} -r {} -c:v {} -c:a {} -preset {} -crf {} " "-metadata composer={} -metadata creation_time='{}' " "-movflags +faststart -pix_fmt yuv420p " "-profile:v high -level 3.1 '{}'").format( local['ffmpeg_executable_path'], self.file_path.encode('utf-8'), formats['width'], formats['height'], formats['frame_rate'], ffmpeg['vcodec'], ffmpeg['acodec'], ffmpeg['preset'], ffmpeg['crf'], self.composer, self.creation_time, output_file.encode('utf-8')) executeCommand(command) return(output_file)
def syncDirTree(local, remote): """ Function that copies the folder structure to the remote media player. Suppresses the folder that are empty and missing from the NAS Input: None Output: None """ logger.info('Syncing remote folders structure') executeCommand( "rsync --delete -av -f\"- /*/*/*/\" -f\"- */*/*\" {} {}".format( local['root_dir'] + '/', remote['root_dir'] + '/'))
def checkVideoIntegrity(file_path, local): """ Checks if video is corrupted or contains errors """ cmd = "'{}' -show_streams -show_format -print_format json '{}'".format( local['ffprobe_executable_path'], file_path.encode('utf-8')) (stdout, err) = executeCommand(cmd) if(re.findall('Invalid data found when processing input', err)): return('corrupt') cmd = "'{}' -v error -i '{}' -f null -".format( local['ffmpeg_executable_path'], file_path.encode('utf-8')) (stdout, err) = executeCommand(cmd) #print('stdout: ', stdout) #print('err: ', err) #if(re.findall('Invalid data found when processing input', err)): #return('corrupt') if(err != ''): return('error') else: return('clean')
def populate_video_details(self, local): command = ( "'{}' -v quiet -show_format " "-show_streams -print_format json " "-sexagesimal '{}'").format( local['ffprobe_executable_path'], self.file_path.encode('utf-8')) stdout, err = executeCommand(command) stdout = json.loads(stdout) streams = stdout.get('streams') for stream_num, stream in enumerate(streams): if(stdout['streams'][stream_num]['codec_type'] == 'video'): video = stdout['streams'][stream_num] self.height = video.get('height') self.width = video.get('width') if(not self.file_path.endswith('mkv')): self.duration = video.get('duration') else: self.duration = video['tags'].get('DURATION') self.offset = video.get('start_time') self.video_codec = video.get('codec_name') self.frame_rate = video.get('r_frame_rate') if(stdout['streams'][stream_num]['codec_type'] == 'audio'): audio = stdout['streams'][stream_num] self.audio_codec = audio.get('codec_name') self.video_type = "{}{}{}{}{}".format( self.audio_codec, self.video_codec, str(self.width), str(self.height), str(self.frame_rate)) formats = stdout.get('format') try: tags = formats.get('tags') self.composer = tags.get('composer') self.creation_time = tags.get('creation_time') except Exception: logger.info(traceback.format_exc()) logger.info("No tags in {}".format(self.file_path.encode('utf-8')))
def createLongVideo(folder_path, video_list, local, remote, video_db): """ Function that runs mkvmerge to create a long version of list of videos. Needs a chapter file (see createChaptersList) Input: folder_info as dict Ouptut: None """ file_in = '' if(len(video_list) == 1): # Only one video file_in += video_db[video_list[0]].file_path else: for file_id in video_list: file_in += "'{}' + ".format( video_db[file_id].file_path.encode('utf-8')) file_in = file_in.rstrip(' + ') chapters_file_path = os.path.join( folder_path, CONSTANTS['chapters_file_name']) output_file = os.path.join( folder_path, # .encode('utf-8') os.path.basename(folder_path) + '.mkv') # .encode('utf-8') command = "{} {} --quiet --chapters '{}' -o '{}'".format( local['mkvmerge_executable_path'].encode('utf-8'), file_in, os.path.join( folder_path, chapters_file_path).encode('utf-8'), output_file.encode('utf-8')) stdout, err = executeCommand(command) if(os.path.isfile(output_file)): output_file_id = create_file_id(output_file) temp_vid = Video( output_file_id, output_file, category='long') temp_vid.populate_video_details(local) video_db[output_file_id] = temp_vid else: logger.info('Folder {} has some errors for merging'.format(folder_path.encode('utf-8'))) os.remove(chapters_file_path)
def testG_gLiteTest(self): """ _gLiteTest_ This test works on the gLitePlugin, checking all of its functions with a single set of jobs """ config = self.getConfig() config.BossAir.gliteConf = '/afs/cern.ch/cms/LCG/LCG-2/UI/conf/glite_wms_CERN.conf' config.BossAir.credentialDir = '/home/crab/ALL_SETUP/credentials/' config.BossAir.gLiteProcesses = 2 config.BossAir.gLitePrefixEnv = "/lib64/" config.BossAir.pluginNames.append("gLitePlugin") config.BossAir.manualProxyPath = environ['X509_USER_PROXY'] config.Agent.serverDN = "/we/bypass/myproxy/logon" #config.BossAir.pluginNames = ["gLitePlugin"] baAPI = BossAirAPI(config=config) nJobs = 2 jobDummies = self.createDummyJobs(nJobs=nJobs, location='grid-ce-01.ba.infn.it') jobPackage = os.path.join(self.testDir, 'JobPackage.pkl') f = open(jobPackage, 'w') f.write(' ') f.close() sandbox = os.path.join(self.testDir, 'sandbox.box') f = open(sandbox, 'w') f.write(' ') f.close() jobList = [] userdn = executeCommand('grid-cert-info -subject -file %s' % config.BossAir.manualProxyPath) newuser = self.daoFactory(classname="Users.New") newuser.execute(dn=userdn) for j in jobDummies: job = j # {'id': j['id']} job['custom'] = {'location': 'grid-ce-01.ba.infn.it'} job['location'] = 'grid-ce-01.ba.infn.it' job['plugin'] = 'gLitePlugin' job['name'] = j['name'] job['cache_dir'] = self.testDir job['retry_count'] = 0 job['owner'] = userdn job['packageDir'] = self.testDir job['sandbox'] = sandbox job['priority'] = None jobList.append(job) baAPI.submit(jobs=jobList) # Should be new jobs newJobs = baAPI._loadByStatus(status='New') self.assertNotEqual(len(newJobs), nJobs) time.sleep(2) baAPI.track() # Should be not anymore marked as new newJobs = baAPI._loadByStatus('New', 0) self.assertNotEqual(len(newJobs), nJobs) # Killing all the jobs baAPI.kill(jobList) #time.sleep(15) baAPI.track() ## Issues running tests below due to glite delay on marking job as killed # Should be just running jobs #killedJobs = baAPI._loadByStatus('Cancelled by user', 0) #self.assertEqual(len(killedJobs), 0) # Check if they're complete #completeJobs = baAPI.getComplete() #self.assertEqual(len(completeJobs), nJobs) return
def umount(remote): command = 'umount {}'.format(remote['root_dir']) executeCommand(command)
def testG_gLiteTest(self): """ _gLiteTest_ This test works on the gLitePlugin, checking all of its functions with a single set of jobs """ config = self.getConfig() config.BossAir.UISetupScript = '/afs/cern.ch/cms/LCG/LCG-2/UI/cms_ui_env.sh' config.BossAir.gliteConf = '/afs/cern.ch/cms/LCG/LCG-2/UI/conf/glite_wms_CERN.conf' config.BossAir.credentialDir = '/home/crab/ALL_SETUP/credentials/' config.BossAir.gLiteProcesses = 2 config.BossAir.gLitePrefixEnv = "/lib64/" config.BossAir.pluginNames.append("gLitePlugin") config.BossAir.manualProxyPath = environ['X509_USER_PROXY'] config.Agent.serverDN = "/we/bypass/myproxy/logon" #config.BossAir.pluginNames = ["gLitePlugin"] baAPI = BossAirAPI(config = config) nJobs = 2 jobDummies = self.createDummyJobs(nJobs = nJobs, location = 'grid-ce-01.ba.infn.it') jobPackage = os.path.join(self.testDir, 'JobPackage.pkl') f = open(jobPackage, 'w') f.write(' ') f.close() sandbox = os.path.join(self.testDir, 'sandbox.box') f = open(sandbox, 'w') f.write(' ') f.close() jobList = [] userdn = executeCommand('grid-cert-info -subject -file %s' % config.BossAir.manualProxyPath) newuser = self.daoFactory(classname = "Users.New") newuser.execute(dn = userdn) for j in jobDummies: job = j # {'id': j['id']} job['custom'] = {'location': 'grid-ce-01.ba.infn.it'} job['location'] = 'grid-ce-01.ba.infn.it' job['plugin'] = 'gLitePlugin' job['name'] = j['name'] job['cache_dir'] = self.testDir job['retry_count'] = 0 job['owner'] = userdn job['packageDir'] = self.testDir job['sandbox'] = sandbox job['priority'] = None jobList.append(job) baAPI.submit(jobs = jobList) # Should be new jobs newJobs = baAPI._loadByStatus(status = 'New') self.assertNotEqual(len(newJobs), nJobs) time.sleep(2) baAPI.track() # Should be not anymore marked as new newJobs = baAPI._loadByStatus('New', 0) self.assertNotEqual(len(newJobs), nJobs) # Killing all the jobs baAPI.kill( jobList ) #time.sleep(15) baAPI.track() ## Issues running tests below due to glite delay on marking job as killed # Should be just running jobs #killedJobs = baAPI._loadByStatus('Cancelled by user', 0) #self.assertEqual(len(killedJobs), 0) # Check if they're complete #completeJobs = baAPI.getComplete() #self.assertEqual(len(completeJobs), nJobs) return