def ftpget(videoid, folder_path): try: video = db_session.query(Videos).filter_by(id=videoid).first() print "video id is: " + str(video.id) if video.hostentry != "mtviestor": ftp = FTP(CREDENTIAL["HOST"], CREDENTIAL["USER"], CREDENTIAL["PASS"]) else: ENTRY = settings.FTP_ENTRY ftp = FTP(ENTRY["mtviestor"]["HOST"], ENTRY["mtviestor"]["USER"], ENTRY["mtviestor"]["PASS"]) local_path_1200 = os.path.join(folder_path, video.uri_1200.split('/')[-1]) local_path_400 = os.path.join(folder_path, video.uri_400.split('/')[-1]) print "Downloading: " + video.uri_1200 + " == >" + local_path_1200 ftp.retrbinary("RETR " + video.uri_1200, open(local_path_1200, 'wb').write) print "Downloading" + video.uri_400 + " == >" + local_path_400 ftp.retrbinary("RETR " + video.uri_400, open(local_path_400, 'wb').write) ftp.quit() video.stateid = STATES['Video Downloaded'] db_session.commit() except all_errors, e: # TODO: Catch FTP errors db_session.add(Logs(videoid, str(e))) video.stateid = STATES['Video Download Failed'] db_session.commit() return
def report_drift(): report_json = json.loads(request.data)['report'] workflowtype = report_json["workflowtype"] if workflowtype == "posterclip": posteruuid = report_json["unique_key"] showvideouuid = report_json["showvideouuid"] status = report_json["asset_status"] error_spec = report_json["error_spec"] video = db_session.query(Videos).filter_by(showvideouuid=showvideouuid).first() videoid = video.id if db_session.query(Reports).filter_by(videoid=videoid).count(): report = db_session.query(Reports).filter_by(videoid=videoid) report.posteruuid = posteruuid report.status = status report.error = error_spec else: report = Reports(videoid, posteruuid, status, error_spec) db_session.add(report) # Update Videos table with status if status == "success": video.status = STATES['ARC ID Retrieved'] else: video.status = STATES['ARC ID Error'] db_session.commit() return "report success"
def fix(): # Fix processes videos = db_session.query(Videos).filter_by(stateid=STATES['Xml Generating in Queue']).all() videos += db_session.query(Videos).filter_by(stateid=STATES['Xml Generate Failed']).all() for video in videos: video.stateid = STATES['Clip Uploaded'] videos = db_session.query(Videos).filter_by(stateid=STATES['Clip Uploading in Queue']).all() videos += db_session.query(Videos).filter_by(stateid=STATES['Clip Upload Failed']).all() for video in videos: video.stateid = STATES['Video Processed Successfully'] videos = db_session.query(Videos).filter_by(stateid=STATES['Ffmpeg Generating in Queue']).all() videos += db_session.query(Videos).filter_by(stateid=STATES['Ffmpeg Failed']).all() for video in videos: video.stateid = STATES['Video Downloaded'] videos = db_session.query(Videos).filter_by(stateid=STATES['Video Downloading in Queue']).all() videos += db_session.query(Videos).filter_by(stateid=STATES['Video Download Failed']).all() for video in videos: video.stateid = STATES['Video Ready To Download'] # Clean Logs table if the video had been fully processed logs = db_session.query(Logs).all() for log in logs: if log.video.stateid == STATES['Cleaned Up']: db_session.delete(log) db_session.commit()
def query_arc(): """ Query ARC database to get videos need to be processed. :return: None """ with open("template/json_query_new.json", 'r') as json_file: json_string = json_file.read().replace(' ', '').replace('\n', '') whole_query = query_header + json_string + query_tailer print whole_query response = urllib2.urlopen(whole_query) json_object = json.loads(response.read()) # print json.dumps(json_object) docs = json_object['response']['docs'] for doc in docs: if 'Videos' in doc: # print doc['Episode'] videos = doc['Videos'] title = X(videos[0]['Title']) try: namespace = doc['mtvi:namespace'] episodeuuid = doc['Episode']['mtvi:id'] seriesuuid = doc['Series']['mtvi:id'] videoplaylistuuid = doc['mtvi:id'] showvideouuid = videos[0]['mtvi:id'] except: # If any of these uuid or namespace is empty, ignore this video continue lang = doc['Language'] # show video uuid should be unique in our database (each show will only have one poster video) # So if episode video uuid exists, skip this loop, not insert it as a new record if db_session.query(Videos).filter_by(episodeuuid=episodeuuid).count(): continue if 'VideoAssetRefs' not in videos[0]: continue videorefs = videos[0]['VideoAssetRefs'] for ref in videorefs: if ref['BitRate'] == '1200': uri_1200 = ref['URI'] elif ref['BitRate'] == '400': uri_400 = ref['URI'] # Only accepts videos from GSP for now if "mgid:file:gsp:" in uri_1200: try: new_video = Videos(1, title, namespace, showvideouuid, videoplaylistuuid, episodeuuid, seriesuuid, uri_1200, uri_400, lang) db_session.add(new_video) except UnboundLocalError: print "uri_1200 or uri_400 might not be found" print ref except: raise else: print "Not a valid doc" print doc db_session.commit()
def localput(videoid, folder_path): try: video = db_session.query(Videos).filter_by(id=videoid).first() to_upload = [] videoassets = db_session.query(Videoassets).filter_by( videoid=video.id).all() imageassets = db_session.query(Imageassets).filter_by( videoid=video.id).all() for videoasset in videoassets: to_upload.append(videoasset.uri) for imageasset in imageassets: to_upload.append(imageasset.uri) if to_upload and len(to_upload) == 4: for filepath in to_upload: filename = filepath.split("/")[-1] local_path = os.path.join(folder_path, filename) print "Uploading: " + local_path + " == >" + filepath shutil.copyfile(local_path, filepath) video.stateid = STATES['Clip Uploaded'] db_session.commit() else: # If not enough videoassets or imageassets, go back to state [Video Downloaded] video.stateid = STATES['Video Downloaded'] db_session.commit() except: video.stateid = STATES['Clip Upload Failed'] db_session.commit() return
def localcopy(videoid, folder_path): try: ENTRY = settings.FTP_ENTRY video = db_session.query(Videos).filter_by(id=videoid).first() print "video id is: " + str(video.id) if video.hostentry == "alias": mount_point = ENTRY['alias']['LOCAL_MOUNT'] if not os.path.isdir(mount_point): error_message = "mount point not visible: " + mount_point + ". Please check." print error_message db_session.add(Logs(videoid, error_message)) db_session.commit() return elif video.hostentry == "scenic": # TODO: Need to implemented later print "not support scenic yet." return else: print "not support", video.hostentry, "yet." return local_path_1200 = os.path.join(folder_path, video.uri_1200.split('/')[-1]) local_path_400 = os.path.join(folder_path, video.uri_400.split('/')[-1]) print "copying: " + video.uri_1200 + " == >" + local_path_1200 shutil.copyfile(video.uri_1200, local_path_1200) print "copying: " + video.uri_400 + " == >" + local_path_400 shutil.copyfile(video.uri_400, local_path_400) video.stateid = STATES['Video Downloaded'] db_session.commit() except Exception, e: # TODO: Catch Other errors db_session.add(Logs(videoid, str(e))) video.stateid = STATES['Video Download Failed'] db_session.commit()
def process_video(): upload_method = DOWNLOAD_UPLOAD_METHOD try: # Generate clips and thumbnails videos = db_session.query(Videos).filter_by(stateid=STATES['Video Downloaded']).all() for video in videos if len(videos) <= MAX_FFMPEG else videos[0:MAX_FFMPEG]: video.stateid = STATES['Ffmpeg Generating in Queue'] db_session.commit() generate_clips.delay(video.id) # Upload clips and thumbnails videos = db_session.query(Videos).filter_by(stateid=STATES['Video Processed Successfully']).all() for video in videos if len(videos) <= MAX_UPLOADS else videos[0:MAX_UPLOADS]: folder_path = "temp/" + video.showvideouuid if not os.path.exists(folder_path): video.stateid = STATES['Clip Upload Failed'] db_session.commit() print "Folder", folder_path, "not found with videoid=", video.id else: video.stateid = STATES['Clip Uploading in Queue'] db_session.commit() if upload_method == "FTP": ftpput.delay(video.id, folder_path) elif upload_method == "MOUNT": localput.delay(video.id, folder_path) else: print "Not a proper way to download videos from GSP" except socket_error: # TODO: rabbitMQ not running or port not right, save to database print "RabbitMQ connection refused"
def localput(videoid, folder_path): try: video = db_session.query(Videos).filter_by(id=videoid).first() to_upload = [] videoassets = db_session.query(Videoassets).filter_by(videoid=video.id).all() imageassets = db_session.query(Imageassets).filter_by(videoid=video.id).all() for videoasset in videoassets: to_upload.append(videoasset.uri) for imageasset in imageassets: to_upload.append(imageasset.uri) if to_upload and len(to_upload) == 4: for filepath in to_upload: filename = filepath.split("/")[-1] local_path = os.path.join(folder_path, filename) print "Uploading: " + local_path + " == >" + filepath shutil.copyfile(local_path, filepath) video.stateid = STATES['Clip Uploaded'] db_session.commit() else: # If not enough videoassets or imageassets, go back to state [Video Downloaded] video.stateid = STATES['Video Downloaded'] db_session.commit() except: video.stateid = STATES['Clip Upload Failed'] db_session.commit() return
def process_xml(): try: # Generate Xml to ARC videos = db_session.query(Videos).filter_by(stateid=STATES['Clip Uploaded']).all() for video in videos: print "generate xml for video: ", video.id folder_path = "temp/" + video.showvideouuid video.stateid = STATES['Xml Generating in Queue'] db_session.commit() generate_xml.delay(video.id, folder_path) # Copy Xml to shared storage videos = db_session.query(Videos).filter_by(stateid=STATES['Xml Generated']).all() if videos: time_string = datetime.datetime.now().strftime("%Y-%m-%d_%H_%M") batch_folder = os.path.join(SHARED_STORAGE_ARCMIG, time_string) if not os.path.exists(batch_folder): os.makedirs(batch_folder) for video in videos: xml_path = "temp/" + video.showvideouuid + "/" + video.showvideouuid + ".xml" dest_path = batch_folder + "/" + video.showvideouuid + ".xml" shutil.copy(xml_path, dest_path) video.stateid = STATES['Xml Ready To Send/Pickup'] # Archive the XMLs in batch archive_batch_folder = os.path.join("archive", time_string) shutil.copytree(batch_folder, archive_batch_folder) # Drop all XMLs to ARC process in batch drop_path = XML_DROP_PATH[SEVER_OPTION] drop_batch_folder = os.path.join(drop_path, time_string) shutil.copytree(batch_folder, drop_batch_folder) for video in videos: video.stateid = STATES["Xml Sent To ARC"] except socket_error: # TODO: rabbitMQ not running or port not right, save to database print "RabbitMQ connection refused" except Exception, e: print str(e) video.stateid = STATES['Cleanup Failed'] db_session.add(Logs(video.id, str(e)))
def ftpput(videoid, folder_path): try: video = db_session.query(Videos).filter_by(id=videoid).first() ftp = FTP(CREDENTIAL["HOST"], CREDENTIAL["USER"], CREDENTIAL["PASS"]) to_upload = [] videoassets = db_session.query(Videoassets).filter_by( videoid=video.id).all() imageassets = db_session.query(Imageassets).filter_by( videoid=video.id).all() for videoasset in videoassets: to_upload.append(videoasset.uri) for imageasset in imageassets: to_upload.append(imageasset.uri) if to_upload and len(to_upload) == 4: for filepath in to_upload: filename = filepath.split("/")[-1] print "Uploading: " + os.path.join( folder_path, filename) + " == >" + filepath ftp.storbinary('STOR ' + filepath, open(os.path.join(folder_path, filename), 'rb')) video.stateid = STATES['Clip Uploaded'] db_session.commit() else: # If not enough videoassets or imageassets, go back to state [Video Downloaded] video.stateid = STATES['Video Downloaded'] db_session.commit() ftp.quit() except all_errors: # TODO: Catch FTP errors video.stateid = STATES['Clip Upload Failed'] db_session.commit() pass except SQLAlchemyError: # TODO: Catch SQLAlchemy errors pass except: video.stateid = STATES['Clip Upload Failed'] db_session.commit() return
def ftpput(videoid, folder_path): try: video = db_session.query(Videos).filter_by(id=videoid).first() ftp = FTP(CREDENTIAL["HOST"], CREDENTIAL["USER"], CREDENTIAL["PASS"]) to_upload = [] videoassets = db_session.query(Videoassets).filter_by(videoid=video.id).all() imageassets = db_session.query(Imageassets).filter_by(videoid=video.id).all() for videoasset in videoassets: to_upload.append(videoasset.uri) for imageasset in imageassets: to_upload.append(imageasset.uri) if to_upload and len(to_upload) == 4: for filepath in to_upload: filename = filepath.split("/")[-1] print "Uploading: " + os.path.join(folder_path, filename) + " == >" + filepath ftp.storbinary('STOR ' + filepath, open(os.path.join(folder_path, filename), 'rb')) video.stateid = STATES['Clip Uploaded'] db_session.commit() else: # If not enough videoassets or imageassets, go back to state [Video Downloaded] video.stateid = STATES['Video Downloaded'] db_session.commit() ftp.quit() except all_errors: # TODO: Catch FTP errors video.stateid = STATES['Clip Upload Failed'] db_session.commit() pass except SQLAlchemyError: # TODO: Catch SQLAlchemy errors pass except: video.stateid = STATES['Clip Upload Failed'] db_session.commit() return
def generate_xml(videoid, folder_path): try: video = db_session.query(Videos).filter_by(id=videoid).first() videoassets = db_session.query(Videoassets).filter_by( videoid=videoid).all() imageassets = db_session.query(Imageassets).filter_by( videoid=videoid).all() if len(videoassets) != 2 or len(imageassets) != 2: # TODO: Log this error db_session.add( Logs(videoid, "videoassets or imageassets not right")) video.stateid = STATES['Video Processed Successfully'] imageassets_to_delete = db_session.query(Imageassets).filter_by( videoid=videoid).all() videoassets_to_delete = db_session.query(Videoassets).filter_by( videoid=videoid).all() db_session.delete(imageassets_to_delete) db_session.delete(videoassets_to_delete) db_session.commit() exit() arc_tree = ET.parse("template/arc_template.xml") root = arc_tree.getroot() root.find('arcnamespace').text = video.namespace # If no series uuid, delete the node if not video.seriesuuid: root.remove(root.find('seriesuuid')) else: root.find('seriesuuid').text = video.seriesuuid # If no episode uuid, delete the node if not video.episodeuuid: root.remove(root.find('episodeuuid')) else: root.find('episodeuuid').text = video.episodeuuid root.find('videoplaylistuuid').text = video.videoplaylistuuid root.find('showvideouuid').text = video.showvideouuid root.find('type').text = "showvideo" root.find('workflowtype').text = "posterclip" root.find('title').text = video.title root.find('duration').text = "10" root.find('lang').text = video.lang image = root.find('images').find('image') image.find('title').text = video.title image.find('imagetype').text = settings.ARCFormats["thumbnail"] imageassets_objs = image.find('imageassets').findall('imageasset') for i in xrange(2): if video.hostentry: imageassets_objs[i].find('imageuri').text = settings.FTP_ENTRY[video.hostentry]['MGID_PREFIX']+'/'\ +'/'.join(imageassets[i].uri.split('/')[2:]) else: # TODO: other host entry imageassets_objs[i].find('imageuri').text = imageassets[i].uri imageassets_objs[i].find('width').text = str(imageassets[i].width) imageassets_objs[i].find('height').text = str( imageassets[i].height) imageassets_objs[i].find( 'format').text = settings.ARCFormats["jpg"] imageassets_objs[i].find( 'aspectratio').text = imageassets[i].aspectratio videoassets_objs = root.find('videoassets').findall('videoasset') for i in xrange(2): videoassets_objs[i].find('lang').text = videoassets[i].lang if video.hostentry: videoassets_objs[i].find('uri').text = settings.FTP_ENTRY[video.hostentry]['MGID_PREFIX']+'/'\ +'/'.join(videoassets[i].uri.split('/')[2:]) else: # TODO: other host entry videoassets_objs[i].find('uri').text = videoassets[i].uri videoassets_objs[i].find('width').text = str(videoassets[i].width) videoassets_objs[i].find('height').text = str( videoassets[i].height) videoassets_objs[i].find('bitrate').text = str( videoassets[i].bitrate) videoassets_objs[i].find( 'format').text = settings.ARCFormats["mp4_h264_main"] videoassets_objs[i].find('duration').text = str( videoassets[i].duration) videoassets_objs[i].find( 'aspectratio').text = videoassets[i].aspectratio # print ET.tostring(root, encoding="utf-8") string_with_CDATA = addCDATA(ET.tostring(root, encoding="utf-8")) with open(folder_path + "/" + video.showvideouuid + ".xml", 'wb') as w: w.write(string_with_CDATA.encode('utf8')) video.stateid = STATES['Xml Generated'] db_session.commit() except SQLAlchemyError: # TODO: Catch SQLAlchemy errors pass except UnicodeDecodeError, e: video.stateid = STATES['Xml Generate Failed'] db_session.add(Logs(videoid, str(e))) db_session.commit() raise
def generate_xml(videoid, folder_path): try: video = db_session.query(Videos).filter_by(id=videoid).first() videoassets = db_session.query(Videoassets).filter_by(videoid=videoid).all() imageassets = db_session.query(Imageassets).filter_by(videoid=videoid).all() if len(videoassets) != 2 or len(imageassets) != 2: # TODO: Log this error db_session.add(Logs(videoid, "videoassets or imageassets not right")) video.stateid = STATES['Video Processed Successfully'] imageassets_to_delete = db_session.query(Imageassets).filter_by(videoid=videoid).all() videoassets_to_delete = db_session.query(Videoassets).filter_by(videoid=videoid).all() db_session.delete(imageassets_to_delete) db_session.delete(videoassets_to_delete) db_session.commit() exit() arc_tree = ET.parse("template/arc_template.xml") root = arc_tree.getroot() root.find('arcnamespace').text = video.namespace # If no series uuid, delete the node if not video.seriesuuid: root.remove(root.find('seriesuuid')) else: root.find('seriesuuid').text = video.seriesuuid # If no episode uuid, delete the node if not video.episodeuuid: root.remove(root.find('episodeuuid')) else: root.find('episodeuuid').text = video.episodeuuid root.find('videoplaylistuuid').text = video.videoplaylistuuid root.find('showvideouuid').text = video.showvideouuid root.find('type').text = "showvideo" root.find('workflowtype').text = "posterclip" root.find('title').text = video.title root.find('duration').text = "10" root.find('lang').text = video.lang image = root.find('images').find('image') image.find('title').text = video.title image.find('imagetype').text = settings.ARCFormats["thumbnail"] imageassets_objs = image.find('imageassets').findall('imageasset') for i in xrange(2): if video.hostentry: imageassets_objs[i].find('imageuri').text = settings.FTP_ENTRY[video.hostentry]['MGID_PREFIX']+'/'\ +'/'.join(imageassets[i].uri.split('/')[2:]) else: # TODO: other host entry imageassets_objs[i].find('imageuri').text = imageassets[i].uri imageassets_objs[i].find('width').text = str(imageassets[i].width) imageassets_objs[i].find('height').text = str(imageassets[i].height) imageassets_objs[i].find('format').text = settings.ARCFormats["jpg"] imageassets_objs[i].find('aspectratio').text = imageassets[i].aspectratio videoassets_objs = root.find('videoassets').findall('videoasset') for i in xrange(2): videoassets_objs[i].find('lang').text = videoassets[i].lang if video.hostentry: videoassets_objs[i].find('uri').text = settings.FTP_ENTRY[video.hostentry]['MGID_PREFIX']+'/'\ +'/'.join(videoassets[i].uri.split('/')[2:]) else: # TODO: other host entry videoassets_objs[i].find('uri').text = videoassets[i].uri videoassets_objs[i].find('width').text = str(videoassets[i].width) videoassets_objs[i].find('height').text = str(videoassets[i].height) videoassets_objs[i].find('bitrate').text = str(videoassets[i].bitrate) videoassets_objs[i].find('format').text = settings.ARCFormats["mp4_h264_main"] videoassets_objs[i].find('duration').text = str(videoassets[i].duration) videoassets_objs[i].find('aspectratio').text = videoassets[i].aspectratio # print ET.tostring(root, encoding="utf-8") string_with_CDATA = addCDATA(ET.tostring(root, encoding="utf-8")) with open(folder_path+"/"+video.showvideouuid+".xml", 'wb') as w: w.write(string_with_CDATA.encode('utf8')) video.stateid = STATES['Xml Generated'] db_session.commit() except SQLAlchemyError: # TODO: Catch SQLAlchemy errors pass except UnicodeDecodeError, e: video.stateid = STATES['Xml Generate Failed'] db_session.add(Logs(videoid, str(e))) db_session.commit() raise
def generate_clips(videoid): try: video = db_session.query(Videos).filter_by(id=videoid).first() print video.id, server_folder = '/'.join(video.uri_1200.split("/")[:-1]) video_path_1200 = os.path.join(os.getcwd(), "temp", video.showvideouuid, video.uri_1200.split("/")[-1]) video_path_400 = os.path.join(os.getcwd(), "temp", video.showvideouuid, video.uri_400.split("/")[-1]) lang = video.lang # Find the total seconds first command = [ "ffprobe", "-v", "error", "-show_entries", "format=duration", "-of", "default=noprint_wrappers=1:nokey=1", video_path_400 ] process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) second_string = process.communicate()[0] seconds = int(float(str(second_string).strip())) if seconds < 11: # TODO: Save this error to DB db_session.add(Logs(videoid, "Video duration less than 10 secs.")) return inpoint = seconds - 11 outpoint = seconds - 1 for video_path in [video_path_1200, video_path_400]: width = video_path.split('_')[-3].split('x')[0] height = video_path.split('_')[-3].split('x')[-1] bitrate = video_path.split('_')[-2] if 1.7 < float(width) / float(height) < 1.8: aspectratio = "16:9" elif 1.3 < float(width) / float(height) < 1.4: aspectratio = "4:3" else: # TODO: Hard coded to 1:1, may change later aspectratio = "1:1" # Create Videoasset destpath = video_path.split('.')[0] + "_10secs.mp4" if os.path.exists(destpath): os.remove(destpath) # Generate 10 seconds here video_command = [ "ffmpeg", "-i", video_path, "-ss", str(inpoint), "-to", str(outpoint), "-an", "-maxrate", "600k", "-bufsize", "1200k", "-profile:v", "baseline", "-level", "3.1", "-f", "mp4", "-movflags", "+faststart", destpath ] subprocess.Popen(video_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() print "10 seconds clip generated" clip_uri = os.path.join(server_folder, destpath.split("/")[-1]) if db_session.query(Videoassets).filter( Videoassets.videoid == videoid, Videoassets.width == width, Videoassets.height == height, Videoassets.uri == clip_uri).count(): # TODO: should merge new fields with old entry? pass else: videoasset = Videoassets(videoid, clip_uri, width, height, bitrate, aspectratio, lang) db_session.add(videoasset) # Create Imageasset image_path = destpath.replace('mp4', 'jpg') if os.path.exists(image_path): os.remove(image_path) image_command = [ "ffmpeg", "-i", destpath, "-ss", "0", "-vframes", "1", "-vcodec", "mjpeg", "-f", "image2", image_path ] subprocess.Popen(image_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() print "Thumbnail generated" image_uri = os.path.join(server_folder, image_path.split("/")[-1]) # In case duplicates in Imageassets table if db_session.query(Imageassets).filter( Imageassets.videoid == videoid, Imageassets.width == width, Imageassets.height == height, Imageassets.uri == image_uri).count(): # TODO: should merge new fields with old entry? pass else: imageasset = Imageassets(videoid, image_uri, width, height, aspectratio) db_session.add(imageasset) video.stateid = STATES['Video Processed Successfully'] db_session.commit() except subprocess.CalledProcessError: # TODO: Catch Subprocess errors print " Subprocess error " video.stateid = STATES['Ffmpeg Failed'] db_session.commit() pass except SQLAlchemyError: # TODO: Catch SQLAlchemy errors pass except ValueError, e: print e # Will find "No such file or directory" need re-download # Delete the record and folder on disk, reprocess it from step 1 folder_path = "temp/" + video.showvideouuid if os.path.exists(folder_path): shutil.rmtree(folder_path) logs = db_session.query(Logs).filter_by(videoid=videoid).all() if logs: for log in logs: print log.id db_session.delete(logs) video.stateid = STATES["Video Logged"] db_session.commit() pass
def download(): """ Create dir in temp folder and download video files :return: None """ download_method = DOWNLOAD_UPLOAD_METHOD # Change the URI if needed, create temp folders in local disk videos = db_session.query(Videos).filter_by(stateid=STATES['Video Logged']).all() count = 0 for video in videos: try: video_path_slices_1200 = video.uri_1200.split(':') if len(video_path_slices_1200) != 1: # GSP if video_path_slices_1200[2] == 'gsp': if video_path_slices_1200[3] in [entry for entry in FTP_ENTRY.keys()]: video.hostentry = video_path_slices_1200[3] video.uri_1200 = FTP_ENTRY[video.hostentry]["FTP_PREFIX"] + video_path_slices_1200[4] video.uri_400 = FTP_ENTRY[video.hostentry]["FTP_PREFIX"] + video.uri_400.split(':')[4] else: new_log = Logs(video.id, "host entry not recognized: " + video_path_slices_1200[3]) db_session.add(new_log) db_session.commit() print "host entry not recognized:", video_path_slices_1200[3] # else: # # TODO: New GSP host entry, log to DB # print video.uri_1200 # print "###", video.id, "Not Supported yet." folder_path = "temp/" + video.showvideouuid os.mkdir(folder_path) video.stateid = STATES['Video Ready To Download'] count += 1 if download_method == "FTP" and count >= 100: break except OSError: print "folder " + folder_path + " already exists... delete" # the state will stay at 1 shutil.rmtree(folder_path) continue except IndexError: # TODO: something wrong with uri list continue except: raise db_session.commit() # Download all videos with state=2 videos = db_session.query(Videos).filter_by(stateid=STATES['Video Ready To Download']).all() for video in videos if len(videos) <= MAX_DOWNLOADS else videos[0:MAX_DOWNLOADS]: folder_path = "temp/" + video.showvideouuid if not os.path.exists(folder_path): video.stateid = STATES['Video Logged'] db_session.commit() else: try: video.stateid = STATES['Video Downloading in Queue'] db_session.commit() print video.id if download_method == "FTP": ftpget.delay(video.id, folder_path) elif download_method == "MOUNT": localcopy.delay(video.id, folder_path) else: print "Not a proper way to download videos from GSP" except socket_error: # TODO: rabbitMQ not running or port not right, save to database print "RabbitMQ connection refused"
from db_drift import Videos, db_session __author__ = 'Hao Lin' with open("template/p_mtv.csv") as csv: lines = csv.readlines() for one_line in lines[1:]: line = one_line.split(',') # print line title = line[3] namespace = line[4] showvideouuid = line[5] videoplaylistuuid = line[6] episodeuuid = line[7] seriesuuid = line[8] uri_1200 = line[9] uri_400 = line[10] lang = line[11] new_video = Videos(1, title, namespace, showvideouuid, videoplaylistuuid, episodeuuid, seriesuuid, uri_1200, uri_400, lang) # print new_video.__dict__ db_session.add(new_video) db_session.commit() print len(lines)
def generate_clips(videoid): try: video = db_session.query(Videos).filter_by(id=videoid).first() print video.id, server_folder = '/'.join(video.uri_1200.split("/")[:-1]) video_path_1200 = os.path.join(os.getcwd(), "temp", video.showvideouuid, video.uri_1200.split("/")[-1]) video_path_400 = os.path.join(os.getcwd(), "temp", video.showvideouuid, video.uri_400.split("/")[-1]) lang = video.lang # Find the total seconds first command = ["ffprobe", "-v", "error", "-show_entries", "format=duration", "-of", "default=noprint_wrappers=1:nokey=1", video_path_400] process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) second_string = process.communicate()[0] seconds = int(float(str(second_string).strip())) if seconds < 11: # TODO: Save this error to DB db_session.add(Logs(videoid, "Video duration less than 10 secs.")) return inpoint = seconds - 11 outpoint = seconds - 1 for video_path in [video_path_1200, video_path_400]: width = video_path.split('_')[-3].split('x')[0] height = video_path.split('_')[-3].split('x')[-1] bitrate = video_path.split('_')[-2] if 1.7 < float(width)/float(height) < 1.8: aspectratio = "16:9" elif 1.3 < float(width)/float(height) < 1.4: aspectratio = "4:3" else: # TODO: Hard coded to 1:1, may change later aspectratio = "1:1" # Create Videoasset destpath = video_path.split('.')[0] + "_10secs.mp4" if os.path.exists(destpath): os.remove(destpath) # Generate 10 seconds here video_command = ["ffmpeg", "-i", video_path, "-ss", str(inpoint), "-to", str(outpoint), "-an", "-maxrate", "600k", "-bufsize", "1200k", "-profile:v", "baseline", "-level", "3.1", "-f", "mp4", "-movflags", "+faststart", destpath] subprocess.Popen(video_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() print "10 seconds clip generated" clip_uri = os.path.join(server_folder, destpath.split("/")[-1]) if db_session.query(Videoassets).filter(Videoassets.videoid == videoid, Videoassets.width == width, Videoassets.height == height, Videoassets.uri == clip_uri).count(): # TODO: should merge new fields with old entry? pass else: videoasset = Videoassets(videoid, clip_uri, width, height, bitrate, aspectratio, lang) db_session.add(videoasset) # Create Imageasset image_path = destpath.replace('mp4', 'jpg') if os.path.exists(image_path): os.remove(image_path) image_command = ["ffmpeg", "-i", destpath, "-ss", "0", "-vframes", "1", "-vcodec", "mjpeg", "-f", "image2", image_path] subprocess.Popen(image_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() print "Thumbnail generated" image_uri = os.path.join(server_folder, image_path.split("/")[-1]) # In case duplicates in Imageassets table if db_session.query(Imageassets).filter(Imageassets.videoid == videoid, Imageassets.width == width, Imageassets.height == height, Imageassets.uri == image_uri).count(): # TODO: should merge new fields with old entry? pass else: imageasset = Imageassets(videoid, image_uri, width, height, aspectratio) db_session.add(imageasset) video.stateid = STATES['Video Processed Successfully'] db_session.commit() except subprocess.CalledProcessError: # TODO: Catch Subprocess errors print " Subprocess error " video.stateid = STATES['Ffmpeg Failed'] db_session.commit() pass except SQLAlchemyError: # TODO: Catch SQLAlchemy errors pass except ValueError, e: print e # Will find "No such file or directory" need re-download # Delete the record and folder on disk, reprocess it from step 1 folder_path = "temp/" + video.showvideouuid if os.path.exists(folder_path): shutil.rmtree(folder_path) logs = db_session.query(Logs).filter_by(videoid=videoid).all() if logs: for log in logs: print log.id db_session.delete(logs) video.stateid = STATES["Video Logged"] db_session.commit() pass