self.uploadData("480p") if self.VideoFrameHeight > 720: #上传360p段和480p段到HDFS self.uploadData("360p") self.uploadData("480p") """ self.uploadData() return self.HadoopCmdList ####################################### if __name__ == "__main__": logger = baseclass.getlog("Mapper") try: logger.info("Start mapper.....") #视频元数据 info_dict = pickle.load(open('video.info', 'r')) #hadoop信息 hadoopinfo_dict = pickle.load(open('hadoop.info', 'r')) #视频分段块 seg_list = [line.strip() for line in sys.stdin] except Exception, e:
if options.hadinsdir == None: # --hadinsdir HadoopInsDir = "/opt/hadoop/hadoop" else: HadoopInsDir = options.hadinsdir.strip() HadoopNNAddr = options.nnport.strip() # --nnport TaskTrackerNums = int(options.tasknums.strip()) # --tasknums BlockSize = int(options.blocksize.strip()) # --blocksize HadoopVersion = options.hadversion.strip() # --hadversion LogLevel = options.loglevel.strip() # --loglevel ############################################## logger = baseclass.getlog("handlevideo", loglevel=LogLevel) logger.debug("handle video parameter info : \n \ VideoMetaFile: %s \n \ HadoopInsDir : %s \n \ HadoopNNAddr : %s \n \ TaskTrackerNums : %s \n \ BlockSize : %s \n \ HadoopVersion : %s \n \ LogLevel : %s \n \ " % (VideoMetaFile, HadoopInsDir, HadoopNNAddr, TaskTrackerNums, BlockSize, HadoopVersion, LogLevel)) ############################################### #分析视频元数据文件,取到视频相关信息. if not os.path.isfile(VideoMetaFile):
############################################### # --/ # 常量 # --/ FFPROBE="/usr/local/bin/ffprobe" ############################################### # --/ # 初始化日志对象 # --/ logger = baseclass.getlog("handlevideo") ############################################### # --/ # # 获取命令行参数 # '-m' : 视频文件信息,xml文件. # '-d' : hadoop安装目录,在所有相关主机hadoop安装目录需一致. # '-n' : mapreducer集群namenode+port # '-z' : hdfs集群namenode+port # '-t' : tasktracker节点数 # '-b' : 指定块尺寸 # '-v' : hadoop版本 # '-l' : 指定日志级别,info或debug #
#!/usr/bin/env python # -*- encoding:utf-8 -*- ''' Created on 20131101 @author: wye Copyright @ 2011 - 2012 Cloudiya Tech . Inc ''' ''' 分析视频文件计算最优切割点 ''' import sys import baseclass videoname = sys.argv[1] videomvpath = sys.argv[2] VideoDuration = sys.argv[3] BlockSizeSecondNum = int(sys.argv[4]) PartNums = int(sys.argv[5]) ffprobe="/usr/local/bin/ffprobe" logger = baseclass.getlog(videoname) KFTSobj = baseclass.getKFSplitPoint(logger,ffprobe,videoname,videomvpath,VideoDuration,BlockSizeSecondNum,PartNums) print(KFTSobj.main())
self.CmdDict["ts"] = self.FFmpegTsCmdList self.CmdDict["mv"] = self.FFmpegMvCmdList self.CmdDict["video"] = self.FFmpegVideoCmdList self.CmdDict["screenshot"] = self.FFmpegScreenshotCmdList self.CmdDict["playlist"] = self.GenPlaylistCmdList self.CmdDict["prev"] = self.GenPrevFileCmdList self.CmdDict["rename"] = self.RenameCmdList self.CmdDict["upload"] = self.HadoopUploadCmdList return self.CmdDict ################################# if __name__ == "__main__": logger = baseclass.getlog("reducer") datadate = datetime.datetime.now().strftime("%Y%m%d") # --/ # Initialization stage # --/ try: logger.info("Start reducer.....") #视频元数据 info_dict = pickle.load(open('video.info','r')) #hadoop信息 hadoopinfo_dict = pickle.load(open('hadoop.info','r'))
############################################### # --/ # 常量 # --/ FFPROBE = "/usr/local/bin/ffprobe" ############################################### # --/ # 初始化日志对象 # --/ logger = baseclass.getlog("handlevideo") ############################################### # --/ # # 获取命令行参数 # '-m' : 视频文件信息,xml文件. # '-d' : hadoop安装目录,在所有相关主机hadoop安装目录需一致. # '-n' : mapreducer集群namenode+port # '-z' : hdfs集群namenode+port # '-t' : tasktracker节点数 # '-b' : 指定块尺寸 # '-v' : hadoop版本 # '-l' : 指定日志级别,info或debug #
@author: wye Copyright @ 2011 - 2012 Cloudiya Tech . Inc ''' import sys import baseclass FFPROBE="/usr/local/bin/ffprobe" # Stream file path StreamFilepath = sys.argv[1] # Video file Name Alias VideoAliasName = sys.argv[2] # Stream Property name PropertyFlag = sys.argv[3] logger = baseclass.getlog(VideoAliasName) RunCmdObj = baseclass.runCmd(logger,VideoAliasName,False,False) RunCmdObj.run(["%s -show_format %s"%(FFPROBE,StreamFilepath)]) VFormatList = baseclass.getVideoMetaDList(RunCmdObj.stdout,"FORMAT") VFormatDict = VFormatList[0] if PropertyFlag == "ab":print(VFormatDict["bit_rate"])
import baseclass logger = baseclass.getlog("handlevideo") ''' MapWebHdfs = baseclass.WebHadoop("10.2.10.14","50071","cloudiyadatauser",logger) vid = "dirtest" TmpDir_Path = "/tmp/%s" % vid TmpHdfs_Path = "/%s" % vid if MapWebHdfs.put_dir(TmpDir_Path,TmpHdfs_Path,overwrite="true"): print("success") else: print("fail") ''' import MySQLdb import sys def getStatus(): try: dbconn = MySQLdb.connect(host="10.2.10.12",user="******",passwd="c10udiya") dbcursor = dbconn.cursor() dbconn.select_db('video1') sql = "select status from video where vid=%s" sql_result = dbcursor.execute(sql,"vXUekH2") status = dbcursor.fetchone() dbconn.commit()
if options.hadinsdir == None: # --hadinsdir HadoopInsDir = "/opt/hadoop/hadoop" else: HadoopInsDir = options.hadinsdir.strip() HadoopNNAddr = options.nnport.strip() # --nnport TaskTrackerNums = int(options.tasknums.strip()) # --tasknums BlockSize = int(options.blocksize.strip()) # --blocksize HadoopVersion = options.hadversion.strip() # --hadversion LogLevel = options.loglevel.strip() # --loglevel ############################################## logger = baseclass.getlog("handlevideo",loglevel=LogLevel) logger.debug("handle video parameter info : \n \ VideoMetaFile: %s \n \ HadoopInsDir : %s \n \ HadoopNNAddr : %s \n \ TaskTrackerNums : %s \n \ BlockSize : %s \n \ HadoopVersion : %s \n \ LogLevel : %s \n \ "%(VideoMetaFile,HadoopInsDir,HadoopNNAddr,TaskTrackerNums,BlockSize,HadoopVersion,LogLevel)) ############################################### #分析视频元数据文件,取到视频相关信息. if not os.path.isfile(VideoMetaFile): logger.error("Don't find video metadata file : %s"%VideoMetaFile)
import baseclass logger = baseclass.getlog("handlevideo") ''' MapWebHdfs = baseclass.WebHadoop("10.2.10.14","50071","cloudiyadatauser",logger) vid = "dirtest" TmpDir_Path = "/tmp/%s" % vid TmpHdfs_Path = "/%s" % vid if MapWebHdfs.put_dir(TmpDir_Path,TmpHdfs_Path,overwrite="true"): print("success") else: print("fail") ''' import MySQLdb import sys def getStatus(): try: dbconn = MySQLdb.connect(host="10.2.10.12", user="******", passwd="c10udiya") dbcursor = dbconn.cursor() dbconn.select_db('video1') sql = "select status from video where vid=%s" sql_result = dbcursor.execute(sql, "vXUekH2")
#!/usr/bin/env python # -*- encoding:utf-8 -*- ''' Created on 20131101 @author: wye Copyright @ 2011 - 2012 Cloudiya Tech . Inc ''' ''' 分析视频文件计算最优切割点 ''' import sys import baseclass videoname = sys.argv[1] videomvpath = sys.argv[2] VideoDuration = sys.argv[3] BlockSizeSecondNum = int(sys.argv[4]) PartNums = int(sys.argv[5]) ffprobe = "/usr/local/bin/ffprobe" logger = baseclass.getlog(videoname) KFTSobj = baseclass.getKFSplitPoint(logger, ffprobe, videoname, videomvpath, VideoDuration, BlockSizeSecondNum, PartNums) print(KFTSobj.main())