def __report_usage(self, authid, mfileid): with self.usagelock: if authid in self.usagecache: if mfileid in self.usagecache[authid]: usages = self.usagecache[authid][mfileid] for metric in usages: logging.info("MServeFUSE reporting %s = %s " , metric, usages[metric]) usage_store.record(mfileid, metric, usages[metric], report=False) self.usagecache[authid][mfileid] = {}
def digitalrapids(inputs, outputs, options={}, callbacks=[]): baseinputdir = settings.DIGITAL_RAPIDS_INPUT_DIR baseoutputdir = settings.DIGITAL_RAPIDS_OUTPUT_DIR inputdir = os.path.join(baseinputdir) outputdir = os.path.join(baseoutputdir) try: mfileid = inputs[0] joboutput = outputs[0] from dataservice.models import MFile mf = MFile.objects.get(id=mfileid) videopath = mf.file.path logging.info("Processing video Digital Rapids %s" % (videopath)) if not os.path.exists(videopath): logging.info("Video %s does not exist" % (videopath)) return False video = _drop_folder(videopath, inputdir, outputdir) from dataservice import usage_store inputfilesize = os.path.getsize(videopath) usage_store.record(mfileid, "http://mserve/digitalrapids", inputfilesize) videofile = open(video, 'r') from jobservice.models import JobOutput jo = JobOutput.objects.get(id=joboutput) jo.file.save('transcode.mov', File(videofile), save=True) videofile.close() return {"success": True, "message": "Digital Rapids transcode of video successful"} except Exception as e: logging.info("Error with digitalrapids %s" % e) raise e
def mxfframecount(inputs,outputs,options={},callbacks=[]): try: mfileid = inputs[0] inputfile = _get_mfile(mfileid) outputfile = tempfile.NamedTemporaryFile() logging.info("Processing mxfframecount job on %s" % (inputfile)) if not os.path.exists(inputfile): logging.info("Inputfile %s does not exist" % (inputfile)) return False args = ["d10sumchecker","-i",inputfile,"-o",outputfile.name] logging.info(args) ret = subprocess.call(args) if ret != 0: raise Exception("mxfframecount failed") frames = 0 for line in open(outputfile.name): frames += 1 # TODO: subtract 1 for additional output frames = frames -1 import dataservice.usage_store as usage_store usage_store.record(mfileid,"http://prestoprime/mxf_frames_ingested",frames) for callback in callbacks: subtask(callback).delay() return {"success":True,"message":"mxfframecount successful", "frames": frames } except Exception as e: logging.info("Error with mxfframecount %s" % e) raise e
def mfile_get_handler( sender, mfile=False, **kwargs): logging.info("In Prestoprime handler" ) try: if mfile.file.name.endswith(".mxf"): mfiled10check=MFileD10Check.objects.get(mfile=mfile) tmpfile = tempfile.NamedTemporaryFile(mode="w") d10mxfchecksum([mfile.file.path],[tmpfile.name]) if not filecmp.cmp(tmpfile.name,mfiled10check.checkfile.path): lines1 = open(tmpfile.name).readlines() lines2 = open(mfiled10check.checkfile.path).readlines() # TODO: Make this a celery task corrupted = 0 i = 0 frames = [] job = Job(name="Extract Corrupted Frames",service=mfile.service) job.save() tasks = [] for line1 in lines1: line2 = lines2[i] if line1 != line2: split = line1.split("\t") frameS = split[0] try: frame = int(frameS) frames.append(frame) logging.info("frame %s corrupted " % frame ) output = JobOutput(name="Job 'Corrupted Frame %s'"%frame,job=job,mimetype="image/png") output.save() fname = "%s.%s" % ("corruptedframe","png") outputpath = os.path.join( str(job.id) , fname) output.file = outputpath thumbfolder = os.path.join( settings.THUMB_ROOT, str(job.id)) if not os.path.exists(thumbfolder): os.makedirs(thumbfolder) (head,tail) = os.path.split(output.file.path) if not os.path.isdir(head): os.makedirs(head) thumbfile= os.path.join( thumbfolder , "%s%s" % (fname,".thumb.png")) thumbpath = os.path.join( str(job.id) , "%s%s" % (fname,".thumb.png")) output.thumb = thumbpath output.save() thumboptions = {"width":settings.thumbsize[0],"height":settings.thumbsize[1]} callback = thumbimage.subtask([output,thumbfile,thumboptions]) logging.info("Creating task %s %s " % (mfile.file.path,output.file.path)) inputs = [mfile.file.path] outputs = [output.file.path] options = {"frame":frame} callbacks = [callback] task = extractd10frame.subtask([inputs,outputs,options],callbacks=callbacks) tasks.append(task) logging.info("task created %s" % task) corrupted += 1 except ValueError as e: logging.info("PP handler %s " % e) i = i+1 if len(tasks) > 0: logging.info("tasks to execute created %s" % task) job.save() ts = TaskSet(tasks=tasks) tsr = ts.apply_async() tsr.save() job.taskset_id=tsr.taskset_id job.name="Extract Corrupted Frames %s" % frames job.save() else: job.delete() if corrupted > 0: usage_store.record(mfile.id,pp_mxfframe_corrupted_metric,corrupted) logging.info("Lines Corrupted = %s " % corrupted) else: logging.info("Files are same") except MFileD10Check.DoesNotExist: logging.info("Prestoprime mfile_get_handler ") except Exception as e: logging.info("Prestoprime mfile get handler failed %s " % e) logging.info("Done Prestoprime handler" ) return
def read(self, request, outputid, field=None): if outputid: if field == "file": joboutput = JobOutput.objects.get(id=outputid) file=joboutput.file if file == "": return HttpResponseNotFound() outputfilepath = file.path logging.info(joboutput) job = joboutput.job logging.info(job) accessspeed = settings.DEFAULT_ACCESS_SPEED service = job.mfile.service container = service.container logging.info("Finding limit for %s " % (job)) try: prop = ManagementProperty.objects.get(base=service,property="accessspeed") accessspeed = prop.value logging.info("Limit set from service property to %s for %s " % (accessspeed,job.name)) except ObjectDoesNotExist: try: prop = ManagementProperty.objects.get(base=container,property="accessspeed") accessspeed = prop.value logging.info("Limit set from container property to %s for %s " % (accessspeed,job.name)) except ObjectDoesNotExist: pass if accessspeed == "unlimited": dlfoldername = "dl" else: dlfoldername = "dl%s" % accessspeed p = str(file) redirecturl = utils.gen_sec_link_orig(p,dlfoldername) redirecturl = redirecturl[1:] SECDOWNLOAD_ROOT = settings.SECDOWNLOAD_ROOT fullfilepath = os.path.join(SECDOWNLOAD_ROOT,dlfoldername,p) fullfilepathfolder = os.path.dirname(fullfilepath) outputfilepath = file.path logging.info("Redirect URL = %s " % redirecturl) logging.info("fullfilepath = %s " % fullfilepath) logging.info("fullfilefolder = %s " % fullfilepathfolder) logging.info("mfilefp = %s " % outputfilepath) logging.info("mfilef = %s " % file) if not os.path.exists(fullfilepathfolder): os.makedirs(fullfilepathfolder) if not os.path.exists(fullfilepath): logging.info("linking %s (exist=%s) to %s (exists=%s)" % (outputfilepath,os.path.exists(outputfilepath),fullfilepath,os.path.exists(fullfilepath))) try: os.link(outputfilepath,fullfilepath) except Exception as e: logging.info("Caught error linking file, trying copy. %s" % str(e)) shutil.copy(outputfilepath,fullfilepath) usage_store.record(joboutput.id,models.METRIC_ACCESS,joboutput.file.size) logging.info("Redirecting to %s " % redirecturl) return redirect("/%s"%redirecturl) else: joboutput = JobOutput.objects.get(id=outputid) return joboutput else: return HttpResponseNotAllowed([])