def makeMultiNetCDFFromDPL(session,DPL,DPLParms,templatefilename): picnicsession.updateSessionComment(session,'loading artist') #import hsrl.data_stream.open_config as oc import hsrl.dpl_experimental.dpl_artists as artists ftpbase=os.getenv('FTPPATH','/var/ftp/data') ftpurlbase=os.getenv('FTPURL','ftp://lidar.ssec.wisc.edu/data') if len(session['username'])==0: print 'bad username' raise RuntimeError,'Bad username' baseftpdir=picnicsession.safejoin(ftpbase,session['username']) sessiondir=picnicsession.safejoin(baseftpdir,session['sessionid']) try: os.mkdir(baseftpdir) except: pass try: os.mkdir(sessiondir) except: pass tarname=session['fileprefix'] + DPLParms['start_time_datetime'].strftime('_%Y%m%dT%H%M') + DPLParms['end_time_datetime'].strftime('_%Y%m%dT%H%M') + session['filesuffix'] + '_' + session['sessionid'] + '.tar.bz2' tarcompoutputfilename=picnicsession.safejoin(baseftpdir,tarname) session['ftpfolder']=ftpurlbase+'/'+session['username']+'/'+session['sessionid'] session['ftpfile']=ftpurlbase+'/'+session['username']+'/'+tarname namer=artists.default_multi_netcdf_namer(sessiondir,session['fileprefix'],session['filesuffix']+'.nc') times=artists.multi_netcdf_filewindow('start_time_datetime','end_time_datetime', DPLParms['start_time_datetime'],DPLParms['end_time_datetime'],session['filemode']) artist=artists.dpl_multi_netcdf_artist(DPL,DPLParms,template=templatefilename,filewindowgenerator=times,filename_maker=namer,selected_bindings=session['selected_fields']) picnicsession.updateSessionComment(session,'processing') findTimes=['rs_raw','rs_mean','rs_inv'] for frame in artist: timewindow='blank' for f in findTimes: if hasattr(frame,f) and hasattr(getattr(frame,f),'times') and len(getattr(frame,f).times)>0: t=getattr(frame,f).times timewindow=t[0].strftime('%Y.%m.%d %H:%M') + ' - ' + t[-1].strftime('%Y.%m.%d %H:%M') picnicsession.updateSessionComment(session,'appended data %s' % (timewindow)) del artist pid=os.fork() if pid==0: os.execvp('tar',('tar','-jcvf',tarcompoutputfilename,'--directory='+baseftpdir,session['sessionid'])) if pid<0: raise RuntimeError,"compression failed due to fork" (pid,status)=os.waitpid(pid,0) if os.WEXITSTATUS(status)!=0: raise RuntimeError,"Compression failed on error %i" % os.WEXITSTATUS(status)
def image_resource(request): fn = request.matchdict['filename'] if 'accesstype' in request.matchdict: global imagepathcache global imagepathcacheage methodtype = request.matchdict['accesstype'] methodkey = request.matchdict['access'] yearno = int(request.matchdict['year']) monthno = int(request.matchdict['month']) dayno = int(request.matchdict['day']) #FIXME HACKY CACHY #md=moddateoffile("/etc/dataarchive.plist") #if imagepathcacheage==None or imagepathcacheage!=md: # imagepathcacheage=md # imagepathcache.clear() if methodtype not in imagepathcache or methodkey not in imagepathcache[ methodtype]: #if len(imagepathcache)==0: # imagepathcacheage=moddateoffile("/etc/dataarchive.plist") if methodtype not in imagepathcache: imagepathcache[methodtype] = {} try: imagepathcache[methodtype][methodkey] = lib( **{methodtype: methodkey})['Path'] except RuntimeError: return HTTPNotFound(methodtype + "-" + methodkey + " is invalid") # return HTTPNotFound("File doesn't exist") f = picnicsession.safejoin(imagepathcache[methodtype][methodkey], '%04i' % yearno, '%02i' % monthno, '%02i' % dayno, 'images', fn) else: return HTTPNotFound("File doesn't exist") m = None if not os.access(f, os.R_OK): return HTTPNotFound("File doesn't exist") if fn.endswith('.jpg'): m = 'image/jpeg' if fn.endswith('.png'): m = 'image/png' if fn == 'logfile': m = 'text/plain' if fn.endswith('.json'): m = 'application/json' if fn.endswith('.nc') or fn.endswith('.cdf'): m = 'application/x-netcdf' if m == None: return HTTPNotFound("File inaccessible") return Response(content_type=m, app_iter=file(f))
def image_resource(request): fn=request.matchdict['filename'] if 'accesstype' in request.matchdict: global imagepathcache global imagepathcacheage methodtype=request.matchdict['accesstype'] methodkey=request.matchdict['access'] yearno=int(request.matchdict['year']) monthno=int(request.matchdict['month']) dayno=int(request.matchdict['day']) #FIXME HACKY CACHY #md=moddateoffile("/etc/dataarchive.plist") #if imagepathcacheage==None or imagepathcacheage!=md: # imagepathcacheage=md # imagepathcache.clear() if methodtype not in imagepathcache or methodkey not in imagepathcache[methodtype]: #if len(imagepathcache)==0: # imagepathcacheage=moddateoffile("/etc/dataarchive.plist") if methodtype not in imagepathcache: imagepathcache[methodtype]={} try: imagepathcache[methodtype][methodkey]=lib(**{methodtype:methodkey})['Path'] except RuntimeError: return HTTPNotFound(methodtype + "-" + methodkey + " is invalid") # return HTTPNotFound("File doesn't exist") f=picnicsession.safejoin(imagepathcache[methodtype][methodkey],'%04i' % yearno,'%02i' % monthno, '%02i' % dayno,'images',fn) else: return HTTPNotFound("File doesn't exist") m=None if not os.access(f,os.R_OK): return HTTPNotFound("File doesn't exist") if fn.endswith('.jpg'): m='image/jpeg' if fn.endswith('.png'): m='image/png' if fn=='logfile': m='text/plain' if fn.endswith('.json'): m='application/json' if fn.endswith('.nc') or fn.endswith('.cdf'): m='application/x-netcdf' if m==None: return HTTPNotFound("File inaccessible") return Response(content_type=m,app_iter=file(f))
def makeMultiNetCDFFromDPL(session, DPL, DPLParms, templatefilename): picnicsession.updateSessionComment(session, 'loading artist') #import hsrl.data_stream.open_config as oc import hsrl.dpl_experimental.dpl_artists as artists ftpbase = os.getenv('FTPPATH', '/var/ftp/data') ftpurlbase = os.getenv('FTPURL', 'ftp://lidar.ssec.wisc.edu/data') if len(session['username']) == 0: print 'bad username' raise RuntimeError, 'Bad username' baseftpdir = picnicsession.safejoin(ftpbase, session['username']) sessiondir = picnicsession.safejoin(baseftpdir, session['sessionid']) try: os.mkdir(baseftpdir) except: pass try: os.mkdir(sessiondir) except: pass tarname = session['fileprefix'] + DPLParms['start_time_datetime'].strftime( '_%Y%m%dT%H%M') + DPLParms['end_time_datetime'].strftime( '_%Y%m%dT%H%M' ) + session['filesuffix'] + '_' + session['sessionid'] + '.tar.bz2' tarcompoutputfilename = picnicsession.safejoin(baseftpdir, tarname) session['ftpfolder'] = ftpurlbase + '/' + session[ 'username'] + '/' + session['sessionid'] session['ftpfile'] = ftpurlbase + '/' + session['username'] + '/' + tarname namer = artists.default_multi_netcdf_namer(sessiondir, session['fileprefix'], session['filesuffix'] + '.nc') times = artists.multi_netcdf_filewindow('start_time_datetime', 'end_time_datetime', DPLParms['start_time_datetime'], DPLParms['end_time_datetime'], session['filemode']) artist = artists.dpl_multi_netcdf_artist( DPL, DPLParms, template=templatefilename, filewindowgenerator=times, filename_maker=namer, selected_bindings=session['selected_fields']) picnicsession.updateSessionComment(session, 'processing') findTimes = ['rs_raw', 'rs_mean', 'rs_inv'] for frame in artist: timewindow = 'blank' for f in findTimes: if hasattr(frame, f) and hasattr(getattr( frame, f), 'times') and len(getattr(frame, f).times) > 0: t = getattr(frame, f).times timewindow = t[0].strftime('%Y.%m.%d %H:%M') + ' - ' + t[ -1].strftime('%Y.%m.%d %H:%M') picnicsession.updateSessionComment(session, 'appended data %s' % (timewindow)) del artist pid = os.fork() if pid == 0: os.execvp('tar', ('tar', '-jcvf', tarcompoutputfilename, '--directory=' + baseftpdir, session['sessionid'])) if pid < 0: raise RuntimeError, "compression failed due to fork" (pid, status) = os.waitpid(pid, 0) if os.WEXITSTATUS(status) != 0: raise RuntimeError, "Compression failed on error %i" % os.WEXITSTATUS( status)