Exemple #1
0
    def test_floor(self):
        dt1 = das2.DasTime('2014-01-01T12:57:34.445')
        dt2 = das2.DasTime('2014-01-01T12:57')
        dt1.floor(60)
        self.assertEqual(dt1, dt2)

        dt1 = das2.DasTime('2014-01-01T12:57:34.445')
        dt2 = das2.DasTime('2014-01-01T12:57:30')
        dt1.floor(25)
        self.assertEqual(dt1, dt2)

        dt1 = das2.DasTime('2014-01-01T12:57:34.445')
        dt2 = das2.DasTime('2014-01-01T12:50')
        dt1.floor(600)
        self.assertEqual(dt1, dt2)

        dt1 = das2.DasTime('2014-01-01T12:57:34.445')
        dt2 = das2.DasTime('2014-01-01T12:40')
        dt1.floor(1200)
        self.assertEqual(dt1, dt2)

        dt1 = das2.DasTime('2014-01-01T12:57:34.445')
        dt2 = das2.DasTime('2014-01-01T12:00')
        dt1.floor(3600)
        self.assertEqual(dt1, dt2)
Exemple #2
0
def checkAgeAccess(dConf, fLog, form, sResource, sValue):
	"""Checks to see if the query string asks for data that is old
	enough.  Only works for Das 2.1 queries right now.
	"""
	
	(sBeg, sEnd) = (form.getfirst('start_time',''), form.getfirst('end_time',''))
	if sBeg == '' or sEnd == '':
		(sBeg, sEnd) = (form.getfirst('time.min',''), form.getfirst('time.max',''))
		if sBeg == '' or sEnd == '':
			fLog.write("   Authorization: Can't determine query time range, start_time or end_time missing")
			return AUTH_FAIL
	
	try:
		dtBeg = das2.DasTime(sBeg)
		dtEnd = das2.DasTime(sEnd)
	except ValueError as e:
		fLog.write("   Authorization: Bad Query can't parse time range (%s to %s)"%(sBeg, sEnd))
		return AUTH_FAIL
	
	# Get age in seconds
	dtLockBeg = _ageToTime(fLog, sResource, sValue)
	fLog.write("   Authorization: Lockout begins %s, query ends %s."%(
	            str(dtLockBeg)[:-7], str(dtBeg)[:-7]))
					
	if dtLockBeg == None:
		return AUTH_SVR_ERR
		
	if dtBeg <= dtEnd and dtEnd < dtLockBeg:
		return AUTH_SUCCESS
	
	return AUTH_FAIL
Exemple #3
0
def findWfr(log, sDataDir, dtBeg, dtEnd, bLowBand, bHighBand):

    log.info("Finding files between %s and %s" % (dtBeg, dtEnd))

    lFiles = []

    # For each 24 hours in the coverage period, try to find files
    dt = D.DasTime(dtBeg.year(), dtBeg.month(), dtBeg.dom())

    lPtrns = []
    if bLowBand:
        lPtrns.append("T%4d%1dXX/T%4d%03d/T%4d%03d_25HZ*.DAT")

    if bHighBand:
        lPtrns.append("T%4d%1dXX/T%4d%03d/T%4d%03d_2_5KHZ*.DAT")

    while dt < dtEnd:
        for sPtrn in lPtrns:
            sGlob = pjoin(
                sDataDir, sPtrn % (dt.year(), dt.doy() / 100, dt.year(),
                                   dt.doy(), dt.year(), dt.doy()))
            log.info("Checking time %s using pattern: %s" % (dt, sGlob))
            lFiles += glob.glob(sGlob)

        dt.adjust(0, 0, 1)  # Get next day

    return lFiles
	def next(self):
		bytes = self.fIn.read(1024)
		if len(bytes) < 600:
			raise StopIteration
			
		if self.fIn.tell() == 1024:
			if sys.version_info[0] > 2:
				self.dtFrame = das2.DasTime(bytes[274:297].decode('utf-8'))
			else:
				self.dtFrame = das2.DasTime(bytes[274:297])
			bytes = self.fIn.read(1024)
		
		if len(bytes) < 600:
			raise StopIteration
		else:
			return VgrWfrmRecord(self.dtFrame, bytes)
Exemple #5
0
def findWbr(log, sDataDir, dtBeg, dtEnd, b10khz, b80khz, bHFR):

    lFiles = []

    # For each hour in the coverage period, try to find files
    dt = D.DasTime(dtBeg.year(), dtBeg.month(), dtBeg.dom(), dtBeg.hour())

    lPtrns = []
    if b10khz:
        lPtrns.append("T%4d%1dXX/T%4d%03d/T%4d%03d_%02d_10KHZ*.DAT")
    elif b80khz:
        lPtrns.append("T%4d%1dXX/T%4d%03d/T%4d%03d_%02d_75KHZ*.DAT")
    elif bHFR:
        lPtrns.append("T%4d%1dXX/T%4d%03d/T%4d%03d_%02d_???*KHZ*.DAT")

    while dt < dtEnd:
        for sPtrn in lPtrns:
            sGlob = pjoin(
                sDataDir, sPtrn % (dt.year(), dt.doy() / 100, dt.year(),
                                   dt.doy(), dt.year(), dt.doy(), dt.hour()))
            log.debug(" Searching for files matching: %s" % sGlob)
            lFiles += glob.glob(sGlob)

        dt.adjust(0, 0, 0, 1)

    log.debug("%d files found" % len(lFiles))
    return lFiles
Exemple #6
0
    def begin(self):
        """Returns the data collection initiation time of the first sample
		in the record as a Das2 Time object.  Code assumes you've called
		pspice.furnsh()
		"""

        if not self.dtBeg:
            # Formats time from records (UTC)
            sSclk = "%d/%d:%d" % (self.nSclkPart, self.nSclkSec,
                                  self.nSclkFine)
            et = pspice.scs2e(-82, sSclk)
            sScet = pspice.et2utc(et, "ISOC", 6)
            self.dtBeg = D.DasTime(sScet)

        return self.dtBeg
Exemple #7
0
    def test_ceil(self):
        dt1 = das2.DasTime('2014-01-01T12:07:34.445')
        dt2 = das2.DasTime('2014-01-01T12:08')
        dt1.ceil(60)
        self.assertEqual(dt1, dt2)

        dt1 = das2.DasTime('2014-01-01T12:07:34.445')
        dt2 = das2.DasTime('2014-01-01T12:10')
        dt1.ceil(600)
        self.assertEqual(dt1, dt2)

        dt1 = das2.DasTime('2014-01-01T12:07:34.445')
        dt2 = das2.DasTime('2014-01-01T12:20')
        dt1.ceil(1200)
        self.assertEqual(dt1, dt2)

        dt1 = das2.DasTime('2014-01-01T12:07:34.445')
        dt2 = das2.DasTime('2014-01-01T13:00')
        dt1.ceil(3600)
        self.assertEqual(dt1, dt2)
def getVgrFileBegTime(sPath):
	"""Read the Waveform Frame start time from the filename"""
	s = bname(sPath)
	if not s.startswith('VG'):
		return None
		
	try:
		nYr = int(s[4:8], 10)
		nMn = int(s[9:11], 10)
		nDom = int(s[12:14], 10)
		nHr = int(s[15:17], 10)
		nMin = int(s[18:20], 10)
		nSec = int(s[21:23], 10)
		nMilli = int(s[24:27], 10)
		fSec = float(nSec) + nMilli / 1000.0
	except:
		#Doesn't look like a voyager file, skip it
		return None
	
	return das2.DasTime(nYr, nMn, nDom, nHr, nMin, fSec)
def main(argv):

	perr = sys.stderr.write

	if len(argv) < 2:
		perr("BUILD_DIRectory missing")
		return 4
		
	if not os.path.isdir(argv[1]):
		perr("Directory %s doesn't exist"%argv[1])

	sys.path.insert(0, "%s/%s"%(os.getcwd(), argv[1]))
	sys.path.insert(0, ".")
	
	import das2 as D
	
	dt = D.DasTime("1971-001")
	print("A parsed time")
	print(str(dt)[:-3])
	print("")
	
	dt.adjust(0,0,364,23,59,59.999999)
	
	print("Adding 364 days, 23 hours, 59 minutes, and 59.999999 seconds")
	print(str(dt))
	print("")
	
	print("Blindly printing to millisecond resolution (error)")
	print("%04d-%03dT%02d:%02d:%06.3f"%(dt.year(), dt.doy(), dt.hour(), 
	                                   dt.minute(), dt.sec()))
	print("")
	
	print("You can use the round_doy() function to help")
	print(dt.round_doy(dt.MILLISEC))
	print("")
	
	print("Or the round() function too")
	print(dt.round(dt.MILLISEC))
	print("")
	
	return 0
Exemple #10
0
def sendDataPackets(cdf, dtBeg, dtEnd, lComp, bPolar):

	"""Output all packets for a given file that are within the specified time
	range.
	
	Globals:
		g_bHdrSent: Tracks whether a header has been sent for this particular
		            variable set, and if so what is it's packet ID.	
	Args:
		sFile (str): The name of the CDF file
		
		dtBeg (DasTime): The inclusive lower bound minimum time to output
		
		dtEnd (DasTime): The exclusive upper bound maximum time to output
		
		lComp (list) : A list of the correlations to output, defaults to all
		               the autocorrelations
							
		bPolar (bool) : If True output Magnitude and Phase angle instead of 
		                real and imaginary components
	Returns:
		bool : True if output should continue, False if done.
		
	"""
	
	global g_bHdrSent
	
	lCompInfo = getComp(cdf, lComp)
	
	aTime = cdf['Epoch']
	
	# Output data in range
	nPkts = 0
	for i in range( aTime.shape[0] ):
		dt = das2.DasTime(aTime[i])
		
		if dt < dtBeg or dt >= dtEnd: continue
		
		if not g_bHdrSent:
			sendPktHdr(cdf, lComp, bPolar)
			g_bHdrSent = True
		
		write(':01:')
		
		# Various time encodings are supported by das2, we've somewhat randomly
		# chosen T2000, which happens to be seconds since 2000-01-01T00:00:00
		# ignoring leap seconds
		xTime = struct.pack("=d", dt.t2000())
		
		write( xTime )
		
		# Write the data for all the selected components
		for tInfo in lCompInfo:
			(sComp, sVar, iIdx, bAuto) = tInfo
			
			aRe = cdf[sVar][i, iIdx, 0, : ]
			
			# Optimization:  We specified numpy byte order so that we can
			# kick raw numpy bytes out the door without converting them.
			# only works if we don't have to do any calculations
			if bAuto:
				write( aRe.tobytes() )
			else:
				aIm = cdf[sVar][i, iIdx, 1, : ]
				
				
				if bPolar:
					aCplx = np.empty(aRe.shape, dtype=complex)
					aCplx.real = aRe
					aCplx.imag = aIm
					
					# Make sure we use the same internal representation as the 
					# original arrays because we are dumping binaries...
					aMag = np.absolute(aCplx).astype(aRe.dtype)
					aPhase = np.angle(aCplx, deg=True).astype(aRe.dtype)
					
					write(aMag.tobytes())
					write(aPhase.tobytes())
				else:
					write(aRe.tobytes())
					write(aIm.tobytes())
			
		# Flush packets as we go
		flush()
		nPkts += 1
			
	return nPkts
Exemple #11
0
def snapToTimeBlks(fLog, dsdf, sBeg, sEnd, nLevel, bCoverage=False):
    """Given a dsdf object which has fully initialize defaults and a 
	cache level, stap begin and end times for cache generation to 
	cache block boundaries.
	
	Returns a three tuple of (dtBeg, tAdj, dtEnd)
	
	dtBeg - A DasTime object for the begining of the cache period
	
	tAdj  - A tuple of time adjustments needed to step from one file start
	        time to the next.  This has the format 
			(nYear, nMonth, nDay, nHour, nMinute, nSec)
			  
	dtEnd - A DasTime object for the ending of the cache period
	"""

    (nRes, sUnits, sPeriod, sParams) = dsdf['cacheLevel'][nLevel]

    sUnit = None
    if nRes != 0:
        if sUnits.lower() in ('sec', 's', 'second', 'seconds'):
            sUnit = 's'
        elif sUnits.lower() in ('ms', 'millisec', 'millisecond',
                                'milliseconds'):
            sUnit = 'ms'
        else:
            raise E.TodoError("Handling resolution in units other than seconds"+\
                           " or milliseconds is not yet implemented")

    sBeg = sBeg
    sEnd = sEnd

    dtB = das2.DasTime(sBeg)
    dtE = das2.DasTime(sEnd)

    if sPeriod == 'persecond':
        fFrac = dtE.sec() - int(dtE.sec())
        if fFrac > 0.0:
            dtE.adjust(0, 0, 0, 0, 0, 1)

        return (das2.DasTime(dtB.year(), dtB.month(), dtB.dom(), dtB.hour(),
                             dtB.minute(), dtB.sec()), (0, 0, 0, 0, 0, 1),
                das2.DasTime(dtE.year(), dtE.month(), dtE.dom(), dtE.hour(),
                             dtE.minute(), dtE.sec()))

    elif sPeriod == 'perminute':
        if dtE.sec() > 0.0:
            dtE.adjust(0, 0, 0, 0, 1)

        return (das2.DasTime(dtB.year(), dtB.month(), dtB.dom(), dtB.hour(),
                             dtB.minute()), (0, 0, 0, 0, 1, 0),
                das2.DasTime(dtE.year(), dtE.month(), dtE.dom(), dtE.hour(),
                             dtE.minute()))

    elif sPeriod == 'hourly':
        if dtE.minute() > 0 or dtE.sec() > 0.0:
            dtE.adjust(0, 0, 0, 1)

        return (das2.DasTime(dtB.year(), dtB.month(), dtB.dom(),
                             dtB.hour()), (0, 0, 0, 1, 0, 0),
                das2.DasTime(dtE.year(), dtE.month(), dtE.dom(), dtE.hour()))

    elif sPeriod == 'daily':
        if dtE.hour() > 0 or dtE.minute() > 0 or dtE.sec() > 0.0:
            dtE.adjust(0, 0, 1, 0)

        return (das2.DasTime(dtB.year(), dtB.month(),
                             dtB.dom()), (0, 0, 1, 0, 0, 0),
                das2.DasTime(dtE.year(), dtE.month(), dtE.dom()))

    elif sPeriod == 'monthly':
        if dtE.dom() > 1 or dtE.hour() > 0 or dtE.minute() > 0 or dtE.sec(
        ) > 0.0:
            dtE.adjust(0, 1, 0, 0)

        return (das2.DasTime(dtB.year(), dtB.month(), 1), (0, 1, 0, 0, 0, 0),
                das2.DasTime(dtE.year(), dtE.month(), 1))

    else:
        raise E.ServerError("Unknown storage period %s, in DSDF %s" %
                            (sPeriod, sDsdf))
Exemple #12
0
            sys.stdout.buffer.write(thing.encode('utf-8'))
        else:
            sys.stdout.buffer.write(thing)


def flush():
    if sys.version_info[0] == 2: sys.stdout.flush()
    else: sys.stdout.buffer.flush()


##############################################################################
# now for the reader ...

arg = sys.argv

start_time = das2.DasTime(arg[1])
end_time = das2.DasTime(arg[2])

ndata = 200
delta_t = (end_time - start_time) / 200.

header = '''<stream version="2.2">
  <properties 
    double:zFill="-1.0e+31"
    DatumRange:xRange="%s to %s UTC"
    String:title="Random points" 
  />
</stream>
''' % (str(start_time), str(end_time))

write("[00]{:06d}{}".format(len(header), header))
Exemple #13
0
def main(argv):
	"""Transmit a test dataset of 300 spectra given any input times
	"""
	
	perr = sys.stderr.write
	
	if len(argv) < 3:
		perr("Expected command line %s START_TIME END_TIME [PARAMETERS]\n")
		perr("Any times are fine as long as they are parseable and start < end\n")
		return 10
	
	dtBeg = D.DasTime(argv[1])
	dtEnd = D.DasTime(argv[2])
	
	if dtBeg >= dtEnd:
		perr("Start time (%s) is >= end time (%s)"%(str(dtBeg), str(dtEnd)))
		return 11
	
	sendHdr(dtBeg, dtEnd, sys.stdout)
	
	rSec = dtEnd - dtBeg
	rCadence = rSec/300
	
	# Randomly pick 3 sets of ten vectors to skip
	lSkip = []
	for i in range(0, 3):
		n = random.randrange(300)
		for j in range(0,10):
			if n < 289:
				lSkip.append(n + j)
			else:
				lSkip.append(n - j)
	
	# Randomly pick 30 other vectors to contain fill
	lFill = []
	for i in range(0, 30):
		lFill.append( random.randrange(300) )
	
	lSkip.sort()
	
	# Okay, start producing data
	dt = dtBeg.copy()
	for iX in range(0, 300):
		
		dt.adjust(0, 0, 0, 0, 0, rCadence)  # Yea this can get round off errors
		                                    # but it's only a test function
		if iX in lSkip:
			continue
		
		if iX in lFill:
			lVec = [-1.0]*100		
		else:
			lVec = getLogVec(300, iX, 100, -18.0, -6.0)
	
		buf = D.DasPktBuf(1)
		buf.add(dt.round(dt.MILLISEC))
		buf.add(" ")
		
		for iY in range(0, 100):
			if iY < 99:
				buf.add("%10.3e "%lVec[iY])
			else:
				buf.add("%10.3e\n"%lVec[iY])
				
		buf.send(sys.stdout)
		
	return 0
Exemple #14
0
def main(argv):
	
	sUsage = "%%prog [options] DATA_DIRECTORY BEGIN END"
	sDesc = """
Reads Themis spectral density auto-correlation values from archive CDFs.
Format is similar to the Cluster Active Archive, see document: CAA-EST-UG-002
for details.
"""

	psr = optparse.OptionParser(
		usage=sUsage, description=sDesc, prog=bname(argv[0])
	)
	
	psr.add_option('-l', "--log-level", dest="sLevel", metavar="LOG_LEVEL",
	               help="Logging level one of [critical, error, warning, "+\
	               "info, debug].  The default is info.", type="string",
	               action="store", default="info")
	
	(opts, lArgs) = psr.parse_args(argv[1:])
	log = setupLogger(opts.sLevel)
	log = logging.getLogger('')
	
	if len(lArgs) < 1:
		return serverErr(log, "Misconfigured DSDF, data directory is missing")
	sRoot = lArgs[0]
	
	if len(lArgs) < 3:
		return queryErr(log, "Start and or Stop time is missing")
		
	try:
		dtBeg = das2.DasTime(lArgs[1])
	except:
		return queryErr(log, "Couldn't parse time value '%s'"%lArgs[1])
	try:
		dtEnd = das2.DasTime(lArgs[2])
	except:
		return queryErr(log, "Couldn't parse time value '%s'"%lArgs[2])
	
	# Take all the rest of the arguments and glop them together in a single
	# string.  That way running the reader from the command line feels the
	# same as running it from Autoplot	
	sParams = ''
	if len(lArgs) > 3: sParams = ' '.join(lArgs[3:])
	
	# pull out the polar style output, i.e: Magnitude and Phase Angle
	bPolar = True
	if sParams.find('complex') != -1:
		sParams = sParams.replace('complex','').strip()
		bPolar = False

	# Default to printing all the autocorrelations
	sComp = 'BxBx ByBy BzBz ExEx EyEy EzEz'
	if len(sParams) > 0: sComp = sParams
	lComp = sComp.split()
	lComp.sort()
	
	
	# Look in directory tree for files that match.  We sort the file names
	# under the assumption that sort order = numerical time order, but that
	# may not be true for some file types
	lDir = os.listdir(sRoot)
	lDir.sort()
	
	nSent = 0
	bSentHdr = False
	for sF in lDir:
		if not sF.endswith('.cdf'): continue             # only want CDFs
		if not sF.startswith('tha_l3_sm'): continue      # Only want L3 SM
		
		# Make ISO-8601 strings
		sBeg = "%s-%s-%sT%s:%s:%s"%(
			sF[10:14], sF[14:16], sF[16:18], sF[19:21], sF[21:23], sF[23:25]
		)
		sEnd = "%s-%s-%sT%s:%s:%s"%(
			sF[26:30], sF[30:32], sF[32:34], sF[35:37], sF[37:39], sF[39:41]
		)
		
		sPath = pjoin(sRoot, sF)
		
		try:
			dtFileBeg = das2.DasTime(sBeg)
			dtFileEnd = das2.DasTime(sEnd)
			
			# Since the themis files truncate the seconds field, round up by
			# one second for the file end time...
			dtFileEnd += 1.0
			
		except ValueError as e:
			log.waring("Unknown file %s in data area"%sPath)
			continue
		
		# If overlaps with desired range, include it in the output, send header
		# if haven't done so
		if (dtFileBeg < dtEnd) and (dtFileEnd > dtBeg):
			log.info("Reading %s"%sPath)
			cdf = pycdf.CDF(sPath)
		
			# Assmue all the files are similar enough that an informational 
			# header can be created from the first one that fits the range
			if not bSentHdr:
				lIgnore = ['TIME_MAX','TIME_MIN', 'TIME_resolution']
				dExtra = {
					'title':getVespaTitle(cdf, 'THEMIS', lComp), 
					'Datum:xTagWidth':'0.5 s'  # Max interp width for Autoplot
				}
				cdfToDas22Hdr(cdf, lIgnore, dExtra)
				bSentHdr = True
			
			nSent += sendDataPackets(cdf, dtBeg, dtEnd, lComp, bPolar)
		
	if nSent == 0:
		if not bSentHdr: writeHdrPkt(0, '<stream version="2.2" />\n')
		sFmt = '<exception type="NoDataInInterval" message="%s" />\n'
		sOut = sFmt%("No data in interval %s to %s UTC"%(str(dtBeg), str(dtEnd)))
		writeHdrPkt('xx', sOut)
	
	return 0
def main(argv):
	
	global g_bStrmHdrWritten
	
	sUsage = "%s [options] DATA_DIRECTORY BEGIN END"%bname(argv[0])
	sDesc = """
Reads Voyager 1 High-Rate waveform values and produce a Das2 Stream.  Three
parameters are required, (1) The path to the directory where the datafiles
reside, (2) The minmum time value of records to transmit, and (3) the
maximum time value.
"""
	psr = optparse.OptionParser(usage=sUsage, description=sDesc, prog=bname(argv[0]))
	
	psr.add_option('-l', "--log-level", dest="sLevel", metavar="LOG_LEVEL",
	               help="Logging level one of [critical, error, warning, "+\
	               "info, debug].  The default is info.", type="string",
	               action="store", default="info")
	
	(opts, lArgs) = psr.parse_args(argv[1:])
	log = setupLogger(opts.sLevel)
	log = logging.getLogger('')
	
	if len(lArgs) < 1:
		return serverErr(log, "Misconfigured DSDF, data directory is missing")
	sRoot = lArgs[0]
	
	if len(lArgs) < 3:
		return queryErr(log, "Query error, Start and or Stop time is missing")
		
	try:
		dtBeg = das2.DasTime(lArgs[1])
	except:
		return queryErr(log, "Couldn't parse time value '%s'"%lArgs[1])
	try:
		dtEnd = das2.DasTime(lArgs[2])
	except:
		return queryErr(log, "Couldn't parse time value '%s'"%lArgs[2])
	
	# Send the stream header as soon as you can. This way if data loading
	# takes a while the client program knows the reader is alive and will
	# not shutoff the connection.  
	sHdr = streamHeader({
      'String:renderer':'waveform',#<-- Tell Autoplot to use Waveform Rendering
		'String:title':'Voyager PWS Wideband, Jupiter Encounter',
		'Datum:xTagWidth': '120 ms',  # Twice the time between rows 
		'DatumRange:xCacheRange': "%s to %s UTC"%(str(dtBeg)[:-3], str(dtEnd)[:-3]),
		'String:xLabel' : 'SCET (UTC)'
	})
	write(sHdr)
	g_bStrmHdrWritten = True
	write( VgrWfrmRecord.das2HeaderPacket(1) )
	
	flush()  # It's good to flush stdout output right after sending headers so
            # Autoplot get's something right a way.

	
	# File search range starts 48 seconds before given time range since Voyager
	# waveform frame files contain 48 seconds worth of data
	dtSearchBeg = dtBeg.copy()
	dtSearchBeg -= 48.0
	
	# Though it's not needed for the small time extent of the sample dataset,
	# sending task progress messages allows Autoplot to display a data loading
	# progress bar (aka Human Amusement Device)
	progress = None
	
	lFiles = os.listdir(sRoot)
	lFiles.sort()
	
	# Interation below assumes file list and file records are in ascending
	# time order, this is typically the case.
	nSent = 0
	for sFile in lFiles:
		dtFileBeg = getVgrFileBegTime(sFile)
		
		# Skip unknown files and files that are out of the query range
		if dtFileBeg == None:
			continue
		if dtFileBeg < dtSearchBeg or dtEnd <= dtFileBeg:
			continue
		
		if progress == None:
			progress = TimeProgressTracker(bname(argv[0]), dtFileBeg, dtEnd, 100)
		
		for rec in VgrFileReader(log, pjoin(sRoot, sFile)):
		
			# since input data are monotonic, quit when encountering a 
			# record that is past the end point
			if rec.dtBeg >= dtEnd:
				break
			
			if rec.dtBeg < dtEnd and rec.dtEnd > dtBeg:
				write(rec.das2DataPacket(1))
				nSent += 1
		
			# Check/Send progress
			progress.status(rec.dtBeg)
			
	
	# If not data were available in the given time range inform the client
	if nSent == 0:
		sendNoData(log, dtBeg, dtEnd)
	
	return 0
def makePlot(log,
             sServer,
             sSource,
             sRange,
             sInterval,
             sParams,
             sJavaBin,
             sAltAutoplot=None):
    '''Returns:  The 2-tuple (nRetCode, sMsg)
	then return codes are:
	
	0 - Plot constructed properly
	1 - 3 Setup problem, can't find autoplot or something like that
	7 - Resource limit hit on sub-program
	101-104 - Bad plot, but everything looked like it should have run
	111 - DSDF problem
	'''

    log.info("Testing %s" % sSource)

    lRange = sRange.split('|')[0].split()
    if len(lRange) < 3 or lRange[1].lower() != 'to':
        return (111, "Improper ExampleRange value")

    sMsg = "Range: %s to %s" % (lRange[0], lRange[2])

    if sInterval:
        sUri = 'vap+das2server:%s?dataset=%s&start_time=%s&end_time=%s&interval=%s' % (
            sServer, sSource, lRange[0], lRange[2], sInterval)
        sMsg += ", interval: %s" % sInterval
    else:
        fRes = (das2.DasTime(lRange[2]) - das2.DasTime(lRange[0])) / 1000
        sMsg += ", resolution: %f s" % fRes
        sUri = 'vap+das2server:%s?dataset=%s&start_time=%s&end_time=%s&resolution=%f' % (
            sServer, sSource, lRange[0], lRange[2], fRes)

    if sParams:
        sUri += "&%s" % sParams.replace(' ', '%20')
        sMsg += ", parameters: %s" % sParams

    log.info(sMsg)

    sPngPath = pjoin(os.getcwd(), sSource.replace('/', '_') + ".png")

    if os.path.isfile(sPngPath):
        os.remove(sPngPath)

    sScript = g_sScriptTplt % (sUri, sSource, sPngPath)

    sScriptName = sSource.replace('/', '_') + ".jy"
    fOut = open(sScriptName, 'w')
    fOut.write(sScript)
    fOut.close()

    sAutoplot = g_sJenkinsAutoplot
    if sAltAutoplot != None:
        sAutoplot = sAltAutoplot

    sScriptPath = pjoin(os.getcwd(), sScriptName)

    sCmd = '%s -cp %s -Djava.awt.headless=true org.autoplot.AutoplotUI --script "%s"' % (
        sJavaBin, sAutoplot, sScriptPath)

    log.info("Exec: %s" % sCmd)

    # Run command for up to 5 minutes with 1MB StdOut and StdErr
    (nLimit, nReturn, sStdOut, sStdErr) = runProc(sCmd, 600, 1048576, 1048576)

    if nLimit != None:
        return (7, "Sub-process resource limit hit: %s" % g_LimitMsg[nLimit])

    if nReturn != 0:
        if nReturn == 101:
            sMsg = "No data in interval"
        elif nReturn == 104:
            sMsg = "Dataset not created"
        elif nReturn == 105:
            sMsg = "Zero length data set, but NoData exception is missing."
        elif nReturn == 17:
            sMsg = "Network Connection Timeout"
        else:
            sMsg = sStdOut

        log.warning("Exec: %s\n          Return: %d\n          Reason: %s" %
                    (sCmd, nReturn, sMsg))
        return (nReturn, sMsg)

    if not os.path.isfile(sPngPath):
        log.error(sStdOut)
        return (
            1,
            "Proper Autoplot exit, but %s is missing (HINT: If using pyServer check TEST_FROM in das2server.conf."
            % sPngPath)

    return (0, "%s tested okay" % sSource)
Exemple #17
0
(1980,  1,	1, 19.0),
(1981,  7,	1, 20.0),
(1982,  7,	1, 21.0),
(1983,  7,	1, 22.0),
(1985,  7,	1, 23.0),
(1988,  1,	1, 24.0),
(1990,  1,	1, 25.0),
(1991,  1,	1, 26.0),
(1992,  7,	1, 27.0),
(1993,  7,	1, 28.0),
(1994,  7,	1, 29.0),
(1996,  1,	1, 30.0),
(1997,  7,	1, 31.0),
(1999,  1,	1, 32.0),
(2006,  1,	1, 33.0),
(2009,  1,	1, 34.0),
(2012,  7,	1, 35.0),
(2015,  7,	1, 36.0),
(2017,  1,	1, 37.0)
]


for t in l:
	dt = das2.DasTime(t[0], t[1], t[2])
	t2K = dt.t2000()
	if t2K < 0:
		print("{%f, %.8e}, /* %04d-%02d-%02d  */"%(32 - t[3] + 1, t2K*1e6, t[0], t[1], t[2]) )
	else:
		print("{%f, %.8e}, /* %04d-%02d-%02d  */"%(t[3] - 32, t2K*1e6, t[0], t[1], t[2]) )