def dt_to_carchive(input_dt): '''Converts a datetime to Channel Archiver timestamp. This is exact. The input format has microsecond resolution, but the output format has nanosecond.''' #delta = input_dt - datetime.datetime(1970, 1, 1) #seconds = delta.seconds + delta.days * 24 * 3600 #nanoseconds = delta.microseconds * 1000 #return (seconds, nanoseconds) ''' This approach works for the local time ''' return timeTuple(input_dt)
def cmd(archive=None, opt=None, args=None, conf=None, **kws): archs = opt.archive if len(args) == 0: print 'Missing PV names' defer.returnValue(0) T0, Tend = makeTimeInterval(opt.start, opt.end) TT0, TT1 = timeTuple(T0), timeTuple(Tend) count = opt.count if opt.count > 0 else conf.getint('defaultcount') h5file, _, path = opt.h5file.partition(':') if path == '': path = '/' F = h5py.File(h5file, 'a') pvgroup = F.require_group(path) Chk = opt.chunk Ds = [None] * len(args) for i, pv in enumerate(args): pvstore = pvgroup.require_group(pv) # store complete time range covering all *requests* aT0 = tuple(pvstore.attrs.get('T0', ())) try: if aT0 is None or TT0 < aT0: pvstore.attrs['T0'] = TT0 except TypeError: pvstore.attrs['T0'] = TT0 aT1 = tuple(pvstore.attrs.get('T1', ())) try: if aT1 is None or TT1 < aT1: pvstore.attrs['T1'] = TT1 except TypeError: pvstore.attrs['T1'] = TT1 P = printInfo() P.file = F P.pvstore = pvstore P.pv = pv P.metaset = pvstore.get('meta') if P.metaset is None: P.metaset = pvstore.create_dataset('meta', shape=(0, ), dtype=dbr_time, maxshape=(None, ), chunks=(Chk, ), shuffle=True, compression='gzip') P.valset = None print pv D = archive.fetchraw(pv, printData, archs=archs, cbArgs=(archive, P), T0=T0, Tend=Tend, count=count, chunkSize=Chk, enumAsInt=opt.enumAsInt) @D.addCallback def show(C, pv=pv): _log.info('%s received %s points', pv, C) Ds[i] = D yield defer.DeferredList(Ds, fireOnOneErrback=True) defer.returnValue(0)
def cmd(archive=None, opt=None, args=None, conf=None, **kws): archs=opt.archive if len(args)==0: print 'Missing PV names' defer.returnValue(0) T0, Tend = makeTimeInterval(opt.start, opt.end) TT0, TT1 = timeTuple(T0), timeTuple(Tend) count = opt.count if opt.count>0 else conf.getint('defaultcount') h5file, _, path = opt.h5file.partition(':') if path=='': path='/' F = h5py.File(h5file, 'a') pvgroup = F.require_group(path) Chk = opt.chunk Ds = [None]*len(args) for i,pv in enumerate(args): pvstore = pvgroup.require_group(pv) # store complete time range covering all *requests* aT0 = tuple(pvstore.attrs.get('T0', ())) try: if aT0 is None or TT0 < aT0: pvstore.attrs['T0'] = TT0 except TypeError: pvstore.attrs['T0'] = TT0 aT1 = tuple(pvstore.attrs.get('T1', ())) try: if aT1 is None or TT1 < aT1: pvstore.attrs['T1'] = TT1 except TypeError: pvstore.attrs['T1'] = TT1 P = printInfo() P.file = F P.pvstore = pvstore P.pv=pv P.metaset = pvstore.get('meta') if P.metaset is None: P.metaset = pvstore.create_dataset('meta', shape=(0,), dtype=dbr_time, maxshape=(None,), chunks=(Chk,), shuffle=True, compression='gzip') P.valset = None print pv D = archive.fetchraw(pv, printData, archs=archs, cbArgs=(archive, P), T0=T0, Tend=Tend, count=count, chunkSize=Chk, enumAsInt=opt.enumAsInt) @D.addCallback def show(C, pv=pv): _log.info('%s received %s points', pv,C) Ds[i] = D yield defer.DeferredList(Ds, fireOnOneErrback=True) defer.returnValue(0)