def get_archive_pv_value( PV, label, start_time, end_time, scan_archives=True, limit=100000, interpolation='raw', archiver=None, ): '''get PV, PV-Label from archiver from start_time to end_time Return: pandas.DataFrame with keys as time-str, time-float, label-values ''' import pandas as pd import numpy as np import time if archiver is None: from channelarchiver import Archiver archiver = [ Archiver( 'http://xf11id-ca.nsls2.bnl.local/cgi-bin/ArchiveDataServer.cgi' ) ] #if scan_archives: #archiver.scan_archives() if label[:3] == 'Acc': arch = archiver[1] else: arch = archiver[0] data = arch.get(PV, start_time, end_time, scan_archives=scan_archives, limit=limit, interpolation=interpolation) td = np.array(data.times) v = np.array(data.values, dtype=float) td = make_wave_data(td, dtype='x') v = make_wave_data(v, dtype='y') tf = trans_td_to_tf(td) tv = np.array([td, tf, v]).T index = np.arange(len(tv)) data = tv df = pd.DataFrame(data, index=index, columns=['td', 'tf', label]) # print('seraching %s from: %s---to: %s' % (label, start_time, end_time)) print('Found results: from: %s to :%s with %s points.' % (df.td[0], df.td[len(df.td) - 1], len(df.td))) return df
def get_archive_pvlist_values(PVs, labels, start_time, end_time, limit=100000, interpolation='raw'): '''get PV-list, PV-Label-list from archiver from start_time to end_time, Return: a dict { label: pandas.DataFrame with keys as time-str, time-float, label-values } ''' from channelarchiver import Archiver archiver = Archiver( 'http://xf11id-ca.nsls2.bnl.local/cgi-bin/ArchiveDataServer.cgi') archiver_acc = Archiver( 'http://ca.cs.nsls2.local:8888/cgi-bin/ArchiveDataServer.cgi') #archiver.scan_archives() dict_tv = {} N = len(PVs) for i in range(N): PV = PVs[i] label = labels[i] if i == 0: scan_archives = True else: scan_archives = False scan_archives = True dict_tv[label] = get_archive_pv_value( PV, label, start_time, end_time, scan_archives=scan_archives, limit=limit, interpolation=interpolation, archiver=[archiver, archiver_acc]) return dict_tv
Based off of cafetch.py, written by Elena, with a few features removed to limit dependencies. Changelog: 2020-06-08 DG Ported to Python 3 and ObserverTools, takes a series of keys or just one. """ from channelarchiver import Archiver from argparse import ArgumentParser from astropy.time import Time import socket try: telemetry = Archiver('http://sdss-telemetry.apo.nmsu.edu/' 'telemetry/cgi/ArchiveDataServer.cgi') telemetry.scan_archives() except (socket.gaierror, ConnectionRefusedError): try: telemetry = Archiver('http://*****:*****@' 'sdss-gateway.apo.nmsu.edu') __version__ = '3.1.1'
def setUp(self): self.archiver = Archiver( 'https://xf23id-ca/cgi-bin/ArchiveDataServer.cgi') self.archiver.archiver = MockArchiver()
#!/usr/bin/env python3 # bernie Tue. 26 June '18 # playing around from channelarchiver import Archiver, codes, utils ss = 'http://sdss-telemetry.apo.nmsu.edu/telemetry/cgi/ArchiveDataServer.cgi' #ss='http://localhost:5080/telemetry/cgi/ArchiveDataServer.cgi' archiver = Archiver(ss) archiver.scan_archives() start = '2018-06-25 10:00:00' end = '2018-06-26 11:00:00' archiver.scan_archives() data1 = archiver.get('25m:boss:SP1B2LN2TempRead', start, end, interpolation='raw', scan_archives=False) data2 = archiver.get('25m:boss:SP1R0LN2TempRead', start, end, interpolation='raw', scan_archives=False) data3 = archiver.get('25m:boss:SP2B2LN2TempRead', start, end, interpolation='raw', scan_archives=False)
def setUp(self): self.archiver = Archiver('http://fake') self.archiver.archiver = MockArchiver()
def get_archived_pv(pv, start_time, end_time, label=None, limit=None, make_wave=True, interpolation='raw'): '''Yugang May 15, 2017 Get a archived PV value Input: start time: str, e.g., '2017-04-11 09:00', end time: str, e.g., '2017-04-12 11:00' label: str, a meaningful label for the pv limit: integer, the limit data point make_wave: if True, make a 'square-wave like' data interpolation: 'raw', gives the raw archived data Return: a pandas.dataframe with column as datetime, float time, and value data An example: data = get_archived_pv('XF:11IDA-OP{Mono:DCM-Ax:Bragg}T-I', '2017-04-11 09:00', '2017-04-11 11:00') ''' from channelarchiver import Archiver archiver = Archiver( 'http://xf11id-ca.cs.nsls2.local/cgi-bin/ArchiveDataServer.cgi') import numpy as np import pandas as pd if label is None: label = pv #if pv[0][:2] == 'SR': # res = arget( pv,start=start_time, # end=end_time,count=limit, conf = 'middle') #else: print('Seraching PV: %s from: %s---to: %s' % (label, start_time, end_time)) res = archiver.get(pv, start_time, end_time, scan_archives=True, limit=limit, interpolation=interpolation) #print res key = pv v = np.array(res[key][0], dtype=float) k1 = res[key][1] N = len(k1) sec = np.array([k1[i][2] for i in range(N)]) nsec = np.array([k1[i][3] for i in range(N)]) tf = sec + nsec * 10**(-9) if make_wave: v = make_wave_data(v, dtype='y') tf = make_wave_data(tf, dtype='x') td = trans_tf_to_td(tf, dtype='array') NN = len(td) tv = np.array([td, tf.reshape(NN), v.reshape(NN)]).T index = np.arange(len(tv)) data = tv df = pd.DataFrame(data, index=index, columns=['td', 'tf', label[0]]) if make_wave: fnum = len(df.td) / 2 else: fnum = len(df.td) return df
def archiver(): archiver = Archiver("http://fake") archiver.archiver = MockArchiver() return archiver