def target_sweep_dirfile(self, save_path = '/mnt/iqstream/dirfiles', write = True, span = 100.0e3, attenuation=None):
		if attenuation == None:
			attenuation = float(raw_input("Attenuation level [dB] ?"))
		print "    Previous attenuation setting was %.1f dB"%self.get_attenuation()
		self.set_atten(attenuation)
		print "    Attenuation level is now %.1f dB"%attenuation
		write = raw_input('Write tones? (y/n) ')
		kid_freqs = np.load('/mnt/iqstream/last_kid_freqs.npy')
		dirfile_dir = raw_input('dirfile dir ? ')
		save_path = os.path.join(save_path, dirfile_dir+'_'+str(int(time.time())))

		#kid_freqs = np.array(np.loadtxt('BLASTResonatorPositionsVer2.txt', delimiter=','))
		center_freq = (np.max(kid_freqs) + np.min(kid_freqs))/2.   #Determine LO position to put tones centered around LO
		self.v1.set_frequency(0,center_freq / (1.0e6), 0.01) # LO
		bb_freqs = kid_freqs - center_freq
		bb_freqs = np.roll(bb_freqs, - np.argmin(np.abs(bb_freqs)) - 1)
		np.save('/mnt/iqstream/last_bb_freqs.npy',bb_freqs)
		rf_freqs = bb_freqs + center_freq
		np.save('/mnt/iqstream/last_rf_freqs.npy',rf_freqs)
		channels = np.arange(len(rf_freqs))
		print channels
		np.save('/mnt/iqstream/last_channels.npy',channels)
		self.v1.set_frequency(0,center_freq / (1.0e6), 0.01) # LO
		print '\nTarget baseband freqs (MHz) =', bb_freqs/1.0e6
		print '\nTarget RF freqs (MHz) =', rf_freqs/1.0e6
		if write == 'y':\
			self.writeQDR(bb_freqs)
		self.fpga.write_int('sync_accum_reset', 0)
		self.fpga.write_int('sync_accum_reset', 1)
		
		print save_path,gd.CREAT|gd.RDWR
		
		dirf = gd.dirfile(save_path,gd.CREAT|gd.RDWR)
		symlink_path = '/mnt/iqstream/active_dirfile.lnk'
		try:
			os.unlink(symlink_path)
		except:
			pass
		os.symlink(save_path,symlink_path)
		
		for chan in range(1024):
			dirf.add(gd.entry(gd.RAW_ENTRY,'I%04d'%chan,0,(gd.INT32,1)))
			dirf.add(gd.entry(gd.RAW_ENTRY,'Q%04d'%chan,0,(gd.INT32,1)))
		dirf.add(gd.entry(gd.CONST_ENTRY,'attenuation',0,(gd.FLOAT32,)))
		dirf.put_constant('attenuation',attenuation)
		dirf.close()
		
		f,i,q = self.sweep_lo_dirfile(Npackets_per = 10, channels = channels, center_freq = center_freq, span = span, bb_freqs=bb_freqs,  save_path = save_path)
				
		self.write_dirfile_format_file(save_path,f,i,q)
		
		last_target_dir = save_path
		np.save('/mnt/iqstream/last_target_dir.npy',np.array([last_target_dir]))
		self.plot_kids_dirfile(save_path = last_target_dir, channels = channels)
		#plt.figure()
		#plt.plot()
		return
	def open_stored_dirfile(self, save_path = '/mnt/iqstream/lo_sweeps/'):
		df=gd.dirfile(save_path,gd.RDONLY)
		fields=df.field_list()
		numchannels = int(sorted([f for f in fields if f.startswith('Q')])[-1][1:])+1
		f=[]
		i=[]
		q=[]
		for chan in range(numchannels):
			f.append(df.get_carray('sweep_f_%04d'%chan,gd.FLOAT32))
			i.append(df.get_carray('sweep_i_%04d'%chan,gd.FLOAT32))
			q.append(df.get_carray('sweep_q_%04d'%chan,gd.FLOAT32))
		f=np.array(f)
		i=np.array(i)
		q=np.array(q)
		return f, i, q
示例#3
0
def plot_sweep(sweepfile='latest', html=None):
    """ No slash when inputting sweepfile name
    HTML file - template to save in folder (as index.html)
    """

    if sweepfile == 'latest':
        list_of_sweeps = glob.glob(
            '/data/dirfiles/roach0/20??????_??????_sweep')
        mysweep = max(list_of_sweeps, key=os.path.getctime)
    else:
        mysweep = sweepfile

    figdir = '/data/tuning/roach0/' + mysweep[-21:-6]
    if not os.path.isdir(figdir):
        os.mkdir(figdir)

    if html is not None:
        cmd = 'cp ' + html + ' ' + figdir + '/index.html'
        os.system(cmd)

    df = gd.dirfile(mysweep, gd.RDONLY | gd.UNENCODED)
    lo = df.get_carray('sweep.lo_freqs')
    off_freq = (lo - np.mean(lo)) / 1e3

    for key in df.field_list():

        if (key[-4] == 'K') or (key[-4] == 'B'):

            sweep = df.get_carray('sweep.' + key[-4:])
            s21 = 20 * np.log10(np.abs(sweep))
            dSdf = np.gradient(np.real(sweep))**2 + \
                   np.gradient(np.imag(sweep))**2
            plt.figure(1, figsize=(13, 5))
            plt.subplot(1, 2, 1)
            plt.plot(off_freq, s21)
            plt.plot(np.array(
                [off_freq[np.argmax(dSdf)], off_freq[np.argmax(dSdf)]]),
                     np.array([np.min(s21), np.max(s21)]),
                     '--r',
                     label='Max dSdf')
            plt.plot(np.array([0, 0]),
                     np.array([np.min(s21), np.max(s21)]),
                     '-k',
                     label='Current Tone')
            plt.grid()
            plt.legend(loc='best')
            plt.xlabel('Offset Frequency [kHz]')
            plt.ylabel('$20*\log_{10}(|S_{21}|)$, arb. offset')
            plt.title('$S_{21}$')

            plt.subplot(1, 2, 2)
            plt.plot(np.real(sweep) / 1e5, np.imag(sweep) / 1e5)
            plt.axis('scaled')
            plt.xlabel('I [arb]')
            plt.ylabel('Q [arb]')
            plt.grid()
            plt.title('I/Q')

            plt.suptitle(key[-4:] + ': ' + mysweep[-21:-6])
            plt.savefig(figdir + '/' + key[-4:] + '.png')
            plt.clf()

    plt.close()
示例#4
0
bindport = 1234
buffer_size = 8234  # int * length of roach packet

NTONES = 1010
filename = os.path.join('testing', 'run', '20180416_testdatawrite_dirfile')

dq = deque()

if __name__ == '__main__':
    #if False:

    # generate dirfile
    dirf = create_format_file(filename, NTONES)

    dirf = gd.dirfile(
        filename,
        gd.RDWR | gd.UNENCODED)  # add GD_EXCL to stop accidental overwriting

    # configure socket
    #s = funcs_network.generate_socket()
    #funcs_network.configure_socket_and_bind(s, bindaddress, bindport, buffer_size)
    eventmonitor = threading.Event()
    eventmonitor.set()
    # create multi-threaded queue
    filewritethread = threading.Thread(name='writer_thread',
                                       target=append_to_dirfile,
                                       args=(
                                           dirf,
                                           dq,
                                           NTONES,
                                           eventmonitor,
示例#5
0
with open(sys.argv[1]) as handle:
    pc = json.loads(handle.read())

TSTEP = pc['samp_rate']
TSTR = pc['time_start']
TEND = pc['time_end']
ivec = np.arange(TEND+1-TSTR)
tvec = ivec * TSTEP
t_span = [ivec[0], ivec[-1]]

ROOTDIR = "/data/mole/"
FILESET = pc['fileset']
TDATASET = pc['t_dataset']
HDATASET = pc['h_dataset']

df = gd.dirfile(os.path.join(ROOTDIR, FILESET))
Tdf = df.getdata(TDATASET)[TSTR:TEND+1]
Qdf = np.maximum(QMIN, np.minimum(QMAX, df.getdata(HDATASET)))[TSTR:TEND+1]
i_Qon = np.where(Qdf>0.5)[0][0]
t_Qon = i_Qon * TSTEP

###########################################################################
# From circuit parameters, simulate timestream
circ_params = np.array([pc['Ch'], pc['Rh'], pc['Ct'], pc['Rt']], dtype=float)
Tb = float(pc['Tb'])
sim_params = np.append(1./circ_params, Tb)

T0 = [0, 0]     # ODE initial conditions ([T0, Tdot0])
soln = solve_ivp(lambda t, T: HeaterIVP(t, T, updateSimParams(t, t_Qon, sim_params), Q=np.interp(t, tvec, Qdf)),
    t_span, T0, t_eval=tvec, max_step=TSTEP)
示例#6
0
    # center Vsig on 0 V.  Accounts for amplifier offset
    lowAve = np.average(Vsig[np.where(Vbias < -cutoff)])
    hiAve = np.average(Vsig[np.where(Vbias > cutoff)])
    offset = (lowAve + hiAve) / 2
    Vsig -= offset
    return offset


##############################################################################

BUFFER = 100000
PROCESSES = 3
prefix = '/home/nick/'

files = sorted(os.listdir(prefix + 'Bolometers/LJBolos/'))
data = gd.dirfile(prefix + 'Bolometers/run7_dir')
totalSamples = data.nframes * data.spf('UnixTime')
'''
prevFrame = 0
i = 0
while (i < len(files)):
    f = files[i]
    if (not re.match('B1_([0-9]{2}-?){3}(_[0-9]{2}){3}.txt', f)):
        files.remove(f)
        continue

    # extract the load curve time and date from the file name
    date = f[3:11]
    Time = f[12:20].replace('_', ':')
    t = time.strptime('%s, %s' %(date, Time), '%m-%d-%y, %X')
    t = time.mktime(t)
示例#7
0
    def ctime_roach(self, roach_number, kind, mode='average', write=False):

        '''
        Function to generate ctime for a given roach.
        roach_number: may be a str, int or float. List or numpy array is accepted too if time is 
                      required for multiple roaches
        kind: it is the method that is used to compute ctime for the roach. str for single entry 
              and list for multiple entries
              Possible methods:
              - Packet: A method that uses only packet count information
              - Clock: A method that uses only clock information
        mode: in case of multiple methods how the time information coming from the different
              methods will be combined
              Possible modes:
              - average
              - max
              - min
        write: choose to write the created ctime in the original dirfile
        '''

        kind_list = ['Packet', 'Clock']
        mode_list = ['average', 'max', 'min']

        try:
            if isinstance(roach_number, str) or isinstance(roach_number, int) or \
               isinstance(roach_number, float):
                self.time_roach = {}
                if isinstance(roach_number, str):
                    roach_number = roach_number.strip()

                roach_number = int(roach_number)
            
            elif isinstance(roach_number, list) or isinstance(roach_number, np.ndarray):               
                roach_number = np.array(roach_number).astype(int)
                self.time_roach = {}

                for i in range(len(roach_number)):
                    key = 'roach'+str(roach_number[i])
                    self.time_roach[key] = {}
            
            else:
                raise InputError
        
        except InputError:

            print('roach_number is not a str, int, float, list or numpy.array')
            sys.exit(1)

        try:
            if isinstance(kind, str):                
                kind = kind.strip()
                
                if kind in kind_list:
                    pass

                else:
                    raise InputError

            elif isinstance(kind, list):               
                kind = list(map(str.strip, kind))

                for i in kind:
                    if i in kind_list:
                        pass
                    else:
                        raise InputError
            
            else:                
                raise InputError
            
        except InputError:
            print('The method (kind) chosen to compute ctime_roach is not correct or is not a str or a list')
            sys.exit(1)

        try: 
            if isinstance(mode, str):
                if mode in mode_list:
                    pass
                else:
                    raise InputError

            else:
                raise InputError
        
        except InputError:
            print('The mode choosen for combininig the time methods calculation is not')
            print('between average, max or min. Or the mode is not a string')
            sys.exit(1)

        for i in range(np.size(roach_number)):

            if np.size(roach_number) == 1:

                if isinstance(roach_number, np.ndarray):
                    roach_number_temp = roach_number[0]
                    key = 'roach'+str(roach_number[0])
                else:
                    roach_number_temp = roach_number
                    key = 'roach'+str(roach_number)
            else:
                roach_number_temp = roach_number[i]
                key = 'roach'+str(roach_number[i])

            if write:
                mode_dirfile = gd.RDWR
            else:
                mode_dirfile = gd.RDONLY

            self.d = gd.dirfile(self.roach_path[roach_number_temp-1], mode_dirfile)
            ctime_roach_name = 'ctime_roach'+str(int(roach_number_temp))
            self.ctime_roach_temp = (self.d.getdata(ctime_roach_name)).astype(np.float64)

            pps_roach_name = 'pps_count_roach'+str(int(roach_number_temp))
            self.pps_roach = (self.d.getdata(pps_roach_name)).astype(np.float64)

            for j in range(np.size(kind)):

                if np.size(kind) == 1:
                    if isinstance(kind, list):
                        kind_temp = kind[j]
                    else:    
                        kind_temp = kind
                else:
                    kind_temp = kind[j]

                if kind_temp.lower() == 'clock':
                    ctime_temp = self.clock_ctime_roach(roach_number_temp)

                elif kind_temp.lower() == 'packet':
                    ctime_temp = self.packet_ctime_roach(roach_number_temp)
                
                ctime_temp += 1570000000.
                ctime_temp += self.ctime_roach_temp*1e-2

                if j == 0:
                    ctime = ctime_temp
                else:
                    ctime = np.vstack((ctime_temp, ctime))

            del self.pps_roach
            del self.ctime_roach_temp

            if j != 0:
                print('ROACH ', key, 'completed')
                if mode == 'average':
                    self.time_roach[key] = np.average(ctime, axis=0)
                elif mode == 'max':
                    self.time_roach[key] = np.amax(ctime, axis=0)
                elif mode == 'min':
                    self.time_roach[key] = np.amin(ctime, axis=0)

            else:
                print('ROACH ', key, 'completed')
                self.time_roach[key] = ctime

            if write:

                self.write_ctime(roach_number_temp)
            
            self.d.close()
 def dirfile_all_chan(self, time_interval):
     nchannel = len(self.freq_comb)
     channels = range(nchannel)
     data_path = "./data"
     sub_folder_1 = "meas"
     sub_folder_2 = raw_input("Insert subfolder name (e.g. single_tone): ")
     Npackets = np.int(time_interval * self.accum_freq)
     self.fpga.write_int(
         self.regs[np.where(self.regs == 'pps_start_reg')[0][0]][1], 1)
     save_path = os.path.join(data_path, sub_folder_1, sub_folder_2)
     if not os.path.exists(save_path):
         os.makedirs(save_path)
     filename = save_path + '/' + \
                str(int(time.time())) + '-' + time.strftime('%b-%d-%Y-%H-%M-%S') + '.dir'
     # make the dirfile
     d = gd.dirfile(filename, gd.CREAT | gd.RDWR | gd.UNENCODED)
     # add fields
     phase_fields = []
     for chan in range(nchannel):
         phase_fields.append('chP_' + str(chan))
         d.add_spec('chP_' + str(chan) + ' RAW FLOAT64 1')
     d.add_spec('time RAW FLOAT64 1')
     d.add_spec('packet_count RAW UINT32 1')
     d.close()
     d = gd.dirfile(filename, gd.RDWR | gd.UNENCODED)
     #nfo_I = map(lambda x: save_path + "/chI_" + str(x), range(nchannel))
     #nfo_Q = map(lambda y: save_path + "/chQ_" + str(y), range(nchannel))
     nfo_phase = map(lambda z: filename + "/chP_" + str(z), range(nchannel))
     #fo_I = map(lambda x: open(x, "ab"), nfo_I)
     #fo_Q = map(lambda y: open(y, "ab"), nfo_Q)
     fo_phase = map(lambda z: open(z, "ab"), nfo_phase)
     fo_time = open(filename + "/time", "ab")
     fo_count = open(filename + "/packet_count", "ab")
     count = 0
     while count < Npackets:
         ts = time.time()
         packet = self.s.recv(
             8234)  # total number of bytes including 42 byte header
         data = np.fromstring(packet[42:], dtype='<i').astype('float')
         packet_count = (np.fromstring(packet[-4:], dtype='>I'))
         for chan in channels:
             if (chan % 2) > 0:
                 I = data[1024 + ((chan - 1) / 2)]
                 Q = data[1536 + ((chan - 1) / 2)]
             else:
                 I = data[0 + (chan / 2)]
                 Q = data[512 + (chan / 2)]
             #fo_I[chan].write(struct.pack('i',I))
             #fo_Q[chan].write(struct.pack('i',Q))
             fo_phase[chan].write(struct.pack('d', np.arctan2([Q], [I])))
             #fo_I[chan].flush()
             #fo_Q[chan].flush()
             fo_phase[chan].flush()
         count += 1
         fo_time.write(struct.pack('d', ts))
         fo_count.write(struct.pack('L', packet_count))
         fo_time.flush()
         fo_count.flush()
     for chan in channels:
         #fo_I[chan].close()
         #fo_Q[chan].close()
         fo_phase[chan].close()
     fo_time.close()
     fo_count.close()
     d.close()
     return
示例#9
0
    print "linenum =", pdata["linenum"]
    sys.exit(1);

  if (re.search("dirfile/format$", pdata["filename"]) == None):
    print "filename =", pdata["filename"]
    sys.exit(1);

  return pygetdata.SYNTAX_IGNORE;

# create the dirfile first
data=array.array("H",range(3,7000,7))
os.system("rm -rf dirfile")
os.mkdir("dirfile")
file=open("dirfile/data", 'w')
data.tofile(file)
file.close()

file=open("dirfile/format", "w")
file.write("data RAW UINT16 8\nbad line\n")
file.close()

d=pygetdata.dirfile("dirfile", pygetdata.RDONLY, callback=parser_callback,
    extra="extra stuff");
error=d.error;

os.system("rm -rf dirfile")

if (error != pygetdata.E_OK):
  print "error = ", error
  sys.exit(1)
示例#10
0
           "mult MULTIPLY data sbit\n"
           "div DIVIDE mult bit\n"
           "recip RECIP div 6.5;4.3\n"
           "phase PHASE data 11\n"
           "window WINDOW linterp mult LT 4.1\n"
           "/ALIAS alias data\n"
           "string STRING \"Zaphod Beeblebrox\"\n")
file.close()

file = open("dirfile/form2", 'w')
file.write("const2 CONST INT8 -19\n")
file.close()

# 0: error check
try:
    d = pygetdata.dirfile("x", pygetdata.RDONLY)
except:
    CheckException(0, pygetdata.OpenError)

# 1: dirfile check
try:
    d = pygetdata.dirfile("dirfile", pygetdata.RDWR)
except:
    CheckOK(1)

# 2: getdata (int) check
try:
    n = d.getdata("data", pygetdata.INT, first_frame=5, num_frames=1)
except:
    CheckOK(2)
CheckSimple(2, len(n), 8)
示例#11
0
#!/usr/bin/python
import time
import numpy as np
from datetime import datetime
import pygetdata as gd
import soco

DEVICE_ADDRESS = "192.168.0.135"
DATAFILE = "/data/etc/mole.lnk"

#sonos = soco.SoCo(DEVICE_ADDRESS)
sonos = soco.discovery.any_soco()
df = gd.dirfile(DATAFILE, gd.RDONLY)

# find the tracks


def get_current_track_title():
    return sonos.get_current_track_info()['title']


class AutoSonos:
    def __init__(self, trackname, fieldname, trueval=None):
        self.trackname = trackname
        self.fieldname = fieldname
        self.trueval = trueval
        self.timeout = 0
        self.lastval = 0
        self.framenum = 0
        self.changed = False
        self.timesteady = 0
示例#12
0
    def _process_command(self):
        """
        Internal method to handle and process any event triggered by event.set(). Note that when running,
        this is a copy of the main memory space and has direct access to the daemon process memory space,
        Also, self in this method does not update self in the main process.

        """
        def send_response_to_eventqueue(data_to_send):
            if self._eventqueue.empty():
                self._cmdevent.set()
                _logger.debug("putting data to send on eventqueue, {0}".format(
                    data_to_send))
                self._eventqueue.put(data_to_send)
            else:
                _logger.error(
                    "queue not empty - something bad has happened - nothing done"
                )

        try:
            command_to_process = self._eventqueue.get(timeout=1.)

        except mp.queues.Empty:
            _logger.error("queue empty, nothing done")
            return

        # make sure that the command conforms to some simple requirements
        assert  type(command_to_process) == tuple \
            and len(command_to_process) == 2 \
            and type(command_to_process[0]) == str, " item on queue not in expected format {0} ".format(command_to_process)

        command, args = command_to_process

        _logger.debug("\nReceived command ({0}, {1})\n".format(command, args))
        #print  "\nReceived command ({0}, {1})\n".format( command, args )

        #  a set of if statements to handle all available options.
        if command == "SET_DATAPACKET_DICT":
            # reinitialise datapacket_dict
            self._datapacket_dict = args if isinstance(args, dict) else None
            self._datapipe_in.send((command, self._datapacket_dict))

        elif command == "SET_FILE":

            # don't proceed if the writer is currently saving data. Return the current dirfile name
            if self.is_writing.value:
                _logger.error(
                    "writer is currently saving data. Stop and try again.")
                send_response_to_eventqueue(self.current_filename)
                return

            # close the current dirfile - this will probably be done in the main thread too
            lib_dirfiles.close_dirfile(self.current_dirfile)

            # extract new dirfile path from queue arguments
            new_filename = args if isinstance(
                args, str) else args[0] if isinstance(args, tuple) else None

            # open dirfile (note that for some reason we can't pass an open dirfilehandle between proceses)
            self.current_dirfile = _gd.dirfile(new_filename, _gd.RDWR)

            try:
                _logger.debug("current dirfile is {0}".format(
                    self.current_dirfile.name))
                self.current_filename = self.current_dirfile.name

            except:
                _logger.error("Bad dirfile. Possibly the dirfile is closed?")
                self.current_filename = None
                return

            # send new dirfile filename to main process
            self._datapipe_in.send((command, self.current_filename))
            send_response_to_eventqueue(self.current_filename)

        elif command == "GET_FILE":
            send_response_to_eventqueue(self.current_filename)

        elif command == "START_WRITE":

            # clear all packets currently in data queue
            self._writer_queue.clear()
            _logger.debug("starting writing")
            print "Starting writing"

            # sets the writing flag to True (see inner loop of _writer_thread_function)
            self.is_writing.value = True

            # set cmd event to indicate that the main thread can read the queue
            #send_response_to_eventqueue( self.is_writing.value )

        elif command == "STOP_WRITE":
            self.is_writing.value = False
            _logger.info("pausing writer")

            send_response_to_eventqueue(self.is_writing.value)

        elif command == "STATUS_WRITER":
            send_response_to_eventqueue(self._filewritethread.is_alive())

        elif command == "CHECK_PACKET":
            # select on the socket to see if there packets are being received. Doesn't do anything with
            # the packet, and so this could be triggered by stray packets if using SOCK_RAW

            rd, wr, err = select.select([self._sockethandle], [], [], 1.)
            send_response_to_eventqueue(bool(rd))

        elif command == "CHECK_PACKET_DATA":
            # select on the socket to see if there packets are being received. Doesn't do anything with
            # the packet, and so this could be triggered by stray packets if using SOCK_RAW

            rd, wr, err = select.select([self._sockethandle], [], [], 1.)
            send_response_to_eventqueue(self._sockethandle.recv(9000))

        elif command == "CLEAR_QUEUE":
            print "Clearing queue with {0} packets".format(
                len(self._writer_queue))
            self._writer_queue.clear()
            print "Queue cleared"

        elif command == "SET_LOGLEVEL":
            level = args if isinstance(args, int) else 0
            logfile.set_log_level(level=level)

        elif command == "TERMINATE":
            print "Terminating datalogger. Goodbye."

        else:
            print "command not recognised - nothing done "
示例#13
0
    def __init__(self, path, idx_start, idx_end, field_list=[], mode='frames', ref_field=None, \
                 roach=False, roach_num=None):
        '''
        Class to handle dirfile: load, select a subsample or save data
        Parameters:
        - path: path of the dirfile
        - idx_start: first frame or sample or starting time of the subset.
        - idx_end: last frame or sample or final time of the subset. For the last sample of the 
                   array, it is possible to use -1
        - field_list: a list of strings with the field to be loaded. If full all the 
                      fields in the dirfile are read
        - mode: can be frames, samples or time. If frames, idx_start(_end) are read as 
                as first and last frame of interest. If samples, the parameters idx_start(_end)
                refers to the a field given by the parameter ref_field. If time the parameters idx_start(_end)
                refers to the starting and ending time of the slice of interest.
                BE CAREFUL: reading data in time with this class does not synchronize pointing and detector
                data. In order to do that, it is necessary to use the interpolation function in timing.py
        - ref_field: reference field for the mode 'samples' to assign the idx_start(_end). 
                     If None, the reference field is considered the ctime
        - roach: If true, the dirfile to be read is the roach file and not the master. Default is FALSE
        - roach_num: the number of the roach if a roach dirfile is used. Default is 3 (350um array)
        '''

        self.d = gd.dirfile(path)

        if roach:
            if roach_num is None:
                roach_num = 3
            time_field = 'ctime_built_roach' + str(int(roach_num))
        else:
            time_field = 'ctime_master_built'

        if ref_field is None:
            self.ref_field = time_field
        else:
            self.ref_field = ref_field

        if mode == 'frames':
            if idx_end == -1:
                idx_end = self.d.nframes

            first_frame = int(idx_start)
            num_frames = int(idx_end) - int(idx_start)

            self.time = self.d.getdata(time_field,
                                       first_frame=first_frame,
                                       num_frames=num_frames)

        else:
            self.time = self.d.getdata(time_field)

            if mode == 'time':
                idx_start = np.nanargmin(np.abs(self.time - idx_start))
                idx_end = np.nanargmin(np.abs(self.time - idx_end))

            else:
                if idx_end == -1:
                    if field_list == 'full':
                        idx_end = self.d.array_len(time_field)
                    else:
                        idx_end = self.d.array_len(self.ref_field)

            self.time = self.time[idx_start:idx_end]

        if len(field_list) != 0:

            self.resample_completed = False
            self.data_values = {}

            if field_list == 'full':
                field_list = self.d.field_list()

            len_fields = np.array([])

            for i in field_list:
                if mode == 'frames':
                    self.data_values[i] = self.d.getdata(
                        i, first_frame=first_frame, num_frames=num_frames)
                else:
                    first_sample = int(idx_start * self.d.spf(i) /
                                       self.d.spf(self.ref_field))
                    num_samples = int((idx_end - idx_start) * self.d.spf(i) /
                                      self.d.spf(self.ref_field))

                    self.data_values[i] = self.d.getdata(
                        i, first_sample=first_sample, num_samples=num_samples)

                len_fields = np.append(len_fields, len(self.data_values[i]))

            if self.ref_field in field_list:
                self.ref_field_array = self.data_values[self.ref_field]
            else:
                self.ref_field_array = self.d.getdata(self.ref_field, first_sample=int(idx_start), \
                                                      num_samples=int(idx_end-idx_start))

            if np.all(np.diff(len_fields) == 0):
                self.resample_required = False
            else:
                self.resample_required = True
示例#14
0
    def _writer_thread_function(self,
                                _logger):  #, datapipe_out):#, data_queue):
        """
        Function to run as the writer thread spawned in the daemon process. Runs self.parse_packet_and_append_to_dirfile()
        continuously until both is_writing is set to False and _exitevent is set. Note that this means that to terminate
        gracefully the writer should be stopped before the _exitevent is set. That said, if the daemon_process is terminated,
        as setDaemon is True by default, this thread will die at the same time, possibly not gracefully.

        Inner while loop is set by is_writing flag, and allows the writer to be paused by the user. From the main thread, use
        self.pause_writing() and self.start_writing().

        TODO:

        Scope : thread spawned in daemon_process

        """
        # ignore ctrl+c signals from main thread
        signal.signal(signal.SIGINT, signal.SIG_IGN)

        i = 0  # loop iteration index, used for debugging

        # set niceness of this process

        os.nice(15)

        #assert isinstance(self._writer_queue, deque), "queue object doesn't appear to be correct"
        # check that a dirfile exists before starting wirter loop
        if not type(self.current_dirfile) == _gd.dirfile:
            _logger.warning("no dirfile set")

        #print datapipe_out.__repr__
        datatowrite = []

        # BUFFER SIZE REQUIRED BEFORE WRITING TO DISK (# TODO: be able to change on the fly?)
        sizetowrite = roach_config[self.roachid]["buffer_len_to_write"]

        while not self._exitevent.is_set():

            # POLL PIPE TO CHECK FOR UPDATES TO DATAPACKET_DICT or CURRENT DIRFILE
            if self._datapipe_out.poll():
                pipedata = self._datapipe_out.recv()

                if not isinstance(pipedata, tuple) and len(pipedata) == 2:
                    _logger.debug(
                        "data on pipe not of correct format. Found {0}".format(
                            pipedata))
                    pass
                else:
                    command, data = pipedata
                    _logger.debug("command, data on pipe: {0},{1}".format(
                        command, data))
                    #
                    if command == "SET_DATAPACKET_DICT":
                        self._datapacket_dict = data
                    elif command == "SET_FILE":
                        self.current_dirfile = _gd.dirfile(str(data), _gd.RDWR)
                    else:
                        _logger.warning(
                            "command not recognised {0}. nothing done.".format(
                                command))

            prevcnt = 0
            # MAIN DATA WRITING LOOP #
            last_iteration = False
            while self.is_writing.value or last_iteration:

                _logger.debug("in writing loop; {0},{1}".format(
                    len(self._writer_queue), sizetowrite))

                # WRITE TO DISK WHEN BUFFER_LEN IS REACHED
                if (len(self._writer_queue) >= sizetowrite) or last_iteration:

                    datatowrite = [
                        self._writer_queue.pop()
                        for i in range(len(self._writer_queue))
                    ]  # get all data currently in queue
                    _logger.debug("length of datatowrite {0}".format(
                        len(datatowrite)))

                    for packet in datatowrite:
                        newcnt = np.frombuffer(packet[0][-9:-5], ">u4")
                        if newcnt - prevcnt > 1:
                            _logger.warning(
                                "!! Dropped packets !! - {0} packets lost at packet number {1}"
                                .format(newcnt - prevcnt, prevcnt))
                        prevcnt = newcnt

                    retcode = self._parse_packet_and_append_to_dirfile(
                        datatowrite
                    )  # parse the packet using the datapacket_dict and append ot the dirfile
                    datatowrite = []
                    last_iteration = False
                else:
                    time.sleep(sizetowrite / 488. * 0.5)  # <-- tidy this up?
                    # check is_writing is still true, otherwise check flag to save data on the last iteration
                    if self.is_writing.value == False:
                        last_iteration = True

            if datatowrite:
                _logger.warning("{0} packets didn't get saved!!".format(
                    len(datatowrite)
                ))  # just in case some data is left in the buffer
                datatowrite = []
            time.sleep(0.1)
示例#15
0
 def saveDirfile_chanRangeIQ(self, time_interval, stage_coords=False):
     start_chan = input("Start chan # ? ")
     end_chan = input("End chan # ? ")
     chan_range = range(start_chan, end_chan + 1)
     data_path = self.gc[np.where(self.gc == 'DIRFILE_SAVEPATH')[0][0]][1]
     sub_folder = raw_input("Insert subfolder name (e.g. single_tone): ")
     Npackets = int(np.ceil(time_interval * self.data_rate))
     self.zeroPPS()
     save_path = os.path.join(data_path, sub_folder)
     if not os.path.exists(save_path):
         os.makedirs(save_path)
     filename = save_path + '/' + \
                str(int(time.time())) + '-' + time.strftime('%b-%d-%Y-%H-%M-%S') + '.dir'
     print filename
     # make the dirfile
     d = gd.dirfile(filename, gd.CREAT | gd.RDWR | gd.UNENCODED)
     # add fields
     I_fields = []
     Q_fields = []
     for chan in chan_range:
         I_fields.append('I_' + str(chan))
         Q_fields.append('Q_' + str(chan))
         d.add_spec('I_' + str(chan) + ' RAW FLOAT64 1')
         d.add_spec('Q_' + str(chan) + ' RAW FLOAT64 1')
     d.close()
     d = gd.dirfile(filename, gd.RDWR | gd.UNENCODED)
     nfo_I = map(lambda z: filename + "/I_" + str(z), chan_range)
     nfo_Q = map(lambda z: filename + "/Q_" + str(z), chan_range)
     fo_I = map(lambda z: open(z, "ab"), nfo_I)
     fo_Q = map(lambda z: open(z, "ab"), nfo_Q)
     fo_time = open(filename + "/time", "ab")
     fo_count = open(filename + "/packet_count", "ab")
     count = 0
     while count < Npackets:
         ts = time.time()
         try:
             packet, data, header, saddr = self.parsePacketData()
             if not packet:
                 continue
         #### Add field for stage coords ####
         except TypeError:
             continue
         packet_count = (np.fromstring(packet[-4:], dtype='>I'))
         idx = 0
         for chan in range(start_chan, end_chan + 1):
             I, Q, __ = self.parseChanData(chan, data)
             fo_I[idx].write(struct.pack('d', I))
             fo_Q[idx].write(struct.pack('d', Q))
             fo_I[idx].flush()
             fo_Q[idx].flush()
             idx += 1
         fo_count.write(struct.pack('L', packet_count))
         fo_count.flush()
         fo_time.write(struct.pack('d', ts))
         fo_time.flush()
         count += 1
     for idx in range(len(fo_I)):
         fo_I[idx].close()
         fo_Q[idx].close()
         fo_time.close()
         fo_count.close()
     d.close()
     return
示例#16
0
    if (match):
      lst =  (float(match.group("day"))*24*3600 \
	  + float(match.group("hour"))*3600)
      schedule.append( (lst, match.group("name")) )

print "\nSchedule parsed with", len(schedule), "command lines"
print
print "************************************************************"
print "* Starting main loop, waiting for new scans                *"
print "************************************************************"
print

#main loop. read LST_SCHED and if it crosses a schedule threshold, play the song
try:
  while True:
    df = gd.dirfile(dirfilePath, gd.RDONLY)
    lst = df.getdata("LST_SCHED", gd.FLOAT, \
	first_frame=df.nframes-1, num_frames=1)[0]
    df.close()
    newsong = False
    try:
      while lst > schedule[1][0]:
	schedule.pop(0)
	print "Passed threshold", schedule[0][0], "<", lst, \
	    "for region", schedule[0][1]
	newsong = True
      if newsong:
	#can't do direct songLookup access because names may have modifier chars
	for region in songLookup.iterkeys():
	  if schedule[0][1].find(region) >= 0:
	    print "New song!", schedule[0][1], "matches", region
示例#17
0
    "/ALIAS alias data\n"
    "string STRING \"Zaphod Beeblebrox\"\n"
    "sarray SARRAY one two three four five six seven\n"
    "data/msarray SARRAY eight nine ten eleven twelve\n"
    "indir INDIR data carray\n"
    "sindir SINDIR data sarray\n"
    )
file.close()

file=open("dirfile/form2", 'w')
file.write("const2 CONST INT8 -19\n")
file.close()

# 1: error check
try:
  d = pygetdata.dirfile("x", pygetdata.RDONLY)
except:
  CheckException(1, pygetdata.IOError)

# 2: dirfile check
try:
  d = pygetdata.dirfile("dirfile", pygetdata.RDWR)
except:
  CheckOK(2)

# 3: getdata (int) check
try:
  n = d.getdata("data", pygetdata.INT, first_frame=5, num_frames=1)
except:
  CheckOK(3)
CheckSimple(3,len(n),8)
示例#18
0
roach_comparison = {}

for i in range(len(kind)):

    print('Kind', kind[i])

    roach = t.ctime_roach(roach_number, kind[i])

    roach_comparison[kind[i]] = t.time_roach

chunk = 10000

number = 10

for j in range(len(roach_number)):
    d = gd.dirfile(t.roach_path[roach_number[j] - 1])

    ctime_roach_name = 'ctime_roach' + str(int(roach_number[j]))
    ctime_roach = (d.getdata(ctime_roach_name)).astype(np.float64)

    pps_roach_name = 'pps_count_roach' + str(int(roach_number[j]))
    pps_roach = (d.getdata(pps_roach_name)).astype(np.float64)

    length_chunk = np.floor(len(pps_roach) / number)

    roach_str = 'roach' + str(int(roach_number[j]))
    path = '/home/gabriele/Documents/pyBLASTtools/plots/'

    for k in range(number):
        min_val = j * length_chunk
        max_val = (j + 1) * length_chunk - 2 * chunk
示例#19
0
def saveTimestreamDirfile(subfolder, start_chan, end_chan, time_interval):
    """Saves a dirfile containing the I and Q values for a range of channels, streamed
       over a time interval specified by time_interval
       inputs:
           float time_interval: Time interval to integrate over, seconds"""
    # Roach PPC object
    fpga = getFPGA()
    # UDP socket
    s = socket(AF_PACKET, SOCK_RAW, htons(3))
    # Roach interface
    ri = roachInterface(fpga, gc, regs, None)
    # UDP object
    udp = roachDownlink(ri, fpga, gc, regs, s, ri.accum_freq)
    udp.configSocket()
    chan_range = range(start_chan, end_chan + 1)
    data_path = gc[np.where(gc == 'DIRFILE_SAVEPATH')[0][0]][1]
    Npackets = int(np.ceil(time_interval * ri.accum_freq))
    udp.zeroPPS()
    save_path = os.path.join(data_path, subfolder)
    if not os.path.exists(save_path):
        os.makedirs(save_path)
    filename = save_path + '/' + \
               str(int(time.time())) + '-' + time.strftime('%b-%d-%Y-%H-%M-%S') + '.dir'
    print filename
    np.save('last_data_path.npy', filename)
    # make the dirfile
    d = gd.dirfile(filename, gd.CREAT | gd.RDWR | gd.UNENCODED)
    # add fields
    I_fields = []
    Q_fields = []
    for chan in chan_range:
        I_fields.append('I_' + str(chan))
        Q_fields.append('Q_' + str(chan))
        d.add_spec('I_' + str(chan) + ' RAW FLOAT64 1')
        d.add_spec('Q_' + str(chan) + ' RAW FLOAT64 1')
    d.close()
    d = gd.dirfile(filename, gd.RDWR | gd.UNENCODED)
    nfo_I = map(lambda z: filename + "/I_" + str(z), chan_range)
    nfo_Q = map(lambda z: filename + "/Q_" + str(z), chan_range)
    fo_I = map(lambda z: open(z, "ab"), nfo_I)
    fo_Q = map(lambda z: open(z, "ab"), nfo_Q)
    fo_time = open(filename + "/time", "ab")
    fo_count = open(filename + "/packet_count", "ab")
    count = 0
    while count < Npackets:
        ts = time.time()
        try:
            packet, data, header, saddr = udp.parsePacketData()
            if not packet:
                continue
        except TypeError:
            continue
        packet_count = (np.fromstring(packet[-4:], dtype='>I'))
        idx = 0
        for chan in range(start_chan, end_chan + 1):
            I, Q, __ = udp.parseChanData(chan, data)
            fo_I[idx].write(struct.pack('d', I))
            fo_Q[idx].write(struct.pack('d', Q))
            fo_I[idx].flush()
            fo_Q[idx].flush()
            idx += 1
        fo_count.write(struct.pack('L', packet_count))
        fo_count.flush()
        fo_time.write(struct.pack('d', ts))
        fo_time.flush()
        count += 1
    for idx in range(len(fo_I)):
        fo_I[idx].close()
        fo_Q[idx].close()
    fo_time.close()
    fo_count.close()
    d.close()
    return
示例#20
0
 def read_existing_sweep_file(self, path_to_sweep):
     # check if filename appears to be a valid dirfile
     assert _lib_dirfiles.is_path_a_dirfile(path_to_sweep)
     self.current_sweep_dirfile = _gd.dirfile(path_to_sweep, _gd.RDWR)
示例#21
0
文件: sync.py 项目: BlastTNG/NaMap
def intersync_roach(data, bins):

    start = np.append(0, np.cumsum(bins[:-1]))
    end = np.cumsum(bins)

    ln = np.linspace(start, end - 1, 488)
    idx = np.reshape(np.transpose(ln), np.size(ln))
    idx_plus = np.append(idx[:-1] + 1, idx[-1])

    return (data[idx_plus.astype(int)] - data[idx.astype(int)]) * (
        idx - idx.astype(int)) + data[idx.astype(int)]


path = '/mnt/d/xystage/'
d = gd.dirfile(path)

starting_frame = 21600
ending_frame = 42070
buffer_frame = 100
num_frames = ending_frame - starting_frame

x = d.getdata('x_stage',
              first_frame=starting_frame - buffer_frame,
              num_frames=num_frames + 2 * buffer_frame)
y = d.getdata('y_stage',
              first_frame=starting_frame - buffer_frame,
              num_frames=num_frames + 2 * buffer_frame)
pps = d.getdata('pps_count_roach3',
                first_frame=starting_frame - buffer_frame,
                num_frames=num_frames + 2 * buffer_frame)
示例#22
0
    def sweep_lo(self, stop_event=None, **sweep_kwargs):
        """
        Function to sweep the LO. Takes in a number of optional keyword arugments. If not given,
        defaults from the configuration files are assumed.

        Keyword Arguments
        -----------------

        sweep_span : float
            Frequency span, in Hz, about which to sweep the LO around its currently set value.

        sweep_step : float
            Frequency step, in Hz, in which to sweep the LO around its currently set value. Note that some
            synthesiers have a minimum step size. Every attempt has been made to try to make the user know
            if the hardware is limiting the step, but care should still be taken.

        sweep_avgs : int
            Number of packets to average per LO frequency. This is used to calculate an approximate integration
            time to collect sweep_avgs. There is a 5% time addition to ensure that at least this many packets are
            collected.

        startidx : int
            user defined number of samples to skip after lo switch (to be read from config, or set at run time)

        stopidx : int
            same as startidx, but for the other end (None reads all samples up to lo_switch)
        save_data : bool
            Flag to turn off data writing. Mainly for testing purposes. Default is, of course, True.

        filename_suffix : str
            allows the user to append an additional string to the end of the filename
        """
        # create the stop event for use when running all roaches at once through the muxChannelList
        stop_event = _multiprocessing.Event() if not isinstance(
            stop_event, _multiprocessing.synchronize.Event) else stop_event

        # configure sweep parameters and start writing
        sweep_params = self._configure_sweep_and_start_writing(**sweep_kwargs)

        # # get time for avg factor + 10%
        sleeptime = np.round(sweep_params["sweep_avgs"] / self.sample_rate *
                             1.1,
                             decimals=3)
        _logger.debug("sleep time for sweep is {0}".format(sleeptime))

        step_times = []

        # acutally do the sweep - loop over LO frequencies, while saving time at lo_step
        try:
            sweepdirection = np.sign(np.diff(self.toneslist.sweep_lo_freqs))[
                0]  # +/- 1 for forward/backward - not used right now
            _logger.info(
                'Sweeping LO %3.1f kHz around %3.3f MHz in %1.1f kHz steps' %
                (np.ptp(self.toneslist.sweep_lo_freqs / 1.e3),
                 np.mean(self.toneslist.sweep_lo_freqs / 1.e6),
                 np.median(np.diff(self.toneslist.sweep_lo_freqs)) / 1.e3))
            for ix, lo_freq in enumerate(self.toneslist.sweep_lo_freqs):

                if self.loswitch == True:  # only switch if the muxchannel is configured to do so
                    self.synth_lo.frequency = lo_freq
                else:
                    # wait until synth_lo.frequency => lo_freq
                    t0 = time.time()
                    while self.synth_lo.frequency <= lo_freq and time.time(
                    ) <= t0 + sleeptime:
                        time.sleep(sleeptime / 100.)
                pytime = self.writer_daemon.pytime.value
                step_times.append(pytime)
                #print "lo stepped at ", pytime
                #_logger.info('LO stepped to ' + str(lo_freq/1.e6))
                # check the stop event to break out of the loop
                if stop_event.is_set():
                    break
                #pbar.set_description(cm.BOLD + "LO: %i" % lo_freq + cm.ENDC)
                time.sleep(sleeptime)

                # should we wait for a number of samples per frequency? can sample self.current_dirfile.nframes

            #pbar.close()
            #print cm.OKGREEN + "Sweep done!" + cm.ENDC
        except KeyboardInterrupt:
            pass

        # sweep has finished, pause the writing and continue to process the data
        #time.sleep(2.5)
        _logger.debug("pausing writing at ", self.writer_daemon.pytime.value)

        self.writer_daemon.pause_writing()

        #  get only the indexes that were swept
        lofreqs_that_were_swept = self.toneslist.sweep_lo_freqs[np.arange(ix +
                                                                          1)]
        #lofreqs_that_were_swept = self.toneslist.sweep_lo_freqs
        # Back to the central frequency
        if self.loswitch == True:
            self.synth_lo.frequency = self.toneslist.lo_freq

        # save lostep_times to current timestream dirfile (why are these not arrays?)
        self.current_dirfile.add(
            _gd.entry(_gd.RAW_ENTRY, "lo_freqs", 0, (_gd.FLOAT64, 1)))
        self.current_dirfile.add(
            _gd.entry(_gd.RAW_ENTRY, "lostep_times", 0, (_gd.FLOAT64, 1)))

        self.current_dirfile.putdata(
            "lo_freqs",
            np.ascontiguousarray(lofreqs_that_were_swept, dtype=np.float64))
        self.current_dirfile.putdata(
            "lostep_times", np.ascontiguousarray(step_times, dtype=np.float64))

        # on mac, we need to close and reopen the dirfile to flush the data before reading back in the data
        # - not sure why, or if this is a problem on linux - it doesn't hurt too much though
        self.current_dirfile.close()
        self.current_dirfile = _gd.dirfile(self.writer_daemon.current_filename,
                                           _gd.RDWR)

        #delay appears to be required to finish write/open operations before continuing
        time.sleep(0.5)
        # analyse the raw sweep dirfile and write to disk
        self.reduce_and_write_sweep_data(self.current_dirfile)
示例#23
0
    "div DIVIDE mult bit\n"
    "recip RECIP div 6.5;4.3\n"
    "phase PHASE data 11\n"
    "window WINDOW linterp mult LT 4.1\n"
    "/ALIAS alias data\n"
    "string STRING \"Zaphod Beeblebrox\"\n"
    )
file.close()

file=open("dirfile/form2", 'w')
file.write("const2 CONST INT8 -19\n")
file.close()

# 0: error check
try:
  d = pygetdata.dirfile("x", pygetdata.RDONLY)
except:
  CheckException(0, pygetdata.OpenError)

# 1: dirfile check
try:
  d = pygetdata.dirfile("dirfile", pygetdata.RDWR)
except:
  CheckOK(1)

# 2: getdata (int) check
try:
  n = d.getdata("data", pygetdata.INT, first_frame=5, num_frames=1)
except:
  CheckOK(2)
CheckSimple(2,len(n),8)
示例#24
0
from astropy.modeling import rotations
from astropy import wcs
import pygetdata as gd
import numpy as np
import os 
import matplotlib.pyplot as plt


#Load Data
path = '/mnt/c/Users/gabri/Documents/GitHub/mapmaking/2012_data/'
fname = gd.dirfile(path, gd.RDONLY)

ra = fname.getdata('ra', gd.UINT32, num_frames=fname.nframes)
dec = fname.getdata('dec', gd.INT32, num_frames=fname.nframes)
lst = fname.getdata('lst', gd.UINT32, num_frames=fname.nframes)
lat = fname.getdata('lat', gd.INT32, num_frames=fname.nframes)

ra = ra*5.587935447693e-09
dec = dec*8.381903171539e-08
lat = lat*8.381903171539e-08
lst = lst*2.77777777778e-04

frame1 = 1918381
frame2 = 1921000
offset = 0

raf = ra[frame1:frame2]
decf = (dec[frame1:frame2])
latf = (lat[frame1+offset:frame2+offset])
lstf = lst[frame1+offset:frame2+offset]
示例#25
0
    elif option == "xsc0":
        xsc = 0
    elif option == "xsc1":
        xsc = 1
    elif option == "fieldrotation":
        fieldrotation = float(value)
    elif option == "minflux":
        minflux = float(value)
    elif option == "minsize":
        minsize = float(value)
    else:
        print("Unrecognized option " + option)
        sys.exit()

# Load GPS data from dirfile
df = gd.dirfile(dirfilename, gd.RDONLY)
TIME = df.getdata("TIME",
        first_frame=0,
        first_sample=0,
        num_frames=df.nframes-1,
        num_samples=0,
        return_type=gd.FLOAT64)
LAT = df.getdata("LAT",
        first_frame=0,
        first_sample=0,
        num_frames=df.nframes-1,
        num_samples=0,
        return_type=gd.FLOAT64)
LON = df.getdata("LON",
        first_frame=0,
        first_sample=0,
示例#26
0
def create_pcp_dirfile(roachid, dfname="", dftype = "stream", tonenames = [], *df_creation_flags, **kwargs):
    """
    High level function to create a new dirfile according to the pcp standards. This creates a format file with a number of tones,
    and other packet information.

    Parameters
    ============
    dfname: str
        Path to a dirfile, or directory in which to create a new dirfile. If no dirfilename is given (or ""), or if dirfilename
        is a valid path but not a valid dirfile, then a new dirfile will be created in this directory using the default filename
        format given in general_config['default_datafilename_format'].

        If dfname a valid dirfile path is given and exclusive = False, the dirfile will be opened and returned. If exclusive = True
        (default) then an error is raised.

    dftype: str
        Currently, one of ["stream", "sweep"]. Anything else raises an error.

    tones: list
        A list of field names that will be used as the dirfile fields.

    df_creation_flags:
        Bit-wise or'd args that are passed to gd.dirfile().

    Valid kwargs
    ============
    filename_suffix: str (default: "")
        string to add to the end of the file path, prepended by a leading underscore. This is added before checking whether
        the resulting path exists.

    exclusive: bool (default: True)
        Switch to handle existing dirfiles. If True, an error is raised if the given path is an existing dirfile.
        If False, this function opens and returns the dirfile object to continued writing/processing.

    array_size: int (default: 101)
        Used for sweep dirfiles only. Sets the array size for the sweep fields.

    inc_derived_fields: bool (default: False)
        Switch to include the derived fields relevant to the type of the dirfile requested.

    Returns
    ============
    dirfile: pygetdata.dirfile
        Initialised and opened pygetdata dirfile object of requested type.

    TODO:
     - handle empty string with filename_suffix

    """
    # preliminary checks
    assert type(dfname) == str
    assert dftype in ["stream", "sweep"]

    # --- handle kwargs ---
    filename_suffix = kwargs.pop("filename_suffix", "") # str to add to file path
    filename_suffix = "_"  + filename_suffix if filename_suffix else ""

    dfname = dfname.rstrip("/") + filename_suffix

    _logger.debug("dirfile path to write: {0}".format(dfname))

    exclusive          = kwargs.pop("exclusive", True) # str to add to file path (only applies to new filenames)
    array_size         = kwargs.pop("array_size", 101) # default size used for sweep file creation
    inc_derived_fields = kwargs.pop("inc_derived_fields", True) # option to include derived fields to dirfile

    if kwargs:
        raise NameError("Unknown kwarg(s) given {0}".format(kwargs.keys()))
    # ------

    # parse user specified set of dirfile flags, else use defaults (note _gd.EXCL prevents accidental overwriting)
    dfflagint = np.bitwise_or.reduce(df_creation_flags) if df_creation_flags \
                                                        else _gd.CREAT|_gd.RDWR|_gd.UNENCODED|_gd.EXCL
    # check if the file path is a valid dirfile
    if is_path_a_dirfile(dfname):
        _logger.debug("{0} is a valid dirifle".format(dfname))
        if exclusive:
            raise IOError, "{0} exists and exclusive = True. Use exclusive = False to return this dirfile".format(dfname)
        else:
            _logger.info( "It looks like {0} is a valid dirfile. Opening and returning dirfile.".format(dfname) )
            return _gd.dirfile(dfname, _gd.RDWR|_gd.UNENCODED)

    # check if path exists - join new filename to existing path. Or if no path is given, create file in cwd
    elif os.path.exists(dfname) or dfname == "":
        dfname = os.path.join( dfname, time.strftime(general_config['default_datafilename_format']) + filename_suffix )
        _logger.debug("path exists; assume this is a directory in which to create the new dirfile".format(dfname))
    # assume that the path given is the intended path of the new dirfile
    else:
        pass # not required, but better to be explicit than implicit :)

    # create the new dirfile
    dirfile = _gd.dirfile(dfname, dfflagint)
    _logger.info( "new dirfile created; {0}".format(dirfile.name) )
    # add main fields according to the type required

    if dftype == "stream":
        dirfile = generate_main_rawfields(dirfile, roachid, tonenames, fragnum = 0)#, field_suffix = field_suffix)
        if inc_derived_fields: # true by default
            dirfile = generate_main_derivedfields(dirfile, tonenames)

    elif dftype == "sweep":
        dirfile = generate_sweep_fields(dirfile, tonenames, array_size = array_size)#, field_suffix = field_suffix)

    return dirfile
示例#27
0
    def __init__(self, loading_method, path_master=None, path_roach=None, roach_num=1, idx_start=0, idx_end=-1, \
                 time_start=None, time_end=None, time_master=None, time_roach=None, offset=0.):

        '''
        Input parameters for interpolating the data:
        loading_method: a string between:
                        - idx: Using two indices on the master file to select the data
                        - idx_roach: Using two indices of the roach file to select data
                        - time_val: Using two time values on the master file to select at the data
                        - time_array: using two time arrays, one from master and one from the roach as 
                                      reference
        path_master: path of the master file
        path_roach: path of the roach file
        roach_num: which roach is going to be analyzed
        idx_start: Starting index of the data that need to be analyzed. The index is from the 
                   master file if the loading method is 'idx' and is from the roach file if the 
                   loading method is 'idx_roach'
        idx_end: Ending index of the data that need to be analyzed. The index is from the 
                 master file if the loading method is 'idx' and is from the roach file if the 
                 loading method is 'idx_roach'
        time_start: Starting time of the data that need to be analyzed. The time is from the 
                    master file. The array needs to be already sliced 
        time_end: Ending time of the data that need to be analyzed. The time is from the 
                  master file. The array needs to be already sliced
        time_master: Array with the time data from master file
        time_roach: Array with the time data from one of the roach file
        offset: time offset in seconds of the roach time array with respect to the master time.
                This is defined as the time to be added (or subctrated if negative) to the master time. 
                This offset is not applied in case it is used the 'time_array' option for loading
                the time arrays.
        '''

        loading_method_list = ['idx', 'idx_roach', 'time_val', 'time_array']

        try:
            if loading_method.strip().lower() in loading_method_list:
                pass
            else:
                raise InputError
        except InputError:
            print('The loading method choosen is not correct. Choose between: idx, idx_roach, time_val, time_array')
            sys.exit(1)

        if loading_method.strip().lower() == 'time_array':

            self.time_master = time_master
            self.time_roach = time_roach

        else:

            self.d_master = gd.dirfile(path_master)
            self.d_roach = gd.dirfile(path_roach)

            self.roach_num = roach_num
            roach_time_str = 'ctime_built_roach'+str(int(self.roach_num))

            if loading_method.strip().lower() == 'idx_roach':


                self.time_roach = self.d_roach.getdata(roach_time_str)

                self.idx_start_roach = idx_start
                self.idx_end_roach = idx_end

                self.time_roach = self.time_roach[self.idx_start_roach:self.idx_end_roach]

                self.time_master = self.d_master.getdata('ctime_master_built')

                self.idx_start_master = np.nanargmin(np.abs(self.time_master-self.time_roach[0]+offset))
                self.idx_end_master = np.nanargmin(np.abs(self.time_master-self.time_roach[-1]+offset))
                
                self.time_master = self.time_master[self.idx_start_master:self.idx_end_master]
                self.time_master += offset

            else:
                if loading_method.strip().lower() == 'idx':

                    self.time_master = self.d_master.getdata('ctime_master_built')

                    self.idx_start_master = idx_start
                    self.idx_end_master = idx_end

                    self.time_master = self.time_master[self.idx_start_master:self.idx_end_master]

                elif loading_method.strip().lower() == 'time_val':
                    
                    
                    self.time_master = self.d_master.getdata('ctime_master_built')

                    self.idx_start_master = np.nanargmin(np.abs(self.time_master-time_start))
                    self.idx_end_master = np.nanargmin(np.abs(self.time_master-time_end))

                    self.time_master = self.time_master[self.idx_start_master:self.idx_end_master]

                self.time_roach = self.d_roach.getdata(roach_time_str)

                self.idx_start_roach = np.nanargmin(np.abs(self.time_roach-self.time_master[0]-offset))
                self.idx_end_roach = np.nanargmin(np.abs(self.time_roach-self.time_master[-1]-offset))

                self.time_roach = self.time_roach[self.idx_start_roach:self.idx_end_roach]
                #return the array corrected by the offset 
                #this is the time array shifted to pointing
                self.time_roach -= offset

            self.d_master.flush('ctime_master_built')
            self.d_roach.flush(roach_time_str)
示例#28
0
def open_dirfile(dirfilename, **dirfile_flags):

    if is_path_a_dirfile(dirfilename):
        return _gd.dirfile(dirfilename, **dirfile_flags)
    else:
        _logger.warning("can't open dirfile {0}".format(dirfilename))
示例#29
0
    if (re.search("dirfile/format$", pdata["filename"]) == None):
        print("filename =", pdata["filename"])
        sys.exit(1)

    return pygetdata.SYNTAX_IGNORE


# create the dirfile first
data = array.array("H", range(3, 7000, 7))
os.system("rm -rf dirfile")
os.mkdir("dirfile")
file = open("dirfile/data", 'wb')
data.tofile(file)
file.close()

file = open("dirfile/format", "w")
file.write("data RAW UINT16 8\nbad line\n")
file.close()

d = pygetdata.dirfile("dirfile",
                      pygetdata.RDONLY,
                      callback=parser_callback,
                      extra="extra stuff")
error = d.error

os.system("rm -rf dirfile")

if (error != pygetdata.E_OK):
    print("error = ", error)
    sys.exit(1)
示例#30
0
    B("_b2\n") + B("o3 LINCOM ") + xstring + B("_i1 ") + xstring + B("_m1 ") +
    xstring + B("_b1 ") + xstring + B("_i2 ") + xstring + B("_m2 ") + xstring +
    B("_b2 ") + xstring + B("_i3 ") + xstring + B("_m3 ") + xstring +
    B("_b3\n") + B("b1 BIT ") + xstring + B("_i ") + xstring + B("_bn ") +
    xstring + B("_nb\n") + B("e1 RECIP in ") + xstring + B("_dv\n") +
    B("p1 PHASE ") + xstring + B("_i ") + xstring + B("_ps\n") +
    B("m1 MPLEX a b ") + xstring + B("_cv ") + xstring + B("_pd\n") +
    B("y1 POLYNOM ") + xstring + B("_i ") + xstring + B("_y1 2 ") + xstring +
    B("_y3\n") + B("w1 WINDOW a b EQ ") + xstring + B("_t1\n") +
    B("w2 WINDOW a b SET ") + xstring + B("_t2\n") + B("w3 WINDOW a b GT ") +
    xstring + B("_t3\n") + B("d1 DIVIDE ") + xstring + B("_i1 ") + xstring +
    B("_i2\n"))
f.close()

try:
    D = pygetdata.dirfile("dirfile", pygetdata.RDONLY)
except pygetdata.DirfileError:
    CheckOK(0)

# Attempt 2: no encoding

try:
    D.validate(estring)
except pygetdata.DirfileError:
    CheckEOS(2, D.error_string, estring)

c = D.carrays(return_type=pygetdata.NULL)
CheckSimple(3, len(c), 2)
CheckSimple(4, c[0][0], estring + B("_a1"))
CheckSimple(5, c[1][0], estring + B("_a2"))
示例#31
0
def dirfileToUseful(file_name, data_type):
    nom_path = '../roach_data'
    q = gd.dirfile(nom_path, gd.RDONLY)
    values = q.getdata(file_name, data_type)
    return values
示例#32
0
def read_stream(filename):
    firstframe = 0
    firstsample = 0
    d = gd.dirfile(filename, gd.RDWR | gd.UNENCODED)
    #print "Number of frames in dirfile =", d.nframes
    nframes = d.nframes

    vectors = d.field_list()
    ifiles = [i for i in vectors if i[0] == "I"]
    qfiles = [q for q in vectors if q[0] == "Q"]
    ifiles.remove("INDEX")
    ivals = d.getdata(ifiles[0],
                      gd.FLOAT32,
                      first_frame=firstframe,
                      first_sample=firstsample,
                      num_frames=nframes)
    qvals = d.getdata(qfiles[0],
                      gd.FLOAT32,
                      first_frame=firstframe,
                      first_sample=firstsample,
                      num_frames=nframes)
    ivals = ivals[~np.isnan(ivals)]
    qvals = qvals[~np.isnan(qvals)]
    i_stream = np.zeros((len(ivals), len(ifiles)))
    q_stream = np.zeros((len(qvals), len(qfiles)))

    for n in range(len(ifiles)):
        ivals = d.getdata(ifiles[n],
                          gd.FLOAT32,
                          first_frame=firstframe,
                          first_sample=firstsample,
                          num_frames=nframes)
        qvals = d.getdata(qfiles[n],
                          gd.FLOAT32,
                          first_frame=firstframe,
                          first_sample=firstsample,
                          num_frames=nframes)
        i_stream[:, n] = ivals[~np.isnan(ivals)]
        q_stream[:, n] = qvals[~np.isnan(qvals)]
    d.close()
    #read in the time file
    with open(filename + "/time", 'rb') as content_file:
        content = content_file.read()
    time_val = []
    for i in range(0, len(content) / 8):
        time_val.append(struct.unpack('d', content[0 + 8 * i:8 + 8 * i])[0])

    #read in the time file
    with open(filename + "/packet_count", 'rb') as content_file:
        content = content_file.read()
    packet_val = []
    for i in range(0, len(content) / 8):
        packet_val.append(struct.unpack('L', content[0 + 8 * i:8 + 8 * i])[0])
    packet = np.asarray(packet_val)
    if ((packet - np.roll(packet, 1))[1:] != 1).any():  #you dropped packet
        print(
            "!!!!WARNING!!!!! you dropped some packets during your measurement consider increasing your system buffer size"
        )
        plt.figure(1)
        plt.title("Delta t between packets")
        plt.plot((time_val - np.roll(time_val, 1))[1:])
        plt.figure(2)
        plt.title("Delta packet")
        plt.plot((packet - np.roll(packet, 1))[1:])
        plt.show()

    dictionary = {
        'I_stream': i_stream,
        'Q_stream': q_stream,
        'time': time_val,
        'packet_count': packet_val
    }
    return dictionary
示例#33
0
    def sync_data(self, telemetry=True):
        '''
        Wrapper for the previous functions to return the slices of the detector and coordinates TODs,  
        and the associated time
        '''

        if self.experiment.lower() == 'blast-tng':
            d = gd.dirfile(self.roach_pps_path)

            first_frame = self.startframe - self.bufferframe
            num_frames = self.endframe - self.startframe + 2 * self.bufferframe
            interval = self.endframe - self.startframe

            ctime_mcp = d.getdata('time',
                                  first_frame=first_frame,
                                  num_frames=num_frames)
            ctime_usec = d.getdata('time_usec',
                                   first_frame=first_frame,
                                   num_frames=num_frames)

            if self.xystage is True:
                #frequency_ctime = 100
                sample_ctime = 100
            else:
                #frequency_ctime = self.coord_fs
                sample_ctime = self.coord_sample_frame
            ctime_start = ctime_mcp + ctime_usec / 1e6 + 0.2
            ctime_mcp = ctime_mcp[self.bufferframe *
                                  sample_ctime:self.bufferframe *
                                  sample_ctime + interval * sample_ctime]

            if self.offset is not None:
                ctime_mcp += self.offset / 1000.

            ctime_start = ctime_mcp[0]
            ctime_end = ctime_mcp[-1]

            coord1 = self.coord1_data[self.bufferframe*self.coord_sample_frame:self.bufferframe*self.coord_sample_frame+\
                                      interval*self.coord_sample_frame]
            coord2 = self.coord2_data[self.bufferframe*self.coord_sample_frame:self.bufferframe*self.coord_sample_frame+\
                                      interval*self.coord_sample_frame]

            if self.xystage is True:
                freq_array = np.append(
                    0,
                    np.cumsum(
                        np.repeat(1 / self.coord_sample_frame,
                                  self.coord_sample_frame * interval - 1)))
                coord1time = ctime_start + freq_array
                coord2time = coord1time.copy()
            else:
                if self.coord_sample_frame != 100:
                    freq_array = np.append(
                        0,
                        np.cumsum(
                            np.repeat(1 / self.coord_sample_frame,
                                      self.coord_sample_frame * interval - 1)))
                    coord1time = ctime_start + freq_array
                    coord2time = coord1time.copy()
                else:
                    coord1time = ctime_mcp.copy()
                    coord2time = ctime_mcp.copy()

            if telemetry:

                kidutils = det.kidsutils()

                start_det_frame = self.startframe - self.bufferframe
                end_det_frame = self.endframe + self.bufferframe

                frames = np.array([start_det_frame, end_det_frame],
                                  dtype='int')

                dettime, pps_bins = kidutils.det_time(self.roach_pps_path, self.roach_number, frames, \
                                                      ctime_start, ctime_mcp[-1], self.det_fs)

                coord1int = interp1d(coord1time, coord1, kind='linear')
                coord2int = interp1d(coord2time, coord2, kind='linear')

                idx_roach_start, = np.where(
                    np.abs(dettime - ctime_start) == np.amin(
                        np.abs(dettime - ctime_start)))
                idx_roach_end, = np.where(
                    np.abs(dettime -
                           ctime_end) == np.amin(np.abs(dettime - ctime_end)))

                if len(np.shape(self.det_data)) == 1:
                    self.det_data = kidutils.interpolation_roach(
                        self.det_data, pps_bins[pps_bins > 350], self.det_fs)
                    self.det_data = self.det_data[
                        idx_roach_start[0]:idx_roach_end[0]]
                else:
                    for i in range(len(self.det_data)):
                        self.det_data[i] = kidutils.interpolation_roach(
                            self.det_data[i], pps_bins[pps_bins > 350],
                            self.det_fs)
                        self.det_data[i] = self.det_data[
                            i, idx_roach_start[0]:idx_roach_end[0]]

                dettime = dettime[idx_roach_start[0]:idx_roach_end[0]]

            else:
                if len(np.shape(self.det_data)) == 1:
                    dettime = ctime_start + np.append(
                        0,
                        np.cumsum(
                            np.repeat(1 / self.det_fs, len(self.det_data))))
                else:
                    dettime = ctime_start + np.append(
                        0,
                        np.cumsum(
                            np.repeat(1 / self.det_fs, len(self.det_data[0]))))

            index1, = np.where(
                np.abs(dettime - coord1time[0]) == np.amin(
                    np.abs(dettime - coord1time[0])))
            index2, = np.where(
                np.abs(dettime - coord1time[-1]) == np.amin(
                    np.abs(dettime - coord1time[-1])))

            coord1_inter = coord1int(dettime[index1[0] + 200:index2[0] - 200])
            coord2_inter = coord2int(dettime[index1[0] + 200:index2[0] - 200])
            dettime = dettime[index1[0] + 200:index2[0] - 200]

            if len(np.shape(self.det_data)) == 1:
                self.det_data = self.det_data[index1[0] + 200:index2[0] - 200]
            else:
                for i in range(len(self.det_data)):
                    self.det_data[i] = self.det_data[i, index1[0] +
                                                     200:index2[0] - 200]

        elif self.experiment.lower() == 'blastpol':
            dettime, self.det_data = self.frame_zoom(self.det_data, self.det_sample_frame, \
                                                     self.det_fs, np.array([self.startframe,self.endframe]), \
                                                     self.offset)
            coord1time, coord1 = self.frame_zoom(self.coord1_data, self.coord_sample_frame, \
                                                 self.coord_fs, np.array([self.startframe,self.endframe]))

            coord2time, coord2 = self.frame_zoom(self.coord2_data, self.coord_sample_frame, \
                                                 self.coord_fs, np.array([self.startframe,self.endframe]))

            dettime = dettime - dettime[0]
            coord1time = coord1time - coord1time[0]

            index1, = np.where(
                np.abs(dettime - coord1time[0]) == np.amin(
                    np.abs(dettime - coord1time[0])))
            index2, = np.where(
                np.abs(dettime - coord1time[-1]) == np.amin(
                    np.abs(dettime - coord1time[-1])))

            coord1_inter, coord2_inter = self.coord_int(coord1, coord2, \
                                                        coord1time, dettime[index1[0]+10:index2[0]-10])

            dettime = dettime[index1[0] + 10:index2[0] - 10]
            self.det_data = self.det_data[:, index1[0] + 10:index2[0] - 10]

        if isinstance(self.hwp_data, np.ndarray):

            if self.experiment.lower() == 'blastpol':
                hwptime, hwp = self.frame_zoom(self.hwp_data, self.hwp_sample_frame, \
                                               self.hwp_fs, np.array([self.startframe,self.endframe]))

                hwptime = hwptime - hwptime[0]
                index1, = np.where(
                    np.abs(dettime -
                           hwptime[0]) == np.amin(np.abs(dettime -
                                                         hwptime[0])))
                index2, = np.where(
                    np.abs(dettime - hwptime[-1]) == np.amin(
                        np.abs(dettime - hwptime[-1])))

                hwp_interpolation = interp1d(hwptime, hwp, kind='linear')
                hwp_inter = hwp_interpolation(dettime[index1[0] +
                                                      10:index2[0] - 10])

            else:

                hwp = self.hwp_data[self.bufferframe*self.coord_sample_frame:self.bufferframe*self.coord_sample_frame+\
                                    interval*self.coord_sample_frame]

                freq_array = np.append(
                    0,
                    np.cumsum(
                        np.repeat(1 / self.hwp_sample_frame,
                                  self.hwp_sample_frame * interval - 1)))
                hwptime = ctime_start + freq_array

                hwp_interpolation = interp1d(hwptime, hwp, kind='linear')
                hwp_inter = hwp_interpolation(dettime)

            del hwptime
            del hwp

        else:

            hwp_inter = np.zeros_like(coord1_inter)

        del coord1time
        del coord2time
        del coord1
        del coord2

        if self.lat_data is not None and self.lat_data is not None:

            if self.experiment.lower() == 'blastpol':
                lsttime, lst = self.frame_zoom(self.lst_data, self.lstlat_sample_frame, \
                                               self.lstlatfreq, np.array([self.startframe,self.endframe]))

                lattime, lat = self.frame_zoom(self.lat_data, self.lstlat_sample_frame, \
                                               self.lstlatfreq, np.array([self.startframe,self.endframe]))

                lsttime = lsttime - lsttime[0]
                index1, = np.where(
                    np.abs(dettime -
                           lsttime[0]) == np.amin(np.abs(dettime -
                                                         lsttime[0])))
                index2, = np.where(
                    np.abs(dettime - lsttime[-1]) == np.amin(
                        np.abs(dettime - lsttime[-1])))

                lst_inter, lat_inter = self.coord_int(lst, lat, \
                                                      lsttime, dettime[index1[0]+10:index2[0]-10])

            else:
                lst = self.lst_data[self.bufferframe*self.coord_sample_frame:self.bufferframe*self.coord_sample_frame+\
                                    interval*self.coord_sample_frame]
                lat = self.lat_data[self.bufferframe*self.coord_sample_frame:self.bufferframe*self.coord_sample_frame+\
                                    interval*self.coord_sample_frame]

                lsttime = ctime_mcp.copy()
                lattime = ctime_mcp.copy()

                lstint = interp1d(lsttime, lst, kind='linear')
                latint = interp1d(lattime, lat, kind='linear')

                lst_inter = lstint(dettime)
                lat_inter = latint(dettime)

            del lst
            del lat

            if np.size(np.shape(self.det_data)) > 1:
                return (dettime, self.det_data, \
                        coord1_inter, coord2_inter, hwp_inter, lst_inter, lat_inter)
            else:
                return (dettime, self.det_data, \
                        coord1_inter, coord2_inter,  hwp_inter, lst_inter, lat_inter)

        else:
            if np.size(np.shape(self.det_data)) > 1:
                return (dettime, self.det_data, \
                        coord1_inter, coord2_inter, hwp_inter)
            else:
                return (dettime, self.det_data, \
                        coord1_inter, coord2_inter, hwp_inter)
示例#34
0
        if (match):
            lst =  (float(match.group("day"))*24*3600 \
         + float(match.group("hour"))*3600)
            schedule.append((lst, match.group("name")))

print "\nSchedule parsed with", len(schedule), "command lines"
print
print "************************************************************"
print "* Starting main loop, waiting for new scans                *"
print "************************************************************"
print

#main loop. read LST_SCHED and if it crosses a schedule threshold, play the song
try:
    while True:
        df = gd.dirfile(dirfilePath, gd.RDONLY)
        lst = df.getdata("LST_SCHED", gd.FLOAT, \
     first_frame=df.nframes-1, num_frames=1)[0]
        df.close()
        newsong = False
        try:
            while lst > schedule[1][0]:
                schedule.pop(0)
                print "Passed threshold", schedule[0][0], "<", lst, \
                    "for region", schedule[0][1]
                newsong = True
            if newsong:
                #can't do direct songLookup access because names may have modifier chars
                for region in songLookup.iterkeys():
                    if schedule[0][1].find(region) >= 0:
                        print "New song!", schedule[0][1], "matches", region
	def write_dirfile_format_file(self,dirfile_path, f, i, q):
		print f
		print i
		print q
		
		print "Writing format file"
		dirf=gd.dirfile(dirfile_path,gd.RDWR)
		print 'including format file fragments...','format_sweep','format_calibration'
		sweepfrag = dirf.include('sweep',flags=gd.CREAT)
		calfrag   = dirf.include('calibration',flags=gd.CREAT)
		
		for chan,(ff,ii,qq) in enumerate(zip(f,i,q)):

			di = np.diff(ii)
			dq = np.diff(qq)
			mididx=ff.size//2
			df = ff[mididx+1]-ff[mididx]
			f_tone = ff[mididx]
			i_tone  = ii[mididx]
			q_tone  = qq[mididx]
			di_tone = di[mididx]
			dq_tone = dq[mididx]
			#di_tone = np.mean(di[mididx-1:mididx+1])
			#dq_tone = np.mean(dq[mididx-1:mididx+1])
			didf_tone = di_tone/df
			dqdf_tone = dq_tone/df
			c,r = self.least_sq_circle(ii,qq)
			phi_tone = np.arctan2(q_tone-c[1],i_tone-c[0])


			#Sweeps
			dirf.add(gd.entry(gd.CARRAY_ENTRY,'sweep_f_%04d'%chan,sweepfrag,(gd.FLOAT32,ff.size)))
			dirf.add(gd.entry(gd.CARRAY_ENTRY,'sweep_i_%04d'%chan,sweepfrag,(gd.FLOAT32,ff.size)))
			dirf.add(gd.entry(gd.CARRAY_ENTRY,'sweep_q_%04d'%chan,sweepfrag,(gd.FLOAT32,ff.size)))
			dirf.put_carray('sweep_f_%04d'%chan,ff)
			dirf.put_carray('sweep_i_%04d'%chan,ii)
			dirf.put_carray('sweep_q_%04d'%chan,qq)

			##Resonant Frequency
			#dirf.add(gd.entry(gd.CONST_ENTRY,'cal_res_freq_%04d'%chan,calfrag,(gd.FLOAT32,)))
			#dirf.put_constant('cal_res_freq_%04d'%chan,fres)

			#Tone Frequency
			dirf.add(gd.entry(gd.CONST_ENTRY,'_cal_tone_freq_%04d'%chan,calfrag,(gd.FLOAT32,)))
			dirf.put_constant('_cal_tone_freq_%04d'%chan,f_tone) 

			#i-i0 q-q0
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'_cal_i_sub_i0_%04d'%chan,calfrag,
			(("I%04d"%chan,),(1,),(-1*i_tone,))))
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'_cal_q_sub_q0_%04d'%chan,calfrag,
			(("Q%04d"%chan,),(1,),(-1*q_tone,))))
			

			#Complex values
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'_cal_complex_%04d'%chan,calfrag,
			(("I%04d"%chan,"Q%04d"%chan),(1,1j),(0,0))))

			#Amplitude
			dirf.add(gd.entry(gd.PHASE_ENTRY,'amplitude_%04d'%chan,calfrag,
			(('_cal_complex_%04d.m'%chan),0)))

			Phase
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'phase_raw_%04d'%chan,calfrag,
			(('_cal_complex_%04d.a'%chan,),(1,1j),(0,))))
				
			#Complex_centered:    
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'_cal_centred_%04d'%chan,calfrag,
			(("_cal_complex_%04d"%chan,),(1,),(-c[0]-1j*c[1],))))

			#Complex_rotated
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'_cal_rotated_%04d'%chan,calfrag,
			(("_cal_centred_%04d"%chan,),(np.exp(-1j*phi_tone),),(0,))))

			#Phase
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'phase_rotated_%04d'%chan,calfrag,
			(('_cal_rotated_%04d.a'%chan,),(1,),(0,))))
			
			#df = ((i[0]-i)(di/df) + (q[0]-q)(dq/df) ) / ((di/df)**2 + (dq/df)**2)
			dirf.add(gd.entry(gd.CONST_ENTRY,'_cal_didf_mult_%04d'%chan,calfrag,(gd.FLOAT32,)))
			dirf.add(gd.entry(gd.CONST_ENTRY,'_cal_dqdf_mult_%04d'%chan,calfrag,(gd.FLOAT32,)))
			dirf.put_constant('_cal_didf_mult_%04d'%chan,didf_tone/(didf_tone**2+dqdf_tone**2))
			dirf.put_constant('_cal_dqdf_mult_%04d'%chan,dqdf_tone/(didf_tone**2+dqdf_tone**2))
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'_cal_i0_sub_i_%04d'%chan,calfrag,
				(("I%04d"%chan,),(-1,),(i_tone,))))
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'_cal_q0_sub_q_%04d'%chan,calfrag,
				(("Q%04d"%chan,),(-1,),(q_tone,))))
			dirf.add(gd.entry(gd.LINCOM_ENTRY, 'delta_f_%04d'%chan, calfrag,
				(("_cal_i0_sub_i_%04d"%chan,"_cal_q0_sub_q_%04d"%chan),
				("_cal_didf_mult_%04d"%chan,"_cal_dqdf_mult_%04d"%chan),
				(0,0))))
			
			#x = df/f0
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'x_%04d'%chan,calfrag,
				(('delta_f_%04d'%chan,),(1./f_tone,),(0,))))

		dirf.close()
示例#36
0
# Quickly plot up last sweep and save pngs

import matplotlib.pyplot as plt
import numpy as np
import pygetdata as gd
import glob
import os

list_of_sweeps = glob.glob('/data/dirfiles/roach0/20??????_??????_sweep')
latest_sweep = max(list_of_sweeps, key=os.path.getctime)

figdir = '/data/tuning/roach0/' + latest_sweep[-21:-6]
if not os.path.isdir(figdir):
    os.mkdir(figdir)

df = gd.dirfile(latest_sweep, gd.RDONLY | gd.UNENCODED)
lo = df.get_carray('sweep.lo_freqs')

for key in df.field_list():

    if (key[-4] == 'K') or (key[-4] == 'B'):

        sweep = df.get_carray('sweep.' + key[-4:])

        plt.figure(1)
        plt.plot((lo - np.mean(lo)) / 1e3, 20 * np.log10(np.abs(sweep)))
        plt.grid()
        plt.xlabel('Offset Frequency [kHz]')
        plt.ylabel('20*log10(mag)')
        plt.title(key[-4:] + ': ' + latest_sweep[-21:-6])