def target_sweep_dirfile(self, save_path = '/mnt/iqstream/dirfiles', write = True, span = 100.0e3, attenuation=None):
		if attenuation == None:
			attenuation = float(raw_input("Attenuation level [dB] ?"))
		print "    Previous attenuation setting was %.1f dB"%self.get_attenuation()
		self.set_atten(attenuation)
		print "    Attenuation level is now %.1f dB"%attenuation
		write = raw_input('Write tones? (y/n) ')
		kid_freqs = np.load('/mnt/iqstream/last_kid_freqs.npy')
		dirfile_dir = raw_input('dirfile dir ? ')
		save_path = os.path.join(save_path, dirfile_dir+'_'+str(int(time.time())))

		#kid_freqs = np.array(np.loadtxt('BLASTResonatorPositionsVer2.txt', delimiter=','))
		center_freq = (np.max(kid_freqs) + np.min(kid_freqs))/2.   #Determine LO position to put tones centered around LO
		self.v1.set_frequency(0,center_freq / (1.0e6), 0.01) # LO
		bb_freqs = kid_freqs - center_freq
		bb_freqs = np.roll(bb_freqs, - np.argmin(np.abs(bb_freqs)) - 1)
		np.save('/mnt/iqstream/last_bb_freqs.npy',bb_freqs)
		rf_freqs = bb_freqs + center_freq
		np.save('/mnt/iqstream/last_rf_freqs.npy',rf_freqs)
		channels = np.arange(len(rf_freqs))
		print channels
		np.save('/mnt/iqstream/last_channels.npy',channels)
		self.v1.set_frequency(0,center_freq / (1.0e6), 0.01) # LO
		print '\nTarget baseband freqs (MHz) =', bb_freqs/1.0e6
		print '\nTarget RF freqs (MHz) =', rf_freqs/1.0e6
		if write == 'y':\
			self.writeQDR(bb_freqs)
		self.fpga.write_int('sync_accum_reset', 0)
		self.fpga.write_int('sync_accum_reset', 1)
		
		print save_path,gd.CREAT|gd.RDWR
		
		dirf = gd.dirfile(save_path,gd.CREAT|gd.RDWR)
		symlink_path = '/mnt/iqstream/active_dirfile.lnk'
		try:
			os.unlink(symlink_path)
		except:
			pass
		os.symlink(save_path,symlink_path)
		
		for chan in range(1024):
			dirf.add(gd.entry(gd.RAW_ENTRY,'I%04d'%chan,0,(gd.INT32,1)))
			dirf.add(gd.entry(gd.RAW_ENTRY,'Q%04d'%chan,0,(gd.INT32,1)))
		dirf.add(gd.entry(gd.CONST_ENTRY,'attenuation',0,(gd.FLOAT32,)))
		dirf.put_constant('attenuation',attenuation)
		dirf.close()
		
		f,i,q = self.sweep_lo_dirfile(Npackets_per = 10, channels = channels, center_freq = center_freq, span = span, bb_freqs=bb_freqs,  save_path = save_path)
				
		self.write_dirfile_format_file(save_path,f,i,q)
		
		last_target_dir = save_path
		np.save('/mnt/iqstream/last_target_dir.npy',np.array([last_target_dir]))
		self.plot_kids_dirfile(save_path = last_target_dir, channels = channels)
		#plt.figure()
		#plt.plot()
		return
Exemplo n.º 2
0
def create_dirfile(filename):

    dirf = gd.dirfile(
        filename, gd.CREAT | gd.RDWR
        | gd.UNENCODED)  # add GD_EXCL to stop accidental overwriting

    #subdirf = gd.dirfile(os.path.join(filename, "sweep"), gd.CREAT|gd.RDWR|gd.UNENCODED)

    sweepfrag = dirf.include('sweep', flags=gd.CREAT)
    #sweepfrag = dirf.include(subdirf, flags=gd.CREAT)
    dirf.add(
        gd.entry(gd.CARRAY_ENTRY, 'sweep_f_%04d' % (1), sweepfrag,
                 (gd.FLOAT64, 11)))
    dirf.put_carray('sweep_f_%04d' % (1), np.arange(11))

    dirf.add_spec('PACKETCOUNT' +
                  ' RAW UINT32 1')  # Initialized to 0 with 'GbE_pps_start'

    kidlist = [
        "K{kidnum:04d} RAW COMPLEX128 1".format(kidnum=i) for i in range(10)
    ]

    l = map(dirf.add_spec, kidlist)

    #dirf.close()
    return dirf
Exemplo n.º 3
0
    def __init__(self, directory):
        directory = os.path.expandvars(directory)
        print "Trying directory ", directory
        basename = current_yyyymmdd_hhmmss()
        filename = os.path.join(directory, basename)
        self.dirfile = pygetdata.dirfile(filename,
                                         pygetdata.CREAT | pygetdata.RDWR)
        self.filename = filename

        # Not sure whether curfiles or soft links are now preferred in
        # kst2, so for now, set up one of each!

        # Set up a "curfile" that always points to the current dirfile.
        curfilename = os.path.join(directory, "temperatures.cur")
        prevfilename = os.path.join(directory, "temperatures.prev")
        if os.path.exists(prevfilename):
            os.unlink(prevfilename)
        if os.path.exists(curfilename):
            os.rename(curfilename, prevfilename)
        fp = open(curfilename, "w")
        fp.write("%s\n" % basename)
        fp.close()

        # Also set up a soft link to the current dirfile
        softlink = os.path.join(directory, "temperatures.lnk")
        if os.path.exists(softlink):
            os.unlink(softlink)
        os.symlink(basename, softlink)

        # Set up the dirfile fields
        fields = {
            'temperature': {
                'type': pygetdata.FLOAT32,
                'spf': 1
            },
            'current': {
                'type': pygetdata.FLOAT32,
                'spf': 1
            },
            'ctime': {
                'type': pygetdata.FLOAT64,
                'spf': 1
            }
        }
        for name, params in fields.iteritems():
            entry = pygetdata.entry(pygetdata.RAW_ENTRY, name, 0, params)
            self.dirfile.add(entry)
        self.dirfile.metaflush()

        # Open file handles for the raw data
        # Do this instead of self.dirfile.putdata(), because that's annoying
        self.dirfile.raw_close()
        self.raw_files = {}
        for f in fields.keys():
            fp = open(os.path.join(self.filename, f), "wb")
            self.raw_files[f] = fp
Exemplo n.º 4
0
def generate_main_derivedfields(dirfile, field_names):
    """Generate the calibration fragment for the streaming dirfiles. To calculate the derived fields such as dff0,
    the LINCOM entries require combinations of the parameters from the sweep, defined in
    pcp.lib.lib_dirfiles.SWEEP_CALPARAM_FIELDS. """
    # requires calibration data - this can be written to the dirfile later
    calns = ""

    calfrag = dirfile.include("calibration", namespace = calns, flags = _gd.CREAT|_gd.EXCL|_gd.RDWR)
    nfields = len(field_names)

    cal_entries = [_gd.entry(_gd.CARRAY_ENTRY, calfield, calfrag, (_gd.FLOAT64, nfields) ) for calfield in DERIVED_CALPARAM_FIELDS]
    # create a fragment for the derived fields
    derivedfrag = dirfile.include("derived", flags = _gd.CREAT|_gd.EXCL|_gd.RDWR )

    # create the entries for the derived fields for a stream file
    zentries, magzentries, angzentries, dfentries, dff0entries = [],[],[],[],[]
    for idx, field_name in enumerate( sorted( field_names ) ):
        # complex combination of i and q
        zentries.append( _gd.entry(_gd.LINCOM_ENTRY, field_name + "_z", derivedfrag, ( (field_name + "_I", field_name + "_Q" ), (1,1j), (0,0) ) ) )
        # raw amplitude
        magzentries.append ( _gd.entry(_gd.PHASE_ENTRY, field_name + '_magz', derivedfrag, ( (field_name + '_z.m'), 0) ) )
        # raw phase
        angzentries.append ( _gd.entry(_gd.PHASE_ENTRY, field_name + '_angz', derivedfrag, ( (field_name + '_z.a'), 0) ) )
        # fraction frequency shift
        dfentries.append ( _gd.entry(_gd.LINCOM_ENTRY, field_name + '_df', derivedfrag, ( (field_name + "_I", field_name + "_Q" ), \
                                                                                            ("didf_sumdidq2", "dqdf_sumdidq2"),\
                                                                                            ("i0_didf_sumdidq2", "q0_dqdf_sumdidq2") ) ) )

        #dff0entries.append ( _gd.entry(_gd.DIVIDE_ENTRY, field_name + '_dff0', derivedfrag, ( (field_name + "_df", "f0s"]) ) ) ) )

        derived_entries = zentries + magzentries + angzentries + dfentries# + dff0entries

    # add all the entries to the dirfile
    map(dirfile.add, cal_entries + derived_entries)

    dirfile.flush()
    # hack to fix the derived format file from a bug in pygetdata
    dfname = dirfile.name
    _fix_format_file(dirfile) # <- this makes the dirfile invalid. so, close and reopen
    dirfile.close()

    return _gd.dirfile(dfname, _gd.EXCL|_gd.RDWR)
Exemplo n.º 5
0
def generate_sweep_fragment(dirfile, tones, array_size = 501, datatag=""):
    """Generate fragment file for the derived sweep file """

    namespace = "sweep"

    sweep_frag = dirfile.include("sweep_frag", namespace = namespace, flags = _gd.CREAT|_gd.EXCL)

    sep = "_" if datatag else ""; datatag = sep + datatag # str to add to file path (only applies to new filenames)

    swp_fields = check_tones_type(tones)
    # old - to delete -
    #swp_fields = ["K{kidnum:04d}{datatag}".format(kidnum=i, datatag=datatag) for i in range(ntones)]

    sweep_entry_freq       = [ _gd.entry(_gd.CARRAY_ENTRY, ".".join((namespace, "lo_freqs")), sweep_frag, (_gd.FLOAT64, array_size)) ]
    sweep_entries_to_write = [ _gd.entry(_gd.CARRAY_ENTRY, ".".join((namespace, field_name)), sweep_frag, (_gd.COMPLEX64, array_size)) for field_name in swp_fields ]

    #return sweep_entries_to_write
    for entry in sweep_entry_freq + sweep_entries_to_write:
        dirfile.add(entry)

    dirfile.sync()

    return dirfile
Exemplo n.º 6
0
    def write_ctime(self, roach_number = None, spf=1):

        if roach_number is not None:
            ctime_name = 'ctime_built_roach'+str(int(roach_number))
            key = 'roach'+str(int(roach_number))
            val = self.time_roach[key]
        else:
            ctime_name = 'ctime_master_built'
            val = self.time_master

        if ctime_name in list(map(bytes.decode, self.d.field_list())):
            pass
        else:
            ctime_entry = gd.entry(gd.RAW_ENTRY, ctime_name, 0, (gd.FLOAT64, spf))
            self.d.add(ctime_entry)
        self.d.putdata(ctime_name, val , gd.FLOAT64)
Exemplo n.º 7
0
def generate_main_rawfields(dirfile, roachid, tonenames, fragnum=0 ):#, field_suffix = ""):
    # function to generate a standard set of dirfile entries for the roach readout
    # will be used for both timestreams and raw sweep files

    if type(dirfile) != _gd.dirfile:
        _logger.error( "given dirfile is of type {0}, and not a valid dirfile. Nothing done.".format(type(dirfile)) )
        return
    elif roachid not in roach_config.keys():
        _logger.error( "Unrecognised roachid = {0}".format(roachid) )
        return

    # add metadata fragment and add "type" field
    add_metadata_to_dirfile(dirfile, {"type": "stream"})

    # get the appropriate namespace for the fragment to add to the fields
    namespace = dirfile.fragment(fragnum).namespace

    # read in auxillary fields as defined in the configuration file and create getdata entries
    firmware_dict = lib_config.get_firmware_register_dict( firmware_registers, roach_config[roachid]["firmware_file"] )
    aux_fields = firmware_dict["packet_structure"]["aux_field_cfg"]

    # write the python_timestamp field manually
    aux_entries_to_write = [ _gd.entry( _GDENTRYMAP[ firmware_dict["packet_structure"]['python_timestamp'][0] ], namespace + 'python_timestamp', \
                                            fragnum, \
                                            (_GDDATAMAP[ firmware_dict["packet_structure"]['python_timestamp'][1] ], 1) ) ]

    for field_name, (entry_type, field_datatype, __, __, __, __) in aux_fields.items():
        #print eval(entry_type), eval(field_datatype)
        #print _GDENTRYMAP[entry_type], _GDDATAMAP[field_datatype]
        aux_entries_to_write.append( _gd.entry( _GDENTRYMAP[entry_type], namespace + field_name, \
                                                fragnum, \
                                                (_GDDATAMAP[field_datatype], 1) ) )
                                                 # field_type, name, fragment_idx, (data_type, sample_rate)
    # generate the field names for tones
    kid_fields_I, kid_fields_Q = toneslist.gen_tone_iq_fields(tonenames, namespace=namespace) #, field_suffix = field_suffix)

    kid_entries_to_write = [ _gd.entry(_gd.RAW_ENTRY, field_name, fragnum, (_gd.FLOAT64, 1)) for field_name in kid_fields_I ] \
                         + [ _gd.entry(_gd.RAW_ENTRY, field_name, fragnum, (_gd.FLOAT64, 1)) for field_name in kid_fields_Q ]

    # generate entries for bbfreqs and lofreq
    lofreq_entry   = _gd.entry(_gd.CONST_ENTRY, "lofreq",   fragnum, (_gd.FLOAT64, ) )
    bbfreq_entry   = _gd.entry(_gd.CARRAY_ENTRY, "bbfreqs", fragnum, (_gd.FLOAT64, len(tonenames)) )
    tonename_entry = _gd.entry(_gd.SARRAY_ENTRY, "tonenames", fragnum, (len(tonenames), ) )

    for entry in aux_entries_to_write + kid_entries_to_write + [lofreq_entry, bbfreq_entry, tonename_entry]:
        dirfile.add(entry)

    dirfile.sync()
    return dirfile
Exemplo n.º 8
0
    def save(self, path, file_format):
        '''
        Function to save selected data in different formats. 
        It currently support saving only to csv or dirfile. Dirfile are currently created 
        with 1 sample per frame
        Parameters:
        - path: path of the file (including filename but not extension)
        - file_format: format of the file to be created
        '''

        format_list = ['csv', 'dirfile']

        try:
            if file_format in format_list:
                pass
            else:
                raise InputError

        except InputError:
            print(
                'The file format choosen for saving the data is not implemented yet. \
                   Please choose a file format in', format_list)
            sys.exit(1)

        if file_format == 'csv':
            ascii.write(self.data_values,
                        names=self.data_values.keys(),
                        output=path + '.csv',
                        format='csv')

        else:
            dd = gd.dirfile(path, gd.RDWR | gd.CREAT)

            for i in self.data_values.keys():
                if i in list(map(bytes.decode, dd.field_list())):
                    temp = dd.getdata(i)
                    if len(temp) == len(self.data_values[i]):
                        if np.all(np.diff(temp - self.data_values[i]) == 0):
                            pass
                else:
                    entry = gd.entry(gd.RAW_ENTRY, i, 0, (gd.FLOAT32))
                    dd.add(entry)

                dd.putdata(i, self.data_values[i], (gd.FLOAT32, 1))
Exemplo n.º 9
0
def generate_sweep_fields(dirfile, tonenames, array_size = 501, fragnum = 0 ):#, field_suffix=""):
    """Generate fragment file for the derived sweep file """

    # add metadata fragment and add "type" field
    add_metadata_to_dirfile(dirfile, {"type": "sweep"})

    swp_fields = toneslist.get_tone_fields( tonenames )
    #swp_fields = ["K{kidnum:04d}".format(kidnum=i) for i in range(tones)]
    _logger.debug("size of carray for sweep data = {0}".format(array_size) )

    # Parameters
    # sweep_entry_freq       = [ _gd.entry(_gd.CARRAY_ENTRY, "sweep." + "lo_freqs", 0, (_gd.FLOAT64,   array_size)) ]
    # sweep_entry_bb         = [ _gd.entry(_gd.CARRAY_ENTRY, "sweep." + "bb_freqs", 0, (_gd.FLOAT64,   len(tones))) ]
    # sweep_entries_to_write = [ _gd.entry(_gd.CARRAY_ENTRY, "sweep." + field_name, 0, (_gd.COMPLEX64, array_size)) for field_name in swp_fields ]

    sweep_entry_freq       = [ _gd.entry(_gd.CARRAY_ENTRY, "lo_freqs",  fragnum, (_gd.FLOAT64,   array_size)) ]
    sweep_entry_bb         = [ _gd.entry(_gd.CARRAY_ENTRY, "bb_freqs",  fragnum, (_gd.FLOAT64,   len(tonenames))) ]
    sweep_entry_tonenames  = [ _gd.entry(_gd.SARRAY_ENTRY, "tonenames", fragnum, (len(tonenames),)) ]
    sweep_entries_to_write = [ _gd.entry(_gd.CARRAY_ENTRY, field_name,  fragnum, (_gd.COMPLEX64, array_size)) for field_name in swp_fields ]

    _logger.debug("generating new sweep fields: {0}".format(sweep_entries_to_write) )

    map(dirfile.add, sweep_entry_freq + sweep_entry_bb + sweep_entry_tonenames + sweep_entries_to_write)

    # --- add calibration fragment ---
    # constants for F0s, i0, q0, didf0, dqdf0, didq0 -
    # arrays for cal data
    # TODO constants for centres and rotation for phase (just start with df)

    caldata_frag  = dirfile.include("caldata",  namespace = 'caldata',  flags = _gd.CREAT|_gd.EXCL)
    calparam_frag = dirfile.include("calparam", namespace = 'calparam', flags = _gd.CREAT|_gd.EXCL)

    caldata_ns  = dirfile.fragment(caldata_frag).namespace
    calparam_ns = dirfile.fragment(calparam_frag).namespace

    # add calibration fields to the dirfile
    # i0s, q0s, didf0s ...etc for all the tones
    calparam_entries = [ _gd.entry(_gd.CARRAY_ENTRY, ".".join((calparam_ns, field_name)), calparam_frag, (_gd.FLOAT64, len(tonenames)) )  for field_name in SWEEP_CALPARAM_FIELDS]

    # didf, dqdf, didq...etc for each tone - maybe not needed?
    caldata_entries = [ _gd.entry(_gd.CARRAY_ENTRY, ".".join((caldata_ns, field_name)), caldata_frag, (_gd.COMPLEX64, array_size)) for field_name in swp_fields ]

    _logger.debug("generating new sweep cal fields: {0}".format(calparam_entries + caldata_entries) )

    map(dirfile.add, calparam_entries + caldata_entries)

    dirfile.sync()

    return dirfile
Exemplo n.º 10
0
def add_metadata_to_dirfile(dirfile, metadata_dict):
    """

    Function to add metadata to a dirfile as a new fragment.

    Currently all data is written as a string for simplicity.

    """
    # check if metadata fragment exists, and create if not
    is_frag_valid, metadata_frag = check_fragment_valid(dirfile, "metadata")

    if not is_frag_valid:
        metadata_frag = dirfile.include("metadata", flags = _gd.CREAT|_gd.EXCL)

    # add new entries to the dirfile
    map(dirfile.add, [ _gd.entry(_gd.STRING_ENTRY, ".".join(("metadata", field_name)), metadata_frag) for field_name in metadata_dict.keys() ] )

    # write metadata to file
    for field_name, metadata in metadata_dict.items():
        dirfile.put_string(".".join(("metadata", field_name)), str(metadata))

    dirfile.flush()
Exemplo n.º 11
0
    def write_data(self, data: Mapping, arange: List[int] = (0, -1)):
        """
        Writes a data block to dirfile over an index range.

        Arguments:
          data:     The data block to be written.
        - arange:   Tuple (start, end) to read frames [start, end).
        """
        # Sanitize arange
        start_frame, end_frame = arange
        if end_frame < 0:
            end_frame = max(0, end_frame + data.nframes + 1)
        if start_frame < 0:
            start_frame = max(0, start_frame + data.nframes + 1)
        num_frames = end_frame - start_frame

        for name, value in data.items():
            if data.spf is None:
                spf = value.spf 
            else:
                spf = data.spf
            v = np.array(value[(start_frame*spf):(end_frame*spf)])
            # Create the entry if not already present in the dirfile
            if name not in self._df.field_list():
                t = GDTYPE_LOOKUP[value.dtype]
                entry = gd.entry(gd.RAW_ENTRY, name, 0, (t ,spf)) 
                self._df.add(entry)

            # putdata into the dirfile
            print("%20s => %d frames (%d spf) starting at frame %d" % 
                    (name, num_frames, spf, start_frame))
            self._df.putdata(name, v, first_frame=start_frame)

        self._df.flush()

        return num_frames
	def write_dirfile_format_file(self,dirfile_path, f, i, q):
		print f
		print i
		print q
		
		print "Writing format file"
		dirf=gd.dirfile(dirfile_path,gd.RDWR)
		print 'including format file fragments...','format_sweep','format_calibration'
		sweepfrag = dirf.include('sweep',flags=gd.CREAT)
		calfrag   = dirf.include('calibration',flags=gd.CREAT)
		
		for chan,(ff,ii,qq) in enumerate(zip(f,i,q)):

			di = np.diff(ii)
			dq = np.diff(qq)
			mididx=ff.size//2
			df = ff[mididx+1]-ff[mididx]
			f_tone = ff[mididx]
			i_tone  = ii[mididx]
			q_tone  = qq[mididx]
			di_tone = di[mididx]
			dq_tone = dq[mididx]
			#di_tone = np.mean(di[mididx-1:mididx+1])
			#dq_tone = np.mean(dq[mididx-1:mididx+1])
			didf_tone = di_tone/df
			dqdf_tone = dq_tone/df
			c,r = self.least_sq_circle(ii,qq)
			phi_tone = np.arctan2(q_tone-c[1],i_tone-c[0])


			#Sweeps
			dirf.add(gd.entry(gd.CARRAY_ENTRY,'sweep_f_%04d'%chan,sweepfrag,(gd.FLOAT32,ff.size)))
			dirf.add(gd.entry(gd.CARRAY_ENTRY,'sweep_i_%04d'%chan,sweepfrag,(gd.FLOAT32,ff.size)))
			dirf.add(gd.entry(gd.CARRAY_ENTRY,'sweep_q_%04d'%chan,sweepfrag,(gd.FLOAT32,ff.size)))
			dirf.put_carray('sweep_f_%04d'%chan,ff)
			dirf.put_carray('sweep_i_%04d'%chan,ii)
			dirf.put_carray('sweep_q_%04d'%chan,qq)

			##Resonant Frequency
			#dirf.add(gd.entry(gd.CONST_ENTRY,'cal_res_freq_%04d'%chan,calfrag,(gd.FLOAT32,)))
			#dirf.put_constant('cal_res_freq_%04d'%chan,fres)

			#Tone Frequency
			dirf.add(gd.entry(gd.CONST_ENTRY,'_cal_tone_freq_%04d'%chan,calfrag,(gd.FLOAT32,)))
			dirf.put_constant('_cal_tone_freq_%04d'%chan,f_tone) 

			#i-i0 q-q0
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'_cal_i_sub_i0_%04d'%chan,calfrag,
			(("I%04d"%chan,),(1,),(-1*i_tone,))))
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'_cal_q_sub_q0_%04d'%chan,calfrag,
			(("Q%04d"%chan,),(1,),(-1*q_tone,))))
			

			#Complex values
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'_cal_complex_%04d'%chan,calfrag,
			(("I%04d"%chan,"Q%04d"%chan),(1,1j),(0,0))))

			#Amplitude
			dirf.add(gd.entry(gd.PHASE_ENTRY,'amplitude_%04d'%chan,calfrag,
			(('_cal_complex_%04d.m'%chan),0)))

			Phase
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'phase_raw_%04d'%chan,calfrag,
			(('_cal_complex_%04d.a'%chan,),(1,1j),(0,))))
				
			#Complex_centered:    
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'_cal_centred_%04d'%chan,calfrag,
			(("_cal_complex_%04d"%chan,),(1,),(-c[0]-1j*c[1],))))

			#Complex_rotated
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'_cal_rotated_%04d'%chan,calfrag,
			(("_cal_centred_%04d"%chan,),(np.exp(-1j*phi_tone),),(0,))))

			#Phase
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'phase_rotated_%04d'%chan,calfrag,
			(('_cal_rotated_%04d.a'%chan,),(1,),(0,))))
			
			#df = ((i[0]-i)(di/df) + (q[0]-q)(dq/df) ) / ((di/df)**2 + (dq/df)**2)
			dirf.add(gd.entry(gd.CONST_ENTRY,'_cal_didf_mult_%04d'%chan,calfrag,(gd.FLOAT32,)))
			dirf.add(gd.entry(gd.CONST_ENTRY,'_cal_dqdf_mult_%04d'%chan,calfrag,(gd.FLOAT32,)))
			dirf.put_constant('_cal_didf_mult_%04d'%chan,didf_tone/(didf_tone**2+dqdf_tone**2))
			dirf.put_constant('_cal_dqdf_mult_%04d'%chan,dqdf_tone/(didf_tone**2+dqdf_tone**2))
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'_cal_i0_sub_i_%04d'%chan,calfrag,
				(("I%04d"%chan,),(-1,),(i_tone,))))
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'_cal_q0_sub_q_%04d'%chan,calfrag,
				(("Q%04d"%chan,),(-1,),(q_tone,))))
			dirf.add(gd.entry(gd.LINCOM_ENTRY, 'delta_f_%04d'%chan, calfrag,
				(("_cal_i0_sub_i_%04d"%chan,"_cal_q0_sub_q_%04d"%chan),
				("_cal_didf_mult_%04d"%chan,"_cal_dqdf_mult_%04d"%chan),
				(0,0))))
			
			#x = df/f0
			dirf.add(gd.entry(gd.LINCOM_ENTRY,'x_%04d'%chan,calfrag,
				(('delta_f_%04d'%chan,),(1./f_tone,),(0,))))

		dirf.close()
Exemplo n.º 13
0
  ent = d.entry("string")
except:
  CheckOK(134)
CheckSimple2(134,1,ent.field_type,pygetdata.STRING_ENTRY)
CheckSimple2(134,2,ent.field_type_name,"STRING_ENTRY")
CheckSimple2(134,3,ent.fragment,0)

# 27: fragment_index check
try:
  n = d.fragment_index("data")
except:
  CheckOK(27)
CheckSimple(27,n,0)

# 28: add / entry (raw) check
ent = pygetdata.entry(pygetdata.RAW_ENTRY, "new1", 0, (pygetdata.FLOAT64, 3))
try:
  d.add(ent)
except:
  CheckOK2(28,1)

try:
  ent = d.entry("new1")
except:
  CheckOK(28,2)
CheckSimple2(28,1,ent.field_type,pygetdata.RAW_ENTRY)
CheckSimple2(28,2,ent.field_type_name,"RAW_ENTRY")
CheckSimple2(28,3,ent.fragment,0)
CheckSimple2(28,4,ent.data_type,pygetdata.FLOAT64)
CheckSimple2(28,5,ent.data_type_name,"FLOAT64")
CheckSimple2(28,6,ent.spf,3)
Exemplo n.º 14
0
    def sweep_lo(self, stop_event=None, **sweep_kwargs):
        """
        Function to sweep the LO. Takes in a number of optional keyword arugments. If not given,
        defaults from the configuration files are assumed.

        Keyword Arguments
        -----------------

        sweep_span : float
            Frequency span, in Hz, about which to sweep the LO around its currently set value.

        sweep_step : float
            Frequency step, in Hz, in which to sweep the LO around its currently set value. Note that some
            synthesiers have a minimum step size. Every attempt has been made to try to make the user know
            if the hardware is limiting the step, but care should still be taken.

        sweep_avgs : int
            Number of packets to average per LO frequency. This is used to calculate an approximate integration
            time to collect sweep_avgs. There is a 5% time addition to ensure that at least this many packets are
            collected.

        startidx : int
            user defined number of samples to skip after lo switch (to be read from config, or set at run time)

        stopidx : int
            same as startidx, but for the other end (None reads all samples up to lo_switch)
        save_data : bool
            Flag to turn off data writing. Mainly for testing purposes. Default is, of course, True.

        filename_suffix : str
            allows the user to append an additional string to the end of the filename
        """
        # create the stop event for use when running all roaches at once through the muxChannelList
        stop_event = _multiprocessing.Event() if not isinstance(
            stop_event, _multiprocessing.synchronize.Event) else stop_event

        # configure sweep parameters and start writing
        sweep_params = self._configure_sweep_and_start_writing(**sweep_kwargs)

        # # get time for avg factor + 10%
        sleeptime = np.round(sweep_params["sweep_avgs"] / self.sample_rate *
                             1.1,
                             decimals=3)
        _logger.debug("sleep time for sweep is {0}".format(sleeptime))

        step_times = []

        # acutally do the sweep - loop over LO frequencies, while saving time at lo_step
        try:
            sweepdirection = np.sign(np.diff(self.toneslist.sweep_lo_freqs))[
                0]  # +/- 1 for forward/backward - not used right now
            _logger.info(
                'Sweeping LO %3.1f kHz around %3.3f MHz in %1.1f kHz steps' %
                (np.ptp(self.toneslist.sweep_lo_freqs / 1.e3),
                 np.mean(self.toneslist.sweep_lo_freqs / 1.e6),
                 np.median(np.diff(self.toneslist.sweep_lo_freqs)) / 1.e3))
            for ix, lo_freq in enumerate(self.toneslist.sweep_lo_freqs):

                if self.loswitch == True:  # only switch if the muxchannel is configured to do so
                    self.synth_lo.frequency = lo_freq
                else:
                    # wait until synth_lo.frequency => lo_freq
                    t0 = time.time()
                    while self.synth_lo.frequency <= lo_freq and time.time(
                    ) <= t0 + sleeptime:
                        time.sleep(sleeptime / 100.)
                pytime = self.writer_daemon.pytime.value
                step_times.append(pytime)
                #print "lo stepped at ", pytime
                #_logger.info('LO stepped to ' + str(lo_freq/1.e6))
                # check the stop event to break out of the loop
                if stop_event.is_set():
                    break
                #pbar.set_description(cm.BOLD + "LO: %i" % lo_freq + cm.ENDC)
                time.sleep(sleeptime)

                # should we wait for a number of samples per frequency? can sample self.current_dirfile.nframes

            #pbar.close()
            #print cm.OKGREEN + "Sweep done!" + cm.ENDC
        except KeyboardInterrupt:
            pass

        # sweep has finished, pause the writing and continue to process the data
        #time.sleep(2.5)
        _logger.debug("pausing writing at ", self.writer_daemon.pytime.value)

        self.writer_daemon.pause_writing()

        #  get only the indexes that were swept
        lofreqs_that_were_swept = self.toneslist.sweep_lo_freqs[np.arange(ix +
                                                                          1)]
        #lofreqs_that_were_swept = self.toneslist.sweep_lo_freqs
        # Back to the central frequency
        if self.loswitch == True:
            self.synth_lo.frequency = self.toneslist.lo_freq

        # save lostep_times to current timestream dirfile (why are these not arrays?)
        self.current_dirfile.add(
            _gd.entry(_gd.RAW_ENTRY, "lo_freqs", 0, (_gd.FLOAT64, 1)))
        self.current_dirfile.add(
            _gd.entry(_gd.RAW_ENTRY, "lostep_times", 0, (_gd.FLOAT64, 1)))

        self.current_dirfile.putdata(
            "lo_freqs",
            np.ascontiguousarray(lofreqs_that_were_swept, dtype=np.float64))
        self.current_dirfile.putdata(
            "lostep_times", np.ascontiguousarray(step_times, dtype=np.float64))

        # on mac, we need to close and reopen the dirfile to flush the data before reading back in the data
        # - not sure why, or if this is a problem on linux - it doesn't hurt too much though
        self.current_dirfile.close()
        self.current_dirfile = _gd.dirfile(self.writer_daemon.current_filename,
                                           _gd.RDWR)

        #delay appears to be required to finish write/open operations before continuing
        time.sleep(0.5)
        # analyse the raw sweep dirfile and write to disk
        self.reduce_and_write_sweep_data(self.current_dirfile)
Exemplo n.º 15
0
  ent = d.entry("string")
except:
  CheckOK(51)
CheckSimple2(51,1,ent.field_type,pygetdata.STRING_ENTRY)
CheckSimple2(51,2,ent.field_type_name,"STRING_ENTRY")
CheckSimple2(51,3,ent.fragment,0)

# 52: fragment_index check
try:
  n = d.fragment_index("data")
except:
  CheckOK(52)
CheckSimple(52,n,0)

# 53: add / entry (raw) check
ent = pygetdata.entry(pygetdata.RAW_ENTRY, "new1", 0, (pygetdata.FLOAT64, 3))
try:
  d.add(ent)
except:
  CheckOK2(53,1)

try:
  ent = d.entry("new1")
except:
  CheckOK(53,2)
CheckSimple2(53,1,ent.field_type,pygetdata.RAW_ENTRY)
CheckSimple2(53,2,ent.field_type_name,"RAW_ENTRY")
CheckSimple2(53,3,ent.fragment,0)
CheckSimple2(53,4,ent.data_type,pygetdata.FLOAT64)
CheckSimple2(53,5,ent.data_type_name,"FLOAT64")
CheckSimple2(53,6,ent.spf,3)