def NumpyTensorInitializerForVacancy(gridShape, filename, vacancyfile=None):
    """
    Initialize a 10 component plasticity state by reading from a numpy "tofile" type file or two files.
    """
    dict = {('x','x') : (0,0), ('x','y') : (0,1), ('x','z') : (0,2),\
            ('y','x') : (1,0), ('y','y') : (1,1), ('y','z') : (1,2),\
            ('z','x') : (2,0), ('z','y') : (2,1), ('z','z') : (2,2)}
    data = fromfile(filename)
    if vacancyfile is None:
        data = data.reshape([10] + list(gridShape))
    else:
        data = data.reshape([3,3] + list(gridShape))
        dataV = fromfile(vacancyfile)
        dataV = dataV.reshape(list(gridShape))
    state = VacancyState.VacancyState(gridShape)
    field = state.GetOrderParameterField() 
    if vacancyfile is None:
        i = 0
        for component in field.components:
            field[component] = copy(data[i]) 
            i += 1
    else:
        for component in field.components:
            if component[0] not in [x,y,z]:
                field[component] = copy(dataV) 
            else:
                field[component] = copy(data[dict[component]]) 
    return state
Esempio n. 2
0
    def get_data(self):
        self.text_sym.set_text("Symbol: %d" % (self.symbol))

        derot_data = scipy.fromfile(self.h_derot_file, dtype=scipy.complex64, count=self.occ_tones)
        acq_data = scipy.fromfile(self.h_acq_file, dtype=scipy.complex64, count=self.occ_tones)
        fft_data = scipy.fromfile(self.h_fft_file, dtype=scipy.complex64, count=self.fft_size)
        if(len(acq_data) == 0):
            print "End of File"
        else:
            self.acq_data_reals = [r.real for r in acq_data]
            self.acq_data_imags = [i.imag for i in acq_data]
            self.derot_data_reals = [r.real for r in derot_data]
            self.derot_data_imags = [i.imag for i in derot_data]

            self.unequalized_angle = [math.atan2(x.imag, x.real) for x in fft_data]
            self.equalized_angle = [math.atan2(x.imag, x.real) for x in acq_data]
            self.derot_equalized_angle = [math.atan2(x.imag, x.real) for x in derot_data]

            self.time = [i*(1/self.sample_rate) for i in range(len(acq_data))]
            ffttime = [i*(1/self.sample_rate) for i in range(len(fft_data))]

            self.freq = self.get_freq(ffttime, self.sample_rate)

            for i in range(len(fft_data)):
                if(abs(fft_data[i]) == 0.0):
                    fft_data[i] = complex(1e-6,1e-6)
            self.fft_data = [20*log10(abs(f)) for f in fft_data]
def main():
    print "opening files (this may take a while)..."
    # open with each bit as an element in a numpy array
    f1 = numpy.unpackbits(scipy.fromfile(open(FILE1), dtype="uint8"))
    f2 = numpy.unpackbits(scipy.fromfile(open(FILE2), dtype="uint8"))

    print f1[:100]
    print f2[:100]

    num_bits = len(f1)

    # split these into a few arrays so that we can print a progress indicator
    print "splitting arrays..."
    inputs = numpy.array_split(f1, NUM_SPLITS)
    outputs = numpy.array_split(f2, NUM_SPLITS)

    errors = 0
    # determine BER for each second/pair of arrays
    for i in range(NUM_SPLITS):
        ins = inputs[i]
        outs = outputs[i]
        # count number that are the same
        errors += numpy.sum(ins != outs)
        print str((i + 1) / float(NUM_SPLITS) * 100) + "%..."

    print "...done!"

    print "total bits: " + str(num_bits)
    print "errors: " + str(errors)
    print "bit error ratio: " + str(errors) + "/" + str(
        num_bits) + " = " + str(errors / float(num_bits))
Esempio n. 4
0
    def get_data(self):
        self.text_sym.set_text("Symbol: %d" % (self.symbol))

        derot_data = scipy.fromfile(self.h_derot_file, dtype=scipy.complex64, count=self.occ_tones)
        acq_data = scipy.fromfile(self.h_acq_file, dtype=scipy.complex64, count=self.occ_tones)
        fft_data = scipy.fromfile(self.h_fft_file, dtype=scipy.complex64, count=self.fft_size)
        if(len(acq_data) == 0):
            print "End of File"
        else:
            self.acq_data_reals = [r.real for r in acq_data]
            self.acq_data_imags = [i.imag for i in acq_data]
            self.derot_data_reals = [r.real for r in derot_data]
            self.derot_data_imags = [i.imag for i in derot_data]

            self.unequalized_angle = [math.atan2(x.imag, x.real) for x in fft_data]
            self.equalized_angle = [math.atan2(x.imag, x.real) for x in acq_data]
            self.derot_equalized_angle = [math.atan2(x.imag, x.real) for x in derot_data]

            self.time = [i*(1/self.sample_rate) for i in range(len(acq_data))]
            ffttime = [i*(1/self.sample_rate) for i in range(len(fft_data))]

            self.freq = self.get_freq(ffttime, self.sample_rate)

            for i in range(len(fft_data)):
                if(abs(fft_data[i]) == 0.0):
                    fft_data[i] = complex(1e-6,1e-6)
            self.fft_data = [20*log10(abs(f)) for f in fft_data]
Esempio n. 5
0
def main(argv):
    inputfile = ''
    original_file = ''
    indices_file = ''
    try:
        opts, args = getopt.getopt(argv, "h:d:i:o:",
                                   ["dfile=", "itype=", "ofile="])
    except getopt.GetoptError:
        print 'file.py -d <file_to_decode>  -o <original_file>  -i <indices_file>'
        sys.exit(2)
    for opt, arg in opts:
        print opt, arg,
        if opt == '-h':
            print 'file.py -d <file_to_decode>  -o <original_file> -i <indices_file> '
            sys.exit()
        elif opt in ("-d", "--dfile"):
            inputfile = arg
        elif opt in ("-i", "--itype"):
            indices_file = arg
        elif opt in ("-o", "--ofile"):
            original_file = arg
        else:
            print "check help for usage"
            sys.exit()

    print inputfile
    print inputfile.split('_')

    to_decode_file = scipy.fromfile(open(inputfile), dtype=scipy.float32)
    original_string = scipy.fromfile(open(original_file), dtype=scipy.float32)
    oracle_indices = np.load(indices_file)
    print "\n lengths for measured data:", len(
        to_decode_file), "length of orig transmission: ", len(original_string)
    get_index = start_index(to_decode_file)
    start_data_index = get_index + 1  # get_index+1 #(m-get_index)  #m - (len(preamble) - get_index) +1
    print "starting of data is  ", start_data_index
    plt.plot(to_decode_file[:start_data_index], '*-')
    plt.savefig('abhinav.pdf')
    plt.clf()
    original_message = original_string[len(preamble):]
    to_decode_data1 = to_decode_file[start_data_index:]
    to_decode_data = to_decode_data1.astype(np.int64)
    #print "lengths of data going in:", len(original_message), len(to_decode_data)
    #bin_rep_to_decode=decoding_maj3(oracle_indices,to_decode_data)
    bin_rep_to_decode = single_demod(oracle_indices, to_decode_data)
    ber_single(oracle_indices, to_decode_data, original_message)
    print "\nGoing to decode"
    rs = reedsolo.RSCodec(32)
    message_decoded = ''
    try:
        message_decoded = rs.decode(bin_rep_to_decode)
    except:
        print "Cannot decode using RS decoder "
    print "decoded message is ", message_decoded
    print "\n"
Esempio n. 6
0
def loadData(input_data_path, output_data_path):
    """
    load Training Data, include real and imag part
    :return: dict contains all data
    """
    train_input = scipy.fromfile(open(input_data_path), dtype=scipy.complex64)
    train_output = scipy.fromfile(open(output_data_path),
                                  dtype=scipy.complex64)

    data = {"X": train_input, "Y": train_output}
    return data
Esempio n. 7
0
def loadData(path_data1, path_data2, binSizeTim):
    ch0 = scipy.fromfile(path_data1, dtype=scipy.complex64)
    ch1 = scipy.fromfile(path_data2, dtype=scipy.complex64)

    N = min(len(ch0), len(ch1))
    rem = N % binSizeTim
    bNum = int(math.floor(N / binSizeTim))

    ch0rshp = np.reshape(ch0[0:N - rem], [binSizeTim, bNum])
    ch1rshp = np.reshape(ch1[0:N - rem], [binSizeTim, bNum])

    return ch0rshp, ch1rshp
Esempio n. 8
0
def main(argv):
    inputfile=''
    original_file=''
    indices_file=''
    try:
        opts, args = getopt.getopt(argv,"h:d:i:o:",["dfile=","itype=","ofile="])
    except getopt.GetoptError:
        print 'file.py -d <file_to_decode>  -o <original_file>  -i <indices_file>'
        sys.exit(2)
    for opt, arg in opts:
        print opt ,arg,
        if opt == '-h':
            print 'file.py -d <file_to_decode>  -o <original_file> -i <indices_file> '
            sys.exit()
        elif opt in ("-d", "--dfile"):
            inputfile = arg
        elif opt in ("-i", "--itype"):
            indices_file = arg
        elif opt in ("-o", "--ofile"):
            original_file = arg
        else:
            print "check help for usage" 
            sys.exit()

    print inputfile
    print    inputfile.split('_')

    to_decode_file = scipy.fromfile(open(inputfile), dtype=scipy.float32)
    original_string = scipy.fromfile(open(original_file), dtype=scipy.float32)
    oracle_indices = np.load(indices_file)
    print "\n lengths for measured data:" , len(to_decode_file), "length of orig transmission: ",len(original_string)
    get_index=start_index(to_decode_file)
    start_data_index = get_index+1 # get_index+1 #(m-get_index)  #m - (len(preamble) - get_index) +1
    print "starting of data is  ", start_data_index
    plt.plot(to_decode_file[:start_data_index],'*-')
    plt.savefig('abhinav.pdf')
    plt.clf()
    original_message =original_string[len(preamble):]
    to_decode_data1= to_decode_file[start_data_index:]
    to_decode_data= to_decode_data1.astype(np.int64)
    #print "lengths of data going in:", len(original_message), len(to_decode_data)
    #bin_rep_to_decode=decoding_maj3(oracle_indices,to_decode_data)
    bin_rep_to_decode=single_demod(oracle_indices,to_decode_data)
    ber_single(oracle_indices,to_decode_data, original_message)
    print "\nGoing to decode"
    rs= reedsolo.RSCodec(32)
    message_decoded =''
    try:
        message_decoded = rs.decode(bin_rep_to_decode)
    except:
        print "Cannot decode using RS decoder " 
    print "decoded message is ",message_decoded
    print "\n"
Esempio n. 9
0
def plot_crosscorrelation(path_data1, path_data2, sample_size):
    # Read in data
    ch0 = scipy.fromfile(path_data1, dtype=scipy.complex64, count=sample_size)
    ch1 = scipy.fromfile(path_data2, dtype=scipy.complex64, count=sample_size)

    bucketSize = 100

    # Number of samples
    N = len(ch0)
    bNum = int(math.floor(N / bucketSize))

    # Sampling frequency
    f_s = 1.0E6

    # Sampling period
    T_s = 1 / f_s

    # Doppler frequency range
    n_f_dop = 120
    f_dop = np.arange(0, n_f_dop, step=1)

    # time delay range
    R = 100

    plt.ion()
    # plt.colorbar()

    # corr = np.zeros((bucketSize, len(f_dop), bNum), dtype=np.complex64)
    corr_tmp = np.zeros((R, len(f_dop)), dtype=np.complex64)
    print(corr_tmp.shape)

    for bi in range(bNum):
        iStart = bi * bucketSize
        #iEnd = iStart + bucketSize - 1
        #i0 = 0
        den_1 = 1 / R
        for i in range(R):
            for j in range(len(f_dop)):
                # This equation is wrong, there must be a shift between ch0 and ch1
                # corr[i0,j, bi] = ch0[i]  * ch1[i] * np.exp(-1j *2*np.pi*f_dop[j]/(len(ch1)))
                corr_tmp[i,
                         j] = ch0[i + iStart] * ch1[i + iStart - j] * np.exp(
                             -1j * 2 * np.pi * f_dop[j] * den_1)
            #i0 += 1

        plt.clf()
        # plt.plot(np.sin(np.arange(100)/float(bi+1)))
        plt.title("Bin number " + str(bi))
        plt.imshow(corr_tmp.imag)
        # plt.pcolormesh(corr_tmp.real)
        plt.pause(0.01)
    plt.ioff()
    plt.show()
Esempio n. 10
0
def readdatafile(fname):
    root, ext = os.path.splitext(fname)
    path, base = os.path.split(root)
    datafilename = base + ext
    parfile = root + '.par'
    
    (common, channels) = readparfile(parfile)

    channelindex = [n for n,d in enumerate(channels) if d['file'] == datafilename][0]
    thechannel = channels[channelindex]
    
    i = Data.Image()
    i.Name = datafilename
    i.ImageType = 'Topo'
    i.XPos = float(common['X Offset'])
    i.YPos = float(common['Y Offset'])
    i.XSize = float(common['Field X Size in nm'])
    i.YSize = float(common['Field Y Size in nm'])
    i.XRes = int(common['Image Size in X'])
    i.YRes = int(common['Image Size in Y'])
    i.ZScale = (float(thechannel['maxphys']) - float(thechannel['minphys'])) / \
        (float(thechannel['maxraw']) - float(thechannel['minraw']))
    i.UBias = float(pickValueForMode(common,thechannel,'Gap Voltage'))
    i.ISet = float(pickValueForMode(common,thechannel,'Feedback Set'))
    i.ScanSpeed = float(common['Scan Speed'])
    i.d = scipy.fromfile(file=fname,dtype=scipy.int16)
    i.d = i.d.byteswap()
    i.d.shape = i.XRes, i.YRes
    i.updateDataRange()
    return i
Esempio n. 11
0
def read_sample_file(path, filename, basetype=scipy.float32):
    cwd = os.path.dirname(os.path.realpath(__file__))
    os.chdir(path)
    packed_samples = scipy.fromfile(open(filename), dtype=basetype)
    y = packed_samples[::2] + 1j * packed_samples[1::2]
    os.chdir(cwd)
    return y
Esempio n. 12
0
def read_pst(pst_path):
    """ read tillvision based .pst files as uint16.
    note: this func was flagged deprecated ("use the version in gioIO" instead,
    but that one never existed ... ")
    problematic: does not work on all .pst on my machine """

    inf_path = os.path.splitext(pst_path)[0] + '.inf'

    # reading stack size from inf
    meta = {}
    with open(inf_path,'r') as fh:
    #    fh.next()
        for line in fh.readlines():
            try:
                k,v = line.strip().split('=')
                meta[k] = v
            except:
                pass

    shape = sp.int32((meta['Width'],meta['Height'],meta['Frames']))


    raw = sp.fromfile(pst_path,dtype='int16')
    data = sp.reshape(raw,shape,order='F')
    return data.astype('uint16')
def count(plot=False
          ):  #Use plot to allow calling from other (supervisor) functions
    # File operations
    f = scipy.fromfile(open(getcwd() + '/' + relative_path_to_file),
                       dtype=scipy.float32)
    if Verbose:
        print("Number of datapoints is: %i" % f.size)

    #f = f[first_sample:last_sample]
    abs_f = abs(f[0::2] + 1j * f[1::2])
    try:
        abs_f = abs_f / np.amax(abs_f)
    except Exception as e:
        print("Error when normalising: \"%s\" " % e)
        return 0
    # Matched filter to reduce hf noise
    abs_f = scipy.signal.correlate(abs_f, np.ones(decim), mode='same') / decim

    count_rn16 = count_rn16s_gate(abs_f, plot)

    if plot:
        #        plt.plot(abs_f)
        plt.plot(np.around(abs_f / np.amax(abs_f)))
        plt.show()
    return count_rn16
Esempio n. 14
0
 def __init__(self, filename, center_freq=850e6, sample_rate=5e5, file_datatype = np.complex64):
     self.filename = filename;
     self.freq = center_freq;
     self.samp_rate = sample_rate
     self.cmplx_data = scipy.fromfile(open(filename), dtype = file_datatype)
     self.n = len(self.cmplx_data)
     self.samp_spacing = 1.0/self.samp_rate
def after_record(tb, cf):
    tb.stop()
    tb.wait()

    import scipy
    vals = scipy.fromfile(open("out"), dtype=float)
    #print vals
    print len(vals)

    readings_per_sample = 2048
    samples = len(vals)/readings_per_sample
    sums = ([0] * readings_per_sample)
    for i in range(samples):
        sample = vals[i*readings_per_sample:(i+1)*readings_per_sample]
        for j in range(readings_per_sample):
            sums[j] += sample[j]
    for i in xrange(readings_per_sample):
        sums[i] /= samples 
         
    #print sums
    #for i in range(readings_per_sample):
     #print sums[i].imag ,sums[i].real 
     #print sums[i].real 
    print len(sums)
    x = [cf-1e6+i*(4e6/readings_per_sample) for i in range(readings_per_sample/2)]
    y = [10*(math.log10((sums[i])/10**5)) for i in range(readings_per_sample/2)]


      #if x[i] == cf:
        #y=0
    with open(get_record_filename(cf), "w") as f:
        f.write(str(zip(x, y)))
def count(plot=False): #Use plot to allow calling from other (supervisor) functions
    # File operations
    f = scipy.fromfile(open(getcwd() + '/' + relative_path_to_file), dtype=scipy.float32)
    if Verbose:    
        print("Number of datapoints is: %i"% f.size)

    #f = f[first_sample:last_sample]
    abs_f = abs(f[0::2] + 1j * f[1::2])
    try:
        abs_f = abs_f / np.amax(abs_f)
    except Exception as e:
        print("Error when normalising: \"%s\" "%e)
        return 0        
    # Matched filter to reduce hf noise
    abs_f = scipy.signal.correlate(abs_f, np.ones(decim), mode='same') / decim


    #Remove dc
    to_remove = abs_f < 0.25
    abs_f = np.delete(abs_f, np.where(to_remove))

    mean = np.mean(abs_f)
    if mean<0.7:
        print("Mean value is %0.4f <0.7 so reads present: "%mean,end='')
        count_rn16 = count_rn16s_filter(abs_f,plot)
    else:
        print("Mean value is %0.4f >0.7 so no reads present"%mean,end='')
        return 0

    if plot:
#        plt.plot(abs_f)
        plt.plot(abs_f)   
        plt.show()
    return count_rn16
Esempio n. 17
0
    def get_data(self, hfile):

        self.text_file_pos.set_text("FilePosition: %d" %
                                    (hfile.tell() // self.sizeof_data))

        try:

            f = scipy.fromfile(hfile,
                               dtype=self.datatype,
                               count=self.block_length)

        except MemoryError:

            print "End of File"

        else:

            self.f = scipy.array(f)

            self.time = scipy.array(
                [i * (1 / self.sample_rate) for i in range(len(self.f))])

            fhh2 = open(r'/home/andy/kk.txt', 'w')

            print str(len(self.f))

            data = []

            for j in range(len(self.f)):

                data.append((self.f[j]))

            fhh2.write(str(data))

            fhh2.close()
Esempio n. 18
0
    def get_data(self):

        self.position = self.hfile.tell() / self.sizeof_data
        self.text_file_pos.set_text("File Position: %d" % self.position)
        try:
            self.iq = scipy.fromfile(self.hfile,
                                     dtype=self.datatype,
                                     count=self.block_length)
        except MemoryError:
            print("End of File")
            return False
        else:
            # retesting length here as newer version of scipy
            # does not throw a MemoryError, just returns a zero-length array.
            if (len(self.iq) > 0):
                tstep = 1.0 / self.sample_rate
                #self.time = scipy.array([tstep*(self.position + i)
                #            for i in xrange(len(self.iq))])
                self.time = scipy.array(
                    [tstep * (i) for i in xrange(len(self.iq))])
                self.iq_psd, self.freq = self.dopsd(self.iq)
                return True
            else:
                print "End of File"
                return False
Esempio n. 19
0
    def get_all_values(self, p_channels_num=1):
        assert (p_channels_num > 0)
        self.finish_reading()
        self.start_reading()
        f_len = float(os.path.getsize(self._file_path))
        ch_len = int((f_len / self._sample_size) / p_channels_num)
        if ch_len * self._sample_size * p_channels_num != f_len:
            LOGGER.info(''.join([
                "Remained samples ",
                str(f_len - ch_len * self._sample_size * p_channels_num),
                " .Should be 0."
            ]))

        vals = scipy.zeros((p_channels_num, ch_len))
        k = 0
        while True:
            try:
                r = scipy.fromfile(self._data_file, self._sample_struct_type,
                                   p_channels_num)
            except MemoryError:
                break
            if len(r) < p_channels_num:
                break
            for i in range(p_channels_num):
                vals[i, k] = r[i]
            k += 1
        return vals
Esempio n. 20
0
def read_pst(pst_path):
    """ read tillvision based .pst files as uint16.
    note: this func was flagged deprecated ("use the version in gioIO" instead,
    but that one never existed ... ")
    problematic: does not work on all .pst on my machine """

    inf_path = os.path.splitext(pst_path)[0] + '.inf'

    # reading stack size from inf
    meta = {}
    with open(inf_path,'r') as fh:
    #    fh.next()
        for line in fh.readlines():
            try:
                k,v = line.strip().split('=')
                meta[k] = v
            except:
                pass

    shape = sp.int32((meta['Width'],meta['Height'],meta['Frames']))


    raw = sp.fromfile(pst_path,dtype='int16')
    data = sp.reshape(raw,shape,order='F')
    return data.astype('uint16')
Esempio n. 21
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('file', help="Raw sample file")
    parser.add_argument('--thres',
                        type=int,
                        default=1000,
                        help="Power threshold")
    parser.add_argument('--out', help="Output file")
    parser.add_argument(
        '--skip',
        type=int,
        default=int(0.1 * 20e6),
        help="Inital samples to skip (USRP stabilization period)")

    args = parser.parse_args()

    wave = scipy.fromfile(args.file, dtype=scipy.int16)
    raw_samples = [
        complex(i, q) for i, q in itertools.izip(wave[::2], wave[1::2])
    ]
    print "%d raw samples" % (len(raw_samples))

    if args.out is None:
        args.out = '_condensed'.join(os.path.splitext(args.file))

    fh = open(args.out, 'wb')

    state = 'idle'
    count = 0
    countdown = 0
    for i in xrange(0, len(raw_samples), WINDOW):
        if i < args.skip:
            continue

        if state == 'idle':
            if any(
                [abs(c.real) > args.thres for c in raw_samples[i:i + WINDOW]]):
                state = 'trigger'
                countdown = 80 * 10
                count += 1

        if state == 'trigger':
            if countdown > 0:
                do_write = True
            elif any(
                [abs(c.real) > args.thres for c in raw_samples[i:i + WINDOW]]):
                do_write = True
            else:
                do_write = False
                state = 'idle'

            if do_write:
                for c in raw_samples[i:i + WINDOW]:
                    fh.write(struct.pack('<hh', int(c.real), int(c.imag)))
                countdown -= WINDOW

    fh.close()
    print "%d packets" % (count)
    print "Output written to %s" % (args.out)
Esempio n. 22
0
	def do_GET(self):
		self.send_response(200)
		self.send_header("Content-type","text/html")
		self.end_headers()
		query_string = urlparse.urlparse(self.path).query
		if(self.path == PATH_FLOW_GRAPH):
			fp = open(PATH_FLOW_GRAPH,"rb")
			self.wfile.write(fp.read())
		else:
			query_string = urllib.unquote(query_string)
			#Gather individual parameters
			param_list = query_string.split("&")
		
			#append filename of xml parsing file
			param_list.insert(0,"./xmlparse.py")
			xmlproc = subprocess.Popen(param_list)	
			xmlproc.wait()
			process = subprocess.Popen([PATH_TOP_BLOCK], stdout=subprocess.PIPE)
			time.sleep(2)
			process.kill()
			arr = scipy.fromfile(OUT_FILE_PATH,dtype=scipy.float32,count=NUM_VALUES)
			value = []
			for i in range(NUM_VALUES):
				value.append([str(i),float(arr[i])])
			description = [('Output number','string'),('Result','number')]
			table = gviz_api.DataTable(description)
			path_flow_graph = PATH_FLOW_GRAPH
			res = """
			<html>
	 	 		<head>
	    				<script type="text/javascript" src="https://www.google.com/jsapi"></script>
	    				<script type="text/javascript">
	      					google.load("visualization", "1", {packages:["corechart"]});
	      					google.setOnLoadCallback(drawChart);
	      					function drawChart()
						{
							var data = new google.visualization.DataTable(%(values)s,0.6);
							var options = {
		  						title: 'Square Plot'
								};

							var chart = new google.visualization.LineChart(document.getElementById('chart_div'));
							chart.draw(data, options);
	      					}
	    				</script>
	  			</head>
	  			<body>
					<h1>FLOW GRAPH:</h1><br/>
					<img src=%(path_flow_graph)s alt="flow graph" height="700" width="900" align="middle">
					<h1>PLOT:</h1><br/>
	    				<div id="chart_div" style="width: 1200px; height: 350px;"></div>
	  			</body>
			</html>
	"""
			
			table.AppendData(value)
			values = table.ToJSon(columns_order=('Output number','Result'))
			result = res % vars()
			self.wfile.write(result)
Esempio n. 23
0
def volume_from_file(filename, frameshape=(512, 512)):
    with file(filename) as fp:
        arr = scipy.fromfile(fp, dtype=scipy.uint8)
    x, y = frameshape
    n, = arr.shape
    n /=  x * y
    arr.shape = n, x, y
    return arr
Esempio n. 24
0
 def read_dat(self, filename=None):
     if not filename:
         filename = self.filename
     try:
         return sp.fromfile(filename)
     except:
         CXP.log.error('Could not extract data from data file.')
         raise
Esempio n. 25
0
 def read_dat(self, filename=None):
     if not filename:
         filename = self.filename
     try:
         return sp.fromfile(filename)
     except:
         CXP.log.error('Could not extract data from data file.')
         raise
def NumpyTensorInitializer(gridShape, filename, bin=True):
    """
    Initialize a 9 component plasticity state by reading from a numpy "tofile" type file.
    """
    if bin:
        data = fromfile(filename)
    else:
        data = fromfile(filename,sep='  ')
    data = data.reshape([3,3] + list(gridShape))
    state = PlasticityState.PlasticityState(gridShape)
    dict = {('x','x') : (0,0), ('x','y') : (0,1), ('x','z') : (0,2),\
            ('y','x') : (1,0), ('y','y') : (1,1), ('y','z') : (1,2),\
            ('z','x') : (2,0), ('z','y') : (2,1), ('z','z') : (2,2)}
    field = state.GetOrderParameterField()
    for component in field.components:
        field[component] = copy(data[dict[component]]) 
    return state
Esempio n. 27
0
	def read_samples_scipy(self,filename, blocklen):
	    import scipy
    	    print filename	
	    iq_samples = scipy.fromfile(filename, 
	                                dtype=scipy.complex64, 
	                                count=blocklen)

    	    return iq_samples
Esempio n. 28
0
    def loadData(self):
        """
		Load raw data from file
		"""
        signal = scipy.fromfile(open(self.fileName + '.raw'),
                                dtype=scipy.complex64)
        signal = np.array(signal)
        return signal
Esempio n. 29
0
	def make_prediction(self,sample_file):
		import scipy
		print sample_file
		iq_samples = scipy.fromfile(sample_file, dtype=scipy.complex64)
		print iq_samples.shape
		fi, Pxi = self.PSD(iq_samples,3e6)	# Compute PSD
		tmpdf = self.ComputeFreqFeatures(Pxi,fi) # Feature Engineering
		return self.le_l.inverse_transform(self.predict_h2(tmpdf)) # Return predictions
Esempio n. 30
0
def plot_crosscorrelation(path_data1, path_data2, sample_size=None):
    # Read in data
    sample_size = 50000
    ch0 = scipy.fromfile(path_data1, dtype=scipy.complex64, count=sample_size)
    ch1 = scipy.fromfile(path_data2, dtype=scipy.complex64, count=sample_size)

    c = float(3E8)
    f0 = float(88.8E6)
    maxrange = float(100E3)
    maxvel = 500 / 3.6
    maxdoppler = (c + maxvel) / c * f0 - f0

    maxdelay = maxrange / c

    fs = float(200E3)
    min_len = min(len(ch0), len(ch1))

    maxshift = int(round(maxdelay * fs))
    maxtrans = int(round(min_len * maxdoppler / fs))
    print(maxtrans, maxdoppler, len(ch0), len(ch1))

    ch0_fft = fftpack.fft(ch0)
    ch1_fft = fftpack.fft(ch1)

    out = np.zeros((2 * maxtrans + 1, 2 * maxshift), dtype=np.complex64)
    # plt.ion()

    for i in range(-maxtrans, maxtrans):
        # ch1s = np.copy(ch1)
        corr_fft = ch0_fft * np.roll(ch1, i)
        corr = fftpack.ifft(corr_fft)

        out[i + maxtrans + 1] = np.concatenate(
            (corr[-maxshift:], corr[:maxshift]))

    print(out.shape)
    out = np.absolute(out)
    # plt.clf()
    # plt.plot(np.sin(np.arange(100)/float(bi+1)))
    # plt.title("Bin number "+str(bi))
    plt.imshow(out)
    # plt.pcolormesh(corr_tmp.real)
    # plt.pause(0.01)
    # plt.ioff()
    plt.show()
Esempio n. 31
0
 def get_data(self, hfile):
     self.text_file_pos.set_text("File Position: %d" % (hfile.tell()//self.sizeof_data))
     f = scipy.fromfile(hfile, dtype=self.datatype, count=self.block_length)
     #print "Read in %d items" % len(self.f)
     if(len(f) == 0):
         print "End of File"
     else:
         self.f = f
         self.time = [i*(1/self.sample_rate) for i in range(len(self.f))]
Esempio n. 32
0
def generate_plot(filepath):
    '''
    Make a basic plot of the binary output data (from gnuradio)
    '''
    dataLen = 1000
    data = scipy.fromfile(open(filepath), dtype=scipy.complex64, count=dataLen)
    plt.figure()
    plt.plot(data)
    plt.show()
Esempio n. 33
0
 def _fromfile(self, fileid):
     self.header=sc.rec.fromfile(fileid, dtype=_HEADER_1, shape=1, byteorder='<')[0]
     dtype=self.get_dtype(self.header.datatype)
     xdim=self.header.xdim
     ydim=self.header.ydim
     nfram=self.header.NumFrames
     
     data=sc.fromfile(fileid, dtype=dtype, count=nfram*xdim*ydim)
     self.frames=sc.reshape(data,(nfram, ydim, xdim,))
Esempio n. 34
0
 def get_data(self, hfile):
     self.text_file_pos.set_text("File Position: %d" % (hfile.tell() // self.sizeof_data))
     try:
         f = scipy.fromfile(hfile, dtype=self.datatype, count=self.block_length)
     except MemoryError:
         print "End of File"
     else:
         self.f = scipy.array(f)
         self.time = scipy.array([i * (1 / self.sample_rate) for i in range(len(self.f))])
Esempio n. 35
0
def gr_f32_file_input(fp, verbose=0):
    #desc:  Imports GNU Radio Float32 File
    #input:  full path to file, assumes is valid
    #output: list of floats for doppler data

    #import gnuradio float32 type, uses scipy
    f = scipy.fromfile(open(fp, 'r'), dtype=scipy.float32)
    if verbose: print "Found {:d} 32 bit floats".format(len(f))
    return f
Esempio n. 36
0
 def get_data(self, hfile):
     self.text_file_pos.set_text("File Position: %d" % (hfile.tell()//self.sizeof_data))
     try:
         f = scipy.fromfile(hfile, dtype=self.datatype, count=self.block_length)
     except MemoryError:
         print("End of File")
     else:
         self.f = scipy.array(f)
         self.time = scipy.array([i*(1 / self.sample_rate) for i in range(len(self.f))])
Esempio n. 37
0
 def get_data(self):
     self.text_file_pos.set_text("File Position: %d" % (self.hfile.tell()//self.sizeof_data))
     self.iq = scipy.fromfile(self.hfile, dtype=self.datatype, count=self.block_length)
     #print "Read in %d items" % len(self.iq)
     if(len(self.iq) == 0):
         print "End of File"
     else:
         self.reals = [r.real for r in self.iq]
         self.imags = [i.imag for i in self.iq]
         self.time = [i*(1/self.sample_rate) for i in range(len(self.reals))]
Esempio n. 38
0
 def process(self, limit):
     '''Get upto limit values from outfile and return values to caller'''
     process = subprocess.Popen(self.path)
     time.sleep(2)
     process.kill()
     arr = scipy.fromfile(self.outfile, dtype=scipy.float32, count=limit)
     value = []
     for i in range(limit):
         value.append([str(i), float(arr[i])])
     return value
Esempio n. 39
0
 def _read_index_file(self, ext):
     f = self.__dict__['f' + ext]
     logger.info('Reading file %s ...' % f.name)
     if ext == 'fts':
         vals = f.read().split('\n')[:-1]
     else:
         vals = scipy.fromfile(f, sep='\n', dtype=scipy.int32)
     if ext == 'fts' or ext == 'ids':
         vals = dict((v, i) for i, v in enumerate(vals))
     self.__dict__[ext] = vals
 def __init__(self,
              filename,
              center_freq=850e6,
              sample_rate=5e5,
              decimation=1,
              file_datatype=np.complex64):
     self.filename = filename
     self.freq = center_freq
     self.samp_rate = sample_rate
     self.cmplx_data = scipy.fromfile(open(filename), dtype=file_datatype)
     self.decimation = decimation
     if self.decimation > 1:
         self.cmplx_data = scipy.signal.resample(
             scipy.fromfile(open(filename), dtype=file_datatype),
             self.samp_rate / self.decimation)
         self.samp_rate = sample_rate / self.decimation
     self.n = len(self.cmplx_data)
     print 'self.n: ', self.n
     self.samp_spacing = 1.0 / self.samp_rate
Esempio n. 41
0
 def _read_index_file(self, ext):
     f = self.__dict__['f'+ext]
     logger.info('Reading file %s ...' % f.name)
     if ext == 'fts':
         vals = f.read().split('\n')[:-1]
     else:
         vals = scipy.fromfile(f, sep='\n', dtype=scipy.int32)
     if ext == 'fts' or ext == 'ids':
         vals = dict((v, i) for i, v in enumerate(vals))
     self.__dict__[ext] = vals
Esempio n. 42
0
 def get_data(self):
     self.text_file_pos.set_text("File Position: %d" % (self.hfile.tell()//self.sizeof_data))
     try:
         self.iq = scipy.fromfile(self.hfile, dtype=self.datatype, count=self.block_length)
     except MemoryError:
         print "End of File"
     else:
         self.reals = scipy.array([r.real for r in self.iq])
         self.imags = scipy.array([i.imag for i in self.iq])
         self.time = scipy.array([i*(1/self.sample_rate) for i in range(len(self.reals))])
Esempio n. 43
0
def filereader(filename): 
    z= scipy.fromfile(open(filename), dtype=scipy.complex64)
    # dtype with scipy.int16, scipy.int32, scipy.float32, scipy.complex64 or whatever type you were using.
    mag, phase,x,y = [], [], [], []
    for i in range(0, len(z)):
        mag.append(np.absolute(z[i]))
        x.append(z[i].real)
        y.append(z[i].imag)
        phase.append(np.angle(z[i]))
    return [x,y,mag, phase,z]
Esempio n. 44
0
	def process(self,limit):
		'''Get upto limit values from outfile and return values to caller'''
		process = subprocess.Popen(self.path)
		time.sleep(2)	
		process.kill()
		arr = scipy.fromfile(self.outfile, dtype=scipy.float32, count = limit)
		value = []
		for i in range(limit):
			value.append([str(i),float(arr[i])])
		return value
Esempio n. 45
0
    def read_grc_data(self):
        """Loads data from given path (self.path) and identifier (self.ident)

        :returns: Array of [time and phase] values.
        """
        self.phase = scipy.fromfile(open(self.path + '/' + self.ident + '/' +
                                         self.ident + '-phase-new.dat'),
                                    dtype=scipy.float32)
        self.time = scipy.fromfile(open(self.path + '/' + self.ident + '/' +
                                        self.ident + '-time-new.dat'),
                                   dtype=scipy.float32)

        # fixes different lenghts of data phase and time, this does not significantly "destroy" data
        if len(self.phase) < len(self.time):
            self.time = self.time[:len(self.phase)]
        elif len(self.phase) > len(self.time):
            self.phase = self.phase[:len(self.time)]

        return [self.time, self.phase]
    def get_data(self):
        self.text_file_pos.set_text("File Position: %d" % (self.hfile.tell()//self.sizeof_data))
        self.floats = scipy.fromfile(self.hfile, dtype=self.datatype, count=self.block_length)
        #print "Read in %d items" % len(self.floats)
        if(len(self.floats) == 0):
            print "End of File"
        else:
            self.f_fft = self.dofft(self.floats)

            self.time = [i*(1/self.sample_rate) for i in range(len(self.floats))]
            self.freq = self.calc_freq(self.time, self.sample_rate)
Esempio n. 47
0
def avim(dtype = float, pix=2048):
    '''
    averages scipy stored binary DATs.
    Inputs: the dtype and the image size
    '''
    filenames = tkFileDialog.askopenfilenames(title = 'select files to average',
                filetypes=[('Scipy DATs','.dat')])
    im = sp.zeros((pix,pix))
    for file in filenames:
        im += 1.*sp.fromfile(file,dtype).reshape(pix,pix)
    return im/len(filenames)
Esempio n. 48
0
    def get_data(self):
        self.position = self.hfile.tell()/self.sizeof_data
        self.text_file_pos.set_text("File Position: %d" % self.position)
        self.iq = scipy.fromfile(self.hfile, dtype=self.datatype, count=self.block_length)
        #print "Read in %d items" % len(self.iq)
        if(len(self.iq) == 0):
            print "End of File"
        else:
            tstep = 1.0 / self.sample_rate
            self.time = [tstep*(self.position + i) for i in xrange(len(self.iq))]

            self.iq_psd, self.freq = self.dopsd(self.iq)
def zeroextract2d(N,filename):
    a = fromfile(filename)
    a = a.reshape(9,N,N)
    b = numpy.zeros((9,N/2,N/2),float)
    for i in range(9):
        ka = fft.rfftn(a[i])
        kb = numpy.zeros((N/2,N/4+1),complex)
        kb[:N/4,:]=ka[:N/4,:N/4+1]
        kb[-N/4:,:]=ka[-N/4:,:N/4+1]
        b[i] = fft.irfftn(kb)
    b /= 4.
    b.tofile(filename.replace(str(N),str(N/2)))
Esempio n. 50
0
    def get_data(self):
        self.position = self.hfile.tell()/self.sizeof_data
        self.text_file_pos.set_text("File Position: %d" % self.position)
        try:
            self.iq = scipy.fromfile(self.hfile, dtype=self.datatype, count=self.block_length)
        except MemoryError:
            print "End of File"
        else:
            tstep = 1.0 / self.sample_rate
            #self.time = scipy.array([tstep*(self.position + i) for i in xrange(len(self.iq))])
            self.time = scipy.array([tstep*(i) for i in xrange(len(self.iq))])

            self.iq_psd, self.freq = self.dopsd(self.iq)
def NumpyTensorInitializerForRho(gridShape, filename):
    """
    Initialize a 9 component plasticity state by reading from a numpy "tofile" type file.
    """
    data = fromfile(filename)
    data = data.reshape([3,3] + list(gridShape))
    dict = {('x','x') : (0,0), ('x','y') : (0,1), ('x','z') : (0,2),\
            ('y','x') : (1,0), ('y','y') : (1,1), ('y','z') : (1,2),\
            ('z','x') : (2,0), ('z','y') : (2,1), ('z','z') : (2,2)}
    rho = Fields.TensorField(gridShape) 
    for component in rho.components:
        rho[component] = copy(data[dict[component]]) 
    state = RhoState.RhoState(gridShape, field=rho)
    return state
Esempio n. 52
0
    def get_data(self):
        self.text_file_pos.set_text("File Position: %d" % (self.hfile.tell() // self.sizeof_data))
        try:
            iq = scipy.fromfile(self.hfile, dtype=self.datatype, count=self.block_length)
        except MemoryError:
            print "End of File"
        else:
            # retesting length here as newer version of scipy does not throw a MemoryError, just
            # returns a zero-length array
            if len(iq) > 0:
                self.reals = scipy.array([r.real for r in iq])
                self.imags = scipy.array([i.imag for i in iq])

                self.time = scipy.array([i * (1 / self.sample_rate) for i in range(len(self.reals))])
                return True
            else:
                print "End of File"
                return False
Esempio n. 53
0
    def _get_channels(self, chans, kind='a'):
        """returns a numpy array of the channel data"""

        # init and check
        if kind not in ['a', 'd', 'e']:
            raise ValueError('unknown kind (%s), accepts: \'a\', \'d\' and \'e\'!')
        ch = {'a': self._ch_analog,
              'd': self._ch_digital,
              'e': self._ch_event}[kind]
        dtype = {'a': sp.int16,
                 'd': sp.int32,
                 'e': sp.int32}[kind]
        if not chans:
            raise ValueError('chans not valid!')
        n_sample = 0
        for c in chans:
            try:
                if ch[c].n_sample > n_sample:
                    n_sample = ch[c].n_sample
            except KeyError:
                warnings.warn('invalid channel: %s' % c, RuntimeWarning)
        if n_sample == 0:
            warnings.warn('no data for chans %s' % str(chans), RuntimeWarning)
            return sp.zeros((n_sample, len(chans)), dtype=dtype)

        # collect data
        rval = sp.zeros((n_sample, len(chans)), dtype=dtype)
        for i, c in enumerate(chans):
            # load from file
            try:
                header = ch[c]
                self._file.seek(header.data_offset)
                load_item = sp.fromfile(file=self._file, count=header.n_sample, dtype=dtype)
            except KeyError:
                warnings.warn('invalid channel: %s!' % c, RuntimeWarning)
                load_item = sp.zeros(n_sample, dtype=dtype)

            # unfortunately the channel shapes are not always consistent
            # across the tetrode. but we preallocate space such that the
            # largest item may fit in the buffer.
            rval[:load_item.size, i] = load_item

        # return stuff
        return rval
Esempio n. 54
0
def read_samples_c(filename, start, in_size, min_size=0):
    # Complex samples are handled differently
    fhandle = open(filename, 'r')
    fhandle.seek(start*gr.sizeof_gr_complex, 0)
    data = scipy.fromfile(fhandle, dtype=scipy.complex64, count=in_size)
    data_min = 1.1*float(min(data.real.min(), data.imag.min()))
    data_max = 1.1*float(max(data.real.max(), data.imag.max()))
    data = data.tolist()
    fhandle.close()

    if(min_size > 0):
        if(len(data) < in_size):
            print "Warning: read in {0} samples but asked for {1} samples.".format(
                len(data), in_size)
    else:
        # If we have to, append 0's to create min_size samples of data
        if(len(data) < min_size):
            data += (min_size - len(data)) * [complex(0,0)]

    return data, data_min, data_max
Esempio n. 55
0
def filereader(filename,fs): 
    z= scipy.fromfile(open(filename), dtype=scipy.complex64)
    print "length of total z= ", len(z)
    # dtype with scipy.int16, scipy.int32, scipy.float32, scipy.complex64 or whatever type you were using.
    mag, phase,x,y = [], [], [], []
    start_sec=0.001
    end_sec=0.1
    z_needed = z[start_sec*fs: end_sec*fs]
    #z_needed = z[600+start_sec*fs: 600+end_sec*fs]
    z=z_needed
    
    len_z = int(len(z))
    print "length of needed z= ", len(z)
    #len_z = int(20.0/6*len(z))
    #len_z = int(400.0/6000*len(z))
    for i in range(0, len_z):
        mag.append(np.absolute(z[i]))
        x.append(z[i].real)
        y.append(z[i].imag)
        phase.append(np.angle(z[i]))
    return [x,y,mag, phase,z]
Esempio n. 56
0
    def get_data(self):
        self.position = self.hfile.tell()/self.sizeof_data
        self.text_file_pos.set_text("File Position: %d" % self.position)
        try:
            self.iq = scipy.fromfile(self.hfile, dtype=self.datatype, count=self.block_length)
        except MemoryError:
            print "End of File"
            return False
        else:
            # retesting length here as newer version of scipy does not throw a MemoryError, just
            # returns a zero-length array
            if(len(self.iq) > 0):
                tstep = 1.0 / self.sample_rate
                #self.time = scipy.array([tstep*(self.position + i) for i in xrange(len(self.iq))])
                self.time = scipy.array([tstep*(i) for i in xrange(len(self.iq))])

                self.iq_psd, self.freq = self.dopsd(self.iq)
                return True
            else:
                print "End of File"
                return False
Esempio n. 57
0
def read_samples(filename, start, in_size, min_size, dtype, dtype_size):
    # Read in_size number of samples from file
    fhandle = open(filename, 'r')
    fhandle.seek(start*dtype_size, 0)
    data = scipy.fromfile(fhandle, dtype=dtype, count=in_size)
    data_min = 1.1*data.min()
    data_max = 1.1*data.max()
    data = data.tolist()
    fhandle.close()

    if(min_size > 0):
        if(len(data) < in_size):
            print "Warning: read in {0} samples but asked for {1} samples.".format(
                len(data), in_size)
    else:
        # If we have to, append 0's to create min_size samples of data
        if(len(data) < min_size):
            data += (min_size - len(data)) * [dtype(0)]


    return data, data_min, data_max