Esempio n. 1
0
 def test_complex(self):
     a = rand(13,4) + 1j*rand(13,4)
     fname = tempfile.mktemp('.dat')
     io.write_array(fname,a)
     b = io.read_array(fname,atype=N.Complex)
     assert_array_almost_equal(a,b,decimal=4)
     os.remove(fname)
Esempio n. 2
0
 def test_complex(self):
     a = rand(13, 4) + 1j * rand(13, 4)
     fname = tempfile.mktemp('.dat')
     io.write_array(fname, a)
     b = io.read_array(fname, atype=N.Complex)
     assert_array_almost_equal(a, b, decimal=4)
     os.remove(fname)
Esempio n. 3
0
 def test_float(self):
     a = rand(3,4)*30
     fname = tempfile.mktemp('.dat')
     io.write_array(fname,a)
     b = io.read_array(fname)
     assert_array_almost_equal(a,b,decimal=4)
     os.remove(fname)
Esempio n. 4
0
 def test_float(self):
     a = rand(3, 4) * 30
     fname = tempfile.mktemp('.dat')
     io.write_array(fname, a)
     b = io.read_array(fname)
     assert_array_almost_equal(a, b, decimal=4)
     os.remove(fname)
Esempio n. 5
0
 def test_integer(self):
     from scipy import stats
     a = stats.randint.rvs(1,20,size=(3,4))
     fname = tempfile.mktemp('.dat')
     io.write_array(fname,a)
     b = io.read_array(fname,atype=a.dtype.char)
     assert_array_equal(a,b)
     os.remove(fname)
Esempio n. 6
0
 def test_integer(self):
     from scipy import stats
     a = stats.randint.rvs(1, 20, size=(3, 4))
     fname = tempfile.mktemp('.dat')
     io.write_array(fname, a)
     b = io.read_array(fname, atype=a.dtype.char)
     assert_array_equal(a, b)
     os.remove(fname)
Esempio n. 7
0
def walk_and_run(top_dir, tempdir='', action=default_action):
    """recurse through directory structure, looking for .LS8 files.
       Each .LS8 file is run.
       """
    if tempdir == '': # let tempfile library choose a tempdir by default
        tempdir = tempfile.mkdtemp()
    sys.stderr.write( "Temporary directory will be %s.\n" % 
            os.path.abspath(tempdir) )
    top_dir = os.path.abspath(top_dir)
    not_converged = file('not_converged', 'w')

    for dirpath, dirnames, filenames in os.walk(top_dir):
        sys.stderr.write( "Searching %s...\n" % dirpath )
        for filename in filenames:
            if filename.upper().endswith('LS8'):
                sys.stderr.write( "Found %s.\n" % filename )
                lisfile = LisrelInput(os.path.join(dirpath, filename))
                # adjust input to output matrices separately
                lisfile.write_to_file(lisfile.get_modified_input())
                try:
                    lisfile.run_lisrel(tempdir) # run lisrel to write output to tempdir
                except:
                    print "LISREL encountered an error, skipping...\n"
                    break
                finally:
                    if os.path.exists(lisfile.path + '.backup'):
                        os.remove(lisfile.path)
                        os.rename(lisfile.path + '.backup', lisfile.path)
                    else:
                        sys.stderr.write('WARNING: Could not restore backup LS8 file.\n')
                if solution_obtained(os.path.join(tempdir, 'OUT')):
                    smats = lisfile.standardize_matrices()
                    for igrp in range(len(smats)):
                        for matname, stanmat in smats[igrp].iteritems():
                            sys.stderr.write( "INPUT %s, GROUP %d, MATRIX %s:" % \
                                    (filename, igrp+1, matname))
                            action(stanmat, matname=matname, group_num = igrp+1,
                                    filename=filename, dirpath=dirpath)
                    # try to write the variance matrix of 
                    #   the standardized estimates
                    try:
                        vnames, vmat = lisfile.get_var_standardized(path = \
                                tempdir)
                        vfile = open(os.path.join(dirpath, 
                                    'vcov_standardized.txt'), 'w')
                        io.write_array(vfile, vmat, separator='\t',
                                    linesep='\n', precision=10,) # closes vfile
                        write_tuple_to_file(vnames, 
                                    path=(dirpath, 'vcov_standardized.names'))
                    except:
                        sys.stderr.write('ERROR writing or getting vcov matrix of standardized estimates for group %d. Error: %s\n' % (igrp, str(e.args)))
                else:
                    print "No solution could be obtained, skipping...\n"
                    not_converged.write(os.path.join(dirpath, filename) + "\n")

    not_converged.close()
Esempio n. 8
0
    def saveSample(self, path):
        outData = self.dataContainer.data

        if path[-3:] == 'csv':
            outFile = file(path, 'wb')
            csvWriter = csv.writer(outFile, dialect='excel')
            #csvWriter.writerow([self.dataContainer.dimensions[0].label, self.dataContainer.label])
            csvWriter.writerows(outData.tolist())
        else:
            outFile = file(path, 'w')
            #outFile.write(str([self.dataContainer.dimensions[0].label, self.dataContainer.label])+"\n")
            write_array(outFile, outData.tolist())
        outFile.close()
Esempio n. 9
0
    def saveSample(self, path):
        outData = self.dataContainer.data

        if path[-3:] == 'csv':
            outFile = file(path,'wb')
            csvWriter = csv.writer(outFile,dialect='excel')
            #csvWriter.writerow([self.dataContainer.dimensions[0].label, self.dataContainer.label])
            csvWriter.writerows(outData.tolist())
        else:
            outFile = file(path,'w')
            #outFile.write(str([self.dataContainer.dimensions[0].label, self.dataContainer.label])+"\n")
            write_array(outFile,outData.tolist())
        outFile.close()
Esempio n. 10
0
 def saveField(self, path):
     if not isQuantity(self.dataContainer.unit):
         self.ordinate = self.dataContainer.data*self.dataContainer.unit
     else:
         self.ordinate = self.dataContainer.data*self.dataContainer.unit.value
     self.abscissa = self.dataContainer.dimensions[0].data
     outData = scipy.transpose(scipy.array([self.abscissa,self.ordinate]))
     if path[-3:] == 'csv':
         outFile = file(path,'wb')
         csvWriter = csv.writer(outFile,dialect='excel')
         csvWriter.writerow([self.dataContainer.dimensions[0].label, self.dataContainer.label])
         csvWriter.writerows(outData.tolist())
     else:
         outFile = file(path,'w')
         outFile.write(str([self.dataContainer.dimensions[0].label, self.dataContainer.label])+"\n")
         write_array(outFile,outData)
     outFile.close()
Esempio n. 11
0
def convert_to_midi(reffile,savefile):
    """ converts REF files from mirex05 to MIDI data
    """
    target_file = DATA_PATH + reffile
    data = read_array(target_file)
    
    # remove time axes and convert to midi
    midi = np.round( audiotools.freq_to_midi(data[:,1]) )
    
    # calc chromas
    chromas = np.ones(len(midi)) * (-1)
    for n in range(len(midi)):
        if midi[n]>0:
            chromas[n] = midi[n] % 12
    
    # write data to disk
    writedata = np.c_[data[:,0], midi, chromas]
    FILE = open(DATA_PATH + savefile,"w")
    write_array(FILE,writedata,',')
    FILE.close()
Esempio n. 12
0
 def saveField(self, path):
     if not isQuantity(self.dataContainer.unit):
         self.ordinate = self.dataContainer.data * self.dataContainer.unit
     else:
         self.ordinate = self.dataContainer.data * self.dataContainer.unit.value
     self.abscissa = self.dataContainer.dimensions[0].data
     outData = scipy.transpose(scipy.array([self.abscissa, self.ordinate]))
     if path[-3:] == 'csv':
         outFile = file(path, 'wb')
         csvWriter = csv.writer(outFile, dialect='excel')
         csvWriter.writerow([
             self.dataContainer.dimensions[0].label,
             self.dataContainer.label
         ])
         csvWriter.writerows(outData.tolist())
     else:
         outFile = file(path, 'w')
         outFile.write(
             str([
                 self.dataContainer.dimensions[0].label,
                 self.dataContainer.label
             ]) + "\n")
         write_array(outFile, outData)
     outFile.close()
Esempio n. 13
0
def deact_batch(filename=""):
    """Fits deactivation time constants: Monoexponential until <=-70,
    biexponential for >-70 mV.

    filename -- If not an empty string, stores the best-fit parameters
                in this file."""

    stf = __import__("stf")
    # Some ugly definitions for the time being
    gNMono = 5  # Monoexponential fits
    gNBi = 4  # Biexponential fits
    gFMono = 0  # id of monoexponential function
    gFBi = 3  # id of biexponential function
    gMonoDictSize = stf.leastsq_param_size(gFMono) + 1  # Parameters, chisqr
    gBiDictSize = stf.leastsq_param_size(gFBi) + 1  # Parameters, chisqr

    if (gMonoDictSize < 0 or gBiDictSize < 0):
        print('Couldn\'t retrieve function; aborting now.')
        return False

    if (not (stf.check_doc())):
        print('Couldn\'t find an open file; aborting now.')
        return False

    # set the test pulse window cursors:
    if (not (stf.set_peak_start(70.84, True))):
        return False
    if (not (stf.set_peak_end(74.84, True))):
        return False

    if (not (stf.set_base_start(69.5, True))):
        return False
    if (not (stf.set_base_end(70.5, True))):
        return False

    if (not (stf.set_peak_mean(1))):
        return False
    if (not (stf.set_peak_direction("down"))):
        return False

    # Monoexponential loop ---------------------------------------------------

    firstpass = True
    # A list for dictionary keys...
    mono_keys = []
    # ... and values:
    mono_values = np.empty((gMonoDictSize, gNMono))
    if not filename == "":
        ls_file = np.empty((gNMono, stf.leastsq_param_size(gFMono)))

    # Monoexponential fits:
    for n in range(0, gNMono):
        if (stf.set_trace(n) == False):
            print("Couldn't set a new trace; aborting now.")
            return False

        print('Analyzing trace %d of %d' % (n + 1, stf.get_size_channel()))

        # set the fit window cursors:

        # use the index for the start cursor:
        if (not (stf.set_fit_start(stf.peak_index(True)))):
            return False

        # fit 1.5 ms:
        fit_end_time = stf.get_fit_start(True) + 1.0
        if (not (stf.set_fit_end(fit_end_time, True))):
            return False

        # Least-squares fitting:
        p_dict = stf.leastsq(gFMono)
        if not filename == "":
            ls_file[n][0] = p_dict["Amp_0"]
            ls_file[n][1] = p_dict["Tau_0"]
            ls_file[n][2] = p_dict["Offset"]

        if (p_dict == 0):
            print('Couldn\'t perform a fit; aborting now.')
            return False

        # Create an empty list:
        tempdict_entry = []
        row = 0
        for k, v in p_dict.iteritems():
            if (firstpass == True):
                mono_keys.append(k)
            mono_values[row][n] = v
            row = row + 1

        firstpass = False

    monoDict = dict()
    # Create the dictionary for the table:
    entry = 0
    for elem in mono_keys:
        monoDict[elem] = mono_values[entry].tolist()
        entry = entry + 1

    if (not (stf.show_table_dictlist(monoDict))):
        return False

    # Biexponential loop ---------------------------------------------------

    firstpass = True
    # A list for dictionary keys...
    bi_keys = []
    # ... and values:
    bi_values = np.empty((gBiDictSize, gNBi))

    # Monoexponential fits:
    for n in range(gNMono, gNBi + gNMono):
        if (stf.set_trace(n) == False):
            print('Couldn\'t set a new trace; aborting now.')
            return False

        print('Analyzing trace %d of %d' % (n + 1, stf.get_size_channel()))

        # set the fit window cursors:

        # use the index for the start cursor:
        if (not (stf.set_fit_start(stf.peak_index(True)))):
            return False

        # fit 4 ms:
        fit_end_time = stf.get_fit_start(True) + 3.5
        if (not (stf.set_fit_end(fit_end_time, True))):
            return False

        # Least-squares fitting:
        p_dict = stf.leastsq(gFBi)

        if (p_dict == 0):
            print('Couldn\'t perform a fit; aborting now.')
            return False

        # Create an empty list:
        tempdict_entry = []
        row = 0
        for k, v in p_dict.iteritems():
            if (firstpass == True):
                bi_keys.append(k)
            bi_values[row][n - gNMono] = v
            row = row + 1

        firstpass = False

    biDict = dict()

    # Create the dictionary for the table:
    entry = 0
    for elem in bi_keys:
        biDict[elem] = bi_values[entry].tolist()
        entry = entry + 1

    if not filename == "":
        write_array(file(filename, 'w'), ls_file, precision=15)

    if (not (stf.show_table_dictlist(biDict))):
        return False

    return True
Esempio n. 14
0
def save( ar, fileName ):
    from scipy.io import write_array
    write_array( fileName, ar, precision = 8 )
Esempio n. 15
0
def exportPointset(thepointset, infodict, separator='   ', linesep='\n',
                   precision=12, suppress_small=0, varvaldir='col',
                   ext='', append=False):

    assert varvaldir in ['col', 'row'], \
           "invalid variable value write direction"
    # in order to avoid import cycles, cannot explicitly check that
    # thepointset is of type Pointset, because Points.py imports this file
    # (utils.py), so check an attribute instead.
    try:
        thepointset.coordnames
    except AttributeError:
        raise TypeError, "Must pass Pointset to this function: use arrayToPointset first!"
    infodict_usedkeys = []
    for key, info in infodict.iteritems():
        if isinstance(info, str):
            infodict_usedkeys += [info]
        elif info == []:
            infodict[key] = copy.copy(thepointset.coordnames)
            infodict_usedkeys.extend(thepointset.coordnames)
        else:
            infodict_usedkeys += list(info)
    allnames = copy.copy(thepointset.coordnames)
    if thepointset._parameterized:
        allnames.append(thepointset.indepvarname)
    remlist = remain(infodict_usedkeys, allnames+range(len(allnames)))
    if remlist != []:
        print "Coords not found in pointset:", remlist
        raise ValueError, \
              "invalid keys in infodict - some not present in thepointset"
    assert isinstance(ext, str), "'ext' extension argument must be a string"
    if ext != '':
        if ext[0] != '.':
            ext = '.'+ext
    if append:
        assert varvaldir == 'col', ("append mode not supported for row"
                                     "format of data ordering")
        modestr = 'a'
    else:
        modestr = 'w'
    totlen = len(thepointset)
    if totlen == 0:
        raise ValueError, ("Pointset is empty")
    for fname, tup in infodict.iteritems():
        try:
            f = open(fname+ext, modestr)
        except IOError:
            print "There was a problem opening file "+fname+ext
            raise
        try:
            if isinstance(tup, str):
                try:
                    varray = thepointset[tup]
                except TypeError:
                    raise ValueError, "Invalid specification of coordinates"
            elif isinstance(tup, int):
                try:
                    varray = thepointset[:,tup].toarray()
                except TypeError:
                    raise ValueError, "Invalid specification of coordinates"
            elif type(tup) in [list, tuple]:
                if alltrue([type(ti)==str for ti in tup]):
                    thetup=list(tup)
                    if thepointset.indepvarname in tup:
                        tix = thetup.index(thepointset.indepvarname)
                        thetup.remove(thepointset.indepvarname)
                    try:
                        vlist = thepointset[thetup].toarray().tolist()
                    except TypeError:
                        raise ValueError, "Invalid specification of coordinates"
                    if len(thetup)==1:
                        vlist = [vlist]
                    if thepointset.indepvarname in tup:
                        vlist.insert(tix, thepointset.indepvararray.tolist())
                    varray = array(vlist)
                elif alltrue([type(ti)==int for ti in tup]):
                    try:
                        varray = thepointset[:,tup].toarray()
                    except TypeError:
                        raise ValueError, "Invalid specification of coordinates"
                else:
                    raise ValueError, "Invalid specification of coordinates"
            else:
                f.close()
                raise TypeError, \
                   "infodict values must be singletons or tuples/lists of strings or integers"
        except IOError:
            f.close()
            print "Problem writing to file"+fname+ext
            raise
        except KeyError:
            f.close()
            raise KeyError, ("Keys in infodict not found in pointset")
        if varvaldir == 'row':
            write_array(f, varray, separator, linesep,
                        precision, suppress_small, keep_open=0)
        else:
            write_array(f, transpose(varray), separator, linesep,
                        precision, suppress_small, keep_open=0)
Esempio n. 16
0
def act_batch(nFunc=5, filename="", lat=60):
    """Fits activation and inactivation of 15 iv pulses
    using a biexponential funtion with a delay, creates a
    table showing the results.
    
    Keyword argument:
    nFunc --    Index of function used for fitting. At present,
                10 is the HH gNa function,
                5  is a sum of two exponentials with a delay.
    filename -- If not an empty string, stores the best-fit parameters
                in this file."""

    stf = __import__("stf")
    # Some ugly definitions for the time being
    gFitStart = 70.5 + lat / 1000.0  # fit end cursor is variable
    gFSelect = nFunc  # HH function
    gDictSize = stf.leastsq_param_size(
        gFSelect) + 2  # Parameters, chisqr, peak value
    gBaseStartCtrl = 69.5  # Start and end of the baseline before the control pulse, in ms
    gBaseEndCtrl = 70.5
    gPeakStartCtrl = 70.64  # Start and end of the peak cursors for the control pulse, in ms
    gPeakWindowSizes = (2.5, 2, 1.5, 1, 1, 0.8, 0.8, 0.8, 0.6, 0.6, 0.5, 0.5,
                        0.4, 0.4, 0.4)
    gFitDurations = (8, 8, 7, 6, 5.5, 5, 4.5, 3.5, 2.5, 2, 1.5, 1.5, 1.0, 0.8,
                     0.8)
    gPulses = len(gFitDurations)  # Number of traces

    if (gDictSize < 0):
        print("Couldn\'t retrieve function id=%d; aborting now." % gFSelect)
        return False

    if (not (stf.check_doc())):
        print("Couldn\'t find an open file; aborting now.")
        return False

    # set cursors:
    if (not (stf.set_peak_start(gPeakStartCtrl, True))):
        return False
    if (not (stf.set_peak_end(stf.get_size_trace(0) - 1))):
        return False
    if (not (stf.set_base_start(gBaseStartCtrl, True))):
        return False
    if (not (stf.set_base_end(gBaseEndCtrl, True))):
        return False

    if (not (stf.set_peak_mean(3))):
        return False
    if (not (stf.set_peak_direction("both"))):
        return False

    firstpass = True
    # A list for dictionary keys and values:
    dict_keys = []
    dict_values = np.empty((gDictSize, stf.get_size_channel()))
    if not filename == "":
        ls_file = np.empty((gPulses, stf.leastsq_param_size(nFunc)))
    for n in range(0, gPulses):
        if (stf.set_trace(n) == False):
            print('Couldn\'t set a new trace; aborting now.')
            return False

        print('Analyzing trace %d of %d' % (n + 1, stf.get_size_channel()))
        # set the fit window cursors:
        if (not (stf.set_peak_end(gPeakStartCtrl + gPeakWindowSizes[n],
                                  True))):
            return False
        if (not (stf.set_fit_start(gFitStart, True))):
            return False
        if (not (stf.set_fit_end(gFitStart + gFitDurations[n], True))):
            return False

        stf.measure()

        # Least-squares fitting:
        p_dict = stf.leastsq(gFSelect)
        if not filename == "":
            ls_file[n][0] = p_dict["gprime_na"]
            ls_file[n][1] = p_dict["tau_m"]
            ls_file[n][2] = p_dict["tau_h"]
            ls_file[n][3] = p_dict["offset"]

        if (p_dict == 0):
            print('Couldn\'t perform a fit; aborting now.')
            return False

        # Create an empty list:
        tempdict_entry = []
        row = 0
        for k, v in p_dict.iteritems():
            if (firstpass == True):
                dict_keys.append(k)
            dict_values[row][n] = v
            row = row + 1

        if (firstpass):
            dict_keys.append("Peak amplitude")
        dict_values[row][n] = stf.get_peak() - stf.get_base()

        firstpass = False

    if not filename == "":
        write_array(file(filename, 'w'), ls_file, precision=15)

    retDict = dict()
    # Create the dictionary for the table:
    entry = 0
    for elem in dict_keys:
        retDict[elem] = dict_values[entry].tolist()
        entry = entry + 1

    return stf.show_table_dictlist(retDict)
Esempio n. 17
0
def gc_homeo_af():
    import contrib.jsldefs
    import topo.command.pylabplots
    import contrib.jacommands
    from topo.command.analysis import save_plotgroup
    from topo.analysis.featureresponses import FeatureResponses , PatternPresenter, FeatureMaps            
    #FeatureResponses.repetitions=10

    FeatureMaps.selectivity_multiplier=20

    PatternPresenter.duration=0.2
    PatternPresenter.apply_output_fns=False
    import topo.command.pylabplots
    reload(topo.command.pylabplots)

    
    on = topo.sim["LGNOn"].in_connections[0].strength
    off = topo.sim["LGNOff"].in_connections[0].strength
    if __main__.__dict__.get("GC",False):
       topo.sim["LGNOn"].in_connections[0].strength=0
       topo.sim["LGNOff"].in_connections[0].strength=0
    
    contrib.jsldefs.homeostatic_analysis_function()
    topo.command.pylabplots.fftplot(topo.sim["V1"].sheet_views["OrientationPreference"].view()[0],filename="V1ORMAPFFT")
    
    from topo.misc.filepath import normalize_path, application_path    
    from scipy.io import write_array
    import numpy
    write_array(normalize_path(str(topo.sim.time())+"orprefmap.txt"), topo.sim["V1"].sheet_views["OrientationPreference"].view()[0])
    write_array(normalize_path(str(topo.sim.time())+"orselmap.txt"), topo.sim["V1"].sheet_views["OrientationSelectivity"].view()[0])
    topo.sim["LGNOn"].in_connections[0].strength = on
    topo.sim["LGNOff"].in_connections[0].strength = off

    print float(topo.sim.time())
    if(float(topo.sim.time()) > 19002.0): 
	#topo.sim["V1"].output_fns[2].scale=0.0
	save_plotgroup("Position Preference")
	PatternPresenter.duration=1.0
        PatternPresenter.apply_output_fns=True
	import topo.command.pylabplots
        reload(topo.command.pylabplots)
        topo.command.pylabplots.measure_or_tuning_fullfield.instance(sheet=topo.sim["V1"],repetitions=10)(repetitions=10)
        topo.command.pylabplots.cyclic_tuning_curve.instance(x_axis="orientation",filename="ORTC[0,0]",sheet=topo.sim["V1"],coords=[(0,0)])()
        topo.command.pylabplots.cyclic_tuning_curve.instance(x_axis="orientation",filename="ORTC[0,0]",sheet=topo.sim["V1"],coords=[(0.1,0)])()
        topo.command.pylabplots.cyclic_tuning_curve.instance(x_axis="orientation",filename="ORTC[0,0]",sheet=topo.sim["V1"],coords=[(0.1,0.1)])()
        topo.command.pylabplots.cyclic_tuning_curve.instance(x_axis="orientation",filename="ORTC[0,0]",sheet=topo.sim["V1"],coords=[(0,0.1)])()
	topo.command.pylabplots.cyclic_tuning_curve.instance(x_axis="orientation",filename="ORTC[0.1,0.1]",sheet=topo.sim["V1"],coords=[(0.1,0.1)])()
        topo.command.pylabplots.cyclic_tuning_curve.instance(x_axis="orientation",filename="ORTC[0.1,-0.1]",sheet=topo.sim["V1"],coords=[(0.1,-0.1)])()
	topo.command.pylabplots.cyclic_tuning_curve.instance(x_axis="orientation",filename="ORTC[-0.1,0.1]",sheet=topo.sim["V1"],coords=[(-0.1,0.1)])()    
        topo.command.pylabplots.cyclic_tuning_curve.instance(x_axis="orientation",filename="ORTC[-0.1,-0.1]",sheet=topo.sim["V1"],coords=[(-0.1,-0.1)])()
	topo.command.pylabplots.cyclic_tuning_curve.instance(x_axis="orientation",filename="ORTC[0.2,0.2]",sheet=topo.sim["V1"],coords=[(0.2,0.2)])()
        topo.command.pylabplots.cyclic_tuning_curve.instance(x_axis="orientation",filename="ORTC[0.2,-0.2]",sheet=topo.sim["V1"],coords=[(0.2,-0.2)])()
	topo.command.pylabplots.cyclic_tuning_curve.instance(x_axis="orientation",filename="ORTC[-0.2,0.2]",sheet=topo.sim["V1"],coords=[(-0.2,0.2)])()    
        topo.command.pylabplots.cyclic_tuning_curve.instance(x_axis="orientation",filename="ORTC[-0.2,-0.2]",sheet=topo.sim["V1"],coords=[(-0.2,-0.2)])()
	topo.command.pylabplots.cyclic_tuning_curve.instance(x_axis="orientation",filename="ORTC[0,0.1]",sheet=topo.sim["V1"],coords=[(0.0,0.1)])()
        topo.command.pylabplots.cyclic_tuning_curve.instance(x_axis="orientation",filename="ORTC[0,-0.1]",sheet=topo.sim["V1"],coords=[(0.0,-0.1)])()
	topo.command.pylabplots.cyclic_tuning_curve.instance(x_axis="orientation",filename="ORTC[-0.1,0]",sheet=topo.sim["V1"],coords=[(-0.1,0.0)])()    
        topo.command.pylabplots.cyclic_tuning_curve.instance(x_axis="orientation",filename="ORTC[0.1,0]",sheet=topo.sim["V1"],coords=[(0.1,-0.0)])()

    if(float(topo.sim.time()) > 20000.0): 
        topo.sim["V1"].output_fns[1].plastic=False
        contrib.jacommands.measure_histogram(iterations=1000) 	
	obj = GpuSpace()
	obj.cuda_make_context( mpi.rank )

	t1 = datetime.now()
	for i, m in enumerate( m_list ):
		mpi.world.barrier()	
		t2 = datetime.now()
		print t2-t1, m, 'MB'
		obj.set_nx( m*mbytes_float )

		if mpi.rank == 0:
			t_rate_htod[0][i], t_rate_dtoh[0][i] = obj.get_transfer_rate()
			for dev in xrange( 1, num_gpus ):
				t_rate_htod[dev][i], t_rate_dtoh[dev][i] = mpi.world.recv( source=dev )

		else:
			mpi.world.send( dest=0, value=obj.get_transfer_rate() )
	

	if mpi.rank == 0:
		from scipy.io import write_array
		fpath = './bandwidth-mpi_barrier-%dgpus.ascii' % num_gpus
		data_list = [m_list]
		for dev in xrange( num_gpus ):
			data_list.append( t_rate_htod[dev] )
			data_list.append( t_rate_dtoh[dev] )
		write_array( fpath, sc.transpose(data_list), separator='\t', linesep='\n' )


	obj.cuda_context_pop()
Esempio n. 19
0
def gc_homeo_af():
    import contrib.jsldefs
    import topo.command.pylabplot
    import contrib.jacommands
    from topo.command.analysis import save_plotgroup
    from topo.analysis.featureresponses import FeatureResponses, PatternPresenter, FeatureMaps
    #FeatureResponses.repetitions=10

    FeatureMaps.selectivity_multiplier = 20

    PatternPresenter.duration = 0.2
    PatternPresenter.apply_output_fns = False
    import topo.command.pylabplot
    reload(topo.command.pylabplot)

    on = topo.sim["LGNOn"].in_connections[0].strength
    off = topo.sim["LGNOff"].in_connections[0].strength
    if __main__.__dict__.get("GC", False):
        topo.sim["LGNOn"].in_connections[0].strength = 0
        topo.sim["LGNOff"].in_connections[0].strength = 0

    contrib.jsldefs.homeostatic_analysis_function()
    topo.command.pylabplot.fftplot(
        topo.sim["V1"].sheet_views["OrientationPreference"].view()[0],
        filename="V1ORMAPFFT")

    from topo.misc.filepath import normalize_path, application_path
    from scipy.io import write_array
    import numpy
    write_array(normalize_path(str(topo.sim.time()) + "orprefmap.txt"),
                topo.sim["V1"].sheet_views["OrientationPreference"].view()[0])
    write_array(normalize_path(str(topo.sim.time()) + "orselmap.txt"),
                topo.sim["V1"].sheet_views["OrientationSelectivity"].view()[0])
    topo.sim["LGNOn"].in_connections[0].strength = on
    topo.sim["LGNOff"].in_connections[0].strength = off

    print float(topo.sim.time())
    if (float(topo.sim.time()) > 19002.0):
        #topo.sim["V1"].output_fns[2].scale=0.0
        save_plotgroup("Position Preference")
        PatternPresenter.duration = 1.0
        PatternPresenter.apply_output_fns = True
        import topo.command.pylabplot
        reload(topo.command.pylabplot)
        topo.command.pylabplot.measure_or_tuning_fullfield.instance(
            sheet=topo.sim["V1"], repetitions=10)(repetitions=10)
        topo.command.pylabplot.cyclic_tuning_curve.instance(
            x_axis="orientation",
            filename="ORTC[0,0]",
            sheet=topo.sim["V1"],
            coords=[(0, 0)])()

        topo.command.pylabplot.cyclic_tuning_curve.instance(
            x_axis="orientation",
            filename="ORTC[0,0]",
            sheet=topo.sim["V1"],
            coords=[(0.1, 0)])()
        topo.command.pylabplot.cyclic_tuning_curve.instance(
            x_axis="orientation",
            filename="ORTC[0,0]",
            sheet=topo.sim["V1"],
            coords=[(0.1, 0.1)])()
        topo.command.pylabplot.cyclic_tuning_curve.instance(
            x_axis="orientation",
            filename="ORTC[0,0]",
            sheet=topo.sim["V1"],
            coords=[(0, 0.1)])()
        topo.command.pylabplot.cyclic_tuning_curve.instance(
            x_axis="orientation",
            filename="ORTC[0.1,0.1]",
            sheet=topo.sim["V1"],
            coords=[(0.1, 0.1)])()
        topo.command.pylabplot.cyclic_tuning_curve.instance(
            x_axis="orientation",
            filename="ORTC[0.1,-0.1]",
            sheet=topo.sim["V1"],
            coords=[(0.1, -0.1)])()
        topo.command.pylabplot.cyclic_tuning_curve.instance(
            x_axis="orientation",
            filename="ORTC[-0.1,0.1]",
            sheet=topo.sim["V1"],
            coords=[(-0.1, 0.1)])()
        topo.command.pylabplot.cyclic_tuning_curve.instance(
            x_axis="orientation",
            filename="ORTC[-0.1,-0.1]",
            sheet=topo.sim["V1"],
            coords=[(-0.1, -0.1)])()
        topo.command.pylabplot.cyclic_tuning_curve.instance(
            x_axis="orientation",
            filename="ORTC[0.2,0.2]",
            sheet=topo.sim["V1"],
            coords=[(0.2, 0.2)])()
        topo.command.pylabplot.cyclic_tuning_curve.instance(
            x_axis="orientation",
            filename="ORTC[0.2,-0.2]",
            sheet=topo.sim["V1"],
            coords=[(0.2, -0.2)])()
        topo.command.pylabplot.cyclic_tuning_curve.instance(
            x_axis="orientation",
            filename="ORTC[-0.2,0.2]",
            sheet=topo.sim["V1"],
            coords=[(-0.2, 0.2)])()
        topo.command.pylabplot.cyclic_tuning_curve.instance(
            x_axis="orientation",
            filename="ORTC[-0.2,-0.2]",
            sheet=topo.sim["V1"],
            coords=[(-0.2, -0.2)])()
        topo.command.pylabplot.cyclic_tuning_curve.instance(
            x_axis="orientation",
            filename="ORTC[0,0.1]",
            sheet=topo.sim["V1"],
            coords=[(0.0, 0.1)])()
        topo.command.pylabplot.cyclic_tuning_curve.instance(
            x_axis="orientation",
            filename="ORTC[0,-0.1]",
            sheet=topo.sim["V1"],
            coords=[(0.0, -0.1)])()
        topo.command.pylabplot.cyclic_tuning_curve.instance(
            x_axis="orientation",
            filename="ORTC[-0.1,0]",
            sheet=topo.sim["V1"],
            coords=[(-0.1, 0.0)])()
        topo.command.pylabplot.cyclic_tuning_curve.instance(
            x_axis="orientation",
            filename="ORTC[0.1,0]",
            sheet=topo.sim["V1"],
            coords=[(0.1, -0.0)])()

    if (float(topo.sim.time()) > 20000.0):
        topo.sim["V1"].output_fns[1].plastic = False
        contrib.jacommands.measure_histogram(iterations=1000)
Esempio n. 20
0
def deact_batch( filename="" ):
    """Fits deactivation time constants: Monoexponential until <=-70,
    biexponential for >-70 mV.

    filename -- If not an empty string, stores the best-fit parameters
                in this file."""

    stf = __import__("stf")
    # Some ugly definitions for the time being
    gNMono = 5   # Monoexponential fits 
    gNBi   = 4   # Biexponential fits
    gFMono = 0   # id of monoexponential function
    gFBi   = 3   # id of biexponential function
    gMonoDictSize =  stf.leastsq_param_size( gFMono ) + 1 # Parameters, chisqr
    gBiDictSize =    stf.leastsq_param_size( gFBi ) + 1   # Parameters, chisqr

    if ( gMonoDictSize < 0 or gBiDictSize < 0 ):
        print('Couldn\'t retrieve function; aborting now.')
        return False        
    
    if ( not(stf.check_doc()) ):
        print('Couldn\'t find an open file; aborting now.')
        return False

    # set the test pulse window cursors:
    if ( not(stf.set_peak_start( 70.84, True )) ):
        return False
    if ( not(stf.set_peak_end( 74.84, True )) ):
        return False

    if ( not(stf.set_base_start( 69.5, True )) ):
        return False
    if ( not(stf.set_base_end( 70.5, True )) ):
        return False
    
    if ( not(stf.set_peak_mean( 1 )) ):
        return False
    if ( not(stf.set_peak_direction( "down" )) ):
        return False

    # Monoexponential loop ---------------------------------------------------
    
    firstpass = True
    # A list for dictionary keys...
    mono_keys = []
    # ... and values:
    mono_values = np.empty( (gMonoDictSize, gNMono) )
    if not filename=="":
        ls_file=np.empty((gNMono,stf.leastsq_param_size(gFMono)))
    
    # Monoexponential fits:
    for n in range( 0, gNMono ):
        if ( stf.set_trace( n ) == False ):
            print("Couldn't set a new trace; aborting now.")
            return False
        
        print('Analyzing trace %d of %d'%( n+1, stf.get_size_channel()))
        
        # set the fit window cursors:
        
        # use the index for the start cursor:
        if ( not(stf.set_fit_start( stf.peak_index( True ) )) ):
            return False
        
        # fit 1.5 ms:
        fit_end_time = stf.get_fit_start( True )+1.0
        if ( not(stf.set_fit_end( fit_end_time, True)) ):
            return False
        
        # Least-squares fitting:
        p_dict = stf.leastsq( gFMono )
        if not filename=="":
            ls_file[n][0]=p_dict["Amp_0"]
            ls_file[n][1]=p_dict["Tau_0"]
            ls_file[n][2]=p_dict["Offset"]
        
        if ( p_dict == 0 ):
            print('Couldn\'t perform a fit; aborting now.')
            return False
            
        # Create an empty list:
        tempdict_entry = []
        row = 0
        for k, v in p_dict.iteritems():
            if ( firstpass == True ):
                mono_keys.append( k )
            mono_values[row][n] = v 
            row = row+1
        
        firstpass = False
    
    monoDict = dict()
    # Create the dictionary for the table:
    entry = 0
    for elem in mono_keys:
        monoDict[ elem ] = mono_values[entry].tolist()
        entry = entry+1
   
    if ( not(stf.show_table_dictlist( monoDict )) ):
        return False
    
    # Biexponential loop ---------------------------------------------------
    
    firstpass = True
    # A list for dictionary keys...
    bi_keys = []
    # ... and values:
    bi_values = np.empty( (gBiDictSize, gNBi) )
    
    # Monoexponential fits:
    for n in range( gNMono, gNBi+gNMono ):
        if ( stf.set_trace( n ) == False ):
            print('Couldn\'t set a new trace; aborting now.')
            return False
        
        print('Analyzing trace %d of %d'%( n+1, stf.get_size_channel()))
        
        # set the fit window cursors:
        
        # use the index for the start cursor:
        if ( not(stf.set_fit_start( stf.peak_index( True ) )) ):
            return False
        
        # fit 4 ms:
        fit_end_time = stf.get_fit_start( True )+3.5
        if ( not(stf.set_fit_end( fit_end_time, True)) ):
            return False
        
        # Least-squares fitting:
        p_dict = stf.leastsq( gFBi )
        
        if ( p_dict == 0 ):
            print('Couldn\'t perform a fit; aborting now.')
            return False
            
        # Create an empty list:
        tempdict_entry = []
        row = 0
        for k, v in p_dict.iteritems():
            if ( firstpass == True ):
                bi_keys.append( k )
            bi_values[row][n-gNMono] = v 
            row = row+1
        
        firstpass = False
    
    biDict = dict()
    
    # Create the dictionary for the table:
    entry = 0
    for elem in bi_keys:
        biDict[ elem ] = bi_values[entry].tolist()
        entry = entry+1

    if not filename=="":
        write_array(file(filename,'w'), ls_file, precision=15)
   
    if ( not(stf.show_table_dictlist( biDict )) ):
        return False
    
    return True
Esempio n. 21
0
    from mystic.metropolis import *
    import time

    t1 = time.time()
    for i in xrange(L):
        scem(Ck, ak, Sk, Sak, target, 0.1)
    t2 = time.time()
    print "SCEM 1 chain for x[%d] took %0.3f ms" % (len(Sk), (t2 - t1) * 1000)
    Sk = array(Sk)

    t1 = time.time()
    x = [[0, 10]]
    for i in xrange(L):
        x.append(metropolis_hastings(proposal, target, x[-1]))
    t2 = time.time()
    print "2D Metropolis for x[%d] took %0.3f ms" % (len(x), (t2 - t1) * 1000)
    x = array(x)

    # have a look at vrugt.nb for reading and postprocessing
    # the datafile below
    from scipy.io import write_array
    #write_array(open('twisted1.dat','w'),x)
    write_array(open('twisted1.dat', 'w'), Sk)

    import pylab
    pylab.plot(Sk[:, 0], Sk[:, 1], 'r.')
    pylab.plot(x[:, 0] + 30, x[:, 1], 'b.')
    pylab.show()

# end of file
Esempio n. 22
0
def save( ar, fileName ):
    io.write_array( fileName, ar, precision = 8 )
Esempio n. 23
0
        cc.logging(fname,dt)
        
    else :
        print 'discarded cell # %d ' % run
    del cc



angle_moyen =  S.array(angle_moyen)
longueur_moyenne= S.array(longueur_moyenne)
ecartype =  S.array(ecartype)
duree2 = S.array(duree2)
duree1 = S.array(duree1)
angle_onset = S.array(angle_onset)*180/pi
angle_maxi = S.array(angle_maxi)*180/pi


angle_moyen = fabs(angle_moyen)*180/pi


data = S.column_stack((duree1, duree2, angle_moyen,ecartype,longueur_moyenne))

io.write_array("results.txt", data, separator=' ', linesep='\n')

figure(3)

plot(duree2,angle_moyen,'o')


show()
Esempio n. 24
0
 def save_event(self):
     dataout = np.vstack((self.x, self.y, self.z))
     dataout = dataout.transpose()
     write_array("/home/pcuser/data/RVsT1.txt", dataout)
Esempio n. 25
0
def exportPointset(thepointset,
                   infodict,
                   separator='   ',
                   linesep='\n',
                   precision=12,
                   suppress_small=0,
                   varvaldir='col',
                   ext='',
                   append=False):

    assert varvaldir in ['col', 'row'], \
           "invalid variable value write direction"
    # in order to avoid import cycles, cannot explicitly check that
    # thepointset is of type Pointset, because Points.py imports this file
    # (utils.py), so check an attribute instead.
    try:
        thepointset.coordnames
    except AttributeError:
        raise TypeError, "Must pass Pointset to this function: use arrayToPointset first!"
    infodict_usedkeys = []
    for key, info in infodict.iteritems():
        if isinstance(info, str):
            infodict_usedkeys += [info]
        elif info == []:
            infodict[key] = copy.copy(thepointset.coordnames)
            infodict_usedkeys.extend(thepointset.coordnames)
        else:
            infodict_usedkeys += list(info)
    allnames = copy.copy(thepointset.coordnames)
    if thepointset._parameterized:
        allnames.append(thepointset.indepvarname)
    remlist = remain(infodict_usedkeys, allnames + range(len(allnames)))
    if remlist != []:
        print "Coords not found in pointset:", remlist
        raise ValueError, \
              "invalid keys in infodict - some not present in thepointset"
    assert isinstance(ext, str), "'ext' extension argument must be a string"
    if ext != '':
        if ext[0] != '.':
            ext = '.' + ext
    if append:
        assert varvaldir == 'col', ("append mode not supported for row"
                                    "format of data ordering")
        modestr = 'a'
    else:
        modestr = 'w'
    totlen = len(thepointset)
    if totlen == 0:
        raise ValueError, ("Pointset is empty")
    for fname, tup in infodict.iteritems():
        try:
            f = open(fname + ext, modestr)
        except IOError:
            print "There was a problem opening file " + fname + ext
            raise
        try:
            if isinstance(tup, str):
                try:
                    varray = thepointset[tup]
                except TypeError:
                    raise ValueError, "Invalid specification of coordinates"
            elif isinstance(tup, int):
                try:
                    varray = thepointset[:, tup].toarray()
                except TypeError:
                    raise ValueError, "Invalid specification of coordinates"
            elif type(tup) in [list, tuple]:
                if alltrue([type(ti) == str for ti in tup]):
                    thetup = list(tup)
                    if thepointset.indepvarname in tup:
                        tix = thetup.index(thepointset.indepvarname)
                        thetup.remove(thepointset.indepvarname)
                    try:
                        vlist = thepointset[thetup].toarray().tolist()
                    except TypeError:
                        raise ValueError, "Invalid specification of coordinates"
                    if len(thetup) == 1:
                        vlist = [vlist]
                    if thepointset.indepvarname in tup:
                        vlist.insert(tix, thepointset.indepvararray.tolist())
                    varray = array(vlist)
                elif alltrue([type(ti) == int for ti in tup]):
                    try:
                        varray = thepointset[:, tup].toarray()
                    except TypeError:
                        raise ValueError, "Invalid specification of coordinates"
                else:
                    raise ValueError, "Invalid specification of coordinates"
            else:
                f.close()
                raise TypeError, \
                   "infodict values must be singletons or tuples/lists of strings or integers"
        except IOError:
            f.close()
            print "Problem writing to file" + fname + ext
            raise
        except KeyError:
            f.close()
            raise KeyError, ("Keys in infodict not found in pointset")
        if varvaldir == 'row':
            write_array(f,
                        varray,
                        separator,
                        linesep,
                        precision,
                        suppress_small,
                        keep_open=0)
        else:
            write_array(f,
                        transpose(varray),
                        separator,
                        linesep,
                        precision,
                        suppress_small,
                        keep_open=0)
Esempio n. 26
0
    def writeAscii(self, filename):
        from scipy.io import write_array

        self.info('Writing data to file: ' + filename + '.asc')
        write_array(filename + ".asc", np.transpose(self.data[1:-1, :]), separator=' ', linesep='\n')
        self.done()
Esempio n. 27
0
#   print 'i', i
    for input in range(T):
#       print 'j', j
        col = a[:,input]
        if options.bylab:
            if options.sum:
                v[maxindices[output]][input] += sum([abs(k) for k in col])
            else:
                v[maxindices[output]][input] = sum([abs(k) for k in col])
        else:
            v[output][input] = sum([abs(k) for k in col])   
#       print 'v[i][j]', v[i][j]
if options.softmax:
    for t in range(T):
        col = v[:,t]
        Z = sum([exp(x) for x in col])
        for y in range(len(col)):
            v[y][t] = exp(col[y]) / Z
if options.max:
    for t in range(T):
        c = list(v[:,t])
        i = c.index(max(c))
        v[:,t] = 0
        v[i][t] = 1
print shape(v)
out = file(outfile, 'w')
if options.bylab:
    print >> out,'LABELS:', ' '.join(labels)
print >> out,'DIMENSIONS:', T 
io.write_array(out, v)
Esempio n. 28
0
def act_batch( nFunc = 5, filename="", lat=60 ):
    """Fits activation and inactivation of 15 iv pulses
    using a biexponential funtion with a delay, creates a
    table showing the results.
    
    Keyword argument:
    nFunc --    Index of function used for fitting. At present,
                10 is the HH gNa function,
                5  is a sum of two exponentials with a delay.
    filename -- If not an empty string, stores the best-fit parameters
                in this file."""

    stf = __import__("stf")
    # Some ugly definitions for the time being
    gFitStart = 70.5 + lat/1000.0 # fit end cursor is variable
    gFSelect  =  nFunc    # HH function
    gDictSize =  stf.leastsq_param_size( gFSelect ) + 2 # Parameters, chisqr, peak value
    gBaseStartCtrl  = 69.5 # Start and end of the baseline before the control pulse, in ms
    gBaseEndCtrl    = 70.5
    gPeakStartCtrl  = 70.64 # Start and end of the peak cursors for the control pulse, in ms
    gPeakWindowSizes = ( 2.5,   2, 1.5, 1,   1, 0.8, 0.8, 0.8, 0.6, 0.6, 0.5, 0.5, 0.4, 0.4, 0.4 )  
    gFitDurations   =  (   8,   8,   7, 6, 5.5,   5, 4.5, 3.5, 2.5,   2, 1.5, 1.5, 1.0, 0.8, 0.8 )
    gPulses = len( gFitDurations )    # Number of traces 
    
    if ( gDictSize < 0 ):
        print("Couldn\'t retrieve function id=%d; aborting now." % gFSelect)
        return False        
    
    if ( not(stf.check_doc()) ):
        print("Couldn\'t find an open file; aborting now.")
        return False
    
    # set cursors:
    if ( not(stf.set_peak_start( gPeakStartCtrl, True )) ):
        return False
    if ( not(stf.set_peak_end( stf.get_size_trace(0)-1 )) ):
        return False
    if ( not(stf.set_base_start( gBaseStartCtrl, True )) ):
        return False
    if ( not(stf.set_base_end( gBaseEndCtrl, True )) ):
        return False
    
    if ( not(stf.set_peak_mean( 3 )) ):
        return False
    if ( not(stf.set_peak_direction( "both" )) ):
        return False

    firstpass = True
    # A list for dictionary keys and values:
    dict_keys = []
    dict_values = np.empty( (gDictSize, stf.get_size_channel()) )
    if not filename=="":
        ls_file=np.empty((gPulses,stf.leastsq_param_size(nFunc)))
    for n in range( 0, gPulses ):
        if ( stf.set_trace( n ) == False ):
            print('Couldn\'t set a new trace; aborting now.')
            return False
        
        print('Analyzing trace %d of %d'%( n+1, stf.get_size_channel()))
        # set the fit window cursors:
        if ( not(stf.set_peak_end( gPeakStartCtrl + gPeakWindowSizes[n], True )) ):
            return False
        if ( not(stf.set_fit_start( gFitStart, True )) ):
            return False
        if ( not(stf.set_fit_end( gFitStart + gFitDurations[n], True )) ):
            return False
        
        stf.measure()
        
        # Least-squares fitting:
        p_dict = stf.leastsq( gFSelect )
        if not filename=="":
            ls_file[n][0]=p_dict["gprime_na"]
            ls_file[n][1]=p_dict["tau_m"]
            ls_file[n][2]=p_dict["tau_h"]
            ls_file[n][3]=p_dict["offset"]

        if ( p_dict == 0 ):
            print('Couldn\'t perform a fit; aborting now.')
            return False
            
        # Create an empty list:
        tempdict_entry = []
        row = 0
        for k, v in p_dict.iteritems():
            if ( firstpass == True ):
                dict_keys.append( k )
            dict_values[row][n] = v 
            row = row+1
        
        if ( firstpass ):
            dict_keys.append( "Peak amplitude" )
        dict_values[row][n] = stf.get_peak()-stf.get_base()
        
        firstpass = False
    
    
    if not filename=="":
        write_array(file(filename,'w'), ls_file, precision=15)

    retDict = dict()
    # Create the dictionary for the table:
    entry = 0
    for elem in dict_keys:
        retDict[ elem ] = dict_values[entry].tolist()
        entry = entry+1
    
    return stf.show_table_dictlist( retDict )
Esempio n. 29
0
    from mystic.metropolis import *
    import time

    t1 = time.time()
    for i in xrange(L):
        scem(Ck, ak, Sk, Sak, target, 0.1)
    t2 = time.time()
    print "SCEM 1 chain for x[%d] took %0.3f ms" % (len(Sk), (t2-t1)*1000)
    Sk = array(Sk)

    t1 = time.time()
    x = [ [0,10] ]
    for i in xrange(L):
        x.append(metropolis_hastings(proposal, target, x[-1]))
    t2 = time.time()
    print "2D Metropolis for x[%d] took %0.3f ms" % (len(x), (t2-t1)*1000)
    x = array(x)

    # have a look at vrugt.nb for reading and postprocessing
    # the datafile below
    from scipy.io import write_array
    #write_array(open('twisted1.dat','w'),x)
    write_array(open('twisted1.dat','w'),Sk)
    
    import pylab
    pylab.plot(Sk[:,0],Sk[:,1],'r.')
    pylab.plot(x[:,0] + 30,x[:,1],'b.')
    pylab.show()

# end of file
Esempio n. 30
0
def save(ar, fileName):
    from scipy.io import write_array
    write_array(fileName, ar, precision=8)
Esempio n. 31
0
    t1 = datetime.now()
    for i, m in enumerate(m_list):
        mpi.world.barrier()
        t2 = datetime.now()
        print t2 - t1, m, 'MB'
        obj.set_nx(m * mbytes_float)

        if mpi.rank == 0:
            t_rate_htod[0][i], t_rate_dtoh[0][i] = obj.get_transfer_rate()
            for dev in xrange(1, num_gpus):
                t_rate_htod[dev][i], t_rate_dtoh[dev][i] = mpi.world.recv(
                    source=dev)

        else:
            mpi.world.send(dest=0, value=obj.get_transfer_rate())

    if mpi.rank == 0:
        from scipy.io import write_array
        fpath = './bandwidth-mpi_barrier-%dgpus.ascii' % num_gpus
        data_list = [m_list]
        for dev in xrange(num_gpus):
            data_list.append(t_rate_htod[dev])
            data_list.append(t_rate_dtoh[dev])
        write_array(fpath,
                    sc.transpose(data_list),
                    separator='\t',
                    linesep='\n')

    obj.cuda_context_pop()
Esempio n. 32
0
		if max_tavg < max_tavg_val:
			max_tavg_count += 1
		else:
			max_tavg_count = 0
		
		# save the field at last period
		if last_period == 'off':
			if max_tavg_count == max_tavg_count_val:
				last_period = 'on'
		elif last_period == 'on':
			field_tavg = field_tsum/period 
			field2_tavg = field2_tsum/period 

			# write the binary file
			filename = dirname + '/' + 'tavg_Ez_%.4dperiod.scbin' % (tstep/period)
			fd	=	open(filename, 'wb')
			fwrite(fd, Nx, field_tavg)
			fd.close()

			filename = dirname + '/' + 'tavg_Ez2_%.4dperiod.scbin' % (tstep/period)
			fd	=	open(filename, 'wb')
			fwrite(fd, Nx, field2_tavg)
			fd.close()

			# write the ascii file
			filename = dirname + '/' + 'i_epr_tavgEz_tavgEz2.txt'
			write_array(filename, transpose((range(Nx+1), epr, field_tavg, field2_tavg)), separator='\t', linesep='\n')


			sys.exit()
Esempio n. 33
0
    #	print 'i', i
    for input in range(T):
        #		print 'j', j
        col = a[:, input]
        if options.bylab:
            if options.sum:
                v[maxindices[output]][input] += sum([abs(k) for k in col])
            else:
                v[maxindices[output]][input] = sum([abs(k) for k in col])
        else:
            v[output][input] = sum([abs(k) for k in col])
#		print 'v[i][j]', v[i][j]
if options.softmax:
    for t in range(T):
        col = v[:, t]
        Z = sum([exp(x) for x in col])
        for y in range(len(col)):
            v[y][t] = exp(col[y]) / Z
if options.max:
    for t in range(T):
        c = list(v[:, t])
        i = c.index(max(c))
        v[:, t] = 0
        v[i][t] = 1
print shape(v)
out = file(outfile, 'w')
if options.bylab:
    print >> out, 'LABELS:', ' '.join(labels)
print >> out, 'DIMENSIONS:', T
io.write_array(out, v)