Esempio n. 1
0
 def getMission(self):
     async_mission_list = self.async_client.get_missions().result()
     missDict = self.regroupIntoDict(async_mission_list, "mission")
     path = "mission.mat"
     file = open(path, 'w')
     scy.savemat(path, missDict)
     file.close()
Esempio n. 2
0
    def write(self):
        from postprocess import tfidf
        K = len(self.nnz[MAP.PREDICATE])
        N = len(self.nnz[MAP.ENTITY])
        nnz_tensor = sum(self.nnz[0])
        subs = np.zeros((nnz_tensor, 3), dtype=np.int)
        vals = np.zeros((nnz_tensor, 1), dtype=np.double)
        offset = 0
        for s, p, o, val in self.relations():
            # awesome matlab start-at-1 indexing...
            subs[offset, :] = (s + 1, o + 1, p + 1)
            vals[offset] = val
            offset += 1

        # remove zeros
        nnzidx = vals.nonzero()[0]
        vals = vals[nnzidx]
        subs = subs[nnzidx, :]

        eattr = self.__create_matlab_attr(self.entity_attributes, N, self.nnz[MAP.EATTR], postprocessor=tfidf)
        rattr = self.__create_matlab_attr(self.predicate_attributes, K, self.nnz[MAP.RATTR], postprocessor=tfidf)

        log.debug('Writing MATLAB tensor')
        savemat(fjoin(TZArchive.SUBS_FOUT, 'mat', self.fname), {
            'subs': subs,
            'vals': vals,
            'size': (N, N, K),
            'eattr': eattr,
            'rattr': rattr
        }, oned_as='column')
        return subs, vals
Esempio n. 3
0
def cleanTip(file, outname):
    dat = sio.loadmat(file)

    tip_raw = dat['tip']
    temp = tip_raw
    temp[np.isnan(temp)] = np.nanmedian(tip_raw)
    idx = abs(temp) > 2000
    tip_raw[idx] = np.nan

    plt.title('Click to the left of the first contact')
    l = np.round(tip_raw.shape[0] / 5)
    plt.plot(tip_raw[:l, :])
    x, y = plt.ginput(1, timeout=0)[0]
    plt.close('all')
    x = int(np.round(x))

    tip_raw[:x, :] = np.nanmedian(tip_raw, axis=0)
    tip_interp = pd.DataFrame(tip_raw).interpolate(method=INTERP_METHOD,
                                                   limit=INTERP_WINDOW).values
    # tip_interp = medfilt(tip_interp)

    scaler = RobustScaler()
    imp = Imputer(strategy='median')
    tip_interp = imp.fit_transform(tip_interp)
    tip = scaler.fit_transform(tip_interp)
    plt.plot(tip)
    plt.title('Cleaned')
    plt.draw()

    save_dict = {'tip_clean': tip}
    sio.savemat(outname, save_dict)
def write_arr_to_matfile(arr, filepath, filename):
    '''
    stores the given numpy array into file in filepath
    '''
    output_dict = {}
    output_dict[filename] = arr
    ml.savemat(filepath, output_dict)
Esempio n. 5
0
def subtract_background_from_stacks(scanfile, indir, outdir, scannumber=-1):
    """Subtract background from SAXS data in MAT-file stacks.
    """
    scans = read_yaml(scanfile)
    if scannumber > 0:
        scannos = [ scannumber ]
    else:
        scannos = scans.keys()
        scannos.sort()
    for scanno in scannos:
        print("Scan #%03d" % scanno)
        try:
            bufscan = scans[scanno][0]
        except TypeError:
            print("Scan #%03d is a buffer" % scanno)
            continue
        try:
            conc = scans[scanno][1]
        except TypeError:
            print("No concentration for scan #02d." % scanno)
            conc = 1.0
        print("Using concentration %g g/l." % conc)
        stackname = "s%03d" % scanno
        stack = loadmat(indir+'/'+stackname+'.mat')[stackname]
        subs = np.zeros_like(stack)
        (npos, nrep, _, _) = stack.shape
        for pos in range(npos):
            print(pos)
            buf = get_bg(indir, bufscan, pos)
            for rep in range(nrep):
                subs[pos,rep,...] = errsubtract(stack[pos,rep,...], buf)
                subs[pos,rep,1:3,:] = subs[pos,rep,1:3,:] / conc
        outname = "subs%03d" % scanno
        savemat(outdir+'/'+outname + ".mat", {outname: subs}, do_compression=1,
                oned_as='row')
Esempio n. 6
0
 def save(self,outfile):
     # Populate matfile-friendly data structures for censoring regions
     tmp = tempfile.NamedTemporaryFile()
     save_attrs = []
     for k in self.editable_traits():
         if k.endswith("ts"):
             continue
         if k == "available_widgets":
             continue
         if k == "bpoint_classifier":
             continue
         if k == "bpoint_classifier_file":
             continue
         if k in ("censored_regions","event_names"):
             continue
         v = getattr(self,k)
         if type(v) == np.ndarray:
             if v.size == 0: continue
         if type(v) is set: continue
         save_attrs.append(k)
     savedict = dict([(k,getattr(self,k)) \
                      for k in save_attrs if not (getattr(self,k) is None)])
     savedict["censoring_sources"] = np.array(self.censoring_sources)
     for evt in self.event_names:
         savedict[evt] = getattr(self,evt)
     savedict["event_names"] = np.array(self.event_names)
     for k,v in savedict.iteritems():
         try:
             savemat( tmp, {k:v}, long_field_names=True)
         except Exception, e:
             logger.warn("unable to save %s because of %s", k,e)
Esempio n. 7
0
    def _post_run_hook(self, runtime):
        atlas_config = self.inputs.atlas_config
        atlas_name = self.inputs.atlas_name

        # Aggregate the connectivity/network data from DSI Studio
        official_labels = np.array(atlas_config['node_ids']).astype(np.int)
        connectivity_data = {
            atlas_name + "_region_ids": official_labels,
            atlas_name + "_region_labels": np.array(atlas_config['node_names'])
        }

        # Gather the connectivity matrices
        matfiles = glob(runtime.cwd + "/*.connectivity.mat")
        for matfile in matfiles:
            measure = "_".join(matfile.split(".")[-4:-2])
            connectivity_data[atlas_name + "_" + measure + "_connectivity"] = \
                _sanitized_connectivity_matrix(matfile, official_labels)

        # Gather the network measure files
        network_results = glob(runtime.cwd + "/*network*txt")
        for network_result in network_results:
            measure = "_".join(network_result.split(".")[-4:-2])
            connectivity_data.update(
                _sanitized_network_measures(network_result, official_labels,
                                            atlas_name, measure))
        merged_matfile = op.join(runtime.cwd, atlas_name + "_connectivity.mat")
        savemat(merged_matfile, connectivity_data, long_field_names=True)
        return runtime
Esempio n. 8
0
    def save(self,matfile):
        if self.graph is None:
            raise ValueError("No graph to save")
        if self.graph.numberOfNodes() > self.nvoxels:
            logger.warn("Non-voxel nodes are present in your graph.")

        logger.info("Converting networkit Graph to csr matrix")
        m = {
                "flat_mask": self.flat_mask,
                "nvoxels":self.nvoxels,
                "voxel_size":self.voxel_size,
                "volume_grid":self.volume_grid,
                "weighting_scheme":self.weighting_scheme,
                "step_size":self.step_size,
                "odf_resolution":self.odf_resolution,
                "angle_max":self.angle_max,
                "angle_weights":self.angle_weights,
                "normalize_doubleODF":self.normalize_doubleODF,
                "angle_weighting_power":self.angle_weighting_power,
                "graph":networkit.algebraic.adjacencyMatrix(self.graph, matrixType="sparse"),
                "ras_affine":self.ras_affine,
                "real_affine":self.real_affine
        }

        savemat(matfile,m,do_compression=True)
        logger.info("Saved matfile to %s", matfile)
Esempio n. 9
0
 def _run_interface(self, runtime):
     mat = loadmat(self.inputs.matfile, squeeze_me=True)
     outfile = fname_presuffix(self.inputs.matfile, suffix="_controllability",
                               newpath=runtime.cwd)
     connectivity_info = _calculate_controllability(mat)
     LOGGER.info("writing %s", outfile)
     savemat(outfile, connectivity_info, do_compression=True)
     self._results['controllability'] = outfile
     return runtime
Esempio n. 10
0
def _merge_conmats(matfile_lists, recon_args, outfile):
    """Merge the many matfiles output by dsi studio and ensure they conform"""
    connectivity_values = {}

    for matfile_list, (atlas_name, atlas_config, ifargs) in zip(matfile_lists, recon_args):
        matfiles = [f for f in matfile_list if f.endswith('.mat')]
        txtfiles = [f for f in matfile_list if f.endswith('.txt')]

        labels = np.array(atlas_config['node_ids']).astype(np.int)
        connectivity_values[atlas_name + "_region_ids"] = labels
        connectivity_values[atlas_name + "_region_labels"] = np.array(atlas_config['node_names'])
        n_atlas_labels = len(labels)

        for conmat in matfiles:
            m = loadmat(conmat)
            measure = "_".join(conmat.split(".")[-4:-2])
            # Column names are binary strings. Very confusing.
            column_names = "".join(
                [s.decode('UTF-8') for s in m["name"].squeeze().view("S1")]).split("\n")[:-1]
            region_ids = np.array([int(name[6:]) for name in column_names])

            # Where does each column go? Make an index array
            connectivity = m['connectivity']
            in_this_mask = np.in1d(labels, region_ids)
            truncated_labels = labels[in_this_mask]
            assert np.all(truncated_labels == region_ids)
            output = np.zeros((n_atlas_labels, n_atlas_labels))
            new_row = np.searchsorted(labels, region_ids)

            for row_index, conn in zip(new_row, connectivity):
                tmp = np.zeros(n_atlas_labels)
                tmp[in_this_mask] = conn
                output[row_index] = tmp
            connectivity_values[atlas_name + "_" + measure + "_connectivity"] = output

        for network_txt in txtfiles:
            measure = "_".join(network_txt.split(".")[-4:-2])
            network_data = _parse_network_file(network_txt)

            # Make sure to get the full atlas
            region_ids = np.array(network_data.pop('region_ids')).astype(np.int)
            in_this_mask = np.in1d(labels, region_ids)
            truncated_labels = labels[in_this_mask]
            assert np.all(truncated_labels == region_ids)
            new_row = np.searchsorted(labels, region_ids)

            for net_measure_name, net_measure_data in network_data.items():
                variable_name = atlas_name + "_" + measure + "_" + net_measure_name
                if type(net_measure_data) is np.ndarray:
                    tmp = np.zeros_like(net_measure_data)
                    tmp[in_this_mask] = net_measure_data
                    connectivity_values[variable_name] = tmp
                else:
                    connectivity_values[variable_name] = net_measure_data

    savemat(outfile, connectivity_values)
Esempio n. 11
0
def main(args):
    dset = biggie.Stash(args.input_file)
    labseg = json.load(open(args.labseg))
    out_dir = futils.create_directory(args.output_directory)
    total_count = len(dset)
    for idx, key in enumerate(dset.keys()):
        out_file = path.join(out_dir, "%s.%s" % (key, FILE_EXT))
        mdict = entity_to_mdict(dset.get(key), labseg[key])
        MLAB.savemat(out_file, mdict=mdict)
        print "[%s] %12d / %12d: %s" % (time.asctime(), idx, total_count, key)
Esempio n. 12
0
def main(args):
    dset = biggie.Stash(args.input_file)
    labseg = json.load(open(args.labseg))
    out_dir = futils.create_directory(args.output_directory)
    total_count = len(dset)
    for idx, key in enumerate(dset.keys()):
        out_file = path.join(out_dir, "%s.%s" % (key, FILE_EXT))
        mdict = entity_to_mdict(dset.get(key), labseg[key])
        MLAB.savemat(out_file, mdict=mdict)
        print "[%s] %12d / %12d: %s" % (time.asctime(), idx, total_count, key)
Esempio n. 13
0
def _merge_conmats(matfile_list, outfile):
    """Merge the many matfiles output by dsi studio and ensure they conform"""
    connectivity_values = {}

    for matfile in matfile_list:
        connectivity_values.update(loadmat(matfile))
    savemat(outfile,
            connectivity_values,
            long_field_names=True,
            do_compression=True)
Esempio n. 14
0
 def getObstacles(self):
     async_future = self.async_client.get_obstacles()
     stationary, moving = async_future.result()
     obstacles = stationary + moving
     dictObs = self.regroupIntoDict(obstacles, "obst")
     self.addMovingObsField(dictObs)
     path = "obstacles.mat"
     file = open(path, 'w')
     scy.savemat(path, dictObs)
     file.close()
Esempio n. 15
0
def save_fake_fib(fname):
    """returns a dict to get saved"""
    inds = np.arange(QSDR_SHAPE[0] * QSDR_SHAPE[1] * QSDR_SHAPE[2])
    mx, my, mz = np.unravel_index(inds,QSDR_SHAPE,order="F")
    fop = gzopen(fname,"wb")
    savemat(fop,
            {"dimension":np.array(QSDR_SHAPE),
                     "mx":mx,"my":my,"mz":mz},
            format='4'
            )
    fop.close()
Esempio n. 16
0
 def segments_pixeldist(self):
     segmdist_fn = os.path.join(self.ds.path, "cpmc", "MySegmentsMat", self.name, "top_masks_dists.mat")
     if os.path.exists(segmdist_fn):
         data = ml.loadmat(segmdist_fn)
         dist = data["top_masks_dist"]
         segm_ids = data["segment_ids"]
     else:
         prop = self.top_masks
         gt = self.ground_truth
         dist, segm_ids = segments.get_class_distributions(gt, prop, num_classes=self.ds.classnum)
         ml.savemat(segmdist_fn, {"top_masks_dist": dist, "segment_ids": segm_ids})
     segm_ids = list(np.array(segm_ids).ravel())
     return dist, segm_ids
Esempio n. 17
0
 def regions(self):
     """ regions created for the top proposals"""
     region_fn = os.path.join(self.ds.path, "cpmc", "MySegmentsMat", self.name, "top_regions.mat")
     if os.path.exists(region_fn):
         regions = ml.loadmat(region_fn)["top_regions"]
     else:
         proposals = self.top_masks
         regions = reg.produce_regions(proposals)
         ml.savemat(region_fn, {"top_regions": regions})
         logging.debug("Storing regions in %s" % region_fn)
         if not np.any(np.isnan(regions)):
             logging.debug("All pixels are covered in one region")
     return regions
Esempio n. 18
0
def save_fake_fib(fname):
    """returns a dict to get saved"""
    inds = np.arange(QSDR_SHAPE[0] * QSDR_SHAPE[1] * QSDR_SHAPE[2])
    mx, my, mz = np.unravel_index(inds, QSDR_SHAPE, order="F")
    fop = gzopen(fname, "wb")
    savemat(fop, {
        "dimension": np.array(QSDR_SHAPE),
        "mx": mx,
        "my": my,
        "mz": mz
    },
            format='4')
    fop.close()
Esempio n. 19
0
File: matlab.py Progetto: afcarl/isa
def main(argv):
	for patch_size in ['8x8', '16x16']:
		data = load('data/vanhateren.{0}.0.npz'.format(patch_size))['data']
		data = preprocess(data)

		savemat('data/vanhateren.{0}.test.mat'.format(patch_size), {'data': data})

		data = load('data/vanhateren.{0}.1.npz'.format(patch_size))['data']
		data = preprocess(data)

		savemat('data/vanhateren.{0}.train.mat'.format(patch_size), {'data': data})

	return 0
Esempio n. 20
0
 def segments_pixeldist(self):
     segmdist_fn = os.path.join(self.ds.path, 'cpmc', 'MySegmentsMat', self.name, 'top_masks_dists.mat')    
     if os.path.exists(segmdist_fn):
         data = ml.loadmat(segmdist_fn)
         dist = data['top_masks_dist']
         segm_ids = data["segment_ids"]
     else:    
         prop = self.top_masks
         gt = self.ground_truth
         dist, segm_ids = segments.get_class_distributions(gt, prop, num_classes=self.ds.classnum)
         ml.savemat(segmdist_fn, {'top_masks_dist':dist, 'segment_ids': segm_ids})
     segm_ids = list(np.array(segm_ids).ravel())        
     return dist, segm_ids
Esempio n. 21
0
 def regions(self):
     """ regions created for the top proposals"""
     region_fn = os.path.join(self.ds.path, 'cpmc', 'MySegmentsMat', self.name, 'top_regions.mat')    
     if os.path.exists(region_fn):
         regions = ml.loadmat(region_fn)['top_regions']
     else:    
         proposals = self.top_masks
         regions = reg.produce_regions(proposals)
         ml.savemat(region_fn, {'top_regions':regions})
         logging.debug("Storing regions in %s" % region_fn)
         if not np.any(np.isnan(regions)):
             logging.debug("All pixels are covered in one region")
     return regions
Esempio n. 22
0
 def write_dict_to_mat(mat_file_path,
                       dict_to_write,
                       version='7.3'):  # field must be a dict
     assert HAVE_HDF5STORAGE, "To use the MATSortingExtractor write_dict_to_mat function install hdf5storage: " \
                              "\n\n pip install hdf5storage\n\n"
     if version == '7.3':
         hdf5storage.write(dict_to_write,
                           '/',
                           mat_file_path,
                           matlab_compatible=True,
                           options='w')
     elif version < '7.3' and version > '4':
         savemat(mat_file_path, dict_to_write)
Esempio n. 23
0
def applyModel(mdl, data_fname):
    data = sio.loadmat(data_fname)
    root = re.search('rat\d{4}_\d\d_[A-Z]{3}\d\d_VG_[A-Z]\d_t\d\d',
                     data_fname).group()
    p = os.path.split(data_fname)[0]
    data_out_name = os.path.join(p, root + '_contact2D.mat')
    X = data['X'][:, :4]
    C = np.zeros(X.shape[0], dtype='bool')
    window_size = mdl.layers[0].input_shape[1] / 2
    XX = make_tensor(X, window_size)
    C = mdl.predict_classes(XX)
    C = C.astype('bool')
    data_struct = {'C': C, 'X': X, 'tip': data['tip']}
    sio.savemat(data_out_name, data_struct)
Esempio n. 24
0
    def save_spm_mat_for_1st_level_glm(self, mat_file, session=0):
        ordered_names = sorted(self.stimOnsets.keys())
        to_save = {
            'onsets' : np.array([self.stimOnsets[n][session] \
                                     for n in  ordered_names],
                                dtype=object),
            'names' : np.array(ordered_names, dtype=object),
            'durations' : np.array([self.stimDurations[n][session] \
                                        for n in  ordered_names], dtype=object),
            }

        # print 'to_save:'
        # print to_save
        # print 'stimOnsets:', self.stimOnsets
        savemat(mat_file, to_save, oned_as='row')
Esempio n. 25
0
def matlab_get_smooth_circ():
    for f in glob.glob(os.path.join(p_in, 'rat*.pkl')):
        print(os.path.basename(f))
        fid = PIO(f)
        blk = fid.read_block()
        M = get_var(blk)
        MB, MD = get_MB_MD(M)
        for unit in blk.channel_indexes[-1].units:
            unit_num = int(unit.name[-1])
            r, b = get_rate_b(blk, unit_num)
            root = get_root(blk, unit_num)
            output_fname = 'circ_stats_{}.mat'.format(root)

            w, edges = PD_fitting(MD, r)
            save_dict = {'w': w, 'alpha': edges[:-1], 'root': root}
            savemat(os.path.join(p_out, output_fname), save_dict)
Esempio n. 26
0
def _merge_conmats(matfile_list, recon_args, outfile):
    """Merge the many matfiles output by dsi studio and ensure they conform"""
    connectivity_values = {}

    for matfile, (atlas_name, atlas_config, tck_file, ifargs) in zip(matfile_list, recon_args):
        labels = np.array(atlas_config['node_ids']).astype(np.int)
        connectivity_values[atlas_name + "_region_ids"] = labels
        connectivity_values[atlas_name + "_region_labels"] = np.array(atlas_config['node_names'])
        measure_name = atlas_name + '_' + ifargs['stat_edge']
        if isdefined(ifargs['length_scale']):
            measure_name += "_" + ifargs['length_scale']
        if isdefined(ifargs['scale_invnodevol']):
            measure_name += "_invroiscale"
        connectivity_values[measure_name + "_connectivity"] = np.loadtxt(matfile)
        connectivity_values[measure_name + "_tck"] = tck_file
        connectivity_values[measure_name + "_image"] = atlas_config['dwi_resolution_mif']
    savemat(outfile, connectivity_values, do_compression=True)
Esempio n. 27
0
def smoothed_best():
    df = pd.read_csv(min_entropy, index_col='id')
    smooth_vals = np.arange(5, 100, 10).tolist()
    best_smooth = df.mode(axis=1)[0]
    best_idx = [smooth_vals.index(x) for x in best_smooth]
    best_idx = pd.DataFrame({'idx': best_idx}, index=best_smooth.index)

    for f in glob.glob(os.path.join(p_load, '*NEO.h5')):
        try:
            blk = neoUtils.get_blk(f)
            blk_smooth = GLM.get_blk_smooth(f, p_smooth)
            num_units = len(blk.channel_indexes[-1].units)
            for unit_num in range(num_units):
                varlist = ['M', 'F', 'TH', 'PHIE']
                root = neoUtils.get_root(blk, unit_num)
                print('Working on {}'.format(root))
                if root not in best_idx.index:
                    print('{} not found in best smoothing derivative data'.
                          format(root))
                    continue
                outname = os.path.join(
                    p_save,
                    'best_smoothing_deriv\\{}_best_smooth_pillowX.mat'.format(
                        root))
                X = GLM.create_design_matrix(blk, varlist)
                smoothing_to_use = best_idx.loc[root][0]

                Xdot = GLM.get_deriv(blk,
                                     blk_smooth,
                                     varlist,
                                     smoothing=[smoothing_to_use])[0]
                X = np.concatenate([X, Xdot], axis=1)
                y = neoUtils.get_rate_b(blk, unit_num)[1]
                cbool = neoUtils.get_Cbool(blk)
                arclengths = get_arclength_bool(blk, unit_num)

                sio.savemat(
                    outname, {
                        'X': X,
                        'y': y,
                        'cbool': cbool,
                        'smooth': best_smooth.loc[root],
                        'arclengths': arclengths
                    })
        except Exception as ex:
            print('Problem with {}:{}'.format(os.path.basename(f), ex))
Esempio n. 28
0
    def save_spm_mat_for_1st_level_glm(self, mat_file, session=0):
        ordered_names = sorted(self.stimOnsets.keys())
        to_save = {
            'onsets':
            np.array([self.stimOnsets[n][session] for n in ordered_names],
                     dtype=object),
            'names':
            np.array(ordered_names, dtype=object),
            'durations':
            np.array([self.stimDurations[n][session] for n in ordered_names],
                     dtype=object),
        }

        # print 'to_save:'
        # print to_save
        # print 'stimOnsets:', self.stimOnsets
        savemat(mat_file, to_save, oned_as='row')
Esempio n. 29
0
def main(argv):

    # Set defaults:
    n_components_embedding = 25
    comp_min = 2
    comp_max = 20 + 1
    varname = 'data'
    filename = './test'

    # Import files
    f = h5py.File(('%s.mat' % filename), 'r')
    dataCorr = np.array(f.get('%s' % varname))

    # Prep matrix
    K = (dataCorr + 1) / 2.
    v = np.sqrt(np.sum(K, axis=1))
    A = K / (v[:, None] * v[None, :])
    del K
    A = np.squeeze(A * [A > 0])

    # Run embedding
    lambdas, vectors = eigsh(A, k=n_components_embedding)
    lambdas = lambdas[::-1]
    vectors = vectors[:, ::-1]
    psi = vectors / vectors[:, 0][:, None]
    lambdas = lambdas[1:] / (1 - lambdas[1:])
    embedding = psi[:, 1:(n_components_embedding +
                          1)] * lambdas[:n_components_embedding][None, :]

    # Run kmeans clustering

    def kmeans(embedding, n_components):
        est = KMeans(n_clusters=n_components,
                     n_jobs=-1,
                     init='k-means++',
                     n_init=300)
        est.fit_transform(embedding)
        labels = est.labels_
        data = labels.astype(np.float)
        return data

    results = list()
    for n_components in xrange(comp_min, comp_max):
        results.append(kmeans(embedding, n_components))

    savemat(('%s_results.mat' % filename), {'results': results})
Esempio n. 30
0
    def _post_run_hook(self, runtime):
        atlas_config = self.inputs.atlas_config
        atlas_name = self.inputs.atlas_name

        # Aggregate the connectivity/network data from DSI Studio
        official_labels = np.array(atlas_config['node_ids']).astype(np.int)
        connectivity_data = {
            atlas_name + "_region_ids": official_labels,
            atlas_name + "_region_labels": np.array(atlas_config['node_names'])
        }

        # get the connectivity matrix
        prefix = atlas_name + "_" + self.inputs.measure
        connectivity_data[prefix + "_connectivity"] = np.loadtxt(
            self.inputs.out_file)
        merged_matfile = op.join(runtime.cwd, prefix + "_connectivity.mat")
        savemat(merged_matfile, connectivity_data, long_field_names=True)
        return runtime
Esempio n. 31
0
    def object_masks(self):

        ob_fn = os.path.join(self.ds.path, "cpmc", "MySegmentsMat", self.name, "obj_gt_masks.mat")
        if os.path.exists(ob_fn):
            data = ml.loadmat(ob_fn)
            masks = data["masks"]
            classes = data["classes"]

        else:
            masks = segments.seperate_gt_segments(self.object_ground_truth)
            gt = self.ground_truth
            dists, ids = segments.get_class_distributions(gt, masks)
            classes = np.argmax(dists, axis=0)
            assert masks.shape[2] == len(ids)
            ml.savemat(ob_fn, {"masks": masks, "classes": classes})

        assert masks.shape[2] == classes.size
        return masks, classes
Esempio n. 32
0
 def regions_pixeldist(self):
     """ returns the regions distributions for the given image."""
     regiondist_fn = os.path.join(self.ds.path, 'cpmc', 'MySegmentsMat', self.name, 'region_dists.mat')    
     if os.path.exists(regiondist_fn):
         data = ml.loadmat(regiondist_fn)
         dist = data['dists']
         segm_ids = data["region_ids"]
     else:    
         regions = self.regions
         n_reg = np.max(regions) + 1
         gt = self.ground_truth
         prop = np.zeros((regions.shape[0], regions.shape[1], n_reg), dtype='bool')
         for i in range(n_reg):
             prop[:, :, i] = (regions == i)
         dist, segm_ids = segments.get_class_distributions(gt, prop, num_classes=self.ds.classnum)
         ml.savemat(regiondist_fn, {'dists':dist, 'region_ids': segm_ids})
     segm_ids = list(np.array(segm_ids).ravel())
     return dist, segm_ids
Esempio n. 33
0
 def object_masks(self):
     
     ob_fn = os.path.join(self.ds.path, 'cpmc', 'MySegmentsMat', self.name, 'obj_gt_masks.mat')    
     if os.path.exists(ob_fn):
         data = ml.loadmat(ob_fn)
         masks = data['masks']
         classes = data["classes"]
         
     else:    
         masks = segments.seperate_gt_segments(self.object_ground_truth)
         gt = self.ground_truth
         dists, ids = segments.get_class_distributions(gt, masks)
         classes = np.argmax(dists, axis=0)
         assert masks.shape[2] == len(ids)
         ml.savemat(ob_fn, {'masks':masks, 'classes': classes})
     
     assert masks.shape[2] == classes.size
     return masks, classes
Esempio n. 34
0
 def regions_pixeldist(self):
     """ returns the regions distributions for the given image."""
     regiondist_fn = os.path.join(self.ds.path, "cpmc", "MySegmentsMat", self.name, "region_dists.mat")
     if os.path.exists(regiondist_fn):
         data = ml.loadmat(regiondist_fn)
         dist = data["dists"]
         segm_ids = data["region_ids"]
     else:
         regions = self.regions
         n_reg = np.max(regions) + 1
         gt = self.ground_truth
         prop = np.zeros((regions.shape[0], regions.shape[1], n_reg), dtype="bool")
         for i in range(n_reg):
             prop[:, :, i] = regions == i
         dist, segm_ids = segments.get_class_distributions(gt, prop, num_classes=self.ds.classnum)
         ml.savemat(regiondist_fn, {"dists": dist, "region_ids": segm_ids})
     segm_ids = list(np.array(segm_ids).ravel())
     return dist, segm_ids
Esempio n. 35
0
  def save_to_matlab_file(self, filename, format = '5'):
    """
    Saves shape measurements to Matlab's .mat format.

    This uses the `scipy.io.matlab.savemat` function.  See that functions documentation for
    details on input options.

    >>> table = MeasurementsTable( "data/testing/seq140[autotraj].measurements" )
    >>> table.save_to_matlab_file( "data/testing/trash.mat" )    # doctest:+ELLIPSIS 
    <...MeasurementsTable object at ...>
    """
    from scipy.io.matlab import savemat
    kwargs = locals().copy()
    for k in [ 'self', 'savemat', 'filename' ]:
      del kwargs[k]
    savemat( filename, 
            { 'measurements': self.asarray() }, 
            **kwargs)
    return self
Esempio n. 36
0
def smoothed_mechanics():
    """
    use this function to grab the data from the smoothed mechanics and the
    derivative of the same
    """

    f_arclength = '/projects/p30144/_VG3D/deflections/direction_arclength_FR_group_data.csv'
    f_list = glob.glob(os.path.join(p_load, '*NEO.h5'))
    f_list.sort()

    for f in f_list:
        try:
            blk = neoUtils.get_blk(f)
            blk_smooth = GLM.get_blk_smooth(f, p_smooth)
            num_units = len(blk.channel_indexes[-1].units)
            for unit_num in range(num_units):
                varlist = ['M', 'F', 'TH', 'PHIE']
                root = neoUtils.get_root(blk, unit_num)
                print('Working on {}'.format(root))
                outname = os.path.join(p_save,
                                       '{}_smooth_mechanicsX.mat'.format(root))

                Xdot, X = GLM.get_deriv(blk,
                                        blk_smooth,
                                        varlist,
                                        smoothing=[5])
                X = np.concatenate([X, Xdot], axis=1)
                y = neoUtils.get_rate_b(blk, unit_num)[1]
                cbool = neoUtils.get_Cbool(blk)
                arclengths = get_arclength_bool(blk,
                                                unit_num,
                                                fname=f_arclength)

                sio.savemat(
                    outname, {
                        'X': X,
                        'y': y,
                        'cbool': cbool,
                        'smooth': 55,
                        'arclengths': arclengths
                    })
        except Exception as ex:
            print('Problem with {}:{}'.format(os.path.basename(f), ex))
def main(argv):
    
    # Set defaults:
    n_components_embedding = 25
    comp_min = 2
    comp_max = 20 + 1
    varname = 'data'
    filename = './test'
    
    # Import files
    f = h5py.File(('%s.mat' % filename),'r')
    dataCorr = np.array(f.get('%s' % varname))

    # Prep matrix
    K = (dataCorr + 1) / 2.  
    v = np.sqrt(np.sum(K, axis=1)) 
    A = K/(v[:, None] * v[None, :])  
    del K
    A = np.squeeze(A * [A > 0])

    # Run embedding
    lambdas, vectors = eigsh(A, k=n_components_embedding)   
    lambdas = lambdas[::-1]  
    vectors = vectors[:, ::-1]  
    psi = vectors/vectors[:, 0][:, None]  
    lambdas = lambdas[1:] / (1 - lambdas[1:])  
    embedding = psi[:, 1:(n_components_embedding + 1)] * lambdas[:n_components_embedding][None, :]

    # Run kmeans clustering

    def kmeans(embedding, n_components):
        est = KMeans(n_clusters=n_components, n_jobs=-1, init='k-means++', n_init=300)
        est.fit_transform(embedding)
        labels = est.labels_
        data = labels.astype(np.float)
        return data

    results = list()
    for n_components in xrange(comp_min,comp_max):   
        results.append(kmeans(embedding, n_components))

    savemat(('%s_results.mat' % filename), {'results':results})
Esempio n. 38
0
def main():
    fname = sys.argv[1]
    model_name = sys.argv[2]

    data = sio.loadmat(fname)
    tip = data['tip']
    tip = cleanVar(tip)

    model = loadModel(model_name)
    window_size = model.layers[0].input_shape[1] / 2

    X = make_tensor(tip, window_size)
    print('\nPredicting Contact Variable...')
    C = model.predict_classes(X)
    data['C'] = C[:, model.output_shape[1] / 2]
    C = C.astype('bool')
    data['tip_scale'] = tip
    print('\nSaving Data to .mat file\n')
    sio.savemat(fname, data, oned_as='column')
    print('C Vector Saved\n')
Esempio n. 39
0
def cleanTip(filename, outname):
    dat = sio.loadmat(filename)
    t3d = dat['tracked_3D']

    tip = np.empty((t3d.shape[-1], 3))
    tip[:] = np.nan
    for ii in xrange(t3d['x'].shape[-1]):
        if len(t3d['x'][0, ii]) > 0:
            if len(t3d['x'][0, ii][0]) > 1:
                x = t3d['x'][0, ii][0, -1]
                y = t3d['y'][0, ii][0, -1]
                z = t3d['z'][0, ii][0, -1]
                tip[ii, :] = np.hstack((x, y, z))

    ########### This section is for de novo cleaning ############
    plt.title('Click to the left of the first contact')
    l = np.round(tip.shape[0] / 5)
    plt.plot(tip[:l, :])
    x, y = plt.ginput(1, timeout=0)[0]
    plt.close('all')
    x = int(np.round(x))
    ######### ==================== ################

    # ########## This section is for if we have already manually found the start #############
    # dat_smooth = sio.loadmat(outname)
    # tip_smooth = dat_smooth['tip']
    # x = np.where(np.diff(tip_smooth[:,0])!=0)[0][0]
    # ########## -------------------- ################

    tip[:x, :] = np.nanmedian(tip, axis=0)
    tip[-1, :] = np.nanmedian(tip, axis=0)
    tip_interp = pd.DataFrame(tip).interpolate(method=INTERP_METHOD,
                                               limit=INTERP_WINDOW).values
    tip_interp_med = np.empty_like(tip_interp)

    scaler = RobustScaler()
    imp = Imputer(strategy='median')
    tip_interp_imp = imp.fit_transform(tip_interp)
    tip_out = scaler.fit_transform(tip_interp_imp)
    save_dict = {'tip': tip_out}
    sio.savemat(outname, save_dict)
Esempio n. 40
0
def main(fname,use_var='M',outlier_thresh=0.05,plot_tgl=True):
    fname_out = os.path.splitext(fname)[0] + '_outliers'
    if os.path.isfile(fname_out+'.mat'):
        print('File already completed. Delete outlier file if you want to recompute')
        return 0

    print('Using variable: {}\t Using outlier_thresh={}'.format(use_var,outlier_thresh))
    print('Loading {} ...'.format(os.path.basename(fname)))
    fid = sio.loadmat(fname)
    print('Loaded!')
    var = fid[use_var]
    cbool = fid['C']
    use_flags,outliers = cleanup(var,cbool,outlier_thresh=outlier_thresh,plot_tgl=plot_tgl)
    if type(use_flags)==int and use_flags == 1:
        return 1

    print('Saving to {}...'.format(fname_out))
    sio.savemat(fname_out,{'use_flags':use_flags,'outliers':outliers})
    print('Saved')

    return 0
Esempio n. 41
0
def stack_files(scanfile, conffile, outdir, modulus=10, eiger=0, matfile=1, scannumber=-1):
    """Create stacks from scans read from YAML-file `scanfile`.

    If `matfile` is true (default), write output stacks to a MAT-file.
    Otherwise writes (slowly) to a YAML-file.
    """
    if not os.path.isdir(outdir):
        # FIXME: Create the directory
        raise IOError("Output directory does not exist.")
    conf = read_experiment_conf(conffile)
    specscans = read_spec(conf['Specfile'])
    radind = read_matclean(conf['Indfile'])['radind']
    q = radind['q']
    if scannumber > 0:
        scannos = [ scannumber ]
    else:
        scans = read_yaml(scanfile)
        scannos = scans.keys()
        scannos.sort()
    for scanno in scannos:
        outname = "s%03d" % scanno
        if eiger:
            stack, fnames, dvals = \
                stack_eiger(conf, scanno, specscans, radind, modulus)
        else:
            stack, fnames, dvals = \
                stack_scan(conf, scanno, specscans, radind, modulus)
        stack = stack.squeeze()
        if matfile:
            outfn = outdir+'/'+outname + ".mat"
            savemat(outfn, {outname: stack}, do_compression=1, oned_as='row')
            print("Wrote output to '%s'." % outfn)
        else:
            for pos in range(stack.shape[0]):
                outfn = outname+'.p%02d.all.ydat' % pos
                write_stack_ydat(outdir+'/'+outfn, stack[pos], fnames[pos], dvals[pos], conf)
                print("Wrote output to '%s'." % outfn)
            chip_name,trg,inp=line.strip().split()
            print chip_name,trg,inp

            profile_matrix_trg,mapped_reads_trg=calculate_profile_matrix_bed_bam(bed_filename,trg,window_size=WINDOW_SIZE,use_strand=True)
            profile_matrix_inp,mapped_reads_inp=calculate_profile_matrix_bed_bam(bed_filename,inp,window_size=WINDOW_SIZE,use_strand=True)

            chip_profiles[chip_name]=    (profile_matrix_trg/float(mapped_reads_trg)).mean(0)  *1000000   
            chip_regions[chip_name]=    profile_matrix_trg/float(mapped_reads_trg)  *1000000  

            chip_profiles_bg[chip_name]= (profile_matrix_inp/float(mapped_reads_inp)).mean(0) *1000000
            chip_regions_bg[chip_name]= profile_matrix_inp/float(mapped_reads_inp) *1000000
 
        else:
            chip_name,trg=line.strip().split()
            print chip_name,trg


            profile_matrix_trg,mapped_reads_trg=calculate_profile_matrix_bed_bam(bed_filename,trg,window_size=WINDOW_SIZE, use_strand=True)
                        
            chip_profiles[chip_name]=(profile_matrix_trg/float(mapped_reads_trg)).mean(0)*1000000
            chip_regions[chip_name]=(profile_matrix_trg/float(mapped_reads_trg))*1000000

        print chip_profiles[chip_name]

    if USE_INPUT:
        savemat(output_filename,{'chip_profiles':chip_profiles,'chip_regions':chip_regions,'chip_profiles_bg':chip_profiles_bg,'chip_regions_bg':chip_regions_bg,'window_size':WINDOW_SIZE}) 
    else:
        savemat(output_filename,{'chip_profiles':chip_profiles,'chip_regions':chip_regions,'window_size':WINDOW_SIZE}) 
        
random_scores=np.zeros((len(input_coordinates),random_factor))  


for idx,c in enumerate(input_coordinates):
    print idx, c
    try:
        input_scores[idx]=read_from_wig(c,wig_path,wig_mask=wig_mask,only_average=True)[1]

        if labels[idx] ==0:
            random_scores[idx,:]=extract_random_scores(c,exons_coordinates,random_factor)

        elif labels[idx]==1:
            random_scores[idx,:]=extract_random_scores(c,introns_coordinates,random_factor)

        else:
            random_scores[idx,:]=extract_random_scores(c,intergenic_coordinates,random_factor)
    except:
        print 'problema in:',idx,c,input_scores[idx],labels[idx]
    

np.save(prefix_to_add+'_scores',input_scores)
np.save(prefix_to_add+'random_scores',random_scores)
np.save(prefix_to_add+'labels',labels)
savemat(prefix_to_add+'risultati_conservation',{prefix_to_add+'labels':labels,prefix_to_add+'random_scores':random_scores,prefix_to_add+'_scores':input_scores})






Esempio n. 44
0
 def save_annotation_track_matlab(self,filename):
     savemat(filename,{'annotation_track':self.annotation_track,'mapping':self.annotation_names_to_prime})
     print 'Annotation track saved to:',filename
Esempio n. 45
0
for ilind, lind in enumerate(linds):
    shape = (7, len(t), len(lind))
    Fo = np.zeros(shape, dtype=float)
    Uo = Fo.copy()
    Vo = Fo.copy()

    flags = [[0, 0, 0, 0],
             [0, 1, 0, 0],
             [0, 0, 0, 1],
             [0, 1, 0, 1],
             [1, 0, 0, 0],
             [0, 0, 1, 0],
             [1, 0, 1, 0]]

    for i, flag in enumerate(flags):
        print(flag)
        try:
            F, U, V = octave.ut_FUV(t, t0, lind, lat, flag)
            Fo[i] = F
            Uo[i] = U
            Vo[i] = V
        except Oct2PyError:
            print('failed')

    save_args = dict(t=t, t0=t0, lat=lat, lind=lind, flags=flags,
                     Fo=Fo, Uo=Uo, Vo=Vo)

    np.savez('FUV%d.npz' % ilind, **save_args)

    savemat('FUV%d.mat' % ilind, save_args)
Esempio n. 46
0
def save_connectivity_matrices(scan):
    out_data = {}
    opath = scan.pkl_trk_path + ".mat"
    for lnum, label_source in enumerate(scan.track_label_items):
        # Load the mask
        # File containing the corresponding label vector
        npy_path = label_source.numpy_path
        if not os.path.exists(npy_path):
            raise ValueError( 
                "\t\t++ [%s]"%sid, npy_path, "does not exist")
        conn_ids = np.load(npy_path)
        
        # Make a prefix
        prefix = "_".join(
            ["%s_%s" % (k,v) for k,v in label_source.parameters.iteritems()]) + "_"

        # Get the region labels from the parcellation
        graphml = graphml_from_label_source(label_source)
        if graphml is None:
            raise ValueError("\t\t++ No graphml exists")
            
        graphml_data = lookup_cache[label_source.parameters['scale']] 
        regions = graphml_data['regions']
        
        # Empty connectivity matrix
        out_data[prefix+'streamline_count'] = np.zeros((len(regions),len(regions)))
        connectivity = out_data[prefix+'streamline_count']
        scalars = {}
        for scalar in scan.track_scalar_items:
            # Load the actual array
            scalars[prefix + scalar.name] = np.load(scalar.numpy_path)
            # Empty matrix for 
            out_data[prefix + scalar.name] = np.zeros((len(regions),len(regions)))
            out_data[prefix + scalar.name + "_sd"] = np.zeros((len(regions),len(regions)))
            
        # extract the streamline lengths and put them into an array
        out_data[prefix+'length'] = np.zeros((len(regions),len(regions)))
        out_data[prefix+'length_sd'] = np.zeros((len(regions),len(regions)))
        streams, hdr = nib.trackvis.read(scan.trk_file)
        lengths = np.array([len(arr) for arr in streams])
        
        # extract the streamline scalar index
        for conn, (_i,_j) in graphml_data["index_to_region_pairs"].iteritems():
            i,j = _i-1, _j-1
            indexes = conn_ids == conn
            sl_count = np.sum(indexes)
            connectivity[i,j] = connectivity[j,i] = sl_count
            
            out_data[prefix+'length'][i,j] = out_data[prefix+'length'][j,i] = lengths[indexes].mean()
            out_data[prefix+'length_sd'][i,j] = out_data[prefix+'length_sd'][j,i] = lengths[indexes].std()
            
            # Fill in the average scalar value
            for scalar_name, scalar_data in scalars.iteritems():
                scalar_vals = scalars[scalar_name][indexes]
                scalar_mean = scalars_vals.mean()
                out_data[scalar_name][i,j] = out_data[scalar_name][j,i] = scalar_mean
                scalar_std = scalars_vals.std()
                out_data[scalar_name+"_sd"][i,j] = out_data[scalar_name + "_sd"][j,i] = scalar_std
    
    print "saving", opath
    savemat(opath, out_data)
new_data["2"] = data["2"]
new_data["2"] = np.append(new_data["2"], data["4"])
new_data["2"] = np.append(new_data["2"], data["5"])
#new_data["2"] = np.append(new_data["2"], data["14"])
#new_data["2"] = np.append(new_data["2"], data["15"])

#new_data["3"] = data["5"]
#new_data["3"] = np.append(new_data["3"], data["9"])
#new_data["4"] = data["9"]
#new_data["3"] = np.append(new_data["3"], data["15"])
#new_data["3"] = np.append(new_data["3"], data["16"])
#new_data["3"] = np.append(new_data["3"], data["24"])
#new_data["3"] = np.append(new_data["3"], data["26"])
#
#
#
#new_data["4"] = data["17"]
#new_data["4"] = np.append(new_data["4"], data["18"])
#new_data["4"] = np.append(new_data["4"], data["19"])
#new_data["4"] = np.append(new_data["4"], data["20"])
#new_data["4"] = np.append(new_data["4"], data["21"])
#new_data["4"] = np.append(new_data["4"], data["22"])



for key in new_data.keys():
    new_data[key] = np.sort(new_data[key])
matl.savemat(target_path + source_file, new_data)

#splt.plot( new_data["2"] )
        previous_interval = st_int
    #print (stims.size, stims_in_series, stims_in_series / stims.size)
    print ("Всего стимуляций в файле %i" % (counter-1))
    plt.title(title + " n stims = " + str(counter-1))
    plt.show(block=True)
    return stimulations
#######################################################################

main_path = '/home/ivan/Data/Ach_full/'
reading_path = main_path + 'source_data_simulations/'
saving_path = main_path + 'discrimination_simulation/'
for wavfile in sorted(os.listdir(reading_path)):
    if (wavfile[0] == "." or os.path.splitext(wavfile)[1] != ".wav"):
        continue
    saving_file = saving_path + os.path.splitext(wavfile)[0] + '_stims_descr.mat'
    if (os.path.isfile(saving_file)):
        continue
    wavcontent = read(reading_path + wavfile)
    fd = wavcontent[0]
    wavdata = wavcontent[1]
    
    wavdata = wavdata.astype(float) 
    wavdata =  2 * ( wavdata - wavdata.min() ) / (wavdata.max() - wavdata.min()) - 1  
    

    print ('###############################################')
    print (wavfile)
    stimulations = descriminate_stimulations(wavdata, fd, os.path.splitext(wavfile)[0])
        
    savemat(saving_file, stimulations)