def setup_module():
    temp_dir = None
    try:
        import scipy.io
        temp_dir = tempfile.mkdtemp()
        for i in range(0, len(mat_files)):
            mat_files[i] = os.path.join(temp_dir, mat_files[i])
        scipy.io.savemat(file_name=mat_files[0], mdict=to_julia)
        hdf5storage.savemat(file_name=mat_files[1], mdict=to_julia)

        #julia_command(script_names[0], mat_files[0], mat_files[2])
        julia_command(script_names[0], mat_files[1], mat_files[3])

        #hdf5storage.loadmat(file_name=mat_files[2],
        #                    mdict=from_julia_v7_to_v7p3)
        hdf5storage.loadmat(file_name=mat_files[3],
                            mdict=from_julia_v7p3_to_v7p3)
    except:
        pass
    else:
        ran_julia_successful[0] = True
    finally:
        for name in mat_files:
            if os.path.exists(name):
                os.remove(name)
        if temp_dir is not None and os.path.exists(temp_dir):
            os.rmdir(temp_dir)
Esempio n. 2
0
def save_frames(filename, data, save_options, all_param_fields):
    savemat(filename + ".mat", data, format='7.3', store_python_metadata=True)

    if not save_options.save_needed:
        return

    for data_pair in data.items():
        if data_pair[0] == "frame_results":
            continue

        data_name = data_pair[0]

        save_name = filename
        if "_" in data_name:
            save_name += "_" + data_name.split("_")[1]

        print "Saving", save_name

        images = data_pair[1]

        for (i, image_orig), param_fields in itertools.izip(enumerate(images), all_param_fields):
            image = image_orig.reshape((111,112),order='F')

            if save_options.do_images:
                fig = plt.figure() 
                im_display=plt.imshow(image, cmap = plt.cm.Greys_r, interpolation='nearest')

                ax=fig.add_subplot(111)
                ax.set_xlim([0, 112])
                ax.set_ylim([112, 0])
                
                if save_options.do_render and save_options.render_name == data_name:
                    if param_fields.model_type != 0:
                        ax.plot([param_fields.pred_x, param_fields.pred_x], [max(0, param_fields.pred_y - 10), min(111, param_fields.pred_y + 10)], 'r-', linewidth=1)
                        ax.plot([max(0, param_fields.pred_x - 10), min(111, param_fields.pred_x + 10)], [param_fields.pred_y, param_fields.pred_y], 'r-', linewidth=1)

                        if param_fields.model_type == 1 or param_fields.model_type == 2:
                            ax.plot([param_fields.cider_col, param_fields.cider_col], [0, 112], 'b-', linewidth=1)
                            ax.plot([0, 112], [param_fields.cider_row, param_fields.cider_row], 'b-', linewidth=1)
                            circle = plt.Circle((param_fields.pred_x, param_fields.pred_y), param_fields.cider_radius, fill=False, linewidth=1, color='g')
                            ax.add_artist(circle)

                fig.set_size_inches(1, 1)
                frame1 = plt.gca()
                frame1.axes.get_xaxis().set_visible(False)
                frame1.axes.get_yaxis().set_visible(False)
                plt.axis('off')
                plt.savefig(save_name + "_" + ("%06d" % i) + ".png", dpi=112,bbox_inches='tight',pad_inches=0)

                plt.close()

            if save_options.do_text:
                text_file = open(save_name + "_" + ("%06d" % i) + ".txt", 'w')
                for line in image:
                    for item in line:
                        text_file.write(str(int(item)) + ' ')
                    text_file.write('\n')
                text_file.close()

        print
Esempio n. 3
0
def save_calib_pred(imset, idx, name, confident_masks, confidence_maps):
    global ds_path

    pred_dir = join(ds_path, 'deeplab_prediction', imset, name)
    if not isdir(pred_dir):
        os.mkdir(pred_dir)

    mats = {'masks': confident_masks, 'conf_maps': confidence_maps}

    savemat(join(pred_dir, imset + '_%06d_calib_pred.mat') % idx, mats)
Esempio n. 4
0
def save_multiarrays(fname, arrays):
    '''Save arrays (dense).'''

    save_dict = {k: v for k, v in arrays.items()}
    hdf5storage.savemat(fname,
                        save_dict,
                        format='7.3',
                        oned_as='column',
                        store_python_metadata=True)

    return None
Esempio n. 5
0
 def write_model(self, workdir, params):
     for param in self.params:
         if param not in params:
             raise FclassError(
                 'Parameter with name %s required by SeisCL\n' % param)
     h5mat.savemat(workdir + self.file_model,
                   params,
                   appendmat=False,
                   format='7.3',
                   store_python_metadata=True,
                   truncate_existing=True)
Esempio n. 6
0
def setup():
    global num_img, img_size, nc

    os.mkdir('calib_test')

    for idx in range(1, num_img+1):
        logits = np.random.rand((img_size, img_size, nc), dtype=np.float32)
        logits[:,::2,0] = 1
        logits[::2,:,1] = 0
        savemat('calib_test/calib_test_%06d_logits.mat' % idx, {
            'logits_img': logits
        })
Esempio n. 7
0
 def set_backward(self, workdir, residuals):
     data = {}
     for n, word in enumerate(self.to_load_names):
         data[word + "res"] = residuals[n]
     h5mat.savemat(workdir + self.file_res,
                   data,
                   appendmat=False,
                   format='7.3',
                   store_python_metadata=True,
                   truncate_existing=True)
     self.csts['gradout'] = 1
     self.write_csts(workdir)
Esempio n. 8
0
def decode_matlab_file(eng,
                       code_type,
                       input_llr,
                       ref_bits,
                       num_snr,
                       num_codewords,
                       mode='soft'):
    # Static path
    # !!!: This is the full path of the decoder .m function
    decoder_path = 'InsertPathToMatlabDecoder.m'

    # Draw a random integer (independent of global seed)
    rg = Generator(PCG64())
    random_idx = np.round(1e10 * rg.standard_normal()).astype(np.int)
    # Filenames
    filename_in = 'scratch/in%d.mat' % random_idx
    filename_out = 'scratch/out%d.mat' % random_idx

    # Copy
    input_llr = np.copy(input_llr)
    # Restore and reshape
    if mode == 'soft':
        input_llr = 2 * np.arctanh(input_llr)
    input_llr = np.reshape(input_llr, (num_snr, num_codewords, -1))

    # Write to input file
    hdf5storage.savemat(filename_in, {
        'llr_input': input_llr,
        'code_type': code_type
    })

    # Create input dictionary
    args = {'filename_in': filename_in, 'filename_out': filename_out}
    # Call decoder
    _ = eng.run_func(decoder_path, args)

    # Read output file
    contents = hdf5storage.loadmat(filename_out)
    rec_bits = contents['bits_out']

    # Convert to arrays
    rec_bits = np.asarray(rec_bits)

    # Compute error rates
    bler = np.mean(np.any(rec_bits != ref_bits, axis=-1), axis=-1)
    ber = np.mean(np.mean(rec_bits != ref_bits, axis=-1), axis=-1)

    # Delete files
    os.remove(filename_in)
    os.remove(filename_out)

    return bler, ber, rec_bits
Esempio n. 9
0
 def write_data(self, workdir, data):
     if 'src_pos' not in data:
         data['src_pos'] = self.src_pos
     if 'rec_pos' not in data:
         data['rec_pos'] = self.rec_pos
     if 'src' not in data:
         data['src'] = self.src
     h5mat.savemat(self.file_din,
                   data,
                   appendmat=False,
                   format='7.3',
                   store_python_metadata=True,
                   truncate_existing=True)
Esempio n. 10
0
def run(args):

    domain = args.domain
    kernel = args.kernel
    device = args.device
    rank = int(args.rank)
    maxIter = int(args.maxIter)
    interval = int(args.interval)

    print('Experiment summary: ')
    print(' - Domain name:', domain)
    print(' - Device id:', device)
    print(' - Cov Func:', kernel)
    print(' - rank:', rank)
    print(' - maxIter:', maxIter)

    os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
    os.environ["CUDA_VISIBLE_DEVICES"] = device
    print('Using GPU', device)

    res_path = 'results'

    if not os.path.exists(res_path):
        os.makedirs(res_path)

    trial = 1
    data = process(domain)

    signature = domain + '_rank' + str(rank) + '_t' + str(trial)
    cfg = {
        'data': data,
        'signature': signature,
        'jitter': 1e-3,
        'init_std': 1,
        'epochs': maxIter,
        'interval': interval,
        'alpha': 1e-3,
        'ns': 100,
        'Q': rank,
        'kernel': kernel,
    }

    try:
        model = SGPRN(cfg, label=signature, init_type=2)
        res = model.fit()
        cfg['result'] = res
        res_save_path = os.path.join(res_path, signature)
        savemat(res_save_path, cfg, format='7.3')
        print('results saved to', res_save_path + '.mat')
    except:
        print('Exceptions occurs during training...')
Esempio n. 11
0
def setup_module():
    teardown_module()
    matlab_command = "run('" + script_names[0] + "')"
    subprocess.check_call(
        ['matlab', '-nosplash', '-nodesktop', '-nojvm', '-r', matlab_command])
    scipy.io.loadmat(file_name=mat_files[1], mdict=types_v7)
    hdf5storage.loadmat(file_name=mat_files[0], mdict=types_v7p3)

    hdf5storage.savemat(file_name=mat_files[2], mdict=types_v7p3)
    matlab_command = "run('" + script_names[1] + "')"
    subprocess.check_call(
        ['matlab', '-nosplash', '-nodesktop', '-nojvm', '-r', matlab_command])
    scipy.io.loadmat(file_name=mat_files[3], mdict=python_v7)
    hdf5storage.loadmat(file_name=mat_files[2], mdict=python_v7p3)
Esempio n. 12
0
 def save(self, fname, key='data', dtype=np.float64):
     hdf5storage.savemat(fname, {
         key: {
             u'__bdpy_sparse_arrray': True,
             u'index': self.__index,
             u'value': self.__value.astype(dtype),
             u'shape': self.__shape,
             u'background': self.__background
         }
     },
                         format='7.3',
                         oned_as='column',
                         store_python_metadata=True)
     return None
def setup_module():
    teardown_module()
    matlab_command = "run('" + script_names[0] + "')"
    subprocess.check_call(['matlab', '-nosplash', '-nodesktop',
                          '-nojvm', '-r', matlab_command])
    scipy.io.loadmat(file_name=mat_files[1], mdict=types_v7)
    hdf5storage.loadmat(file_name=mat_files[0], mdict=types_v7p3)

    hdf5storage.savemat(file_name=mat_files[2], mdict=types_v7p3)
    matlab_command = "run('" + script_names[1] + "')"
    subprocess.check_call(['matlab', '-nosplash', '-nodesktop',
                          '-nojvm', '-r', matlab_command])
    scipy.io.loadmat(file_name=mat_files[3], mdict=python_v7)
    hdf5storage.loadmat(file_name=mat_files[2], mdict=python_v7p3)
Esempio n. 14
0
def main(argv):
    parser = argparse.ArgumentParser()
    parser.add_argument("conffile", help="config file", required=True)
    parser.add_argument("confname",
                        help="conf name for importing",
                        required=True)
    parser.add_argument("net_name",
                        help="Name the network to classify with",
                        required=True)
    parser.add_argument("movie", help="movie to classify", required=True)
    parser.add_argument("out name",
                        help="file to save results to",
                        required=True)

    args = parser.parse_args()
    imp_mod = importlib.import_module(args.conffile)
    conf = imp_mod.__dict__[args.confname]
    oname = re.sub('!', '__', conf.getexpname(args.movie))

    self = PoseUNet.PoseUNet(conf, args.net_name)
    sess = self.init_net_meta(0, True)

    predList = self.classify_movie(args.movie, sess, flipud=True)

    cap = cv2.VideoCapture(args.movie)
    height = int(cap.get(cvc.FRAME_HEIGHT))
    width = int(cap.get(cvc.FRAME_WIDTH))
    rescale = conf.unet_rescale
    orig_crop_loc = conf.cropLoc[(height, width)]
    crop_loc = [int(x / rescale) for x in orig_crop_loc]
    end_pad = [
        int((height - conf.imsz[0]) / rescale) - crop_loc[0],
        int((width - conf.imsz[1]) / rescale) - crop_loc[1]
    ]
    pp = [(0, 0), (crop_loc[0], end_pad[0]), (crop_loc[1], end_pad[1]), (0, 0)]
    predScores = np.pad(predList[1], pp, mode='constant', constant_values=-1.)

    predLocs = predList[0]
    predLocs[:, :, 0] += orig_crop_loc[1]
    predLocs[:, :, 1] += orig_crop_loc[0]

    hdf5storage.savemat(args.outname, {
        'locs': predLocs,
        'scores': predScores,
        'expname': args.movie
    },
                        appendmat=False,
                        truncate_existing=True)

    print('Done Detecting:%s' % oname)
Esempio n. 15
0
def save_array(fname, array, key='data', dtype=np.float64, sparse=False):
    '''Save an array (dense or sparse).'''

    if sparse:
        # Save as a SparseArray
        s_ary = SparseArray(array.astype(dtype))
        s_ary.save(fname, key=key, dtype=dtype)
    else:
        # Save as a dense array
        hdf5storage.savemat(fname, {key: array.astype(dtype)},
                            format='7.3',
                            oned_as='column',
                            store_python_metadata=True)

    return None
Esempio n. 16
0
def matSave(directory, basename, data):
    # Create the data directory if it doesn't exist
    if not (path.exists(directory)):
        os.mkdir(directory, 0o755)
    fileSpaceFound = False
    logNumber = 0
    # Creating the log file in a new .mat file that doesn't already exist
    while (not fileSpaceFound):
        logNumber += 1
        logName = directory + '/' + basename + str(logNumber) + '.mat'
        if not (path.exists(logName)):
            print('data saved to: ', logName)
            fileSpaceFound = True
    # Saving the data to the log file
    hdf5s.savemat(logName, data)
Esempio n. 17
0
def process_data(patch_size, stride, mode):
    if mode == 'train':
        print("\nprocess training set ...\n")
        patch_num = 1
        filenames_hyper = glob.glob(
            os.path.join(opt.data_path, 'NTIRE2020_Train_Spectral', '*.mat'))
        filenames_rgb = glob.glob(
            os.path.join(opt.data_path, 'NTIRE2020_Train_Clean', '*.png'))
        filenames_hyper.sort()
        filenames_rgb.sort()
        # for k in range(1):  # make small dataset
        for k in range(len(filenames_hyper)):
            print([filenames_hyper[k], filenames_rgb[k]])
            # load hyperspectral image
            mat = h5py.File(filenames_hyper[k], 'r')
            hyper = np.float32(np.array(mat['cube']))
            hyper = np.transpose(hyper, [0, 2, 1])
            hyper = normalize(hyper, max_val=1., min_val=0.)
            # load rgb image
            rgb = cv2.imread(filenames_rgb[k])  # imread -> BGR model
            rgb = cv2.cvtColor(rgb, cv2.COLOR_BGR2RGB)
            rgb = np.transpose(rgb, [2, 0, 1])
            rgb = normalize(np.float32(rgb), max_val=255., min_val=0.)
            # creat patches
            patches_hyper = Im2Patch(hyper, win=patch_size, stride=stride)
            patches_rgb = Im2Patch(rgb, win=patch_size, stride=stride)
            # add data :重组patches
            for j in range(patches_hyper.shape[3]):
                print("generate training sample #%d" % patch_num)
                sub_hyper = patches_hyper[:, :, :, j]
                sub_rgb = patches_rgb[:, :, :, j]

                train_data_path_array = [
                    opt.train_data_path1, opt.train_data_path2,
                    opt.train_data_path3, opt.train_data_path4
                ]
                random.shuffle(train_data_path_array)
                train_data_path = os.path.join(
                    train_data_path_array[0],
                    'train' + str(patch_num) + '.mat')
                hdf5storage.savemat(train_data_path, {'rad': sub_hyper},
                                    format='7.3')
                hdf5storage.savemat(train_data_path, {'rgb': sub_rgb},
                                    format='7.3')

                patch_num += 1

        print("\ntraining set: # samples %d\n" % (patch_num - 1))
Esempio n. 18
0
def convert_saved_to_matlab(name='normal'):
    data = {}
    for split_type in ['easy','hard']:
        fname = '/groups/branson/home/kabram/bransonlab/PoseTF/headTracking/{}_{}_cv_data.p'.format(name,split_type)
        with open(fname,'r') as f:
            all_dist, locs, info, predlocs = pickle.load(f)
            data['dist_{}_side'.format(split_type)] = all_dist[0]
            data['dist_{}_front'.format(split_type)] = all_dist[1]
            data['locs_{}_side'.format(split_type)] = locs[0]
            data['locs_{}_front'.format(split_type)] = locs[1]
            data['info_{}_side'.format(split_type)] = info[0]
            data['info_{}_front'.format(split_type)] = info[1]
            data['pred_{}_side'.format(split_type)] = predlocs[0]
            data['pred_{}_front'.format(split_type)] = predlocs[1]
    fname = '/groups/branson/home/kabram/bransonlab/PoseTF/headTracking/{}_cv_data.mat'.format(name)
    hdf5storage.savemat(fname,data)
Esempio n. 19
0
def convert_saved_to_matlab(name='normal'):
    data = {}
    for split_type in ['easy','hard']:
        fname = '/groups/branson/home/kabram/bransonlab/PoseTF/headTracking/{}_{}_cv_data.p'.format(name,split_type)
        with open(fname,'r') as f:
            all_dist, locs, info, predlocs = pickle.load(f)
            data['dist_{}_side'.format(split_type)] = all_dist[0]
            data['dist_{}_front'.format(split_type)] = all_dist[1]
            data['locs_{}_side'.format(split_type)] = locs[0]
            data['locs_{}_front'.format(split_type)] = locs[1]
            data['info_{}_side'.format(split_type)] = info[0]
            data['info_{}_front'.format(split_type)] = info[1]
            data['pred_{}_side'.format(split_type)] = predlocs[0]
            data['pred_{}_front'.format(split_type)] = predlocs[1]
    fname = '/groups/branson/home/kabram/bransonlab/PoseTF/headTracking/{}_cv_data.mat'.format(name)
    hdf5storage.savemat(fname,data)
Esempio n. 20
0
def write_mat(dictionary, filename):
    """Exports dictionary to \\*.mat file.

    Parameters
    ----------
    dictionary : dict
        A list of variables.
    filename : string
        Name of \\*.mat file ('example.mat').
    """

    hdf.savemat(filename,
                dictionary,
                appendmat=True,
                store_python_metadata=True,
                action_for_matlab_incompatible='ignore')
Esempio n. 21
0
def write_feature_fd(feature_names,
                     feature_data,
                     timestamps,
                     outpath,
                     tetrode_path,
                     channel_validity=None):
    if type(feature_names) == str:
        feature_names = [feature_names]

    feature_name_ch = [
        '{}: {}'.format(fn, n + 1) for fn in feature_names
        for n in range(feature_data.shape[1])
    ]
    feature_std = feature_data.std(axis=0, dtype='double')
    feature_av = feature_data.mean(axis=0, dtype='double')

    if channel_validity is None:
        channel_validity = [1, 1, 1, 1]

    if len(feature_names) > 1:
        raise NotImplementedError(
            'No logic for multiple features yet. External or just .fd?')

    outpath_fname = outpath / '{}_{}.fd'.format(tetrode_path.stem,
                                                feature_names[0])

    h5s.savemat(
        outpath_fname,
        {
            'ChannelValidity': np.array(channel_validity,
                                        dtype='double'),  # dead channel index
            'FD_av': feature_av,  # mean
            'FD_sd': feature_std,  # sd
            'FeatureData': feature_data,
            'FeatureIndex': np.arange(
                1, feature_data.shape[0] + 1, dtype='double'),
            'FeatureNames': feature_name_ch,
            'FeaturePar': [],
            'FeaturesToUse': feature_names,
            'FeatureTimestamps': timestamps,
            'TT_file_name': str(tetrode_path.name),
        },
        compress=False,
        truncate_existing=True,
        truncate_invalid_matlab=True,
        appendmat=False)
Esempio n. 22
0
def save_predictions(input_path,
                     preds,
                     output_path=None,
                     output_filename=None):
    # TODO: works on windows?
    input_filename = os.path.splitext(os.path.basename(input_path))[0]
    if output_filename is None:
        output_filename = input_filename + "_pred.mat"
    if not output_filename.endswith(".mat"):
        output_filename = output_filename + ".mat"
    output_dir = output_path if output_path is not None else os.getcwd()
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)
    output_file = os.path.join(output_dir, output_filename)
    output_data = {"preds": preds.T}
    hdf5storage.savemat(output_file, output_data)
    print(f"Saved predictions to {output_file}")
Esempio n. 23
0
def main(argv):
    parser = argparse.ArgumentParser()
    parser.add_argument("conffile",
                        help="config file",
                        required=True)
    parser.add_argument("confname",
                        help="conf name for importing",
                        required=True)
    parser.add_argument("net_name",
                        help="Name the network to classify with",
                        required=True)
    parser.add_argument("movie",
                        help="movie to classify",
                        required=True)
    parser.add_argument("out name",
                        help="file to save results to",
                        required=True)

    args = parser.parse_args()
    imp_mod = importlib.import_module(args.conffile)
    conf = imp_mod.__dict__[args.confname]
    oname = re.sub('!', '__', conf.getexpname(args.movie))

    self = PoseUNet.PoseUNet(conf, args.net_name)
    sess = self.init_net_meta(0,True)

    predList = self.classify_movie(args.movie, sess, flipud=True)

    cap = cv2.VideoCapture(args.movie)
    height = int(cap.get(cvc.FRAME_HEIGHT))
    width = int(cap.get(cvc.FRAME_WIDTH))
    rescale = conf.unet_rescale
    orig_crop_loc = conf.cropLoc[(height,width)]
    crop_loc = [int(x/rescale) for x in orig_crop_loc]
    end_pad = [int((height-conf.imsz[0])/rescale)-crop_loc[0],int((width-conf.imsz[1])/rescale)-crop_loc[1]]
    pp = [(0,0),(crop_loc[0],end_pad[0]),(crop_loc[1],end_pad[1]),(0,0)]
    predScores = np.pad(predList[1],pp,mode='constant',constant_values=-1.)

    predLocs = predList[0]
    predLocs[:,:,0] += orig_crop_loc[1]
    predLocs[:,:,1] += orig_crop_loc[0]

    hdf5storage.savemat(args.outname,{'locs':predLocs,'scores':predScores,'expname':args.movie},appendmat=False,truncate_existing=True)

    print('Done Detecting:%s'%oname)
Esempio n. 24
0
    def write_csts(self, workdir=None):
        """
        Write the constants to the constants file

        @params:
        workdir (str): The directory in which to write SeisCL files

        @returns:

        """
        if workdir is None:
            workdir = self.workdir
        h5mat.savemat(os.path.join(workdir, self.file_csts),
                      self.csts,
                      appendmat=False,
                      format='7.3',
                      store_python_metadata=True,
                      truncate_existing=True)
def loadDeviceDataFromFiles(pList, config):

    for ptID in ptList:

        print('loading data for patient %s ...' % ptID)

        savepath = os.path.join(config['paths']['RNS_DATA_Folder'], ptID)

        # Get converted Data and Time vectors
        [AllData, eventIdx, dneIdx] = npdh.NPdat2mat(ptID, config)

        #Update (and deidentify) ECoG Catalog:
        catalog_csv = npdh.NPgetDataPath(ptID, config, 'ECoG Catalog')
        Ecog_Events = pd.read_csv(catalog_csv)

        # Remove rows that don't have an associated .dat file
        if dneIdx:
            Ecog_Events.drop(index=dneIdx, inplace=True)
            Ecog_Events.reset_index(drop=True, inplace=True)
            print(
                'Removing %d entries from deidentified ECoG_Catalog.csv due to missing data'
                % (len(dneIdx)))

        Ecog_Events = Ecog_Events.drop(columns=['Initials', 'Device ID'])
        Ecog_Events['Patient ID'] = ptID

        # Add event index to ecog_events file, add +1 for matlab 1-indexing
        Ecog_Events['Event Start idx'] = [row[0] + 1 for row in eventIdx]
        Ecog_Events['Event End idx'] = [row[1] + 1 for row in eventIdx]

        # Save updated csv and all events
        Ecog_Events.to_csv(os.path.join(savepath, 'ECoG_Catalog.csv'),
                           index=False)

        hdf5storage.savemat(os.path.join(savepath, 'Device_Data.mat'), {
            "AllData": AllData,
            "EventIdx": eventIdx
        },
                            format='7.3',
                            oned_as='column',
                            store_python_metadata=True)

        print('complete')
Esempio n. 26
0
def train_mi_quantizer(eng,
                       input_llr,
                       mod_size,
                       num_bits,
                       num_tx,
                       num_rx,
                       train=True):
    # Static paths
    trainer_path = '/home/yanni/marius/spawc2015-master/designQuantizerFile.m'

    # Draw a random integer (independent of global seed)
    rg = Generator(PCG64())
    random_idx = np.round(1e10 * rg.standard_normal()).astype(np.int)
    # Filenames
    filename_in = '/home/yanni/marius/spawc2015-master/scratch/in%d.mat' % random_idx
    filename_out = '/home/yanni/marius/spawc2015-master/quantizers/sota_mimo%dby%d_qam%d_bits%d.mat' % (
        num_rx, num_tx, mod_size, num_bits)

    # Training from scratch
    if train:
        # Copy
        input_llr = np.copy(input_llr)
        # Write to input file
        hdf5storage.savemat(filename_in, {'ref_llr': input_llr})

        # Create input dictionary
        args = {
            'filename_in': filename_in,
            'filename_out': filename_out,
            'mod_size': mod_size,
            'num_bits': num_bits
        }
        # Call decoder
        _ = eng.run_func(trainer_path, args)

        # Delete files
        os.remove(filename_in)

    # Load codebook
    contents = hdf5storage.loadmat(filename_out)
    codebook = contents['LLRs']

    return codebook
Esempio n. 27
0
def getAllScores_mean(RootDir, MaxImgNums=float('inf')):

    scoreDir = RootDir + 'Results/Scores_py/'

    score_names = [f for f in os.listdir(scoreDir) if f.endswith('.mat')]
    score_names.sort()
    method_num = len(score_names)

    meanS = {}
    for idx_m in range(method_num):
        method_name = score_names[idx_m]
        iscores = h5io.loadmat(scoreDir + method_name)["scores"]
        img_num = min(len(iscores), MaxImgNums)
        iscores_mean = np.mean(iscores[:img_num], 0)

        tmp_name = method_name[6:-4].replace('-', '_')
        meanS[tmp_name] = {'meanS': iscores_mean, 'scores': iscores}

    h5io.savemat(RootDir + 'Results/meanS_py.mat', {'meanS': meanS})
Esempio n. 28
0
def getShufmap_vid(fixsDir,
                   DataSet='DIEM20',
                   size=None,
                   maxframes=float('inf')):

    DataSet = DataSet.upper()
    if size is None:
        if DataSet in shuff_size.keys():
            size = shuff_size[DataSet]
        else:
            size = shuff_size["default"]

    fix_names = [f for f in os.listdir(fixsDir) if f.endswith('.mat')]
    fix_names.sort()
    fix_num = len(fix_names)

    # if DataSet == 'CITIUS':
    # 	fix_num = 45

    if DataSet == 'DIEM20':
        maxframes = 300

    ShufMap = np.zeros(size)
    for idx_n in range(fix_num):

        fixpts = h5io.loadmat(fixsDir + fix_names[idx_n])["fixLoc"]
        useframes = min(maxframes, fixpts.shape[3])
        fixpts = fixpts[:, :, :, :useframes]

        if fixpts.shape[:2] != size:
            # fixpts = np.array([resize_fixation(fixpts[:,:,0,i],size[0],size[1]) for i in range(useframes)]).transpose((1,2,0))
            fixpts = np.array([
                cv2.resize(fixpts[:, :, 0, i], (size[1], size[0]),
                           interpolation=cv2.INTER_NEAREST)
                for i in range(useframes)
            ]).transpose((1, 2, 0))
            fixpts = np.expand_dims(fixpts, axis=2)

        ShufMap += np.sum(fixpts[:, :, 0, :], axis=2)

    h5io.savemat('Shuffle_' + DataSet + '3.mat', {'ShufMap': ShufMap})
    return ShufMap
Esempio n. 29
0
def pred(unused_argv):
  print("Prediction")
  test_data_file = data_dir + '/' + test_file
  with tf.name_scope('predict_scope') as scope:
    with tf.device('/gpu:1'):
      train_data, train_labels = \
            load_test_from_file(
              test_data_file, 
              startline = FLAGS.startline, 
              endline = FLAGS.endline)
                                    
      config = tf.ConfigProto(log_device_placement=True, allow_soft_placement=True)
      config.gpu_options.allow_growth = True
      config=tf.contrib.learn.RunConfig(session_config=config)
      
      gene_classifier = learn.Estimator(
            model_fn=cnn_model_fn, 
            model_dir=FLAGS.model_dir,
            params={'learning_rate':FLAGS.learning_rate},
            config=config)
            
      input_fn = numpy_io.numpy_input_fn(x={'train_data':train_data}, shuffle=False)
      pred_result = gene_classifier.predict(input_fn = input_fn)
    
  shape = train_labels.shape
  print(shape)
  result = []
  for res in pred_result:
    result.append(res)
  print(len(result))
  result = np.array(result)
  # print(' '.join([str(x) for x in train_labels[20, :]]))
  # print(' '.join([str(x) for x in result[0, :]]))
  hf.savemat('../label/result.mat', 
                {'label':train_labels, 'result':result}, 
                format='7.3')
  auc_result = []
  for i in range(shape[1]):
    if max(train_labels[:, i]) < 1: continue
    auc_score = metrics.roc_auc_score(train_labels[:, i], result[:, i])
    auc_result.append(auc_score)
  print(' '.join([str(x) for x in auc_result]))
Esempio n. 30
0
 def split_file(self, mrId, name, in_split, out_split):
     if out_split == in_split:
         return
     f = hdf5storage.loadmat('./Data/' + mrId + '/' + name + '_' +
                             str(in_split).zfill(3) + '.mat')
     tmp_f = {}
     for key in f.keys():
         if key == 'data':
             data = f['data']
             (_, l) = data.shape
             new_l = int(l * out_split / in_split)
             new_data = data[:, :new_l]
             tmp_f[key] = new_data
         else:
             if not key.endswith('__'):
                 tmp_f[key] = f[key]
     hdf5storage.savemat(
         './Data/' + mrId + '/' + name + '_' + str(out_split).zfill(3) +
         '.mat', tmp_f)
     print("File created")
def create_dummy_dataset(ds_type):
    global ds_path, dataset_info
    global num_fg_pix, per_class_counts

    nc = dataset_info['nc']
    img_size = dataset_info['img_size']

    ds_type = ds_type.lower()
    os.mkdir(join(ds_path, ds_type))

    for imset in ['val', 'test']:
        os.mkdir(join(ds_path, ds_type, imset))

        n_ex = eval('n_%s' % imset)
        for ex_idx in range(1, n_ex + 1):
            if ds_type == 'truth':
                fmt = imset + '_%06d_pixeltruth.mat' % ex_idx
                name = 'truth_img'
                ex = np.random.choice(
                    range(nc),
                    (dataset_info['img_size'], dataset_info['img_size']),
                    replace=True)
                ex = ex.astype(np.uint8)

                if imset == 'val':
                    num_fg_pix += (ex > 0).sum()
            else:
                fmt = imset + '_%06d_logits.mat' % ex_idx
                name = 'logits_img'
                ex = np.random.randn(img_size, img_size, nc)

                if imset == 'val':
                    pred = np.argmax(ex[:, 1:], -1).ravel() + 1
                    for lab in np.unique(pred):
                        class_sum = (pred == lab).sum()
                        if not lab in per_class_counts:
                            per_class_counts[lab] = class_sum
                        else:
                            per_class_counts[lab] += class_sum

            savemat(join(ds_path, ds_type, imset, fmt), {name: ex})
Esempio n. 32
0
    def savedata(self, basename=None):
        """
        Saves data and parameters to mat file
        :return:
        """
        if basename is None:
            basename = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S_%f")

        # dictionary to save
        mdict = {u'signaldim': self.signaldim, u'dictdim': self.dictdim, u'numdata': self.numdata,
                 u'deltas': self.deltas, u'rhos': self.rhos, u'snr_db': self.snr_db,
                 u'solverNames': self.solverNames, u'ERCsolverNames': self.ERCsolverNames,
                 u'err': self.err, u'ERCsuccess': self.ERCsuccess, u'simData': self.simData,
                 u'description': self.get_description()}

        hdf5storage.savemat(basename + '.mat', mdict)
        with open(basename+".pickle", "w") as f:
            cPickle.dump(self.solvers, f)
            cPickle.dump(self.ERCsolvers, f)
        with open(basename+".txt", "w") as f:
            f.write(self.get_description())
Esempio n. 33
0
    def process_output_dict(data_dict, checkpoint_path):
        output_dict = {
            'ground_truths': ground_truths,
            'args': args.__dict__,
        }

        output_dict.update(data_dict)
        output_dict.update(ys_thetas)

        for k, v in output_dict.items():
            if torch.is_tensor(v):
                output_dict[k] = v.cpu().numpy()

        hdf5storage.savemat(os.path.join(os.path.dirname(checkpoint_path),
                                         'samples.mat'),
                            output_dict,
                            format='7.3',
                            store_python_metadata=True)
        print(
            f'Saved samples at {os.path.join(os.path.dirname(checkpoint_path), "samples.mat")}'
        )
Esempio n. 34
0
def input_conversion():

    if True:
        # unity file conversion
        data = {}
        u = pyh.Unity()
        uf = {'data': data}
        data['unityTriggers'] = np.squeeze(np.double(u.unityTriggers[0]))
        data['unityData'] = np.squeeze(np.double(u.unityData[0]))
        h5s.savemat('unityfile.mat', {'uf': uf}, format='7.3')

    if True:
        # eyelink file conversion
        data = {}
        eyedata = pyh.Eyelink()
        el = {'data': data}
        data['trial_timestamps'] = np.array(eyedata.trial_timestamps * 1000)
        data['trial_codes'] = np.uint32(np.array(eyedata.trial_codes))
        data['timestamps'] = np.transpose(
            np.uint32(np.array(eyedata.timestamps * 1000)[np.newaxis]))
        data['eye_pos'] = np.single(np.array(eyedata.eye_pos))
        h5s.savemat('eyelink.mat', {'el': el}, format='7.3')
Esempio n. 35
0
    def write_model(self, params, workdir=None):
        """
        Write model parameters to the model files

        @params:
        workdir (str): The directory in which to write SeisCL files
        param_names (dict) Dictionary with parameters name and their value

        @returns:

        """
        if workdir is None:
            workdir = self.workdir
        for param in self.params:
            if param not in params:
                raise SeisCLError('Parameter with %s not defined\n' % param)
        h5mat.savemat(os.path.join(workdir, self.file_model),
                      params,
                      appendmat=False,
                      format='7.3',
                      store_python_metadata=True,
                      truncate_existing=True)
fp_out = os.path.join(fp_rtm,'output','%s_ORACLES'%vv)


# In[ ]:


dat = RL.read_lut(fp_out,zout=zout,tau=tau,ref=ref,sza=sza,
                  phase=['wc'],
                  fmt=fmt,
                  split_wvl=True,numrad=0)


# In[ ]:


if use_json:
    dat['lut_details'] = d


# In[ ]:


print 'Saving matlab file'


# In[10]:


hs.savemat(fp+'{}_ORACLES_lut.mat'.format(vv),dat)

Esempio n. 37
0
# # Save the comparison data to a file

# In[136]:

import hdf5storage as hs


# In[137]:

dict_out = {u's':s,u'istar':istar,u'istar_steps':istar_steps,
            u'goci':gg,u'utcs':utcs,u'goci_ind':goci_ind,u'gaod':gaod,
            u'aero':aero,u'iaero':iaero}


# In[ ]:

import cPickle as pickle
pickle.dump(dict_out,open(fp+'{}_GOCI_Aeronet_4STAR.p'.format(daystr),"wb"))


# In[138]:

hs.savemat(fp+'{}_GOCI_Aeronet_4STAR.mat',dict_out)


# In[ ]:



Esempio n. 38
0
def main(argv):
	db_path_label =''
	db_path_feats ='' 
	mat_file =''
	print argv
	try:
		opts, args = getopt.getopt(argv,"l:f:o",["label_db=","feature_db=","mat_file="])
	except getopt.GetoptError:
		print 'feature_LDB_to_mat.py -l <label_db> -f <feature_db> -m <output_mat_file>'
		sys.exit(2)
	
	print opts
	print args
	
		
	for opt, arg in opts:
		if opt in ("-l","--label_db"): 
			db_path_label=arg
		elif opt in("-f","--feature_db"):
			db_path_feats=arg
		elif opt in("-o","--mat_file"):
			mat_file=arg
		print arg+" "+opt

	print(db_path_label)
	print(db_path_feats)
	print(mat_file)

	if not os.path.exists(db_path_label):
		raise Exception('db label not found')
	if not os.path.exists(db_path_feats):
		raise Exception('db feature not found')
		

	
	db_label=leveldb.LevelDB(db_path_label)
	db_feats=leveldb.LevelDB(db_path_feats)	
	#window_num =686
	datum = caffe_pb2.Datum()
	datum_lb = caffe_pb2.Datum()
	start=time.time();
	#ft = np.zeros((window_num, float(81)))
	#ft = np.zeros((window_num, float(100352)))
	#lb = np.zeros((window_num, float(81)))
	window_num=0
	for key in db_feats.RangeIter(include_value = False):
		window_num=window_num+1
	
	
	n=0
	for key,value in db_feats.RangeIter():
		n=n+1
		#f_size=len(value)
		datum.ParseFromString(value)
		f_size=len(datum.float_data)
		if n>0:
		   break
	n=0
	for key,value in db_label.RangeIter():
		n=n+1
		#l_size=len(value)
		datum.ParseFromString(value)
		l_size=len(datum.float_data)
		if n==1:
		   break
	ft = np.zeros((window_num, float(f_size)))
	lb = np.zeros((window_num, float(l_size)))
	
	
	# for im_idx in range(window_num):
	count=0
	for key in db_feats.RangeIter(include_value = False):
	 #datum.ParseFromString(db_label.Get('%d' %(im_idx)))
	 datum.ParseFromString(db_feats.Get(key));
	 datum_lb.ParseFromString(db_label.Get(key));
	 #datum.ParseFromString(db_feats.Get('%d' %(im_idx)))
	 #ft[im_idx, :]=caffe.io.datum_to_array(datum)
	 #ft[im_idx, :]=datum.float_data
	 ft[count, :]=datum.float_data
	 lb[count,:]=datum_lb.float_data
	 count=count+1
	 #print key
	 print 'convert feature # : %d key is %s' %(count,key)
	print 'time 1: %f' %(time.time() - start)

	data = {u'feat_label' : {
			u'feat' : ft,
			u'label' : lb,
		 }
	}
	print 'save result to : %s' %(mat_file)
	hdf5storage.savemat(mat_file,data, format='7.3') 
	print 'time 2: %f' %(time.time() - start)
	print 'done!'
Esempio n. 39
0
csts['scalermsnorm']=0      #Scale each modeled and recorded traces according to its rms value, normalized
csts['scaleshot']=0         #Scale all of the traces in each shot by the shot total rms value
csts['fmin']=0              #Maximum frequency for the gradient computation
csts['fmax']=45              #Minimum frequency for the gradient computation
csts['mute']=None           #Muting matrix 5xnumber of traces. [t1 t2 t3 t4 flag] t1 to t4 are mute time with cosine tapers, flag 0: keep data in window, 1: mute data in window
csts['weight']=None         # NTxnumber of geophones or 1x number of geophones. Weight each sample, or trace, according to the value of weight for gradient calculation.

csts['gradout']=0           #Output gradient 1:yes, 0: no
csts['gradsrcout']=0        #Output source gradient 1:yes, 0: no
csts['seisout']=4           #Output seismograms 1:velocities, 2: pressure, 3: velocities and pressure, 4: velocities and stresses
csts['resout']=0            #Output residuals 1:yes, 0: no
csts['rmsout']=0            #Output rms value 1:yes, 0: no
csts['movout']=0            #Output movie 1:yes, 0: no
csts['restype']=0           #Type of costfunction 0: raw seismic trace cost function. No other available at the moment

h5mat.savemat(filenames['csts'], csts , appendmat=False, format='7.3', store_python_metadata=True, truncate_existing=True)


#_________________Model File__________________
model={}
model['vp']=np.zeros( (csts['NZ'],csts['NY'],csts['NX']))+3500  #Must contain the variables names of the chosen parametrization
model['vs']=np.zeros( (csts['NZ'],csts['NY'],csts['NX']))+2000
model['rho']=np.zeros( (csts['NZ'],csts['NY'],csts['NX']))+2000
model['taup']=np.zeros( (csts['NZ'],csts['NY'],csts['NX']))+0.02
model['taus']=np.zeros( (csts['NZ'],csts['NY'],csts['NX']))+0.02

h5mat.savemat(filenames['model'], model , appendmat=False, format='7.3', store_python_metadata=True, truncate_existing=True)



#________add src pos, src and rec_pos_______
dat = RL.read_lut(fp_out,zout=zout,tau=tau,ref=ref,sza=sza,
                  phase=['wc','ic'],
                  fmt=fmt,
                  split_wvl=True)


# In[ ]:

dat = deep_convert_dict(dat)
if use_json:
    dat['lut_details'] = deep_convert_dict(d)


# In[ ]:

print 'Saving matlab file'


# In[10]:

try:
    try:
        hs.savemat(fp+'{vv}_{name}_lut.mat'.format(vv=vv,name=name),dat)
    except:
        sio.savemat(fp+'{vv}_{name}_lut.mat'.format(vv=vv,name=name),dat)
except:
    import pdb
    pdb.set_trace()
    sio.savemat(fp+'{vv}_{name}_lut.mat'.format(vv=vv,name=name),dat)

mu = np.arange(2.7,4.0,0.1)
sza = np.round(np.arccos(1.0/mu)*180.0/np.pi)
tau = np.array([0.1,0.2,0.3,0.5,0.75,1.0,1.25,1.5,1.75,2.0,2.3,2.6,3.0,3.5,4.0,4.5,5.0,
       6.0,7.0,8.0,9.0,10.0,12.5,15.0,17.5,20.0,22.0,25.0,27.0,30.0,35.0,40.0,50.0])
ref = np.append(np.append(np.arange(2,15),np.arange(15,30,2)),np.ceil(np.arange(30,61,3.0)))
zout = [0.2,2.0,100.0]


# In[7]:

fp_out = os.path.join(fp_rtm,'output','%s_ARISE'%vv)


# In[ ]:

dat = RL.read_lut(fp_out,zout=zout,tau=tau,ref=ref,sza=sza,
                  phase=['wc','ic'],
                  fmt='lut_sza{sza:02.0f}_tau{tau:06.2f}_ref{ref:04.1f}_{phase}_w{iwvl:1d}.dat',
                  split_wvl=True)


# In[ ]:

print 'Saving matlab file'


# In[10]:

hs.savemat(fp+'{}_ARISE_lut.mat'.format(vv),dat)

ar['flr'] = ar['fl_routine'] & ar['fl_QA'] & ar['fl_alt']


# ## save to file

# In[189]:


import hdf5storage as hs


# In[195]:


hs.savemat(fp+'/aod_ict_2017/{vv}/all_aod_ict_{vv}_2017.mat'.format(vv=vv),ar)


# ## Optionally load the file

# In[8]:


import hdf5storage as hs


# In[9]:


ar = hs.loadmat(fp+'/aod_ict_2017/{vv}/all_aod_ict_{vv}_2017.mat'.format(vv=vv))
# In[21]:

ar['GPS_Alt'].shape


# ## Save the combined data

# In[31]:

import hdf5storage as hs


# In[33]:

hs.savemat(fp+'/aod_ict/all_aod_KORUS_ict.mat',ar)


# ## Filterout bad data

# In[22]:

ar['fl_QA'] = ar['qual_flag']==0


# In[23]:

ar['fl'] = ar['fl_QA']


# In[24]:
Esempio n. 44
0
                if viz:
                    visualize(img, regions, args)
                    sys.stdout.write('Keep visualizing? [y/n]: ')
                    choice = raw_input().lower()
                    if choice != 'y':
                        viz = False

                # Put all regions of the image in a collection
                boxes_image = np.zeros([len(regions), 4], dtype=np.double)
                for idx_region, data in enumerate(regions):
                    _, (y0, x0, y1, x1) = data
                    # compat: [y0, x0, y1, x1] = [y, x, height, width]
                    boxes_image[idx_region] = [x0, y0, x1, y1]

                # Store the bounding boxes in the collection
                boxes[image_idx] = boxes_image
                # Store the image filename in the collection
                image_names[image_idx] = os.path.basename(image)

                bar.next()
            except:
                print '{} failed to process...'.format(image)
                pass
        bar.finish()
        
        print 'Writing to file %s...' % output_path
        hdf5storage.savemat(output_path, {'images': image_names, 'boxes': boxes}, format='7.3', oned_as='row', store_python_metadata=True)
    else:
        print 'Could not find any images in %s' % args.image_path

    return out

for n in rtss.keys():
    if type(rtss[n]) is list:
        print n
        for i,t in enumerate(rtss[n]):
            rtss[n][i] = dict_keys_to_unicode(t)
    else:
        print 'no',n
        rtss[n] = dict_keys_to_unicode(rtss[n])


# In[94]:


hs.savemat(fp+'zen_ict/v2/{}_all_retrieved.mat'.format(vr),rtss)


# # Make Cloud plots

# ## Read in the filtered cloud retrievals

# In[95]:


from load_utils import load_ict


# In[96]:

        star_aero_CRE['dn'][i,:] = s['diffuse_down']+s['direct_down']
        star_aero_CRE_clear['dn'][i,:] = sc['diffuse_down']+sc['direct_down']
        star_aero_CRE['up'][i,:] = s['diffuse_up']
        star_aero_CRE_clear['up'][i,:] = sc['diffuse_up']
        star_aero_C[i,:] = (star_aero_CRE['dn'][i,:]-star_aero_CRE['up'][i,:]) -                            (star_aero_CRE_clear['dn'][i,:]-star_aero_CRE_clear['up'][i,:])
        
        f_in = '{name}_{vv}_star_{i:03d}_noaero.dat'.format(name=name,vv=vv,i=i)
        sn = Rl.read_libradtran(fpp_out+f_in,zout=geo['zout'])
        f_in = '{name}_{vv}_star_{i:03d}_noaero_clear.dat'.format(name=name,vv=vv,i=i)
        snc = Rl.read_libradtran(fpp_out+f_in,zout=geo['zout'])

        star_noaero_CRE['dn'][i,:] = sn['diffuse_down']+sn['direct_down']
        star_noaero_CRE_clear['dn'][i,:] = snc['diffuse_down']+snc['direct_down']
        star_noaero_CRE['up'][i,:] = sn['diffuse_up']
        star_noaero_CRE_clear['up'][i,:] = snc['diffuse_up']
        star_noaero_C[i,:] = (star_noaero_CRE['dn'][i,:]-star_noaero_CRE['up'][i,:]) -                              (star_noaero_CRE_clear['dn'][i,:]-star_noaero_CRE_clear['up'][i,:])


# In[ ]:


# save the output
    star1 = {'star_noaero_CRE':star_noaero_CRE,'star_noaero_CRE_clear':star_noaero_CRE_clear,'star_noaero_C':star_noaero_C,
            'star_aero_CRE':star_aero_CRE,'star_aero_CRE_clear':star_aero_CRE_clear,'star_aero_C':star_aero_C}
    star = wu.iterate_dict_unicode(star1)
    print 'saving file to: '+fp+'{name}_CRE_{vv}.mat'.format(name=name,vv=vv)
    hs.savemat(fp+'{name}_CRE_{vv}.mat'.format(name=name,vv=vv),star)
    #hs.savemat(fp+'{name}_CRE_{vv}.mat'.format(name=name,vv=vv),star_noaero_CRE,star_noaero_CRE_clear,star_noaero_C,
     #                                                           star_aero_CRE,star_aero_CRE_clear,star_aero_C)

Esempio n. 47
0
def main(argv):

#    defaulttrackerpath = "/groups/branson/home/bransonk/tracking/code/poseTF/matlab/compute3Dfrom2D/for_redistribution_files_only/run_compute3Dfrom2D.sh"
    defaulttrackerpath = "/groups/branson/bransonlab/mayank/PoseTF/matlab/compiled/run_compute3Dfrom2D_compiled.sh"
#    defaultmcrpath = "/groups/branson/bransonlab/projects/olympiad/MCR/v91"
    defaultmcrpath = "/groups/branson/bransonlab/mayank/MCR/v92"

    parser = argparse.ArgumentParser()
    parser.add_argument("-s",dest="sfilename",
                      help="text file with list of side view videos",
                      required=True)
    parser.add_argument("-f",dest="ffilename",
                      help="text file with list of front view videos. The list of side view videos and front view videos should match up",
                      required=True)
    parser.add_argument("-d",dest="dltfilename",
                      help="text file with list of DLTs, one per fly as 'flynum,/path/to/dltfile'",
                      required=True)
    parser.add_argument("-net",dest="net_name",
                      help="Name of the net to use for tracking",
                      default=default_net_name)
    parser.add_argument("-o",dest="outdir",
                      help="temporary output directory to store intermediate computations",
                      required=True)
    parser.add_argument("-r",dest="redo",
                      help="if specified will recompute everything",
                      action="store_true")
    parser.add_argument("-rt",dest="redo_tracking",
                      help="if specified will only recompute tracking",
                      action="store_true")
    parser.add_argument("-gpu",dest='gpunum',type=int,
                        help="GPU to use [optional]")
    parser.add_argument("-makemovie",dest='makemovie',
                        help="if specified will make results movie",action="store_true")
    parser.add_argument("-trackerpath",dest='trackerpath',
                        help="Absolute path to the compiled MATLAB tracker script run_compute3Dfrom2D.sh",
                        default=defaulttrackerpath)
    parser.add_argument("-mcrpath",dest='mcrpath',
                        help="Absolute path to MCR",
                        default=defaultmcrpath)
    parser.add_argument("-ncores",dest="ncores",
                        help="Number of cores to assign to each MATLAB tracker job", type=int,
                        default=1)

    group = parser.add_mutually_exclusive_group()
    group.add_argument("-only_detect",dest='detect',action="store_true",
                        help="Do only the detection part of tracking which requires GPU")
    group.add_argument("-only_track",dest='track',action="store_true",
                        help="Do only the tracking part of the tracking which requires MATLAB")

    args = parser.parse_args(argv)
    if args.redo is None:
        args.redo = False
    if args.redo_tracking is None:
        args.redo_tracking= False

    if args.detect is False and args.track is False: 
        args.detect = True
        args.track = True
    
    args.outdir = os.path.abspath(args.outdir)
    
    with open(args.sfilename, "r") as text_file:
        smovies = text_file.readlines()
    smovies = [x.rstrip() for x in smovies]
    with open(args.ffilename, "r") as text_file:
        fmovies = text_file.readlines()
    fmovies = [x.rstrip() for x in fmovies]

    print(smovies)
    print(fmovies)
    print(len(smovies))
    print(len(fmovies))

    if args.track:
        if len(smovies) != len(fmovies):
            print("Side and front movies must match")
            raise exit(0)

        # read in dltfile
        dltdict = {}
        f = open(args.dltfilename,'r')
        for l in f:
            lparts = l.split(',')
            if len(lparts) != 2:
                print("Error splitting dlt file line %s into two parts"%l)
                raise exit(0)
            dltdict[float(lparts[0])] = lparts[1].strip()
        f.close()
        
        # compiled matlab command
        matscript = args.trackerpath + " " + args.mcrpath

    if args.detect:
        import numpy as np
        import tensorflow as tf
        from scipy import io
        from cvc import cvc
        import localSetup
        import PoseTools
        import multiResData
        import cv2
        import PoseUNet

        for ff in smovies+fmovies:
            if not os.path.isfile(ff):
                print("Movie %s not found"%(ff))
                raise exit(0)
        if args.gpunum is not None:
            os.environ['CUDA_VISIBLE_DEVICES'] = '0'

    for view in range(2): # 0 for front and 1 for side
        if args.detect:
            tf.reset_default_graph() 
        if view ==1:
            from stephenHeadConfig import sideconf as conf
            extrastr = '_side'
            valmovies = smovies
            confname = 'sideconf'
        else:
            # For FRONT
            from stephenHeadConfig import conf as conf
            extrastr = '_front'
            valmovies = fmovies
            confname = 'conf'

        # for ndx in range(len(valmovies)):
        #     mname,_ = os.path.splitext(os.path.basename(valmovies[ndx]))
        #     oname = re.sub('!','__',conf.getexpname(valmovies[ndx]))
        #     pname = os.path.join(args.outdir , oname + extrastr)
        #     print(oname)
        #
        #     if args.detect and os.path.isfile(valmovies[ndx]) and \
        #                    (args.redo or not os.path.isfile(pname + '.mat')):
        #
        #         detect_cmd = 'python classifyMovie.py stephenHeadConfig {} {} {}'.format(confname, net_name, valmovies[ndx], pname+'.mat')

        # conf.batch_size = 1

        if args.detect:        
            for try_num in range(4):
                try:
                    self = PoseUNet.PoseUNet(conf, args.net_name,for_training=True)
                    sess = self.init_net_meta(0,True)
                    break
                except tf.python.framework.errors_impl.InvalidArgumentError:
                    tf.reset_default_graph()
                    print('Loading the net failed, retrying')
                    if try_num is 3:
                        raise ValueError('Couldnt load the network after 4 tries')

        for ndx in range(len(valmovies)):
            mname,_ = os.path.splitext(os.path.basename(valmovies[ndx]))
            oname = re.sub('!','__',conf.getexpname(valmovies[ndx]))
            pname = os.path.join(args.outdir , oname + extrastr)

            print(oname)

            # detect
            if args.detect and os.path.isfile(valmovies[ndx]) and \
               (args.redo or not os.path.isfile(pname + '.mat')):

                try:
                    predList = self.classify_movie(valmovies[ndx], sess, flipud=False)
                except KeyError:
                    continue
                # if args.makemovie:
                #     PoseTools.create_pred_movie(conf, predList, valmovies[ndx], pname + '.avi', outtype)

                cap = cv2.VideoCapture(valmovies[ndx])
                height = int(cap.get(cvc.FRAME_HEIGHT))
                width = int(cap.get(cvc.FRAME_WIDTH))
                rescale = conf.unet_rescale
                orig_crop_loc = conf.cropLoc[(height,width)]
                crop_loc = [int(x/rescale) for x in orig_crop_loc]
                end_pad = [int((height-conf.imsz[0])/rescale)-crop_loc[0],int((width-conf.imsz[1])/rescale)-crop_loc[1]]
#                crop_loc = [old_div(x,4) for x in orig_crop_loc]
#                end_pad = [old_div(height,4)-crop_loc[0]-old_div(conf.imsz[0],4),old_div(width,4)-crop_loc[1]-old_div(conf.imsz[1],4)]
                pp = [(0,0),(crop_loc[0],end_pad[0]),(crop_loc[1],end_pad[1]),(0,0)]
                predScores = np.pad(predList[1],pp,mode='constant',constant_values=-1.)

                predLocs = predList[0]
                predLocs[:,:,0] += orig_crop_loc[1]
                predLocs[:,:,1] += orig_crop_loc[0]

#io.savemat(pname + '.mat',{'locs':predLocs,'scores':predScores,'expname':valmovies[ndx]})
                hdf5storage.savemat(pname + '.mat',{'locs':predLocs,'scores':predScores,'expname':valmovies[ndx]},appendmat=False,truncate_existing=True,gzip_compression_level=0)
#                with h5py.File(pname+'.mat','w') as f:
#                    f.create_dataset('locs',data=predLocs)
#                    f.create_dataset('scores',data=predScores)
#                    f.create_dataset('expname',data=valmovies[ndx])
                del predScores, predLocs

                print('Detecting:%s'%oname)

            # track
            if args.track and view == 1:

                oname_side = re.sub('!','__',conf.getexpname(smovies[ndx]))
                oname_front = re.sub('!','__',conf.getexpname(fmovies[ndx]))
                pname_side = os.path.join(args.outdir , oname_side + '_side.mat')
                pname_front = os.path.join(args.outdir , oname_front + '_front.mat')
                # 3d trajectories
                basename_front,_ = os.path.splitext(fmovies[ndx])
                basename_side,_ = os.path.splitext(smovies[ndx])
                savefile = basename_side+'_3Dres.mat'
                #savefile = os.path.join(args.outdir , oname_side + '_3Dres.mat')
                trkfile_front = basename_front+'.trk'
                trkfile_side = basename_side+'.trk'

                redo_tracking = args.redo or args.redo_tracking
                if os.path.isfile(savefile) and os.path.isfile(trkfile_front) and \
                   os.path.isfile(trkfile_side) and not redo_tracking:
                    print("%s, %s, and %s exist, skipping tracking"%(savefile,trkfile_front,trkfile_side))
                    continue

                try:
                    flynum = int(conf.getflynum(smovies[ndx]))
                except AttributeError:
                    print('Could not find the fly number from movie name')
                    print('{} isnt in standard format'.format(smovies[ndx]))
                    continue
                #print "Parsed fly number as %d"%flynum
                kinematfile = os.path.abspath(dltdict[flynum])

                jobid = oname_side

                scriptfile = os.path.join(args.outdir , jobid + '_track.sh')
                logfile = os.path.join(args.outdir , jobid + '_track.log')
                errfile = os.path.join(args.outdir , jobid + '_track.err')


                #print "matscript = " + matscript
                #print "pname_front = " + pname_front
                #print "pname_side = " + pname_side
                #print "kinematfile = " + kinematfile
                
                # make script to be qsubbed
                scriptf = open(scriptfile,'w')
                scriptf.write('if [ -d %s ]\n'%args.outdir)
                scriptf.write('  then export MCR_CACHE_ROOT=%s/mcrcache%s\n'%(args.outdir,jobid))
                scriptf.write('fi\n')
                scriptf.write('%s "%s" "%s" "%s" "%s" "%s" "%s"\n'%(matscript,savefile,pname_front,pname_side,kinematfile,trkfile_front,trkfile_side))
                scriptf.write('chmod g+w {}\n'.format(savefile))
                scriptf.write('chmod g+w {}\n'.format(trkfile_front))
                scriptf.write('chmod g+w {}\n'.format(trkfile_side))
                scriptf.close()
                os.chmod(scriptfile,stat.S_IRUSR|stat.S_IRGRP|stat.S_IWUSR|stat.S_IWGRP|stat.S_IXUSR|stat.S_IXGRP)

#                cmd = "ssh login1 'source /etc/profile; qsub -pe batch %d -N %s -j y -b y -o '%s' -cwd '\"%s\"''"%(args.ncores,jobid,logfile,scriptfile)
                cmd = "ssh login2 'source /etc/profile; bsub -n %d -J %s -oo '%s' -eo '%s' -cwd . '\"%s\"''"%(args.ncores,jobid,logfile,errfile,scriptfile)
                print(cmd)
                call(cmd,shell=True)
ar['flacr'] = (ar['flag_acaod']==1)&  ar['fl_QA'] & ar['fl_routine']


# ## save to file

# In[38]:


import hdf5storage as hs


# In[ ]:


hs.savemat(fp+'/NAAMES3_aod_ict/all_aod_ict_{vv}_NAAMES3.mat'.format(vv=vv),ar)


# ## Optionally load the file

# In[8]:


import hdf5storage as hs


# In[9]:


ar = hs.loadmat(fp+'/aod_ict_2017/{vv}/all_aod_ict_{vv}_2017.mat'.format(vv=vv))
Esempio n. 49
0
def save_matrix(mat, output_path):
    hdf5storage.savemat(output_path, mat)
out['utc'] = ar['Start_UTC']
out['lat'] = ar['LAT']
out['lon'] = ar['LON']
out['a0'],out['a1'],out['a2'] = ar['a0'],ar['a1'],ar['a2']


# In[266]:


fp


# In[267]:


hs.savemat(fp+'data_other/ssfr_2016_retrieved_COD.mat',out)


# In[268]:


out['days']


# # Load the results of the CRE calculations
# See the ORACLES_cld_CRE_from_SSFR jupyter notebook for details

# In[273]:


c = hs.loadmat(fp+'rtm/ORACLES_CRE_v6_irr.mat')
Esempio n. 51
0
#names = glob.glob(name_base + "\*.mat")
data = {}

file_name = DATA_DIR + '/EVs/'+ strategy + '_' + str(n) + '.mat'
try:
    os.remove(file_name)
except:
    pass


for i in xrange(1, n + 1):
         
    name= name_base + str(i) + '.mat' 
    data = {}
    data["ev_" + str(i)] = hdf5storage.loadmat(name)
    hdf5storage.savemat(file_name, data)
    print "saved file " + str(i)
    
    #try:
    #    if(DELETE):
    #       os.remove(name)
    #except:
    #    pass
    
''' 
   name= name_base + str(i) + '.mat'     
   with h5py.File(name, 'r') as mat_file:
             
        #f.create_group(str(i))              
        h5py.h5o.copy(mat_file.id, mat_file.name, f.id, str(i))
'''
    ar6['idays'] = np.append(ar6['idays'],np.zeros_like(outgas6[i]['Start_UTC'])+i)
    ar6['days'] = np.append(ar6['days'],np.zeros_like(outgas6[i]['Start_UTC'])+float(d))
    for n in nm6:
        ar6[n] = np.append(ar6[n],outgas6[i][n])


# In[96]:


len(ar6['days'])


# In[39]:


hs.savemat(fp+'/gas_ict/all_gas_ict_{vv}_2016.mat'.format(vv=v6),ar6)


# ## Load the 2017 files

# In[16]:


days = ['20170801','20170802','20170807','20170809', '20170812','20170813',
        '20170815','20170817','20170818','20170819','20170821',
        '20170824','20170826','20170828','20170830','20170831','20170902','20170903','20170904']


# In[17]:

sza = np.round(np.arccos(1.0/mu)*180.0/np.pi)
tau = np.array([0.1,0.2,0.3,0.5,0.75,1.0,1.5,2.0,2.5,3.0,4.0,5.0,
       6.0,7.0,8.0,9.0,10.0,12.5,15.0,17.5,20.0,25.0,30.0,40.0,50.0,
       60.0,70.0,80.0,100.0])
ref = np.append(np.append(np.arange(2,15),np.arange(15,30,2)),np.ceil(np.arange(30,61,2.5)))
zout= [0.2,4.0,100.0]


# In[7]:

fp_out = os.path.join(fp_rtm,'output','%s_KORUS'%vv)


# In[ ]:

dat = RL.read_lut(fp_out,zout=zout,tau=tau,ref=ref,sza=sza,
                  phase=['wc','ic'],
                  fmt='lut_sza{sza:02.0f}_tau{tau:06.2f}_ref{ref:04.1f}_{phase}_w{iwvl:1d}.dat',
                  split_wvl=True)


# In[ ]:

print 'Saving matlab file'


# In[10]:

hs.savemat(fp+'{}_KORUS_lut.mat'.format(vv),dat)

Esempio n. 54
0
    cap = movies.Movie(mov)
    dd = []
    for ndx in range(10):
        curfr = np.random.randint(nfr)
        curim = cap.get_frame(curfr)[0][:,:,0]
        ff = curim.astype('float64')+1-ims[:,:,curfr]
        cur_i = [curfr, np.abs(ff).max()]
        if py_ims is not Noneimp:
            ffp = curim.astype('float64')-py_ims[:,:,curfr]
            cur_i.append(np.abs(ffp).max())
        # dd.append([curfr, int(np.percentile(np.abs(ff),100-0.002))])
        dd.append(cur_i)

    print dd

    cap.close()

    ##

    if not os.path.exists(py_mat):
        cap = movies.Movie(mov)
        dd = []
        py_ims = np.zeros([512, 768, nfr])
        for ndx in range(nfr):
            curfr = ndx
            curim = cap.get_frame(curfr)[0][:, :, 0]
            py_ims[:, :, ndx] = curim

        hdf5storage.savemat(py_mat,{'I':py_ims})

Esempio n. 55
0
datum = caffe_pb2.Datum()
mat_file = "/home/tzeng/caffe_3d/data/test_out.mat"

window_num = 0
for key in db_data.RangeIter(include_value=False):
    window_num = window_num + 1

print window_num
n = 0
# for key,value in db_data.RangeIter():
# n=n+1
# #f_size=len(value)
# datum.ParseFromString(value)
# f_size=len(datum.float_data)
# if n>0:
# break

ft = np.zeros((1, 27))
lb = np.zeros((1, 27))
count = 0
for key, value in db_data.RangeIter():
    datum.ParseFromString(value)
    # ft[count, :]=datum.data.tostring()
    lb[count, :] = datum.float_label
    count = count + 1
print ft
data = {u"feat_label": {u"feat": ft, u"label": lb}}

print "save result to : %s" % (mat_file)
hdf5storage.savemat(mat_file, data, format="7.3")
    return out

for n in rtss.keys():
    if type(rtss[n]) is list:
        print n
        for i,t in enumerate(rtss[n]):
            rtss[n][i] = dict_keys_to_unicode(t)
    else:
        print 'no',n
        rtss[n] = dict_keys_to_unicode(rtss[n])


# In[50]:


hs.savemat(fp+'..//zen_ict/v3/{}_all_retrieved.mat'.format(vr),rtss)


# ## Optionally load the saved mat files

# In[23]:


rtss = hs.loadmat(fp+'..//zen_ict/v3/{}_all_retrieved.mat'.format(vr))


# In[34]:


if not 'rts' in locals():
    rts = []
def main(argv):
	#db_path_label =''
	db_path_feats ='' 
	mat_file =''
	print argv
	#try:
	opts, args = getopt.getopt(argv,'f:o:h',['feature_db=','mat_file=','help'])
	#except getopt.GetoptError:
	#	print 'feature_LDB_to_mat.py  -f <feature_db> -m <output_mat_file>'
	#	sys.exit(2)
	
	print opts
	print args
	
	
	for opt, arg in opts:
		if opt in("-f","--feature_db"):
			db_path_feats=arg
		elif opt in("-o","--mat_file"):
			mat_file=arg
		elif opt in('-h', '--help'):
			sys.exit(2)
		print arg+" "+opt

	#print(db_path_label)
	print(db_path_feats)
	print(mat_file)

	#if not os.path.exists(db_path_label):
	#	raise Exception('db label not found')
	if not os.path.exists(db_path_feats):
		raise Exception('db feature not found')
		

	
	#db_label=leveldb.LevelDB(db_path_label)
	db_feats=leveldb.LevelDB(db_path_feats)	
	#window_num =686
	datum = caffe_pb2.Datum()
	#datum_lb = caffe_pb2.Datum()
	start=time.time();
	#ft = np.zeros((window_num, float(81)))
	#ft = np.zeros((window_num, float(100352)))
	#lb = np.zeros((window_num, float(81)))
	window_num=0
	for key in db_feats.RangeIter(include_value = False):
		window_num=window_num+1
	
	f_size =1
	n=0
	for key,value in db_feats.RangeIter():
		#f_size=len(value)
		datum.ParseFromString(value)
		f_size=len(datum.float_data)
		if f_size==0:
	 		f_size=len(datum.data)
        #f_size=len(datum.data)
        #break
	print('data size is %d', f_size)
	#for key,value in db_label.RangeIter():
	#	n=n+1
		#l_size=len(value)
	#	datum.ParseFromString(value)
	#	l_size=len(datum.float_data)
	#	if n==1:
	#	   break
	ft = np.zeros((window_num, int(f_size)))
	#lb = np.zeros((window_num, float(l_size)))
	
	
	# for im_idx in range(window_num):
	count=0
	for key in db_feats.RangeIter(include_value = False):
	 #datum.ParseFromString(db_label.Get('%d' %(im_idx)))
	 datum.ParseFromString(db_feats.Get(key));
	 print('%d %d %d %d', datum.channels, datum.height, datum.width, datum.depth) 
	 #datum_lb.ParseFromString(db_label.Get(key));
	 #datum.ParseFromString(db_feats.Get('%d' %(im_idx)))
	 #ft[im_idx, :]=caffe.io.datum_to_array(datum)
	 ft=datum.float_data
	 print len(ft)
	 if len(ft)==0:
	 	ft=datum.data
	 d_dim =len(ft)/(datum.width * datum.depth)
	 print d_dim
	 #ft[count, :]=datum.data
	 #ft=np.fromstring(datum.data, dtype = np.int).reshape(datum.width, datum.depth)
     #datum.channels, datum.height, datum.width, datum.depth
	 #lb[count,:]=datum_lb.float_data
	 count=count+1
	 #ft=np.reshape(ft,len(ft), order ='F')
	 ft=np.reshape(ft,(d_dim,datum.width, datum.depth))
	 #ft=np.reshape(ft,(12,400, 400))
     # b.reshape(2,3,order='F')
	 #print key
	 print 'convert feature # : %d key is %s' %(count,key)
	print 'time 1: %f' %(time.time() - start)

	data = {u'feat_label' : {
			u'feat' : ft,
			#u'label' : lb,
		 }
	 }
	print 'save result to : %s' %(mat_file)
	hdf5storage.savemat(mat_file,data, format='7.3') 
	print 'time 2: %f' %(time.time() - start)
	print 'done!'
Esempio n. 58
0
tcap_lut = {u'tau':s.tau,
            u'rad':s.sp[:,:,:,:,:,np.newaxis],
            u'sza':np.array(s.sza)[np.newaxis],
            u'irr_dn_diff':s.sp_irrdn[:,:,:,:,:,np.newaxis]*0.0,
            u'irr_dn':s.sp_irrdn[:,:,:,:,:,np.newaxis],
            u'irr_up':s.sp_irrup[:,:,:,:,:,np.newaxis],
            u'zout':s.zout,
            u'wvl':s.zenlambda,
            u'phase':['wc','ic'],
            u'ref':s.ref}


# In[55]:

hs.savemat(fp+'model\\{}_TCAP_lut.mat'.format(vv),tcap_lut)


# ### Run the retrieval via the run_zen_cld_retrieval command line
# in the python_codes directory:
# 
# > python Run_zen_cld_retrieval.py -lut C:\Users\sleblan2\Research\TCAP\model\v1_TCAP_lut.mat C:\Users\sleblan2\Research\TCAP\20130219starzen.mat -o C:\Users\sleblan2\Research\TCAP\plots\ 

# ## Check the 4STAR data

# In[8]:

ss = sio.loadmat(fp+'20130219starzen.mat')


# In[13]:
# In[28]:

ar['fl2'] = ar['fl_QA']&ar['fl_alt_18']


# ## save to file

# In[52]:

import hdf5storage as hs


# In[53]:

hs.savemat(fp+'/aod_ict/{vv}/all_aod_ict_{vv}.mat'.format(vv=vv),ar)


# ## Optionally load the file

# In[1]:

import hdf5storage as hs


# In[8]:

ar = hs.loadmat(fp+'/aod_ict/all_aod_ict.mat')


# ## Plot a histogram of all the AOD