Exemplo n.º 1
0
def main(args):
    leds = LED_SYSTEMS.create(name=args.led_system,
                              wavevec=K,
                              **args.led_attrs)
    recoverer = METHODS.create(
        name=args.recovery_method,
        leds=leds,
        loops=args.recover_loops,
        wavelen=WAVELEN,
        sample_size=SAMPSIZE,
        quality=QUALITY,
        objective=Objective,
        NA=NA,
    )

    low_data = load_low_resolution_images(leds, args.sources_dir) \
      if not args.source_name else np_load(args.source_name)

    low_size = low_data[0].shape
    img_size = tuple(int(i / QUALITY) for i in low_data[0].shape)
    print("Recovery process started ...")
    data = recoverer.run(low_data)
    print(DURATION_FORMAT.format(recoverer.duration))

    img = pack_image(data['amplitude'], img_size, 'I', norm=True)
    img.save(args.destination_real)
    print("Result image was saved into {} file.".format(args.destination_real))

    if args.show_images:
        print("Showing recovered object and phase")
        img.show()
        if 'pupil' in data:
            pack_image(data['pupil'], low_size, norm=True).show()

    if args.real_object_filename:
        real_inten = load_image(args.real_object_filename, 'I')
        # result_inten = get_intensity(data['amplitude'])
        result_inten = array(img)
        print("Object RMSE: {:.3f}".format(
            FP.count_RMSE(real_inten, result_inten), ))
        if args.show_images:
            diff_img = pack_image(abs(real_inten - result_inten),
                                  real_inten.shape,
                                  norm=True)
            diff_img.show()

    if args.real_pupil_filename and 'pupil' in data:
        real_inten = np_load(args.real_pupil_filename)
        print("Pupil RMSE: {:.3f}".format(
            FP.count_RMSE(real_inten, data['pupil']), ))
        if args.show_images:
            diff_img = pack_image(abs(real_inten - data['pupil']),
                                  real_inten.shape,
                                  norm=True)
            diff_img.show()
    elif args.real_pupil_filename and 'pupil' not in data:
        print("Use another method to recover pupil.")

    if args.show_fourier:
        pack_image(data['ft'], img_size, norm=True).show()
Exemplo n.º 2
0
def whatyouwant(paramin):
    global dir_pri1, dir_pri2, dir_pri3
    normW2 = arange(1.4, 2.601, 0.05).tolist()
    dens_moy = arange(0.02, 0.981, 0.03)

    for d1 in range(len(dens_moy)):
        dir_sub1 = dir_pri1 + '/Norm_%.2f_DensMoy_%.2f' % (paramin,
                                                           dens_moy[d1])
        nbpatterns1 = len(os.listdir(dir_sub1))

        for p1 in range(nbpatterns1):
            d_patty1 = dir_sub1 + '/pattern_%.3i.npy' % p1
            patty1 = np_load(d_patty1)

            for w2 in range(len(normW2)):
                dir_sub2 = dir_pri2 + '/Norm_%.2f' % (normW2[w2])
                nbpatterns2 = len(os.listdir(dir_sub2)) / 2

                for p2 in range(nbpatterns2):
                    d_patty2 = dir_sub2 + '/pattern_%.3i.npy' % p2
                    patty2 = np_load(d_patty2)
                    d_patty0 = dir_sub2 + '/states_%.3i.jpeg' % p2

                    if  (corrcoef(patty1, patty2)[0,1] > 0.99) \
                    and (abs(norm(patty1) - norm(patty2)) / norm(patty2) < 0.0005):
                        direction = dir_pri3 + '/C_W%.2fD%.2fP%.3i_D_W%.2fP%.3i' % (
                            paramin, dens_moy[d1], p1, normW2[w2], p2)
                        cmd = commands.getoutput('cp ' + d_patty2 + " " +
                                                 direction + '.npy')
                        cmd = commands.getoutput('cp ' + d_patty0 + " " +
                                                 direction + '.jpeg')
                        print direction, abs(norm(patty1) -
                                             norm(patty2)) / norm(patty2)
    return 1
Exemplo n.º 3
0
 def __init__(self, db_path, ext):
     self.db_path = db_path
     self.ext = ext
     if self.ext == ".npy":
         self.loader = lambda x: np_load(x)
     else:
         self.loader = lambda x: np_load(x)["feat"]
     if db_path.endswith(".lmdb"):
         self.db_type = "lmdb"
         self.env = lmdb_open(
             db_path,
             subdir=isdir(db_path),
             readonly=True,
             lock=False,
             readahead=False,
             meminit=False,
         )
     elif db_path.endswith(".pth"):  # Assume a key,value dictionary
         self.db_type = "pth"
         self.feat_file = torch_load(db_path)
         self.loader = lambda x: x
         print("HybridLoader: ext is ignored")
     elif db_path.endswith("h5"):
         self.db_type = "h5"
         self.loader = lambda x: np_array(x).astype("float32")
     else:
         self.db_type = "dir"
Exemplo n.º 4
0
def whatyouwant(paramin):
    global dir_pri1, dir_pri2, dir_pri3
    dens_moy = arange(0.02, 0.981, 0.03)

    for d1 in range(len(dens_moy)):
        dir_sub1 = dir_pri1 + '/Norm_%.2f_DensMoy_%.2f' % (paramin,
                                                           dens_moy[d1])
        nbpatterns1 = len(os.listdir(dir_sub1))

        for p1 in range(nbpatterns1):
            d_patty1 = dir_sub1 + '/pattern_%.3i.npy' % p1
            patty1 = np_load(d_patty1)

            dir_sub2 = dir_pri2 + '/Norm_0.50_DensMoy_0.02'
            nbpatterns2 = len(os.listdir(dir_sub2)) / 2

            for p2 in range(nbpatterns2):
                d_patty2 = dir_sub2 + '/pattern_%.3i.npy' % p2
                patty2 = np_load(d_patty2)
                d_patty0 = dir_sub2 + '/states_%.3i.jpeg' % p2

                if  (corrcoef(patty1, patty2)[0,1] > 0.7) \
                and (abs(norm(patty1) - norm(patty2)) / norm(patty2) < 10.05):
                    direction = dir_pri3 + '/C_W%.2fD%.2fP%.3i_Pica_%.3i' % (
                        paramin, dens_moy[d1], p1, p2)
                    cmd = commands.getoutput('cp ' + d_patty2 + " " +
                                             direction + '.npy')
                    toimage(
                        array(zip(*reversed(patty2.reshape(1, len(
                            patty2))))).T).save(direction + '.jpeg')
                    print direction, abs(norm(patty1) -
                                         norm(patty2)) / norm(patty2)
    return 1
Exemplo n.º 5
0
    def load(self):
        path = join(self.directory, self.name + ".npy")

        if exists(path):
            arr = np_load(file=path)
            self.num_steps = arr[0]
            self.action_num = arr[1]

        path = join(self.directory, self.name + "_eps.npy")
        if exists(path):
            self.eps = np_load(file=path)
Exemplo n.º 6
0
def getFakeData(dataSet, fakeClasses, instances):
    '''
    64x64 images in imageArray
    '''
    fakeClasses = sorted(fakeClasses)
    imageArray = np.empty((1, 4096))
    labelArray = np.empty((1))
    for i in fakeClasses:
        tempImageArray = np_load('../../DCGAN/results/' + 'compressed' + '/' +
                                 dataSet + '/' + dataSet + '_' + str(i) + '_' +
                                 str(instances) + '.npy')
        tempImageArray = tempImageArray.reshape(tempImageArray.shape[0], -1)
        tempLabelArray = np.empty(tempImageArray.shape[0])
        tempLabelArray.fill(i)
        imageArray = np.concatenate([imageArray, tempImageArray])
        labelArray = np.concatenate([labelArray, tempLabelArray])
    imageArray = imageArray[1:]
    labelArray = labelArray[1:]

    # random shuffling of images and labels
    p = np.random.permutation(imageArray.shape[0])
    imageArray = imageArray[p]
    labelArray = labelArray[p]

    labelArray = labelArray.astype('int')
    imageArray = imageArray / 255.0
    return imageArray, labelArray
Exemplo n.º 7
0
def ReadData(data_dir):
    labelfolders = [f for f in os.scandir(data_dir) if f.is_dir()]

    counter = -1
    dataset = []
    img_counter = 0
    for labelfolder in labelfolders:
        counter += 1
        label = (counter, labelfolder.name)

        files = [
            f.path for f in os.scandir(labelfolder.path)
            if not f.is_dir() and f.name.endswith('depth.npy')
        ]
        depth_frames = []
        for f in files:
            depth_frame = np_load(f).astype(np.float)
            depth_frames.append(depth_frame)
            img_counter += 1

        dataset.append((label, depth_frames))

    t_min, t_max = DetermineMinMaxDistance(dataset)
    print('Range in data: min=' + str(t_min) + "mm max=" + str(t_max) + "mm")

    return (img_counter, dataset)
Exemplo n.º 8
0
def whatyouwant(paramin):
    global dir_pri1, dir_pri2, dir_pri3, patient
    dens_moy = arange(0.02,0.981,0.03)

    ofi1 = open(dir_pri2 + '/TC%ia.dat' %patient,'r')
    ofi2 = open(dir_pri2 + '/TC%ib.dat' %patient,'r')
    A = array([line.split() for line in ofi1]).astype(float)
    B = array([line.split() for line in ofi2]).astype(float)
    C = concatenate([A,B], axis=1)
    ofi1.close()
    ofi2.close()

    nruter_a = []
    for d1 in range(len(dens_moy)):
        dir_sub1   = dir_pri1 + '/Norm_%.2f_DensMoy_%.2f' %(paramin, dens_moy[d1])
        nbpatterns = len(os.listdir(dir_sub1)) - 1
        nbtimeshap = size(C, axis=1)
        nruter_b   = zeros((nbpatterns, nbtimeshap))

        for p1 in range(nbpatterns):
            d_patty = dir_sub1 + '/pattern_%.3i.npy' %p1
            patty = np_load(d_patty)

            for p2 in range(nbtimeshap):
                nruter_b[p1,p2] = corrcoef(patty, C[:,p2])[0,1]
        nruter_a.append(nruter_b)
    nruter = nruter_a[0]
    for ll in range(1, len(nruter_a)):
        nruter = append(nruter, nruter_a[ll], axis=0)

    return nruter
    return 1
Exemplo n.º 9
0
    def __init__(self,
                 data_dir: Path,
                 split: str,
                 input_field_name: str,
                 output_field_name: str,
                 load_into_memory: bool) \
            -> None:
        """Initialization of a Clotho dataset object.

        :param data_dir: Data directory with Clotho dataset files.
        :type data_dir: pathlib.Path
        :param split: The split to use (`development`, `validation`)
        :type split: str
        :param input_field_name: Field name for the input values
        :type input_field_name: str
        :param output_field_name: Field name for the output (target) values.
        :type output_field_name: str
        :param load_into_memory: Load the dataset into memory?
        :type load_into_memory: bool
        """
        super(ClothoDataset, self).__init__()
        the_dir: Path = data_dir.joinpath(split)

        self.examples: List[Path] = sorted(the_dir.iterdir())
        self.input_name: str = input_field_name
        self.output_name: str = output_field_name
        self.load_into_memory: bool = load_into_memory

        if load_into_memory:
            self.examples: List[ndarray] = [
                np_load(str(f), allow_pickle=True) for f in self.examples
            ]
Exemplo n.º 10
0
    def __init__(self, data_dir: Path, split: AnyStr, input_field_name: AnyStr,
                 output_field_name: AnyStr, load_into_memory: bool):
        """Initialization of a Clotho dataset object.

        :param data_dir: Directory with data.
        :type data_dir: pathlib.Path
        :param split: Split to use (i.e. 'development', 'evaluation')
        :type split: str
        :param input_field_name: Field name of the clotho data\
                                 to be used as input data to the\
                                 method.
        :type input_field_name: str
        :param output_field_name: Field name of the clotho data\
                                 to be used as output data to the\
                                 method.
        :type output_field_name: str
        :param load_into_memory: Load all data into memory?
        :type load_into_memory: bool
        """
        super(ClothoDataset, self).__init__()
        the_dir: Path = data_dir.joinpath(split)

        self.examples: List[Path] = sorted(the_dir.iterdir())
        self.input_name: str = input_field_name
        self.output_name: str = output_field_name
        self.load_into_memory: bool = load_into_memory

        if load_into_memory:
            self.examples: List[recarray] = [
                np_load(str(f), allow_pickle=True) for f in self.examples
            ]
Exemplo n.º 11
0
def read_sparse_csc_matrix(filepath):
    """
    Read the data, indices, indptr, and shape arrays from a ``.npz`` file on disk
    at ``filepath``, and return an instantiated ``scipy.sparse.csc_matrix``.
    """
    npz_file = np_load(filepath)
    return csc_matrix((npz_file['data'], npz_file['indices'], npz_file['indptr']),
                      shape=npz_file['shape'])
def predict_image(path_of_image, groupStage):
    path_of_model = os_path.join("./CUSTOMIZE_4_USER/MODEL_TRAINING",
                                 groupStage, groupStage + ".pth")
    path_of_feature = os_path.join("./CUSTOMIZE_4_USER/MODEL_TRAINING",
                                   groupStage, groupStage + ".npz")

    start_time = time()
    model = NeuralNet(input_size, hidden_size, num_classes).to(device)
    model.load_state_dict(load(path_of_model))

    data = np_load(path_of_feature)
    [h_max, s_max, v_max] = data['data_max']
    [h_min, s_min, v_min] = data['data_min']

    img = imread(path_of_image)
    img = resize(img, (6000, 4000))
    img = img[500:-500, 750:-750, :]
    img = cvtColor(img, COLOR_BGR2HSV)
    hchan, schan, vchan = split(img)
    h_hist = calcHist([img], [0], None, [256], [0, 256]).reshape(256, )
    s_hist = calcHist([img], [1], None, [256], [0, 256]).reshape(256, )
    v_hist = calcHist([img], [2], None, [256], [0, 256]).reshape(256, )

    hMean = np_mean(hchan) / 255
    DPV_h_max = np_sum(np_absolute(h_hist - h_max)) / (HEIGHT * WIDTH)
    DPV_h_min = np_sum(np_absolute(h_hist - h_min)) / (HEIGHT * WIDTH)

    sMean = np_mean(schan) / 255
    DPV_s_max = np_sum(np_absolute(s_hist - s_max)) / (HEIGHT * WIDTH)
    DPV_s_min = np_sum(np_absolute(s_hist - s_min)) / (HEIGHT * WIDTH)

    vMean = np_mean(vchan) / 255
    DPV_v_max = np_sum(np_absolute(v_hist - v_max)) / (HEIGHT * WIDTH)
    DPV_v_min = np_sum(np_absolute(v_hist - v_min)) / (HEIGHT * WIDTH)

    correlation = np_corrcoef(h_hist, s_hist)[0][1]

    #image_feature = np_array((hMean, DPV_h_max, DPV_h_min, sMean, DPV_s_max, DPV_s_min, vMean, DPV_v_max, DPV_v_min))
    image_feature = np_array((hMean, DPV_h_max, DPV_h_min, sMean, DPV_s_max,
                              DPV_s_min, correlation))
    image_feature = from_numpy(image_feature).to(device).float().view(
        1, input_size)

    with no_grad():
        out_predict = model(image_feature)
        _, predicted_result = torch_max(out_predict.data, 1)
        original = Tensor([[1, 33, 66, 99]])

    # Round xx.xx %
    percentage_result = np_round(
        mm(out_predict.view(1, num_classes), original.view(num_classes,
                                                           1)).item(), 2)

    # Processed time
    processedTime = np_round(time() - start_time, 2)
    #print("Time  ",processedTime)

    return percentage_result, processedTime
Exemplo n.º 13
0
 def __getitem__(self,
                 item: int) \
         -> Tuple[ndarray, ndarray]:
     ex: Union[Path, recarray] = self.examples[item]
     ex: recarray = np_load(str(ex), allow_pickle=True)
     features = ex['features'].item()
     file_name = ex['file_name'].item()
     tag = self.tags[file_name]
     return features, tag
Exemplo n.º 14
0
    def load(self):
        from numpy import load as np_load
        from os.path import exists, join

        path = join(self.__directory)
        if exists(path):
            path = join(self.__directory, f"memory_meta.npy")
            arr = np_load(file=path)
            self.__pos = arr[0]
            self.__capacity = arr[1]
            self.__episodes = [None] * self.__capacity

            for i in range(len(self.__episodes)):
                path = join(self.__directory, f"memory_{i:05d}.npy")
                if exists(path):
                    self.__episodes[i] = np_load(file=path)
                else:
                    break
        self.loaded = True
Exemplo n.º 15
0
    def __getitem__(self,item: int):
        ex: Union[Path, recarray] = self.examples[item]
        if not self.load_into_memory:
            ex: recarray = np_load(str(ex), allow_pickle=True)

        in_e, ou_e = [ex[i].item() for i in [self.input_name, self.output_name]]
        out_len = len(ou_e)
        all_ref = get_all_ref(ex['file_name'].item(), self.data_dir)

        filename = str(ex['file_name'].item())
        return in_e, ou_e, out_len
Exemplo n.º 16
0
def showImageMatrix(dataSet, primaryClass, helperClass, primaryInstances, helperInstances, showImage):
    '''
    Inputs :
    
    dataSets : List : Datasets for which samples are to be genrated
    instances : List : Number of instances to be used from original dataset
    classes : List : Classes for which samples are to be generated
    
    Outputs :
    
    5x5 image matrix 
    
    '''
    fileName = resultDir+'results/MMD'+'/'+'compressed'+'/'+dataSet+'/'+ dataSet + '_' \
            + str(primaryClass) + '_' + str(helperClass) + '_' + str(primaryInstances) + '_' \
            + str(helperInstances) + '.npy'
            
    images = np_load(fileName)
    
    # get random list of images to be displayed
    randomList = np.random.randint(0,1000,(25))
    imageList = images[randomList]
    
    
    fmt = 'png'
    
    # need to generalise this snippet
    fig, axes = plt.subplots(5,5)
    fig.tight_layout()
    fig.subplots_adjust(wspace=-0.7, hspace=-0.1)
    plt.axis('off')
    
    numOutputChannels = getChannels(dataSet)
    if numOutputChannels==3:
        imageList = np.transpose(imageList,(0,2,3,1))
        
    for i in range(5):
        for j in range(5):
            f = StringIO()
                        
            image = PIL.Image.fromarray((imageList[i*5+j]).astype('uint8'))
            
            image.save(f,fmt)
            axes[i,j].imshow((imageList[i*5+j]).astype('uint8'),cmap='gray')
            axes[i,j].axis('off')
            axes[i,j].set_xticklabels([])
            axes[i,j].set_yticklabels([])
            axes[i,j].set_aspect("equal")
    
    plotFileName = resultDir+'results'+'/'+'MMD/samples'+'/'+dataSet+'/'+dataSet+ '_' + str(primaryClass) + '_' + str(helperClass) + '_' + str(primaryInstances) + '_' \
            + str(helperInstances)
    plt.savefig(plotFileName, bbox_inches='tight')
    if showImage==1:
        plt.show()
Exemplo n.º 17
0
def getFakeData(dataSet, fakeClasses, instances, mmdFlag = 0, numHelperInstances=1000):
    
    '''
    Output image pixels between (-1,1)
    '''
    dataFolder = resultDir+'results/nonMMD/compressed/'+dataSet+'/'+dataSet
    if mmdFlag==1:
        dataFolder = resultDir+'results/MMDall/compressed/'+dataSet+'/'+dataSet
        
    fakeClasses =  sorted(fakeClasses)
    imageArray, labelArray = getInitialArray(dataSet)
    
    for i in fakeClasses:
        if mmdFlag==1:
            tempImageArray = np_load(dataFolder+'_'+str(i)+'_'+str(getHelperClass(dataSet,i))+'_'+str(instances)+'_'+str(numHelperInstances)+'.npy')
        elif mmdFlag==0:
            tempImageArray = np_load(dataFolder+'_'+str(i)+'_'+str(instances)+'.npy')
        
        
        tempImageArray = tempImageArray.reshape(tempImageArray.shape[0],-1)
        
        tempLabelArray = np.empty(tempImageArray.shape[0])
        tempLabelArray.fill(i)        
        
        #print tempImageArray.shape, imageArray.shape
        imageArray = np.concatenate([imageArray, tempImageArray])
        labelArray = np.concatenate([labelArray, tempLabelArray])
    imageArray = imageArray[1:]
    labelArray = labelArray[1:]
    
    
    # random shuffling of images and labels
    p = np.random.permutation(imageArray.shape[0])
    imageArray = imageArray[p]
    labelArray = labelArray[p]

    return imageArray, labelArray
Exemplo n.º 18
0
    def __init__(self, data_dir: Path,
                 split: AnyStr,
                 input_field_name: AnyStr,
                 output_field_name: AnyStr,
                 fileid_field_name,
                 load_into_memory: bool,
                 mapping_index_dict,
                 has_gt_text: bool) \
            -> None:
        """Initialization of a Clotho dataset object.

        :param data_dir: Directory with data.
        :type data_dir: pathlib.Path
        :param split: Split to use (i.e. 'development', 'evaluation')
        :type split: str
        :param input_field_name: Field name of the clotho data\
                                 to be used as input data to the\
                                 method.
        :type input_field_name: str
        :param output_field_name: Field name of the clotho data\
                                 to be used as output data to the\
                                 method.
        :type output_field_name: str
        :param load_into_memory: Load all data into memory?
        :type load_into_memory: bool
        :param return_file_id: whether to return file id
        :type return_file_id: bool
        :param has_gt_text: is it the development subset for which we have GT text?
        :type has_gt_text: bool


        """
        super(ClothoDataset, self).__init__()
        the_dir: Path = data_dir.joinpath(split)

        self.examples: List[Path] = sorted(the_dir.iterdir())
        self.input_name: str = input_field_name
        self.output_name: str = output_field_name
        self.load_into_memory: bool = load_into_memory
        self.fileid_field_name = fileid_field_name
        self.mapping_index_dict = mapping_index_dict
        self.has_gt_text: bool = has_gt_text

        if load_into_memory:
            self.examples: List[recarray] = [
                np_load(str(f), allow_pickle=True) for f in self.examples
            ]
Exemplo n.º 19
0
    def __init__(self, data_dir: Path,
                 split: AnyStr,
                 input_field_name: AnyStr,
                 output_field_name: AnyStr,
                 load_into_memory: bool) \
            -> None:
        super(ClothoDataset, self).__init__()
        the_dir: Path = data_dir.joinpath(split)

        self.examples: List[Path] = sorted(the_dir.iterdir())
        self.input_name: str = input_field_name
        self.output_name: str = output_field_name
        self.load_into_memory: bool = load_into_memory

        if load_into_memory:
            self.examples: List[recarray] = [np_load(str(f), allow_pickle=True)
                                             for f in self.examples]
Exemplo n.º 20
0
def gather_captions_to_text(caption_dir, out_fpath):

    fh = open(out_fpath, 'wt')
    i = 0
    for npy_fpath in glob.glob(caption_dir + '/*.npy'):

        recarray = np_load(str(npy_fpath), allow_pickle=True)
        word_indices_list = recarray['words_ind'][0]
        # print(word_indices_list)
        word_str_list = [index2word[w] for w in word_indices_list]
        # word_str = ' '.join(word_str_list).replace('<sos> ', '')
        word_str = ' '.join(word_str_list)
        # print(npy_fpath, word_str)
        fh.write(word_str + '\n')
        i += 1
        # if i==2: break
    print("wrote %d lines to file"%i)
Exemplo n.º 21
0
    def __getitem__(self, item: int) -> Tuple[ndarray, ndarray]:
        """Gets an example from the dataset.

        :param item: Index of the item.
        :type item: int
        :return: Input and output values.
        :rtype: numpy.ndarray. numpy.ndarray
        """
        ex: recarray = self.examples[item]
        if not self.load_into_memory:
            ex: recarray = np_load(str(ex), allow_pickle=True)

        in_e, ou_e = [
            ex[i].item() for i in [self.input_name, self.output_name]
        ]

        return in_e, ou_e
Exemplo n.º 22
0
def mnist_train(b:Model)->None:
  o = build_outpath(b)
  c = build_cattrs(b)

  with np_load(build_path(b, c.dataset), allow_pickle=True) as f:
    b.x_train, b.y_train = f['x_train'], f['y_train']
    b.x_test, b.y_test = f['x_test'], f['y_test']

  b.x_train = b.x_train.reshape(b.x_train.shape[0], 28, 28, 1).astype('float32') / 255
  b.y_train = to_categorical(b.y_train, 10)

  b.x_test = b.x_test.reshape(b.x_test.shape[0], 28, 28, 1).astype('float32') / 255
  b.y_test = to_categorical(b.y_test, 10)


  print('x_train shape:', b.x_train.shape)
  print(b.x_train.shape[0], 'train samples')
  print(b.x_test.shape[0], 'test samples')

  model = Sequential()
  b.model = model
  model.add(Conv2D(32, kernel_size=(3, 3), activation = 'relu', input_shape = (28,28,1)))
  model.add(Conv2D(64, (3, 3), activation = 'relu'))
  model.add(MaxPool2D(pool_size = (2,2)))
  model.add(Dropout(0.25))
  model.add(Flatten())
  model.add(Dense(128, activation = 'relu'))
  model.add(Dropout(0.5))
  model.add(Dense(10, activation = 'softmax'))

  model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])

  callbacks = [
    ModelCheckpoint(
      monitor='val_accuracy',
      filepath=join(o, "checkpoint.ckpt"),
      save_weights_only=True,
      save_best_only=True,
      verbose=True)]
  model.fit(b.x_train, b.y_train,
      batch_size=32,
      epochs=c.num_epoches,
      verbose=0,
      callbacks=callbacks,
      validation_split=0.2)
Exemplo n.º 23
0
def showImageMatrix(dataSet, cls, instances):
    '''
    Inputs :
    
    dataSets : List : Datasets for which samples are to be genrated
    instances : List : Number of instances to be used from original dataset
    classes : List : Classes for which samples are to be generated
    
    Outputs :
    
    5x5 image matrix 
    
    '''
    fileName = '../DCGAN/results/compressed' + '/' + dataSet + '/' + dataSet + '_' + str(
        cls) + '_' + str(instances) + '.npy'
    images = np_load(fileName)

    # get random list of images to be displayed
    randomList = np.random.randint(0, 1000, (25))
    imageList = images[randomList]

    fmt = 'png'

    # need to generalise this snippet
    fig, axes = plt.subplots(5, 5)
    fig.tight_layout()
    fig.subplots_adjust(wspace=-0.7, hspace=-0.1)
    plt.axis('off')

    for i in range(5):
        for j in range(5):
            f = StringIO()
            image = PIL.Image.fromarray(np.uint8(imageList[i * 5 + j]))
            image.save(f, fmt)
            axes[i, j].imshow(image, cmap='Greys_r')
            axes[i, j].axis('off')
            axes[i, j].set_xticklabels([])
            axes[i, j].set_yticklabels([])
            axes[i, j].set_aspect("equal")

    plotFileName = 'results' + '/' + 'samples' + '/' + dataSet + '/' + dataSet + '_' + str(
        cls) + '_' + str(instances) + '.png'
    plt.savefig(plotFileName, bbox_inches='tight')
    if params.showImage == 1:
        plt.show()
Exemplo n.º 24
0
    def __getitem__(self, item: int):
        """Gets an example from the dataset.

        :param item: Index of the item.
        :type item: int
        :return: Input and output values.
        :rtype: numpy.ndarray. numpy.ndarray
        """
        ex: Union[Path, recarray] = self.examples[item]
        if not self.load_into_memory:
            ex: recarray = np_load(str(ex), allow_pickle=True)

        in_e, ou_e = [
            ex[i].item() for i in [self.input_name, self.output_name]
        ]

        all_ref = get_all_ref(ex['file_name'].item(), self.data_dir)

        filename = str(ex['file_name'].item())
        out_len = len(ou_e)
        return in_e, ou_e, all_ref, filename, out_len
Exemplo n.º 25
0
    def learn_manifold(self, X, manifold_out_file_name=None):
        self.debug_string_out.clear()
        self.print_and_remember("Learning manifold(" + self.manifold_learner +
                                ")" + str(datetime.now()))
        learn_time = time()

        if manifold_out_file_name is not None and isfile(
                manifold_out_file_name
        ):  # check the learned manifold existance
            manifold_feats = np_load(manifold_out_file_name, allow_pickle=True)
            self.print_and_remember("Manifold loaded(" +
                                    manifold_out_file_name + ")")
        elif self.manifold_learner == 'UMAP':
            manifold_feats = UMAP(random_state=0,
                                  metric=self.dist_metric,
                                  n_components=self.manifold_dimension,
                                  n_neighbors=self.num_of_neighbours,
                                  min_dist=float(
                                      self.min_dist)).fit_transform(X)
        elif self.manifold_learner == 'LLE':
            manifold_feats = LocallyLinearEmbedding(
                n_components=self.manifold_dimension,
                n_neighbors=self.num_of_neighbours).fit_transform(X)
        elif self.manifold_learner == 'tSNE':
            manifold_feats = TSNE(n_components=self.manifold_dimension,
                                  random_state=0,
                                  verbose=0).fit_transform(X)
        elif self.manifold_learner == 'isomap':
            manifold_feats = Isomap(
                n_components=self.manifold_dimension,
                n_neighbors=self.num_of_neighbours).fit_transform(X)
        self.print_and_remember(
            "Time to learn manifold: " +
            str(funcH.getElapsedTimeFormatted(time() - learn_time)))
        if manifold_out_file_name is not None:
            np_save(manifold_out_file_name, manifold_feats, allow_pickle=True)
            self.print_and_remember("Manifold saved(" +
                                    manifold_out_file_name + ")")
        return manifold_feats, self.debug_string_out
    def __getitem__(self,
                    item: int) \
            -> Tuple[List[ndarray], List[ndarray], List[Path]]:
        """Gets an example from the dataset.

        :param item: Index of the item.
        :type item: int
        :return: Input and output values, and the Path of the file.
        :rtype: list[numpy.ndarray], list[numpy.ndarray], list[Path]
        """
        ex = self.examples[item] if self.multiple_captions_mode \
            else [self.examples[item]]

        if not self.load_into_memory:
            ex = [np_load(str(i), allow_pickle=True) for i in ex]

        x, y, file_names = [], [], []
        for ex_i in ex:
            x.append(ex_i[self.input_name])
            y.append(ex_i[self.output_name])
            file_names.append(Path(ex_i.file_name[0]))
        return x, y, file_names
Exemplo n.º 27
0
    def __getitem__(self,
                    item: int) \
            -> Tuple[ndarray, ndarray]:
        """Gets an example from the dataset.

        :param item: Index of the item.
        :type item: int
        :return: Input and output values.
        :rtype: numpy.ndarray. numpy.ndarray
        """
        ex: Union[Path, recarray] = self.examples[item]
        if not self.load_into_memory:
            ex: recarray = np_load(str(ex), allow_pickle=True)
        if self.fileid_field_name is not None:
            fid_e = [ex[i].item() for i in [self.fileid_field_name]]
        if self.has_gt_text:
            in_e, ou_e = [
                ex[i].item() for i in [self.input_name, self.output_name]
            ]
            # print("dataset class, text with 5k words:", ou_e, len(ou_e))
            if self.mapping_index_dict is not None:
                ou_e = [
                    self.mapping_index_dict[ind] for ind in ou_e
                    if ind in self.mapping_index_dict
                ]
                ou_e = array(ou_e, dtype=int)
                # print("               text with 1k words:", ou_e, len(ou_e))
            if self.fileid_field_name is not None:
                return in_e, ou_e, fid_e
            else:
                return in_e, ou_e

        else:
            in_e = [ex[i].item() for i in [self.input_name]]
            if self.fileid_field_name is not None:
                return in_e, fid_e
            else:
                return in_e
Exemplo n.º 28
0
def whatyouwant(paramin):
    global dir_pri1, dir_pri2, dir_pri3, patient, NormW

    ofi1 = open(dir_pri2 + '/TC%ia.dat' %patient,'r')
    ofi2 = open(dir_pri2 + '/TC%ib.dat' %patient,'r')
    A = array([line.split() for line in ofi1]).astype(float)
    B = array([line.split() for line in ofi2]).astype(float)
    C = concatenate([A,B], axis=1)
    ofi1.close()
    ofi2.close()

    dir_sub1   = dir_pri1 + '/Norm_%.2f_DensMoy_%.2f' %(NormW, paramin)
    nbpatterns = len(os.listdir(dir_sub1)) - 1
    nbtimeshap = size(C, axis=1)
    nruter     = zeros((nbpatterns, nbtimeshap))

    for p1 in range(nbpatterns):
        d_patty = dir_sub1 + '/pattern_%.3i.npy' %p1
        patty = np_load(d_patty)

        for p2 in range(nbtimeshap):
            nruter[p1,p2] = corrcoef(patty, C[:,p2])[0,1]

    return nruter
def compute_tsne_positions(activations_per_point: array, perplexity: int, name: str) -> array:
    """
    Computes t-SNE positions from a dataset.

    `activations_per_point`: n_obvs x n_dims
    """
    logger.info(f"TSNE from data of size {activations_per_point.shape}")

    t_sne_positions_path = Path(TSNE_SAVE_DIR, f"t-sne positions {name} perp={perplexity}.npy")
    if t_sne_positions_path.exists():
        logger.info("Loading...")
        t_sne_positions = np_load(t_sne_positions_path)
    else:
        logger.info("Computing...")
        t_sne_positions = TSNE(
            n_components=2,  # 2D
            perplexity=perplexity,
            # Recommended args
            n_iter=1_000,
            learning_rate=200,
            method="barnes_hut",
        ).fit_transform(activations_per_point)
        np_save(t_sne_positions_path, t_sne_positions)
    return t_sne_positions
Exemplo n.º 30
0
    args.add_argument('-m', '--methods', help='Dimensionality reduction methods (comma-sep)',
                      default='svd,nmf,plsa,lda,slda')
    args.add_argument('-l', '--label', help='Value being predcicted', required='True')
    args = args.parse_args()
    return args


if __name__=="__main__":
    args = interface()

    cv_dir = args.input_dir
    techniques = get_methods(args.methods)
    techniques = [t[1].upper() for t in techniques]
    num_folds = args.num_folds * args.cv_iters
    dim_steps = [int(d) for d in args.dims.split(',')]
    metadata_values = np_load(path.join(cv_dir, 'labels.npy'))
    metadata_category = args.label

    mdl = RandomForestClassifier()
    qual = roc_auc_score
    plot_data = []
    cv_scores = zeros(num_folds)
    output = open(args.output_file + '.csv', 'w')

    for technique in techniques:
        print technique
        tech_data = []
        for d in dim_steps:
            for c in xrange(num_folds):            
                prefix = 'CV_%d_%s_%d_' % (c, technique, d)
                prefix = path.join(cv_dir, prefix)
Exemplo n.º 31
0
    system('chmod u+x %s' % (filename))


if __name__=="__main__":
    args = interface()

    data_matrix_file = args.input_file
    labels_file = args.input_labels
    output_dir = args.output_dir
    
    dim_steps = [int(x) for x in args.dims.split(',')]
    output_dir = args.output_dir
    scripts_dir = args.scripts_dir
    tag = args.tag

    labels = np_load(labels_file) 
    cv_folds = BalancedKFold(labels, args.num_folds, n_iter=args.cv_iters)

    i = 0
    for k, (training, testing) in enumerate(cv_folds):
        file_prefix = path.join(output_dir, 'CV_%d_' % k)
        training_file = file_prefix+'training.npy'
        np_save(training_file, training)
        testing_file = file_prefix+'testing.npy'
        np_save(testing_file, testing)
        
        for num_dims in dim_steps:
            if i >= args.max_scripts:
                ind = i % args.max_scripts
                script_file = path.join(scripts_dir, '%d.sh' % ind)
                add_to_bash_script(script_file, data_matrix_file, labels_file, output_dir,
Exemplo n.º 32
0
			def load(self, directory, name):
				filename = path.join(directory, name)
				if not '.npz' in name:
					filename += '.npz'

				err = 0
				try:
					data = np_load(filename)
				except:
					data = {}


				if 'A_equil' in data:
					A_equil = data['A_equil'].astype(lib.pyfloat)
				elif path.exists(path.join(directory, 'A_equil.npy')):
					A_equil = np_load(path.join(directory,
						'A_equil.npy')).astype(lib.pyfloat)
				else:
					err = 1


				if not err and 'LLT' in data:
					LLT = data['LLT'].astype(lib.pyfloat)
				elif path.exists(path.join(directory, 'LLT.npy')):
					LLT = np_load(path.join(directory, 'LLT.npy')).astype(
								lib.pyfloat)
					LLT_ptr = LLT.ctypes.data_as(lib.ok_float_p)
				else:
					if lib.direct:
						err = 1
					else:
						LLT_ptr = c_void_p()

				if not err:
					LLT_ptr = LLT.ctypes.data_as(lib.ok_float_p)
				else:
					LLT_ptr = None


				if not err and 'd' in data:
					d = data['d'].astype(lib.pyfloat)
				elif path.exists(path.join(directory, 'd.npy')):
					d = np_load(path.join(directory, 'd.npy')).astype(
								lib.pyfloat)
				else:
					err = 1

				if not err and 'e' in data:
					e = data['e'].astype(lib.pyfloat)
				elif path.exists(path.join(directory, 'e.npy')):
					e = np_load(path.join(directory, 'e.npy')).astype(
								lib.pyfloat)
				else:
					err = 1

				if err:
					snippet = '`LLT`, ' if lib.direct else ''
					ValueError('Minimal requirements to load solver '
							   'not met. Specified file must contain '
							   'at least one .npz file with entries `A_equil`,'
							   ' {}`d`, and `e`, or the specified folder must'
							   ' contain .npy files of the same names.'.format(
							   snippet))

				if 'z' in data:
					z = data['z'].astype(lib.pyfloat)
				else:
					z = zeros(self.m + self.n, dtype=lib.pyfloat)

				if 'z12' in data:
					z12 = data['z12'].astype(lib.pyfloat)
				else:
					z12 = zeros(self.m + self.n, dtype=lib.pyfloat)

				if 'zt' in data:
					zt = data['zt'].astype(lib.pyfloat)
				else:
					zt = zeros(self.m + self.n, dtype=lib.pyfloat)

				if 'zt12' in data:
					zt12 = data['zt12'].astype(lib.pyfloat)
				else:
					zt12 = zeros(self.m + self.n, dtype=lib.pyfloat)

				if 'zprev' in data:
					zprev = data['zprev'].astype(lib.pyfloat)
				else:
					zprev = zeros(self.m + self.n, dtype=lib.pyfloat)

				if 'rho' in data:
					rho = lib.pyfloat(data['rho'])
				else:
					rho = 1.

				order = lib.enums.CblasRowMajor if \
					A_equil.flags.c_contiguous else lib.enums.CblasColMajor

				if self.c_solver is not None:
					self.__unregister_solver()

				self.__register_solver(lib, lib.pogs_load_solver(
						A_equil.ctypes.data_as(lib.ok_float_p), LLT_ptr,
						d.ctypes.data_as(lib.ok_float_p),
						e.ctypes.data_as(lib.ok_float_p),
						z.ctypes.data_as(lib.ok_float_p),
						z12.ctypes.data_as(lib.ok_float_p),
						zt.ctypes.data_as(lib.ok_float_p),
						zt12.ctypes.data_as(lib.ok_float_p),
						zprev.ctypes.data_as(lib.ok_float_p),
						rho, self.m, self.n, order))
Exemplo n.º 33
0
 def start(self):
     self.syn0 = np_load("%s.syn0.npy" % self.path)
     self.syn1neg = np_load("%s.syn1neg.npy" % self.path)
     with open("%s.vocab" % self.path, "r") as f:
         self.vocab = pickle.load(f)
     self.cum_table = np_load("%s.cum_table.npy" % self.path)
Exemplo n.º 34
0
def get_config_dict():
    """Return the config dict (create the default one if needed)

    Returns
    -------
    config_dict: dict
        Dictionnary gather the parameters of the software
    """
    # dynamic import to avoid loop
    module = __import__(
        "pyleecan.definitions",
        globals=globals(),
        locals=locals(),
        fromlist=["USER_DIR", "CONF_PATH"],
        level=0,
    )
    USER_DIR = module.USER_DIR
    CONF_PATH = module.CONF_PATH

    logger = getLogger("Pyleecan")

    # Initialization to make sure all the parameters exist
    init_user_dir()
    config_dict = default_config_dict.copy()
    if isfile(CONF_PATH):
        with open(CONF_PATH, "r") as config_file:
            update_dict(source=config_dict, update=load(config_file))
    else:
        logger.debug("Creating missing config_dict in " + CONF_PATH)
    save_config_dict(config_dict)

    # Load the color_dict
    color_path = join(USER_DIR, "Plot", config_dict["PLOT"]["COLOR_DICT_NAME"])
    def_color_path = join(USER_DIR, "Plot", "pyleecan_color.json")
    if not isfile(color_path):  # Default colors
        logger.warning("Unable to load colors from " + color_path +
                       ", using default colors")
        color_path = join(USER_DIR, "Plot", "pyleecan_color.json")

    # Load default to make sure that the keys are all there
    with open(def_color_path, "r") as color_file:
        config_dict["PLOT"]["COLOR_DICT"] = load(color_file)
    if color_path != def_color_path:
        with open(color_path, "r") as color_file:
            update_dict(source=config_dict["PLOT"]["COLOR_DICT"],
                        update=load(color_file))

    # Register the colormap
    cmap_name = config_dict["PLOT"]["COLOR_DICT"]["COLOR_MAP"]
    if "." not in cmap_name:
        cmap_path = join(USER_DIR, "Plot", cmap_name) + ".npy"
    try:
        get_cmap(name=cmap_name)
    except:
        if not isfile(cmap_path):  # Default colormap
            config_dict["PLOT"]["COLOR_DICT"]["COLOR_MAP"] = "RdBu_r"
        else:
            cmap = np_load(cmap_path)
            cmp = ListedColormap(cmap)
            register_cmap(name=config_dict["PLOT"]["COLOR_DICT"]["COLOR_MAP"],
                          cmap=cmp)

    return config_dict
Exemplo n.º 35
0
    args.add_argument('-n', '--cv-fold-id', help='CV fold id', type=int, required=True)
    args.add_argument('-m', '--methods', help='Dimensionality reduction methods (comma-sep)',
                      default='svd,nmf,plsa,lda,slda')
    args = args.parse_args()
    return args


if __name__=="__main__":
    args = interface()
    
    num_dims = args.num_dims
    prefix = 'CV_%d' % (args.cv_fold_id)
    techniques = get_methods(args.methods)
    output_dir = args.output_dir

    data_matrix = np_load(args.input_file)
    labels = np_load(args.input_labels)
    testing = np_load(args.testing_vector)
    training = np_load(args.training_vector)

    test_labels = labels[testing]
    training_labels = labels[training]
    test_matrix = data_matrix[testing, :]
    training_matrix = data_matrix[training, :]

    for technique, technique_name in techniques:
        dim_redux = technique(n_components=num_dims)
        file_label = '%s_%s_%s_' % (prefix,
                                    technique_name,
                                    str(num_dims))
        file_prefix = path.join(output_dir, file_label)