Пример #1
0
    def TranslateData(self, batch, i):
        """Applies translations to data at index i in batch."""
        sizeX = self.sizeX
        sizex = self.sizex
        batchsize = batch[i].shape[1]
        shift = (sizeX - sizex) / 2
        offset_x = np.array([
            random.choice(self.translate_range_x) + shift
            for k in range(batchsize)
        ]).reshape(1, -1)
        offset_y = np.array([
            random.choice(self.translate_range_y) + shift
            for k in range(batchsize)
        ]).reshape(1, -1)
        num_channels = self.num_channels

        d = batch[i]

        if self.offset_x is None:
            self.offset_x = cm.CUDAMatrix(offset_x)
        else:
            self.offset_x.overwrite(offset_x)
        if self.offset_y is None:
            self.offset_y = cm.CUDAMatrix(offset_y)
        else:
            self.offset_y.overwrite(offset_y)
        if self.translated_d is None or self.translated_d.shape[1] != batchsize:
            self.translated_d = cm.empty((sizex**2 * num_channels, batchsize))
        d.generate_translations(sizeX,
                                sizex,
                                self.offset_x,
                                self.offset_y,
                                target=self.translated_d)
        batch[i] = self.translated_d
Пример #2
0
 def SetDataStats(self, i, stats_file):
     """Load stats for normalizing the data."""
     return
     assert os.path.exists(
         stats_file), 'Stats file %s not found.' % stats_file
     #print("Here is the stats file "+stats_file)
     stats = np.load(stats_file)
     self.normalize[i] = True
     self.means[i] = cm.CUDAMatrix(stats['mean'].reshape(-1, 1))
     self.stds[i] = cm.CUDAMatrix(1e-10 + stats['std'].reshape(-1, 1))
     return
Пример #3
0
 def ShuffleData(self):
     """In-place shuffle the data in self.data."""
     indices = np.arange(self.datasize)
     np.random.shuffle(indices)
     indices1 = indices[:int(self.datasize / 2)]
     indices2 = indices[int(self.datasize / 2):2 * (int(self.datasize / 2))]
     indices1_gpu = cm.CUDAMatrix(indices1.reshape(1, -1))
     indices2_gpu = cm.CUDAMatrix(indices2.reshape(1, -1))
     for d in self.data:
         d.swap_columns(indices1_gpu, indices2_gpu, target=d)
     indices1_gpu.free_device_memory()
     indices2_gpu.free_device_memory()
Пример #4
0
    def LoadData(self):
        """Load data from parent cache."""

        # Ask parent for data.
        data_cpu = self.parent.Get(self._maxpos)
        datasize = data_cpu[0].shape[0]
        #assert datasize <= self._maxpos,"GPU cache can only store %d datapoints, but parent gave it %d." % (self._maxpos, datasize)

        self.datasize = datasize
        for i, d in enumerate(data_cpu):
            if sp.issparse(d):
                mat = d.toarray().T
            else:
                mat = d.T
            size = mat.shape[0] * mat.shape[1]
            #print("size",size,self.allocated_memory_size[i])
            if size > self.allocated_memory_size[i]:
                # If need more space, then allocate new matrix on the GPU.
                self.data[i] = cm.CUDAMatrix(mat)
                self.allocated_memory_size[i] = mat.shape[0] * mat.shape[1]
            else:
                # Overwrite old memory. It is ok if size of mat is less than the total
                # space that has been allocated.
                self.data[i].overwrite(mat)
        self.Normalize()
Пример #5
0
 def __init__(self,mat,typ='numpy'):
     self.typ = typ
     if type(mat) != np.ndarray and type(mat) != np.matrix and type(mat) != np.float64:
         self.typ = 'cuda'
         self.mat = mat
     elif typ == 'numpy':
         self.mat = mat
     elif typ == 'cuda':
         self.mat = cm.CUDAMatrix(mat)
Пример #6
0
    def LoadParams(self, proto, t_op=None, tied_to=None):
        """Load the parameters for this edge.

    Load the parameters if present in self.proto. Otherwise initialize them
    appropriately.
    """
        param_names = [param.name for param in proto.param]
        for param in proto.param:
            assert param.dimensions, 'Empty dimensions'
            if tied_to:
                if self.transpose:
                    self.params[param.name] = tied_to.params[param.name].T
                else:
                    self.params[param.name] = tied_to.params[param.name]
                mat = self.params[param.name]
            else:
                if param.mat:
                    mat = util.ParameterAsNumpy(param)
                else:
                    mat = self.InitializeParameter(param)
                self.params[param.name] = cm.CUDAMatrix(mat)