示例#1
0
    def setup(self, model, dataset, algorithm):
        """
        .. todo::

            WRITEME

        Notes
        -----
        `dataset` argument is ignored
        """
        dataset = None

        # Central windowing of auxiliary datasets (e.g. validation sets)
        preprocessor = CentralWindow(self._window_shape)
        for data in self._center:
            preprocessor.apply(data)

        #
        # Do the initial random windowing
        #

        randomize_now = self._randomize + self._randomize_once

        # maps each dataset in randomize_now to a zero-padded topological view
        # of its data.
        self._original = dict(
            (data,
             _zero_pad(data.get_topological_view().astype('float32'),
                       self._pad_randomized)) for data in randomize_now)

        # For each dataset, for each image, extract a randomly positioned and
        # potentially horizontal-flipped window
        self.randomize_datasets(randomize_now)
示例#2
0
    def setup(self, model, dataset, algorithm):
        """
        .. todo::

            WRITEME

        Notes
        -----
        `dataset` argument is ignored
        """
        dataset = None

        # Central windowing of auxiliary datasets (e.g. validation sets)
        preprocessor = CentralWindow(self._window_shape)
        for data in self._center:
            if not (tuple(data.view_converter.axes) == self.axes):
                raise ValueError("Expected axes: %s Actual axes: %s" % (str(data.view_converter.axes), str(self.axes)))
            preprocessor.apply(data)

        # Do the initial random windowing
        randomize_now = self._randomize + self._randomize_once
        self._original = dict((data,
            _zero_pad(data.get_topological_view().astype('float32'),
                self._pad_randomized)) for data in randomize_now)
        self.randomize_datasets(randomize_now)
示例#3
0
    def setup(self, model, dataset, algorithm):
        """
        .. todo::

            WRITEME

        Notes
        -----
        `dataset` argument is ignored
        """
        dataset = None

        # Central windowing of auxiliary datasets (e.g. validation sets)
        preprocessor = CentralWindow(self._window_shape)
        for data in self._center:
            preprocessor.apply(data)

        #
        # Do the initial random windowing
        #

        randomize_now = self._randomize + self._randomize_once

        # maps each dataset in randomize_now to a zero-padded topological view
        # of its data.
        self._original = dict((data,
                               _zero_pad(data.get_topological_view().astype('float32'),
                                         self._pad_randomized))
                              for data in randomize_now)

        # For each dataset, for each image, extract a randomly positioned and
        # potentially horizontal-flipped window
        self.randomize_datasets(randomize_now)
示例#4
0
    def setup(self, model, dataset, algorithm):
        """
        .. todo::

            WRITEME

        Notes
        -----
        `dataset` argument is ignored
        """
        dataset = None

        # Central windowing of auxiliary datasets (e.g. validation sets)
        preprocessor = CentralWindow(self._window_shape)
        for data in self._center:
            if not (tuple(data.view_converter.axes) == self.axes):
                raise ValueError(
                    "Expected axes: %s Actual axes: %s" %
                    (str(data.view_converter.axes), str(self.axes)))
            preprocessor.apply(data)

        # Do the initial random windowing
        randomize_now = self._randomize + self._randomize_once
        self._original = dict(
            (data,
             _zero_pad(data.get_topological_view().astype('float32'),
                       self._pad_randomized)) for data in randomize_now)
        self.randomize_datasets(randomize_now)
示例#5
0
    def setup(self, model, dataset, algorithm):
        # Central windowing of auxiliary datasets (e.g. validation sets)
        preprocessor = CentralWindow(self._window_shape)
        for data in self._other_datasets:
            if not (tuple(data.view_converter.axes) == self.axes):
                raise ValueError("Expected axes: %s Actual axes: %s" % (str(data.view_converter.axes), str(self.axes)))
            preprocessor.apply(data)

        # Do the initial random windowing of the training set.
        self._original = dataset.get_topological_view()
        self.on_monitor(model, dataset, algorithm)
示例#6
0
    def setup(self, model, dataset, algorithm):

        if self._center_shape is not None:
            preprocessor = CentralWindow(self._center_shape)
            for data in self._center:
                preprocessor.apply(data)

        randomize_now = self._randomize + self._randomize_once
        self._original = dict(
            (data, data.get_topological_view()) for data in randomize_now)

        self.randomize_datasets(randomize_now)
    def setup(self, model, dataset, algorithm):
        
        if self._center_shape is not None:
            preprocessor = CentralWindow(self._center_shape)
            for data in self._center:
                preprocessor.apply(data)
            
        randomize_now = self._randomize + self._randomize_once
        self._original = dict((data,
            data.get_topological_view()) for data in randomize_now)

        self.randomize_datasets(randomize_now)
示例#8
0
    def randomize_datasets(self, datasets):
        center_shift = np.array(self._window_shape) / 2. - 0.5
        tform_center = skimage.transform.SimilarityTransform(
            translation=-center_shift)
        tform_uncenter = skimage.transform.SimilarityTransform(
            translation=center_shift)
        if self._preprocess is not None:
            pipeline = preprocessing.Pipeline()
            #window the rotations to get rid of the uniform background
            if self._central_window_shape is not None:
                print 'adding window'
                pipeline.items.append(CentralWindow(
                    self._central_window_shape))

            for item in self._preprocess:
                pipeline.items.append(item)

        im_shape = (self._window_shape[0], self._window_shape[1], 1)

        for d_idx, dataset in enumerate(datasets):

            data = self._original[dataset]
            #randomly window data
            print data.shape
            arr = np.empty((data.shape[0], self._window_shape[0],
                            self._window_shape[1], data.shape[3]),
                           dtype=np.float32)
            for idx, example in enumerate(data):
                scale_x = np.random.uniform(1 - self._scale_diff,
                                            1 + self._scale_diff)
                scale_y = np.random.uniform(1 - self._scale_diff,
                                            1 + self._scale_diff)
                translation_x = np.random.uniform(1 - self._translation,
                                                  1 + self._translation)
                translation_y = np.random.uniform(1 - self._translation,
                                                  1 + self._translation)
                shear = np.random.uniform(0. - self._shear, 0. + self._shear)
                rotation = np.random.uniform(0, 360)
                tform = AffineTransform(scale=(scale_x, scale_y),
                                        rotation=np.deg2rad(rotation),
                                        translation=(translation_x,
                                                     translation_y),
                                        shear=shear)
                tform = tform_center + tform + tform_uncenter
                img = warp(example, tform, output_shape=self._window_shape)
                arr[idx] = img

            dataset.set_topological_view(arr, axes=dataset.view_converter.axes)
            #assumes self._randomize in in order of [train, valid/test]
            if self._preprocess is not None:
                can_fit = True
                if d_idx == 1:
                    can_fit = False

                dataset.apply_preprocessor(preprocessor=pipeline,
                                           can_fit=can_fit)
示例#9
0
    def setup(self, model, dataset, algorithm):
        """
        .. todo::

            WRITEME

        Notes
        -----
        `dataset` argument is ignored
        """
        dataset = None

        # Central windowing of auxiliary datasets (e.g. validation sets)
        preprocessor = CentralWindow(self._window_shape)
        for data in self._center:
            preprocessor.apply(data)

        # Do the initial random windowing
        randomize_now = self._randomize + self._randomize_once
        self._original = dict((data,
            _zero_pad(data.get_topological_view().astype('float32'),
                self._pad_randomized)) for data in randomize_now)
        self.randomize_datasets(randomize_now)
示例#10
0
    def setup(self, model, dataset, algorithm):
        """
        .. todo::

            WRITEME

        Notes
        -----
        `dataset` argument is ignored
        """
        dataset = None

        # Central windowing of auxiliary datasets (e.g. validation sets)
        preprocessor = CentralWindow(self._window_shape)
        for data in self._center:
            preprocessor.apply(data)

        # Do the initial random windowing
        randomize_now = self._randomize + self._randomize_once
        self._original = dict((data,
            _zero_pad(data.get_topological_view().astype('float32'),
                self._pad_randomized)) for data in randomize_now)
        self.randomize_datasets(randomize_now)