Esempio n. 1
0
    def extract_features(self, key, image_array, meta):  # key is a placeholder
        start = tic()
        output_file = self.config["paths"]["features"] + "/{}_{}_{}.npz"
        output_file = output_file.format( meta["Metadata_Plate"], meta["Metadata_Well"], meta["Metadata_Site"])

        batch_size = self.config["profile"]["batch_size"]
        image_key, image_names, outlines = self.dset.getImagePaths(meta)
        total_crops = self.profile_crop_generator.prepare_image(
                                   self.sess,
                                   image_array,
                                   meta,
                                   False
                            )
        if total_crops == 0:
            print("No cells to profile:", output_file)
            return
        num_features = self.config["train"]["model"]["params"]["feature_dim"]
        repeats = "channel_repeats" in self.config["dataset"]["images"].keys()
        
        # Extract features
        crops = next(self.profile_crop_generator.generate(self.sess))[0]  # single image crop generator yields one batch
        feats = self.feat_extractor.predict(crops, batch_size=batch_size)
        if repeats:
            feats = np.reshape(feats, (self.num_channels, total_crops, num_features))
            feats = np.concatenate(feats, axis=-1)
            
        # Save features
        while len(feats.shape) > 2:  # 2D mean spatial pooling
            feats = np.mean(feats, axis=1)

        np.savez_compressed(output_file, f=feats)
        toc(image_key + " (" + str(total_crops) + " cells)", start)
Esempio n. 2
0
    def extract_features(self, key, image_array, meta):
        output_file = self.config["profiling"]["output_dir"] + "/{}_{}_{}.npz"
        output_file = output_file.format(meta["Metadata_Plate"],
                                         meta["Metadata_Well"],
                                         meta["Metadata_Site"])

        batch_size = self.config["validation"]["minibatch"]
        image_key, image_names, outlines = self.dset.getImagePaths(meta)
        total_crops = self.crop_generator.prepare_image(
            self.sess, image_array, meta,
            self.config["validation"]["sample_first_crops"])

        # Initialize data buffer
        data = np.zeros(shape=(self.num_channels, total_crops, num_features))
        b = 0
        start = tic()

        # Extract features in batches
        batches = []
        for batch in self.crop_generator.generate(self.sess):
            crops = batch[0]
            if self.config["model"]["type"] == "inception_resnet":
                feats = self.sess.run(self.endpoints['PreLogitsFlatten'],
                                      feed_dict={self.raw_crops: crops})
            if self.config["model"]["type"] in ["convnet", "mixup"]:
                feats = self.feat_extractor((batch[0], 0))
            feats = np.reshape(feats,
                               (self.num_channels, batch_size, num_features))
            data[:, b * batch_size:(b + 1) * batch_size, :] = feats
            b += 1
            batches.append(batch)

        # Concatentate features of all channels
        data = np.moveaxis(data, 0, 1)
        data = np.reshape(data, (data.shape[0], data.shape[1] * data.shape[2]))

        # Save features
        np.savez_compressed(output_file, f=data)
        toc(image_key + " (" + str(data.shape[0]) + " cells)", start)

        # Save crops TODO: parameterize saving crops or a sample of them.
        if False:
            batch_data = {"total_crops": total_crops, "batches": batches}
            with open(output_file.replace(".npz", ".pkl"), "wb") as batch_file:
                pickle.dump(batch_data, batch_file)
Esempio n. 3
0
    def extract_features(self, key, image_array, meta):  # key is a placeholder
        start = tic()
        output_file = self.config["paths"]["features"] + "/{}/{}_{}.npz"
        output_file = output_file.format(meta["Metadata_Plate"],
                                         meta["Metadata_Well"],
                                         meta["Metadata_Site"])
        os.makedirs(self.config["paths"]["features"] +
                    "/{}".format(meta["Metadata_Plate"]),
                    exist_ok=True)

        batch_size = self.config["profile"]["batch_size"]
        image_key, image_names, outlines = self.dset.get_image_paths(meta)
        crop_locations = self.profile_crop_generator.prepare_image(
            K.get_session(), image_array, meta, False)
        total_crops = len(crop_locations)
        if total_crops == 0:
            print("No cells to profile:", output_file)
            return
        repeats = self.config["train"]["model"][
            "crop_generator"] == "repeat_channel_crop_generator"

        # Extract features
        crops = next(self.profile_crop_generator.generate(K.get_session()))[
            0]  # single image crop generator yields one batch
        feats = self.feat_extractor.predict(crops, batch_size=batch_size)
        if repeats:
            feats = np.reshape(feats, (self.num_channels, total_crops, -1))
            feats = np.concatenate(feats, axis=-1)

        # Save features
        while len(feats.shape) > 2:  # 2D mean spatial pooling
            feats = np.mean(feats, axis=1)

        key_values = {k: meta[k] for k in meta.keys()}
        key_values["Metadata_Model"] = self.config["train"]["model"]["name"]
        np.savez_compressed(output_file,
                            features=feats,
                            metadata=key_values,
                            locations=crop_locations)
        toc(image_key + " (" + str(total_crops) + " cells)", start)