Exemplo n.º 1
0
    def __init__(self):
        K.set_learning_phase(0)
        args = Args(os.environ['GPU'],
                    float(os.environ['GPU_MEMORY_FRACTION']),
                    os.environ['DATA_SET'], int(os.environ['BATCH_SIZE']))
        global graph
        graph = tf.get_default_graph()
        self.params = get_spectralnet_config(args)
        ktf.set_session(get_session(args.gpu_memory_fraction))

        self.batch_size = args.batch_size
        self.batch_sizes = {
            'Unlabeled': self.batch_size,
            'Labeled': self.batch_size,
            'Orthonorm': self.batch_size,
        }
        n_clusters = self.params['n_clusters']
        y_labeled_onehot = np.empty((0, n_clusters))

        # spectralnet has three inputs -- they are defined here
        input_shape = [n_clusters]
        inputs = {
            'Unlabeled': Input(shape=input_shape, name='UnlabeledInput'),
            'Labeled': Input(shape=input_shape, name='LabeledInput'),
            'Orthonorm': Input(shape=input_shape, name='OrthonormInput'),
        }

        # Load Spectral net
        y_true = tf.placeholder(tf.float32,
                                shape=(None, n_clusters),
                                name='y_true')

        spectralnet_model_path = os.path.join(self.params['model_path'],
                                              'spectral_net')
        self.spectral_net = networks.SpectralNet(inputs,
                                                 self.params['arch'],
                                                 self.params.get('spec_reg'),
                                                 y_true,
                                                 y_labeled_onehot,
                                                 n_clusters,
                                                 self.params['affinity'],
                                                 self.params['scale_nbr'],
                                                 self.params['n_nbrs'],
                                                 self.batch_sizes,
                                                 spectralnet_model_path,
                                                 siamese_net=None,
                                                 train=False,
                                                 x_train=None)
        self.clustering_algo = joblib.load(
            os.path.join(self.params['model_path'], 'spectral_net',
                         'clustering_aglo.sav'))
Exemplo n.º 2
0
from core.util import get_session

# add directories in src/ to path
# sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__),'..')))

parser = argparse.ArgumentParser()
parser.add_argument('--gpu', type=str, help='gpu number to use', default='')
parser.add_argument('--gpu_memory_fraction',
                    type=float,
                    help='gpu percentage to use',
                    default='1.0')
parser.add_argument('--dset', type=str, help='dataset to use', default='mnist')
args = parser.parse_args()
params = get_autoencoder_config(args)

K.set_session(get_session(args.gpu_memory_fraction))

data = load_spectral_data(params['data_path'], args.dset)


def get_reconstruction_mse(x):
    x_embedded = ae.predict_embedding(x)
    x_recon = ae.predict_reconstruction(x_embedded)
    return np.mean(np.square(x - x_recon))


# RUN Train
x_train = data['spectral']['train_and_test'][0]
x_test = data['spectral']['train_and_test'][2]
json_path = 'pretrain_weights/ae_{}.json'.format(args.dset)
weights_path = '{}/ae_{}_weights.h5'.format(params['model_path'], args.dset)