def lda(args): data = get_data(args) mod = Aggregator(LDA, 1, data, args.size, categories=args.categories) mod.train() mod.get_metrics(data) mod.mean(display_scores=True) mod.std(display_scores=True)
def pca(args): data = get_data(args) mod = Aggregator(PCA, 1, data, args.size) mod.train() mod.get_metrics(data) mod.mean(display_scores=True) mod.std(display_scores=True)
def unsup_ae(args): data = get_data(args) mod = Aggregator(UnsupNN, args.number, data, args.size, reconstruct_loss=args.reconstruct_loss, reconstruct_weight=args.reconstruct_weight, enc_regularizer_weight=args.enc_regularizer_weight, dec_regularizer_weight=args.dec_regularizer_weight, lr=args.lr, lr_decay=args.lr_decay, encoder_regularizer=args.encoder_regularizer) aggregated_keras(mod, data, args.model_path)
def sup_cats_ae(args): data = get_data(args) mod = Aggregator(SupNN, args.number, data, args.size, reconstruct_loss=args.reconstruct_loss, reconstruct_weight=args.reconstruct_weight, enc_regularizer_weight=args.enc_regularizer_weight, dec_regularizer_weight=args.dec_regularizer_weight, lr=args.lr, lr_decay=args.lr_decay, encoder_regularizer=args.encoder_regularizer) aggregated_keras(mod, data, args.model_path) mod.get_own_metrics(data, categories=True) mod.mean(display_scores=True) mod.std(display_scores=True)
def mds(args): data = get_data(args) mod = Aggregator(MDS, args.number, data, args.size, n_components=args.size, metric=args.metric, n_init=args.n_init, max_iter=args.max_iter, verbose=args.verbose, eps=args.eps, n_jobs=args.n_jobs, random_state=args.random_state, dissimilarity=args.dissimilarity) aggregated(mod, data)
def tsne(args): data = get_data(args) mod = Aggregator(TSNE, args.number, data, args.size, perplexity=args.perplexity, early_exaggeration=args.early_exaggeration, learning_rate=args.learning_rate, n_iter=args.n_iter, n_iter_without_progress=args.n_iter_without_progress, min_grad_norm=args.min_grad_norm, metric=args.metric, init=args.init, verbose=args.verbose, random_state=args.random_state, method=args.method, angle=args.angle) _aggregated(mod, data, False, args.model_path)
def get_weights(args): data = get_data(args) weights1 = OrderedDict([('model', []), ('param', [])]) weights2 = OrderedDict([('model', []), ('param', [])]) for feat in data.columns: weights1[feat] = [] weights2[feat] = [] # bin if args.bin_ae_path is not None: for w in args.bin_ae_weight: model_path = args.bin_ae_path % w assert os.path.exists( model_path), 'model %s doesn\'t exist!' % model_path agg = Aggregator(SemisupNN, 5, data, 2, categories=False, reconstruct_loss='mse', reconstruct_weight=float(w)) agg.load_models(model_path) for mod in agg.models: weights1['model'].append('bin_ae') weights1['param'].append(w) weights2['model'].append('bin_ae') weights2['param'].append(w) for name, (w1, w2) in zip(data.columns, mod.get_feature_weights()): weights1[name].append(w1) weights2[name].append(w2) # cats if args.cats_ae_path is not None: for w in args.cats_ae_weight: model_path = args.cats_ae_path % w assert os.path.exists( model_path), 'model %s doesn\'t exist!' % model_path agg = Aggregator(SemisupNN, 5, data, 2, categories=True, reconstruct_loss='mse', reconstruct_weight=float(w)) agg.load_models(model_path) for mod in agg.models: weights1['model'].append('cats_ae') weights1['param'].append(w) weights2['model'].append('cats_ae') weights2['param'].append(w) for name, (w1, w2) in zip(data.columns, mod.get_feature_weights()): weights1[name].append(w1) weights2[name].append(w2) print('Weights 1') for k, v in weights1.items(): print(k, ','.join([str(vv) for vv in v]), sep=',') print('Weights 2') for k, v in weights2.items(): print(k, ','.join([str(vv) for vv in v]), sep=',')