示例#1
0
def detect(systemName):
    tuning_file = os.path.join(ROOT_DIR, 'experiments', 'tuning', 'results',
                               'incode', systemName + '.csv')

    params = detection_utils.get_optimal_hyperparameters(tuning_file)
    return detect_with_params(systemName, params['ATFD'], params['LAA'],
                              params['FDP'])
示例#2
0
def predict(antipattern, systemName):
	tuning_file = os.path.join(ROOT_DIR, 'experiments', 'tuning', 'results', 'vote', antipattern, systemName + '.csv')
	params = detection_utils.get_optimal_hyperparameters(tuning_file)
	k = params['Policy']
	
	toolsPredictions = asci_utils.get_tools_predictions(antipattern, systemName)

	return (np.sum(toolsPredictions, axis=0) >= k).astype(float)
def detect(systemName):
	tuning_file = os.path.join(ROOT_DIR, 'experiments', 'tuning', 'results', 'hist', 'feature_envy', systemName + '.csv')

	params = detection_utils.get_optimal_hyperparameters(tuning_file)

	core_metrics_map = cm.getFECoreMetrics(systemName)

	return [entityName for entityName, ratio in core_metrics_map.items() if ratio[0]>params['Alpha']]
示例#4
0
def predict(antipattern, system):
    tuning_file = os.path.join(ROOT_DIR, "experiments", "tuning", "results",
                               "smad", antipattern, system + ".csv")

    params = detection_utils.get_optimal_hyperparameters(tuning_file)
    X = detection_utils.getInstances(antipattern, system)

    # New graph
    tf.reset_default_graph()

    # Create model
    model = md.SMAD(shape=params["Dense sizes"], input_size=X.shape[-1])

    return smad_utils.ensemble_prediction(
        model=model,
        save_paths=[
            smad_utils.get_save_path(antipattern, system, i) for i in range(10)
        ],
        input_x=X)
示例#5
0
		loss_train = session.run(model.loss, feed_dict={model.input_x:x_train, model.input_y:y_train, model.gamma: gamma})
		loss_test  = session.run(model.loss, feed_dict={model.input_x:x_test, model.input_y:y_test, model.gamma: gamma})
		losses_train.append(loss_train)
		losses_test.append(loss_test)
	return losses_train, losses_test

if __name__ == "__main__":
	args = parse_args()

	# Use the "optimal" hyper-parameters found via tuning if unspecified
	hyper_parameters = None
	for key in ['learning_rate', 'beta', 'gamma', 'dense_sizes']:
		if args.__dict__[key] == None:
			if hyper_parameters == None:
				tuning_file = os.path.join(ROOT_DIR, 'experiments', 'tuning', 'results', 'smad', args.antipattern, args.test_system + '.csv')
				hyper_parameters = detection_utils.get_optimal_hyperparameters(tuning_file)
			args.__dict__[key] = hyper_parameters[' '.join(key.split('_')).capitalize()]

	# Remove the test system from the training set
	systems = data_utils.getSystems()
	systems.remove(args.test_system)
	systems = list(systems)

	# Get training data
	x_train = reduce(lambda x1, x2: np.concatenate((x1, x2), axis=0), [detection_utils.getInstances(args.antipattern, s) for s in systems])
	y_train = reduce(lambda x1, x2: np.concatenate((x1, x2), axis=0), [detection_utils.getLabels(args.antipattern, s) for s in systems])
	
	# Get test data
	x_test = detection_utils.getInstances(args.antipattern, args.test_system)
	y_test = detection_utils.getLabels(args.antipattern, args.test_system)