Example #1
0
	if args.algorithm != 'Maxent':
		train_kwargs['algorithm'] = args.algorithm
	
	trainf = MaxentClassifier.train
	train_kwargs['max_iter'] = args.max_iter
	train_kwargs['min_ll'] = args.min_ll
	train_kwargs['min_lldelta'] = args.min_lldelta
	train_kwargs['trace'] = args.trace
else:
	trainf = NaiveBayesClassifier.train

if args.multi and args.binary:
	if args.trace:
		print 'training a multi-binary %s classifier' % args.algorithm
	
	classifier = MultiBinaryClassifier.train(labels, train_feats, trainf, **train_kwargs)
else:
	if args.trace:
		print 'training a %s classifier' % args.algorithm
	
	classifier = trainf(train_feats, **train_kwargs)

################
## evaluation ##
################

if not args.no_eval:
	if not args.no_accuracy:
		print 'accuracy: %f' % accuracy(classifier, test_feats)
	
	if args.multi and args.binary and not args.no_masi_distance:
    if args.multi and args.binary:
        raise NotImplementedError(
            "cross-fold is not supported for multi-binary classifiers")
    scoring.cross_fold(train_feats,
                       trainf,
                       accuracy,
                       folds=args.cross_fold,
                       trace=args.trace,
                       metrics=not args.no_eval,
                       informative=args.show_most_informative)
    sys.exit(0)

if args.multi and args.binary:
    if args.trace:
        print('training multi-binary %s classifier' % args.classifier)
    classifier = MultiBinaryClassifier.train(labels, train_feats, trainf)
else:
    classifier = trainf(train_feats)

################
## evaluation ##
################
if not args.no_eval:
    if not args.no_accuracy:
        try:
            print('accuracy: %f' % accuracy(classifier, test_feats))
        except ZeroDivisionError:
            print('accuracy: 0')

    if args.multi and args.binary and not args.no_masi_distance:
        print('average masi distance: %f' %
##############
## training ##
##############
trainf = nltk_trainer.classification.args.make_classifier_builder(args)

if args.cross_fold:
	if args.multi and args.binary:
		raise NotImplementedError ("cross-fold is not supported for multi-binary classifiers")
	scoring.cross_fold(train_feats, trainf, accuracy, folds=args.cross_fold,
		trace=args.trace, metrics=not args.no_eval, informative=args.show_most_informative)
	sys.exit(0)

if args.multi and args.binary:
	if args.trace:
		print 'training multi-binary %s classifier' % args.classifier
	classifier = MultiBinaryClassifier.train(labels, train_feats, trainf)
else:
	classifier = trainf(train_feats)

################
## evaluation ##
################
if not args.no_eval:
	if not args.no_accuracy:
		try:
			print 'accuracy: %f' % accuracy(classifier, test_feats)
		except ZeroDivisionError:
			print 'accuracy: 0'

	if args.multi and args.binary and not args.no_masi_distance:
		print 'average masi distance: %f' % (scoring.avg_masi_distance(classifier, test_feats))