Exemplo n.º 1
0
def crossValidate(exampleBuilder, corpusElements, examples, options, timer):
    parameterOptimizationSet = None
    constantParameterOptimizationSet = None
    if options.paramOptData != None:
        print >> sys.stderr, "Separating parameter optimization set"
        parameterOptimizationDivision = Example.makeCorpusDivision(corpusElements, float(options.paramOptData))
        exampleSets = Example.divideExamples(examples, parameterOptimizationDivision)
        constantParameterOptimizationSet = exampleSets[0]
        parameterOptimizationSet = constantParameterOptimizationSet
        optDocs = 0
        for k,v in parameterOptimizationDivision.iteritems():
            if v == 0:
                del corpusElements.documentsById[k]
                optDocs += 1
        print >> sys.stderr, "  Documents for parameter optimization:", optDocs
    discardedParameterCombinations = []

    print >> sys.stderr, "Dividing data into folds"
    corpusFolds = Example.makeCorpusFolds(corpusElements, options.folds[0])
    exampleSets = Example.divideExamples(examples, corpusFolds)
    
    keys = exampleSets.keys()
    keys.sort()
    evaluations = []
    for key in keys:
        testSet = exampleSets[key]
        for example in testSet:
            example[3]["visualizationSet"] = key + 1
        trainSet = []
        for key2 in keys:
            if key != key2:
                trainSet.extend(exampleSets[key2])
        print >> sys.stderr, "Fold", str(key + 1)
        # Create classifier object
        if options.output != None:
            if not os.path.exists(options.output+"/fold"+str(key+1)):
                os.mkdir(options.output+"/fold"+str(key+1))
#                if not os.path.exists(options.output+"/fold"+str(key+1)+"/classifier"):
#                    os.mkdir(options.output+"/fold"+str(key+1)+"/classifier")
            classifier = Classifier(workDir = options.output + "/fold"+str(key + 1))
        else:
            classifier = Classifier()
        classifier.featureSet = exampleBuilder.featureSet
        # Optimize ####################
        # Check whether there is need for included param opt set
        if parameterOptimizationSet == None and options.folds[1] == 0: # 8-1-1 folds
            assert(len(keys) > 1)
            if keys.index(key) == 0:
                parameterOptimizationSetKey = keys[-1]
            else:
                parameterOptimizationSetKey = keys[keys.index(key)-1]
            parameterOptimizationSet = exampleSets[parameterOptimizationSetKey]
            trainSet = []
            for key2 in keys:
                if key2 != key and key2 != parameterOptimizationSetKey:
                    trainSet.extend(exampleSets[key2])

        if parameterOptimizationSet != None: # constant external parameter optimization set
            evaluationArgs = {"classSet":exampleBuilder.classSet}
            if options.parameters != None:
                paramDict = splitParameters(options.parameters)
                bestResults = classifier.optimize([trainSet], [parameterOptimizationSet], paramDict, Evaluation, evaluationArgs, combinationsThatTimedOut=discardedParameterCombinations)
            else:
                bestResults = classifier.optimize([trainSet], [parameterOptimizationSet], evaluationClass=Evaluation, evaluationArgs=evaluationArgs, combinationsThatTimedOut=discardedParameterCombinations)
        else: # nested x-fold parameter optimization
            assert (options.folds[1] >= 2)
            optimizationFolds = Example.makeExampleFolds(trainSet, options.folds[1])
            optimizationSets = Example.divideExamples(trainSet, optimizationFolds)
            optimizationSetList = []
            optSetKeys = optimizationSets.keys()
            optSetKeys.sort()
            for optSetKey in optSetKeys:
                optimizationSetList.append(optimizationSets[optSetKey])
            evaluationArgs = {"classSet":exampleBuilder.classSet}
            if options.parameters != None:
                paramDict = splitParameters(options.parameters)
                bestResults = classifier.optimize(optimizationSetList, optimizationSetList, paramDict, Evaluation, evaluationArgs, combinationsThatTimedOut=discardedParameterCombinations)
            else:
                bestResults = classifier.optimize(optimizationSetList, optimizationSetList, evaluationClass=Evaluation, evaluationArgs=evaluationArgs, combinationsThatTimedOut=discardedParameterCombinations)
        
        # Classify
        print >> sys.stderr, "Classifying test data"
        bestParams = bestResults[2]
        if bestParams.has_key("timeout"):
            del bestParams["timeout"]
        print >> sys.stderr, "Parameters:", bestParams
        print >> sys.stderr, "Training",
        startTime = time.time()
        classifier.train(trainSet, bestParams)
        print >> sys.stderr, "(Time spent:", time.time() - startTime, "s)"
        print >> sys.stderr, "Testing",
        startTime = time.time()
        predictions = classifier.classify(testSet)
        if options.output != None:
            pdict = []
            fieldnames = ["class","prediction","id","fold"]
            for p in predictions:
                if "typed" in exampleBuilder.styles:
                    pdict.append( {"class":exampleBuilder.classSet.getName(p[0][1]), "prediction":exampleBuilder.classSet.getName(p[1]), "id":p[0][0], "fold":key} )
                else:
                    pdict.append( {"class":p[0][1], "prediction":p[1], "id":p[0][0], "fold":key} )
            TableUtils.addToCSV(pdict, options.output +"/predictions.csv", fieldnames)
        print >> sys.stderr, "(Time spent:", time.time() - startTime, "s)"
        
        # Calculate statistics
        evaluation = Evaluation(predictions, classSet=exampleBuilder.classSet)
        print >> sys.stderr, evaluation.toStringConcise()
        print >> sys.stderr, timer.toString()
        evaluations.append(evaluation)
        
        # Save example sets
        if options.output != None:
            print >> sys.stderr, "Saving example sets to", options.output
            Example.writeExamples(exampleSets[0], options.output +"/fold"+str(key+1) + "/examplesTest.txt")
            Example.writeExamples(exampleSets[1], options.output +"/fold"+str(key+1) + "/examplesTrain.txt")
            if parameterOptimizationSet == None:
                for k,v in optimizationSets.iteritems():
                    Example.writeExamples(v, options.output +"/fold"+str(key+1) + "/examplesOptimizationSet" + str(k) + ".txt")
            else:
                Example.writeExamples(parameterOptimizationSet, options.output +"/fold"+str(key+1) + "/examplesOptimizationSetPredefined.txt")
            TableUtils.writeCSV(bestResults[2], options.output +"/fold"+str(key+1) + "/parameters.csv")
            evaluation.saveCSV(options.output +"/fold"+str(key+1) + "/results.csv")
            print >> sys.stderr, "Compressing folder"
            zipTree(options.output, "fold"+str(key+1))
        
        parameterOptimizationSet = constantParameterOptimizationSet
    
    print >> sys.stderr, "Cross-validation Results"
    for i in range(len(evaluations)):
        print >> sys.stderr, evaluations[i].toStringConcise("  Fold "+str(i)+": ")
    averageResult = Evaluation.average(evaluations)
    print >> sys.stderr, averageResult.toStringConcise("  Avg: ")
    pooledResult = Evaluation.pool(evaluations)
    print >> sys.stderr, pooledResult.toStringConcise("  Pool: ")
    if options.output != None:
        for i in range(len(evaluations)):
            evaluations[i].saveCSV(options.output+"/results.csv", i)
        averageResult.saveCSV(options.output+"/results.csv", "Avg")
        pooledResult.saveCSV(options.output+"/results.csv", "Pool")
        averageResult.saveCSV(options.output+"/resultsAverage.csv")
        pooledResult.saveCSV(options.output+"/resultsPooled.csv")
    # Visualize
    if options.visualization != None:
        visualize(sentences, pooledResult.classifications, options, exampleBuilder)
    
    # Save interactionXML
    if options.resultsToXML != None:
        classSet = None
        if "typed" in exampleBuilder.styles:
            classSet = exampleBuilder.classSet
        Example.writeToInteractionXML(pooledResult.classifications, corpusElements, options.resultsToXML, classSet)
Exemplo n.º 2
0
    else:
        options.folds = (int(options.folds),int(options.folds))

    if options.output != None:
        if os.path.exists(options.output):
            print >> sys.stderr, "Output directory exists, removing", options.output
            shutil.rmtree(options.output)
        os.makedirs(options.output)
#        if not os.path.exists(options.output+"/classifier"):
#            os.mkdir(options.output+"/classifier")

    print >> sys.stderr, "Importing modules"
    exec "from ExampleBuilders." + options.exampleBuilder + " import " + options.exampleBuilder + " as ExampleBuilder"
    exec "from Classifiers." + options.classifier + " import " + options.classifier + " as Classifier"
    exec "from Evaluators." + options.evaluator + " import " + options.evaluator + " as Evaluation"
    
    # Load corpus and make sentence graphs
    corpusElements = loadCorpus(options.input, options.parse, options.tokenization)
    sentences = []
    for sentence in corpusElements.sentences:
        sentences.append( [sentence.sentenceGraph,None,None] )
    
    # Build examples
    exampleBuilder = ExampleBuilder(**splitParameters(options.exampleBuilderParameters))
    examples = buildExamples(exampleBuilder, sentences, options)
    
    crossValidate(exampleBuilder, corpusElements, examples, options, timer)
    print >> sys.stderr, timer.toString()
        
        
                
Exemplo n.º 3
0
    optparser.add_option("-y", "--parameters", default=None, dest="parameters", help="Parameters for the classifier")
    optparser.add_option("-b", "--exampleBuilder", default="SimpleDependencyExampleBuilder", dest="exampleBuilder", help="Example Builder Class")
    optparser.add_option("-e", "--evaluator", default="BinaryEvaluator", dest="evaluator", help="Prediction evaluator class")
    optparser.add_option("-v", "--visualization", default=None, dest="visualization", help="Visualization output directory. NOTE: If the directory exists, it will be deleted!")
    optparser.add_option("-m", "--resultsToXML", default=None, dest="resultsToXML", help="Results in analysis xml. NOTE: for edges, pairs, not interactions")
    (options, args) = optparser.parse_args()
    
    if options.output != None:
        if os.path.exists(options.output):
            print >> sys.stderr, "Output directory exists, removing", options.output
            shutil.rmtree(options.output)
        os.mkdir(options.output)
        if not os.path.exists(options.output+"/classifier"):
            os.mkdir(options.output+"/classifier")
    
    classifierParamDict = splitParameters(options.parameters)

    print >> sys.stderr, "Importing modules"
    exec "from ExampleBuilders." + options.exampleBuilder + " import " + options.exampleBuilder + " as ExampleBuilder"
    exec "from Classifiers." + options.classifier + " import " + options.classifier + " as Classifier"
    exec "from Evaluators." + options.evaluator + " import " + options.evaluator + " as Evaluation"
    
    trainExamples = []
    exampleSets = [[],[]]
    if not classifierParamDict.has_key("predefined"):
        print >> sys.stderr, "No-predefined model"
        exampleBuilder = ExampleBuilder(**splitParameters(options.exampleBuilderParameters))
        # Load corpus and make sentence graphs
        trainCorpusElements = loadCorpus(options.input, options.parse, options.tokenization)
        sentences = []
        for sentence in trainCorpusElements.sentences:
Exemplo n.º 4
0
def crossValidate(exampleBuilder, corpusElements, examples, options, timer):
    parameterOptimizationSet = None
    constantParameterOptimizationSet = None
    if options.paramOptData != None:
        print >> sys.stderr, "Separating parameter optimization set"
        parameterOptimizationDivision = Example.makeCorpusDivision(
            corpusElements, float(options.paramOptData))
        exampleSets = Example.divideExamples(examples,
                                             parameterOptimizationDivision)
        constantParameterOptimizationSet = exampleSets[0]
        parameterOptimizationSet = constantParameterOptimizationSet
        optDocs = 0
        for k, v in parameterOptimizationDivision.iteritems():
            if v == 0:
                del corpusElements.documentsById[k]
                optDocs += 1
        print >> sys.stderr, "  Documents for parameter optimization:", optDocs
    discardedParameterCombinations = []

    print >> sys.stderr, "Dividing data into folds"
    corpusFolds = Example.makeCorpusFolds(corpusElements, options.folds[0])
    exampleSets = Example.divideExamples(examples, corpusFolds)

    keys = exampleSets.keys()
    keys.sort()
    evaluations = []
    for key in keys:
        testSet = exampleSets[key]
        for example in testSet:
            example[3]["visualizationSet"] = key + 1
        trainSet = []
        for key2 in keys:
            if key != key2:
                trainSet.extend(exampleSets[key2])
        print >> sys.stderr, "Fold", str(key + 1)
        # Create classifier object
        if options.output != None:
            if not os.path.exists(options.output + "/fold" + str(key + 1)):
                os.mkdir(options.output + "/fold" + str(key + 1))


#                if not os.path.exists(options.output+"/fold"+str(key+1)+"/classifier"):
#                    os.mkdir(options.output+"/fold"+str(key+1)+"/classifier")
            classifier = Classifier(workDir=options.output + "/fold" +
                                    str(key + 1))
        else:
            classifier = Classifier()
        classifier.featureSet = exampleBuilder.featureSet
        # Optimize ####################
        # Check whether there is need for included param opt set
        if parameterOptimizationSet == None and options.folds[
                1] == 0:  # 8-1-1 folds
            assert (len(keys) > 1)
            if keys.index(key) == 0:
                parameterOptimizationSetKey = keys[-1]
            else:
                parameterOptimizationSetKey = keys[keys.index(key) - 1]
            parameterOptimizationSet = exampleSets[parameterOptimizationSetKey]
            trainSet = []
            for key2 in keys:
                if key2 != key and key2 != parameterOptimizationSetKey:
                    trainSet.extend(exampleSets[key2])

        if parameterOptimizationSet != None:  # constant external parameter optimization set
            evaluationArgs = {"classSet": exampleBuilder.classSet}
            if options.parameters != None:
                paramDict = splitParameters(options.parameters)
                bestResults = classifier.optimize(
                    [trainSet], [parameterOptimizationSet],
                    paramDict,
                    Evaluation,
                    evaluationArgs,
                    combinationsThatTimedOut=discardedParameterCombinations)
            else:
                bestResults = classifier.optimize(
                    [trainSet], [parameterOptimizationSet],
                    evaluationClass=Evaluation,
                    evaluationArgs=evaluationArgs,
                    combinationsThatTimedOut=discardedParameterCombinations)
        else:  # nested x-fold parameter optimization
            assert (options.folds[1] >= 2)
            optimizationFolds = Example.makeExampleFolds(
                trainSet, options.folds[1])
            optimizationSets = Example.divideExamples(trainSet,
                                                      optimizationFolds)
            optimizationSetList = []
            optSetKeys = optimizationSets.keys()
            optSetKeys.sort()
            for optSetKey in optSetKeys:
                optimizationSetList.append(optimizationSets[optSetKey])
            evaluationArgs = {"classSet": exampleBuilder.classSet}
            if options.parameters != None:
                paramDict = splitParameters(options.parameters)
                bestResults = classifier.optimize(
                    optimizationSetList,
                    optimizationSetList,
                    paramDict,
                    Evaluation,
                    evaluationArgs,
                    combinationsThatTimedOut=discardedParameterCombinations)
            else:
                bestResults = classifier.optimize(
                    optimizationSetList,
                    optimizationSetList,
                    evaluationClass=Evaluation,
                    evaluationArgs=evaluationArgs,
                    combinationsThatTimedOut=discardedParameterCombinations)

        # Classify
        print >> sys.stderr, "Classifying test data"
        bestParams = bestResults[2]
        if bestParams.has_key("timeout"):
            del bestParams["timeout"]
        print >> sys.stderr, "Parameters:", bestParams
        print >> sys.stderr, "Training",
        startTime = time.time()
        classifier.train(trainSet, bestParams)
        print >> sys.stderr, "(Time spent:", time.time() - startTime, "s)"
        print >> sys.stderr, "Testing",
        startTime = time.time()
        predictions = classifier.classify(testSet)
        if options.output != None:
            pdict = []
            fieldnames = ["class", "prediction", "id", "fold"]
            for p in predictions:
                if "typed" in exampleBuilder.styles:
                    pdict.append({
                        "class":
                        exampleBuilder.classSet.getName(p[0][1]),
                        "prediction":
                        exampleBuilder.classSet.getName(p[1]),
                        "id":
                        p[0][0],
                        "fold":
                        key
                    })
                else:
                    pdict.append({
                        "class": p[0][1],
                        "prediction": p[1],
                        "id": p[0][0],
                        "fold": key
                    })
            TableUtils.addToCSV(pdict, options.output + "/predictions.csv",
                                fieldnames)
        print >> sys.stderr, "(Time spent:", time.time() - startTime, "s)"

        # Calculate statistics
        evaluation = Evaluation(predictions, classSet=exampleBuilder.classSet)
        print >> sys.stderr, evaluation.toStringConcise()
        print >> sys.stderr, timer.toString()
        evaluations.append(evaluation)

        # Save example sets
        if options.output != None:
            print >> sys.stderr, "Saving example sets to", options.output
            Example.writeExamples(
                exampleSets[0],
                options.output + "/fold" + str(key + 1) + "/examplesTest.txt")
            Example.writeExamples(
                exampleSets[1],
                options.output + "/fold" + str(key + 1) + "/examplesTrain.txt")
            if parameterOptimizationSet == None:
                for k, v in optimizationSets.iteritems():
                    Example.writeExamples(
                        v, options.output + "/fold" + str(key + 1) +
                        "/examplesOptimizationSet" + str(k) + ".txt")
            else:
                Example.writeExamples(
                    parameterOptimizationSet, options.output + "/fold" +
                    str(key + 1) + "/examplesOptimizationSetPredefined.txt")
            TableUtils.writeCSV(
                bestResults[2],
                options.output + "/fold" + str(key + 1) + "/parameters.csv")
            evaluation.saveCSV(options.output + "/fold" + str(key + 1) +
                               "/results.csv")
            print >> sys.stderr, "Compressing folder"
            zipTree(options.output, "fold" + str(key + 1))

        parameterOptimizationSet = constantParameterOptimizationSet

    print >> sys.stderr, "Cross-validation Results"
    for i in range(len(evaluations)):
        print >> sys.stderr, evaluations[i].toStringConcise("  Fold " +
                                                            str(i) + ": ")
    averageResult = Evaluation.average(evaluations)
    print >> sys.stderr, averageResult.toStringConcise("  Avg: ")
    pooledResult = Evaluation.pool(evaluations)
    print >> sys.stderr, pooledResult.toStringConcise("  Pool: ")
    if options.output != None:
        for i in range(len(evaluations)):
            evaluations[i].saveCSV(options.output + "/results.csv", i)
        averageResult.saveCSV(options.output + "/results.csv", "Avg")
        pooledResult.saveCSV(options.output + "/results.csv", "Pool")
        averageResult.saveCSV(options.output + "/resultsAverage.csv")
        pooledResult.saveCSV(options.output + "/resultsPooled.csv")
    # Visualize
    if options.visualization != None:
        visualize(sentences, pooledResult.classifications, options,
                  exampleBuilder)

    # Save interactionXML
    if options.resultsToXML != None:
        classSet = None
        if "typed" in exampleBuilder.styles:
            classSet = exampleBuilder.classSet
        Example.writeToInteractionXML(pooledResult.classifications,
                                      corpusElements, options.resultsToXML,
                                      classSet)
Exemplo n.º 5
0
    else:
        options.folds = (int(options.folds), int(options.folds))

    if options.output != None:
        if os.path.exists(options.output):
            print >> sys.stderr, "Output directory exists, removing", options.output
            shutil.rmtree(options.output)
        os.makedirs(options.output)
#        if not os.path.exists(options.output+"/classifier"):
#            os.mkdir(options.output+"/classifier")

    print >> sys.stderr, "Importing modules"
    exec "from ExampleBuilders." + options.exampleBuilder + " import " + options.exampleBuilder + " as ExampleBuilder"
    exec "from Classifiers." + options.classifier + " import " + options.classifier + " as Classifier"
    exec "from Evaluators." + options.evaluator + " import " + options.evaluator + " as Evaluation"

    # Load corpus and make sentence graphs
    corpusElements = loadCorpus(options.input, options.parse,
                                options.tokenization)
    sentences = []
    for sentence in corpusElements.sentences:
        sentences.append([sentence.sentenceGraph, None, None])

    # Build examples
    exampleBuilder = ExampleBuilder(
        **splitParameters(options.exampleBuilderParameters))
    examples = buildExamples(exampleBuilder, sentences, options)

    crossValidate(exampleBuilder, corpusElements, examples, options, timer)
    print >> sys.stderr, timer.toString()
Exemplo n.º 6
0
        help="Results in analysis xml. NOTE: for edges, pairs, not interactions"
    )
    (options, args) = optparser.parse_args()

    mainTimer = Timer()
    print >> sys.stderr, __file__ + " start, " + mainTimer.toString()

    if options.output != None:
        if os.path.exists(options.output):
            print >> sys.stderr, "Output directory exists, removing", options.output
            shutil.rmtree(options.output)
        os.mkdir(options.output)
        if not os.path.exists(options.output + "/classifier"):
            os.mkdir(options.output + "/classifier")

    classifierParamDict = splitParameters(options.parameters)

    print >> sys.stderr, "Importing modules"
    exec "from ExampleBuilders." + options.exampleBuilder + " import " + options.exampleBuilder + " as ExampleBuilder"
    exec "from Classifiers." + options.classifier + " import " + options.classifier + " as Classifier"
    exec "from Evaluators." + options.evaluator + " import " + options.evaluator + " as Evaluation"

    testCorpusElements = None
    exampleSets = [[], []]
    if not classifierParamDict.has_key("predefined"):
        print >> sys.stderr, "No-predefined model"
        exampleBuilder = ExampleBuilder(
            **splitParameters(options.exampleBuilderParameters))
        # Load corpus and make sentence graphs
        trainCorpusElements = loadCorpus(options.input, options.parse,
                                         options.tokenization)