Ejemplo n.º 1
0
def main():
    parser = argparse.ArgumentParser(description=DESCRIPTION,
                                     epilog=EPILOG,
                                     formatter_class=ParagraphFormatter)
    utils.add_db_arguments(parser)
    utils.add_tokenizer_argument(parser)
    utils.add_query_arguments(parser)
    parser.add_argument('parent',
                        help=PARENT_LABEL_HELP,
                        metavar='PARENT_LABEL')
    parser.add_argument('child', help=CHILD_LABEL_HELP, metavar='CHILD_LABEL')
    parser.add_argument('unrelated',
                        help=UNRELATED_LABEL_HELP,
                        metavar='UNRELATED_LABEL')
    parser.add_argument('max_works',
                        help=MAX_WORKS_HELP,
                        metavar='MAXIMUM',
                        type=int)
    parser.add_argument('output_dir',
                        help=OUTPUT_DIR_HELP,
                        metavar='DIRECTORY')
    args = parser.parse_args()
    catalogue = utils.get_catalogue(args)
    data_store = utils.get_data_store(args)
    tokenizer = utils.get_tokenizer(args)
    try:
        test = taclextra.paternity.PaternityTest(data_store, catalogue,
                                                 tokenizer, args.parent,
                                                 args.child, args.unrelated,
                                                 args.max_works,
                                                 args.output_dir)
        test.process()
    except Exception as e:
        parser.error(e)
Ejemplo n.º 2
0
def generate_supplied_diff_subparser(subparsers):
    """Adds a sub-command parser to `subparsers` to run a diff query using
    the supplied results sets."""
    parser = subparsers.add_parser(
        'sdiff', description=constants.SUPPLIED_DIFF_DESCRIPTION,
        epilog=constants.SUPPLIED_DIFF_EPILOG,
        formatter_class=ParagraphFormatter, help=constants.SUPPLIED_DIFF_HELP)
    parser.set_defaults(func=supplied_diff)
    utils.add_common_arguments(parser)
    utils.add_tokenizer_argument(parser)
    utils.add_db_arguments(parser, True)
    utils.add_supplied_query_arguments(parser)
Ejemplo n.º 3
0
def generate_supplied_diff_subparser(subparsers):
    """Adds a sub-command parser to `subparsers` to run a diff query using
    the supplied results sets."""
    parser = subparsers.add_parser(
        'sdiff',
        description=constants.SUPPLIED_DIFF_DESCRIPTION,
        epilog=constants.SUPPLIED_DIFF_EPILOG,
        formatter_class=ParagraphFormatter,
        help=constants.SUPPLIED_DIFF_HELP)
    parser.set_defaults(func=supplied_diff)
    utils.add_common_arguments(parser)
    utils.add_tokenizer_argument(parser)
    utils.add_db_arguments(parser, True)
    utils.add_supplied_query_arguments(parser)
Ejemplo n.º 4
0
def main():
    parser = argparse.ArgumentParser(description=DESCRIPTION)
    utils.add_db_arguments(parser)
    utils.add_tokenizer_argument(parser)
    utils.add_query_arguments(parser)
    parser.add_argument('output', help=HELP_OUTPUT, metavar='DIRECTORY')
    args = parser.parse_args()
    data_store = utils.get_data_store(args)
    catalogue = utils.get_catalogue(args)
    tokenizer = utils.get_tokenizer(args)
    output_dir = os.path.abspath(args.output)
    reporter = lifetime.LifetimeReporter(data_store, catalogue, tokenizer,
                                         output_dir)
    reporter.process()
Ejemplo n.º 5
0
def generate_normalise_subparser(subparsers):
    """Adds a sub-command parser to `subparsers` to generate a normalised
    corpus from an unnormalised corpus."""
    parser = subparsers.add_parser(
        'normalise', description=constants.NORMALISE_DESCRIPTION,
        epilog=constants.NORMALISE_EPILOG, formatter_class=ParagraphFormatter,
        help=constants.NORMALISE_HELP)
    parser.set_defaults(func=normalise_corpus)
    utils.add_tokenizer_argument(parser)
    utils.add_common_arguments(parser)
    parser.add_argument('corpus', help=constants.NORMALISE_CORPUS_HELP,
                        metavar='CORPUS')
    parser.add_argument('mapping', help=constants.NORMALISE_MAPPING_HELP,
                        metavar='MAPPING')
    parser.add_argument('output', help=constants.NORMALISE_OUTPUT_HELP,
                        metavar='OUTPUT')
Ejemplo n.º 6
0
def generate_lifetime_subparser(subparsers):
    """Adds a sub-command parser to `subparsers` to make a lifetime report."""
    parser = subparsers.add_parser(
        'lifetime', description=constants.LIFETIME_DESCRIPTION,
        epilog=constants.LIFETIME_EPILOG, formatter_class=ParagraphFormatter,
        help=constants.LIFETIME_HELP)
    parser.set_defaults(func=lifetime_report)
    utils.add_tokenizer_argument(parser)
    utils.add_common_arguments(parser)
    utils.add_query_arguments(parser)
    parser.add_argument('results', help=constants.LIFETIME_RESULTS_HELP,
                        metavar='RESULTS')
    parser.add_argument('label', help=constants.LIFETIME_LABEL_HELP,
                        metavar='LABEL')
    parser.add_argument('output', help=constants.REPORT_OUTPUT_HELP,
                        metavar='OUTPUT')
Ejemplo n.º 7
0
def generate_normalise_subparser(subparsers):
    """Adds a sub-command parser to `subparsers` to generate a normalised
    corpus from an unnormalised corpus."""
    parser = subparsers.add_parser('normalise',
                                   description=constants.NORMALISE_DESCRIPTION,
                                   epilog=constants.NORMALISE_EPILOG,
                                   formatter_class=ParagraphFormatter,
                                   help=constants.NORMALISE_HELP)
    parser.set_defaults(func=normalise_corpus)
    utils.add_tokenizer_argument(parser)
    utils.add_common_arguments(parser)
    parser.add_argument('corpus',
                        help=constants.NORMALISE_CORPUS_HELP,
                        metavar='CORPUS')
    parser.add_argument('mapping',
                        help=constants.NORMALISE_MAPPING_HELP,
                        metavar='MAPPING')
    parser.add_argument('output',
                        help=constants.NORMALISE_OUTPUT_HELP,
                        metavar='OUTPUT')
Ejemplo n.º 8
0
def generate_lifetime_subparser(subparsers):
    """Adds a sub-command parser to `subparsers` to make a lifetime report."""
    parser = subparsers.add_parser('lifetime',
                                   description=constants.LIFETIME_DESCRIPTION,
                                   epilog=constants.LIFETIME_EPILOG,
                                   formatter_class=ParagraphFormatter,
                                   help=constants.LIFETIME_HELP)
    parser.set_defaults(func=lifetime_report)
    utils.add_tokenizer_argument(parser)
    utils.add_common_arguments(parser)
    utils.add_query_arguments(parser)
    parser.add_argument('results',
                        help=constants.LIFETIME_RESULTS_HELP,
                        metavar='RESULTS')
    parser.add_argument('label',
                        help=constants.LIFETIME_LABEL_HELP,
                        metavar='LABEL')
    parser.add_argument('output',
                        help=constants.REPORT_OUTPUT_HELP,
                        metavar='OUTPUT')
Ejemplo n.º 9
0
def generate_results_subparser(subparsers):
    """Adds a sub-command parser to `subparsers` to manipulate CSV
    results data."""
    parser = subparsers.add_parser(
        'results', description=constants.RESULTS_DESCRIPTION,
        epilog=constants.RESULTS_EPILOG, formatter_class=ParagraphFormatter,
        help=constants.RESULTS_HELP)
    utils.add_common_arguments(parser)
    parser.set_defaults(func=results)
    be_group = parser.add_argument_group('bifurcated extend')
    be_group.add_argument('-b', '--bifurcated-extend',
                          dest='bifurcated_extend', metavar='CORPUS',
                          help=constants.RESULTS_BIFURCATED_EXTEND_HELP)
    be_group.add_argument('--max-be-count', dest='bifurcated_extend_size',
                          help=constants.RESULTS_BIFURCATED_EXTEND_MAX_HELP,
                          metavar='COUNT', type=int)
    normalise_group = parser.add_argument_group('denormalise')
    normalise_group.add_argument(
        '--denormalise', dest='denormalise_mapping',
        help=constants.RESULTS_DENORMALISE_MAPPING_HELP, metavar='MAPPING')
    normalise_group.add_argument(
        '--denormalised-corpus', dest='denormalised_corpus',
        help=constants.RESULTS_DENORMALISE_CORPUS_HELP, metavar='CORPUS')
    parser.add_argument('-e', '--extend', dest='extend',
                        help=constants.RESULTS_EXTEND_HELP, metavar='CORPUS')
    parser.add_argument('--excise', help=constants.RESULTS_EXCISE_HELP,
                        metavar='NGRAM', type=str)
    parser.add_argument('-l', '--label', dest='label',
                        help=constants.RESULTS_LABEL_HELP, metavar='LABEL')
    parser.add_argument('--min-count', dest='min_count',
                        help=constants.RESULTS_MINIMUM_COUNT_HELP,
                        metavar='COUNT', type=int)
    parser.add_argument('--max-count', dest='max_count',
                        help=constants.RESULTS_MAXIMUM_COUNT_HELP,
                        metavar='COUNT', type=int)
    parser.add_argument('--min-count-work', dest='min_count_work',
                        help=constants.RESULTS_MINIMUM_COUNT_WORK_HELP,
                        metavar='COUNT', type=int)
    parser.add_argument('--max-count-work', dest='max_count_work',
                        help=constants.RESULTS_MAXIMUM_COUNT_WORK_HELP,
                        metavar='COUNT', type=int)
    parser.add_argument('--min-size', dest='min_size',
                        help=constants.RESULTS_MINIMUM_SIZE_HELP,
                        metavar='SIZE', type=int)
    parser.add_argument('--max-size', dest='max_size',
                        help=constants.RESULTS_MAXIMUM_SIZE_HELP,
                        metavar='SIZE', type=int)
    parser.add_argument('--min-works', dest='min_works',
                        help=constants.RESULTS_MINIMUM_WORK_HELP,
                        metavar='COUNT', type=int)
    parser.add_argument('--max-works', dest='max_works',
                        help=constants.RESULTS_MAXIMUM_WORK_HELP,
                        metavar='COUNT', type=int)
    parser.add_argument('--ngrams', dest='ngrams',
                        help=constants.RESULTS_NGRAMS_HELP, metavar='NGRAMS')
    parser.add_argument('--reciprocal', action='store_true',
                        help=constants.RESULTS_RECIPROCAL_HELP)
    parser.add_argument('--reduce', action='store_true',
                        help=constants.RESULTS_REDUCE_HELP)
    parser.add_argument('--relabel', help=constants.RESULTS_RELABEL_HELP,
                        metavar='CATALOGUE')
    parser.add_argument('--remove', help=constants.RESULTS_REMOVE_HELP,
                        metavar='LABEL', type=str)
    parser.add_argument('--sort', action='store_true',
                        help=constants.RESULTS_SORT_HELP)
    utils.add_tokenizer_argument(parser)
    parser.add_argument('-z', '--zero-fill', dest='zero_fill',
                        help=constants.RESULTS_ZERO_FILL_HELP,
                        metavar='CORPUS')
    parser.add_argument('results', help=constants.RESULTS_RESULTS_HELP,
                        metavar='RESULTS')
    unsafe_group = parser.add_argument_group(
        constants.RESULTS_UNSAFE_GROUP_TITLE,
        constants.RESULTS_UNSAFE_GROUP_DESCRIPTION)
    unsafe_group.add_argument('--add-label-count', action='store_true',
                              help=constants.RESULTS_ADD_LABEL_COUNT_HELP)
    unsafe_group.add_argument('--add-label-work-count', action='store_true',
                              help=constants.RESULTS_ADD_LABEL_WORK_COUNT_HELP)
    unsafe_group.add_argument('--collapse-witnesses', action='store_true',
                              help=constants.RESULTS_COLLAPSE_WITNESSES_HELP)
    unsafe_group.add_argument('--group-by-ngram', dest='group_by_ngram',
                              help=constants.RESULTS_GROUP_BY_NGRAM_HELP,
                              metavar='CATALOGUE')
    unsafe_group.add_argument('--group-by-witness', action='store_true',
                              help=constants.RESULTS_GROUP_BY_WITNESS_HELP)
Ejemplo n.º 10
0
def generate_results_subparser(subparsers):
    """Adds a sub-command parser to `subparsers` to manipulate CSV
    results data."""
    parser = subparsers.add_parser('results',
                                   description=constants.RESULTS_DESCRIPTION,
                                   epilog=constants.RESULTS_EPILOG,
                                   formatter_class=ParagraphFormatter,
                                   help=constants.RESULTS_HELP)
    utils.add_common_arguments(parser)
    parser.set_defaults(func=results)
    be_group = parser.add_argument_group('bifurcated extend')
    be_group.add_argument('-b',
                          '--bifurcated-extend',
                          dest='bifurcated_extend',
                          metavar='CORPUS',
                          help=constants.RESULTS_BIFURCATED_EXTEND_HELP)
    be_group.add_argument('--max-be-count',
                          dest='bifurcated_extend_size',
                          help=constants.RESULTS_BIFURCATED_EXTEND_MAX_HELP,
                          metavar='COUNT',
                          type=int)
    normalise_group = parser.add_argument_group('denormalise')
    normalise_group.add_argument(
        '--denormalise',
        dest='denormalise_mapping',
        help=constants.RESULTS_DENORMALISE_MAPPING_HELP,
        metavar='MAPPING')
    normalise_group.add_argument(
        '--denormalised-corpus',
        dest='denormalised_corpus',
        help=constants.RESULTS_DENORMALISE_CORPUS_HELP,
        metavar='CORPUS')
    parser.add_argument('-e',
                        '--extend',
                        dest='extend',
                        help=constants.RESULTS_EXTEND_HELP,
                        metavar='CORPUS')
    parser.add_argument('--excise',
                        help=constants.RESULTS_EXCISE_HELP,
                        metavar='NGRAM',
                        type=str)
    parser.add_argument('-l',
                        '--label',
                        dest='label',
                        help=constants.RESULTS_LABEL_HELP,
                        metavar='LABEL')
    parser.add_argument('--min-count',
                        dest='min_count',
                        help=constants.RESULTS_MINIMUM_COUNT_HELP,
                        metavar='COUNT',
                        type=int)
    parser.add_argument('--max-count',
                        dest='max_count',
                        help=constants.RESULTS_MAXIMUM_COUNT_HELP,
                        metavar='COUNT',
                        type=int)
    parser.add_argument('--min-count-work',
                        dest='min_count_work',
                        help=constants.RESULTS_MINIMUM_COUNT_WORK_HELP,
                        metavar='COUNT',
                        type=int)
    parser.add_argument('--max-count-work',
                        dest='max_count_work',
                        help=constants.RESULTS_MAXIMUM_COUNT_WORK_HELP,
                        metavar='COUNT',
                        type=int)
    parser.add_argument('--min-size',
                        dest='min_size',
                        help=constants.RESULTS_MINIMUM_SIZE_HELP,
                        metavar='SIZE',
                        type=int)
    parser.add_argument('--max-size',
                        dest='max_size',
                        help=constants.RESULTS_MAXIMUM_SIZE_HELP,
                        metavar='SIZE',
                        type=int)
    parser.add_argument('--min-works',
                        dest='min_works',
                        help=constants.RESULTS_MINIMUM_WORK_HELP,
                        metavar='COUNT',
                        type=int)
    parser.add_argument('--max-works',
                        dest='max_works',
                        help=constants.RESULTS_MAXIMUM_WORK_HELP,
                        metavar='COUNT',
                        type=int)
    parser.add_argument('--ngrams',
                        dest='ngrams',
                        help=constants.RESULTS_NGRAMS_HELP,
                        metavar='NGRAMS')
    parser.add_argument('--reciprocal',
                        action='store_true',
                        help=constants.RESULTS_RECIPROCAL_HELP)
    parser.add_argument('--reduce',
                        action='store_true',
                        help=constants.RESULTS_REDUCE_HELP)
    parser.add_argument('--relabel',
                        help=constants.RESULTS_RELABEL_HELP,
                        metavar='CATALOGUE')
    parser.add_argument('--remove',
                        help=constants.RESULTS_REMOVE_HELP,
                        metavar='LABEL',
                        type=str)
    parser.add_argument('--sort',
                        action='store_true',
                        help=constants.RESULTS_SORT_HELP)
    utils.add_tokenizer_argument(parser)
    parser.add_argument('-z',
                        '--zero-fill',
                        dest='zero_fill',
                        help=constants.RESULTS_ZERO_FILL_HELP,
                        metavar='CORPUS')
    parser.add_argument('results',
                        help=constants.RESULTS_RESULTS_HELP,
                        metavar='RESULTS')
    unsafe_group = parser.add_argument_group(
        constants.RESULTS_UNSAFE_GROUP_TITLE,
        constants.RESULTS_UNSAFE_GROUP_DESCRIPTION)
    unsafe_group.add_argument('--add-label-count',
                              action='store_true',
                              help=constants.RESULTS_ADD_LABEL_COUNT_HELP)
    unsafe_group.add_argument('--add-label-work-count',
                              action='store_true',
                              help=constants.RESULTS_ADD_LABEL_WORK_COUNT_HELP)
    unsafe_group.add_argument('--collapse-witnesses',
                              action='store_true',
                              help=constants.RESULTS_COLLAPSE_WITNESSES_HELP)
    unsafe_group.add_argument('--group-by-ngram',
                              dest='group_by_ngram',
                              help=constants.RESULTS_GROUP_BY_NGRAM_HELP,
                              metavar='CATALOGUE')
    unsafe_group.add_argument('--group-by-witness',
                              action='store_true',
                              help=constants.RESULTS_GROUP_BY_WITNESS_HELP)