Exemplo n.º 1
0
def test_parse_arguments4():
    """Testing specfied arguments"""
    args = ["--absentees", "maria", "--round-robin", "--group-size", "3"]
    parsed_args = parse_arguments.parse_arguments(args)
    assert parsed_args.group_size == 3
    assert parsed_args.grouping_method == constants.ALGORITHM_ROUND_ROBIN
    assert parsed_args.absentees == ["maria"]
Exemplo n.º 2
0
def test_parse_arguments2():
    """Testing specfied arguments"""
    args = ["--debug", "--students-file", "students.csv", "--random"]
    parsed_args = parse_arguments.parse_arguments(args)
    assert parsed_args.logging_level == logging.DEBUG
    assert parsed_args.students_file == "students.csv"
    assert parsed_args.grouping_method == "random"
Exemplo n.º 3
0
def test_parse_arguments1():
    """General testing of arguments - if arguments exists"""
    args = []
    parsed_args = parse_arguments.parse_arguments(args)
    assert parsed_args.logging_level == logging.ERROR
    assert parsed_args.group_size == defaults.DEFAULT_GRPSIZE
    assert parsed_args.students_file == defaults.DEFAULT_CSVFILE
    assert (parsed_args.grouping_method == group_random) is False
Exemplo n.º 4
0
def main():
    args, _ = parse_arguments()
    input_path = Path(args.input_path)
    output_path = Path(args.output_path)
    pretrained_ocr = Path(args.finetuned_ocr)
    bsheet_pages = args.balancesheet_page_nums
    profitloss_pages = args.profitloss_page_nums
    page_numbers = []
    if bsheet_pages is not None:
        bsheet_pages = bsheet_pages.lstrip('[').rstrip(']').split(',')
        bsheet_pages = [int(p_no) for p_no in bsheet_pages]
        page_numbers.extend(bsheet_pages)
    if profitloss_pages is not None:
        profitloss_pages = profitloss_pages.lstrip('[').rstrip(']').split(',')
        profitloss_pages = [int(p_no) for p_no in profitloss_pages]
        page_numbers.extend(profitloss_pages)

    if bsheet_pages is None and profitloss_pages is None:
        print("Enter the page numbers.")
        return
    if args.double_page == 'True':
        double_page = True
    else:
        double_page = False

    if args.next_page == 'True':
        next_page = True
    else:
        next_page = False

    table_extractor = PDFToCSV(input_path, output_path, pretrained_ocr,
                               page_numbers, double_page, next_page)
    table_extractor.pdf_to_jpg()
    table_extractor.image_to_txt()
    table_extractor.txt_to_csv()
    table_extractor.combine_csv_files()
Exemplo n.º 5
0
        sys.stderr.write('\n')
        acc, top5, test_loss = test_model(dataset, checkpoint.model, args)

        checkpoint.save_results({'epoch': checkpoint.epoch, 'acc': acc, 'top5': top5, 'loss': test_loss,
                                 'ingoing': ingoing, 'outgoing': outgoing, 'a': last_a,
                                 'norm': l2_norm(checkpoint.model),
                                 'pruned_param_count': checkpoint.model.compute_params_count(args.pruning_type),
                                 'pruned_flops_count': checkpoint.model.compute_flops_count(),
                                 'epoch_duration': duration})
        checkpoint.epoch += 1
        checkpoint.scheduler.step()
        checkpoint.save()


if __name__ == '__main__':
    arguments = parse_arguments()
    torch.manual_seed(arguments.seed)
    np.random.seed(arguments.seed)
    if arguments.fix_a is None and arguments.reg_type == "swd" and arguments.pruning_iterations != 1:
        print('Progressive a is not compatible with iterative pruning')
        raise ValueError
    if arguments.no_ft and arguments.pruning_iterations != 1:
        print("You can't specify a pruning_iteration value if there is no fine-tuning at all")
        raise ValueError
    get_mask = get_mask_function(arguments.pruning_type)
    _dataset = get_dataset(arguments)
    _targets = [int((n + 1) * (arguments.target / arguments.pruning_iterations)) for n in
                range(arguments.pruning_iterations)]

    # Train model
    print('Train model !')
Exemplo n.º 6
0
def just_do_it():
    options = parse_arguments.parse_arguments()

    track.track(options.video_path, options.show_result)

    print("End of game, have a nice day!")
def main():

    #parsing the arguments
    args, _ = parse_arguments()

    #setup logging
    #output_dir = Path('/content/drive/My Drive/image-captioning/output')
    output_dir = Path(args.output_directory)
    output_dir.mkdir(parents=True, exist_ok=True)
    logfile_path = Path(output_dir / "output.log")
    setup_logging(logfile=logfile_path)

    #setup and read config.ini
    #config_file = Path('/content/drive/My Drive/image-captioning/config.ini')
    config_file = Path('../config.ini')
    reading_config(config_file)

    #tensorboard
    tensorboard_logfile = Path(output_dir / 'tensorboard')
    tensorboard_writer = SummaryWriter(tensorboard_logfile)

    #load dataset
    #dataset_dir = Path('/content/drive/My Drive/Flickr8k_Dataset')
    dataset_dir = Path(args.dataset)
    images_path = Path(dataset_dir / Config.get("images_dir"))
    captions_path = Path(dataset_dir / Config.get("captions_dir"))
    training_loader, validation_loader, testing_loader = data_loaders(
        images_path, captions_path)

    #load the model (encoder, decoder, optimizer)
    embed_size = Config.get("encoder_embed_size")
    hidden_size = Config.get("decoder_hidden_size")
    batch_size = Config.get("training_batch_size")
    epochs = Config.get("epochs")
    feature_extraction = Config.get("feature_extraction")
    raw_captions = read_captions(captions_path)
    id_to_word, word_to_id = dictionary(raw_captions, threshold=5)
    vocab_size = len(id_to_word)
    encoder = Encoder(embed_size, feature_extraction)
    decoder = Decoder(embed_size, hidden_size, vocab_size, batch_size)

    #load pretrained embeddings
    #pretrained_emb_dir = Path('/content/drive/My Drive/word2vec')
    pretrained_emb_dir = Path(args.pretrained_embeddings)
    pretrained_emb_file = Path(pretrained_emb_dir /
                               Config.get("pretrained_emb_path"))
    pretrained_embeddings = load_pretrained_embeddings(pretrained_emb_file,
                                                       id_to_word)

    #load the optimizer
    learning_rate = Config.get("learning_rate")
    optimizer = adam_optimizer(encoder, decoder, learning_rate)

    #loss funciton
    criterion = cross_entropy

    #load checkpoint
    checkpoint_file = Path(output_dir / Config.get("checkpoint_file"))
    checkpoint_captioning = load_checkpoint(checkpoint_file)

    #using available device(gpu/cpu)
    encoder = encoder.to(Config.get("device"))
    decoder = decoder.to(Config.get("device"))
    pretrained_embeddings = pretrained_embeddings.to(Config.get("device"))

    start_epoch = 1
    if checkpoint_captioning is not None:
        start_epoch = checkpoint_captioning['epoch'] + 1
        encoder.load_state_dict(checkpoint_captioning['encoder'])
        decoder.load_state_dict(checkpoint_captioning['decoder'])
        optimizer.load_state_dict(checkpoint_captioning['optimizer'])
        logger.info(
            'Initialized encoder, decoder and optimizer from loaded checkpoint'
        )

    del checkpoint_captioning

    #image captioning model
    model = ImageCaptioning(encoder, decoder, optimizer, criterion,
                            training_loader, validation_loader, testing_loader,
                            pretrained_embeddings, output_dir,
                            tensorboard_writer)

    #training and testing the model
    if args.training:
        validate_every = Config.get("validate_every")
        model.train(epochs, validate_every, start_epoch)
    elif args.testing:
        images_path = Path(images_path / Config.get("images_dir"))
        model.testing(id_to_word, images_path)
Exemplo n.º 8
0
def test_parse_arguments5():
    """Testing specfied arguments"""
    args = ["--num-group", "3"]
    parsed_args = parse_arguments.parse_arguments(args)
    assert parsed_args.num_group == 3
Exemplo n.º 9
0
def test_parse_gatorgrouper_arguments3():
    """Testing specfied arguments"""
    args = ["--verbose", "--round-robin"]
    parsed_args = parse_arguments.parse_arguments(args)
    assert parsed_args.logging_level == logging.INFO
    assert parsed_args.grouping_method == constants.ALGORITHM_ROUND_ROBIN