Пример #1
0
def weak_supervision(model, iterations):

	"""
	Training weak supervision on icdar 2013 dataset
	:param model: Path to Pre-trained model on Synth-Text using the function train_synth
	:param iterations: Number of iterations to train on icdar 2013
	:return: None
	"""

	from train_weak_supervision.__init__ import get_initial_model_optimizer, generate_target, train, save_model

	model, optimizer = get_initial_model_optimizer(model)

	"""
	Steps - 
		1) Using the pre-trained model generate the targets
		2) Fine-tune the model on icdar 2013 dataset using weak-supervision
		3) Saving the model and again repeating process 1-3
		4) Saving the final model	
	"""

	for iteration in range(int(iterations)):

		print('Generating for iteration:', iteration)
		generate_target(model, iteration)

		print('Fine-tuning for iteration:', iteration)
		model, optimizer = train(model, optimizer, iteration)

		print('Saving for iteration:', iteration)
		save_model(model, optimizer, 'intermediate', iteration)

	save_model(model, optimizer, 'final')
Пример #2
0
def weak_supervision(model, iterations):
    """
    Training weak supervision on icdar 2013 dataset
    :param model: Path to Pre-trained model on Synth-Text using the function train_synth
    :param iterations: Number of iterations to train on icdar 2013
    :return: None
    """

    from train_weak_supervision.__init__ import get_initial_model_optimizer, generate_target, train, save_model, test
    import config
    from tensorboardX import SummaryWriter
    writer = SummaryWriter()

    # ToDo - Check the effects of using optimizer of Synth-Text or starting from a random optimizer

    model, optimizer = get_initial_model_optimizer(model)
    print('Number of parameters in the model:', sum(p.numel()
                                                    for p in model.parameters() if p.requires_grad))

    """
	Steps - 
		1) Using the pre-trained model generate the targets
		2) Fine-tune the model on icdar 2013 dataset using weak-supervision
		3) Saving the model and again repeating process 1-3
		4) Saving the final model	
	"""

    for iteration in range(config.start_iteration, int(iterations)):

        if iteration not in config.skip_iterations:

            print('Generating for iteration:', iteration)
            generate_target(model, iteration)

            print('Testing for iteration:', iteration)
            f_score_test, precision_test, recall_test = test(model, iteration, writer)
            print(
                'Test Results for iteration:', iteration,
                ' | F-score: ', f_score_test,
                ' | Precision: ', precision_test,
                ' | Recall: ', recall_test
            )

        print('Fine-tuning for iteration:', iteration)
        model, optimizer, loss, accuracy = train(
            model, optimizer, iteration, writer)

        print('Saving for iteration:', iteration)
        save_model(model, optimizer, 'intermediate',
            iteration, loss=loss, accuracy=accuracy)
        print('====================================')

    save_model(model, optimizer, 'final')
    writer.close()
Пример #3
0
def weak_supervision(model, iterations):

	"""
	Training weak supervision on icdar 2013 dataset
	:param model: Path to Pre-trained model on Synth-Text using the function train_synth
	:param iterations: Number of iterations to train on icdar 2013
	:return: None
	"""

	from train_weak_supervision.__init__ import get_initial_model_optimizer, generate_target, train, save_model, test
	from train_weak_supervision import config

	seed(config)

	# ToDo - Check the effects of using optimizer of Synth-Text or starting from a random optimizer

	model, optimizer = get_initial_model_optimizer(model)

	pytorch_total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)

	print('Number of parameters in the model:', pytorch_total_params)

	"""
	Steps - 
		1) Using the pre-trained model generate the targets
		2) Fine-tune the model on icdar 2013 dataset using weak-supervision
		3) Saving the model and again repeating process 1-3
		4) Saving the final model	
	"""

	for iteration in range(int(iterations)):

		# print('Generating for iteration:', iteration)
		# generate_target(model, iteration)
		#
		# print('Testing for iteration:', iteration)
		# f_score_test = test(model)
		# print('Test Results for iteration:', iteration, ' | F-score: ', f_score_test)

		print('Fine-tuning for iteration:', iteration)
		model, optimizer, loss, accuracy = train(model, optimizer, iteration)

		print('Saving for iteration:', iteration)
		save_model(model, optimizer, 'intermediate', iteration, loss=loss, accuracy=accuracy)

	save_model(model, optimizer, 'final')