conf.model = utils.abspath(conf.model) conf.unetxst_homographies = utils.abspath( conf.unetxst_homographies ) if conf.unetxst_homographies is not None else conf.unetxst_homographies conf.model_weights = utils.abspath( conf.model_weights ) if conf.model_weights is not None else conf.model_weights conf.output_dir = utils.abspath(conf.output_dir) # load network architecture module architecture = utils.load_module(conf.model) # get max_samples_training random training samples n_inputs = len(conf.input_training) files_train_input = [ utils.get_files_in_folder(folder) for folder in conf.input_training ] files_train_label = utils.get_files_in_folder(conf.label_training) _, idcs = utils.sample_list(files_train_label, n_samples=conf.max_samples_training) files_train_input = [np.take(f, idcs) for f in files_train_input] files_train_label = np.take(files_train_label, idcs) image_shape_original_input = utils.load_image( files_train_input[0][0]).shape[0:2] image_shape_original_label = utils.load_image(files_train_label[0]).shape[0:2] print(f"Found {len(files_train_label)} training samples") # get max_samples_validation random validation samples files_valid_input = [ utils.get_files_in_folder(folder) for folder in conf.input_validation ]
def evalate_children(): true_positive = 0 false_positive = 0 true_negative = 0 false_negative = 0 total = 0 try: os.mkdir(f'experiments/{config.experiment_name}') except FileExistsError: pass with open(f'experiments/{config.experiment_name}/child_evaluation.csv', 'w') as csvfile: writer = csv.writer(csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL) writer.writerow(config.csv_header) files = utils.get_child_files_in_folder('blogs/10s') for idx, file in enumerate(files): print(f'[{idx+1}/{len(files)}]Getting file {file}') try: text = parse_xml(file) except xmltodict.expat.ExpatError: file_name = file.split('/')[2] os.replace(file, f'trash/{file_name}') except AttributeError: file_name = file.split('/')[2] os.replace(file, f'trash/{file_name}') print(f'Getting response for file {file}') actual_age = file.split('/')[2].split('.')[2] try: categories = get_nlu_reponse(text) predicted_category = categories[0]['label'] if predicted_category == '/Child': true_positive += 1 result = 'true_positive' else: false_negative += 1 result = 'false_negative' except: file_name = file.split('/')[2] os.replace(file, f'trash/{file_name}') result = 'true_positive' row = [ file, actual_age, 'Child', predicted_category, result ] writer.writerow(row) csvfile.flush() total += 1 for adult_folder in adult_folders: files = utils.get_files_in_folder(f'blogs/{adult_folder}') for idx, file in enumerate(files): print(f'[{idx+1}/{len(files)}]Getting file {file}') try: text = parse_xml(file) except xmltodict.expat.ExpatError: file_name = file.split('/')[2] os.replace(file, f'trash/{file_name}') except AttributeError: file_name = file.split('/')[2] os.replace(file, f'trash/{file_name}') print(f'Getting response for file {file}') actual_age = file.split('/')[2].split('.')[2] try: categories = get_nlu_reponse(text) predicted_category = categories[0]['label'] if predicted_category == '/Child': false_positive += 1 result = 'false_positive' else: true_negative += 1 result = 'true_negative' except: file_name = file.split('/')[2] os.replace(file, f'trash/{file_name}') result = 'true_negative' row = [ file, actual_age, 'Adult', predicted_category, result ] writer.writerow(row) csvfile.flush() total += 1 with open(f'experiments/{config.experiment_name}/child_results.txt', 'w') as result_file: result_file.write(f'Total: {total} \n') result_file.write(f'True positives: {true_positive} [{true_positive/total * 100}%]\n') result_file.write(f'True negatives: {true_negative} [{true_negative / total * 100}%]\n') result_file.write(f'False positives: {false_positive} [{false_positive / total * 100}%]\n') result_file.write(f'False negatives: {false_negative} [{false_negative / total * 100}%]\n') result_file.write(f'acc: {true_positive + true_negative} [{(true_positive + true_negative) / total * 100}%]\n')
conf.input_testing = [utils.abspath(path) for path in conf.input_testing] conf.one_hot_palette_input = utils.abspath(conf.one_hot_palette_input) conf.one_hot_palette_label = utils.abspath(conf.one_hot_palette_label) conf.model = utils.abspath(conf.model) conf.unetxst_homographies = utils.abspath( conf.unetxst_homographies ) if conf.unetxst_homographies is not None else conf.unetxst_homographies conf.model_weights = utils.abspath(conf.model_weights) conf.prediction_dir = utils.abspath(conf.prediction_dir) # load network architecture module architecture = utils.load_module(conf.model) # get max_samples_testing samples files_input = [ utils.get_files_in_folder(folder) for folder in conf.input_testing ] _, idcs = utils.sample_list(files_input[0], n_samples=conf.max_samples_testing) files_input = [np.take(f, idcs) for f in files_input] n_inputs = len(conf.input_testing) n_samples = len(files_input[0]) image_shape_original = utils.load_image(files_input[0][0]).shape[0:2] print(f"Found {n_samples} samples") # parse one-hot-conversion.xml conf.one_hot_palette_input = utils.parse_convert_xml( conf.one_hot_palette_input) conf.one_hot_palette_label = utils.parse_convert_xml( conf.one_hot_palette_label) n_classes_input = len(conf.one_hot_palette_input) n_classes_label = len(conf.one_hot_palette_label)
conf.input_validation = [utils.abspath(path) for path in conf.input_validation] conf.label_validation = utils.abspath(conf.label_validation) conf.one_hot_palette_input = utils.abspath(conf.one_hot_palette_input) conf.one_hot_palette_label = utils.abspath(conf.one_hot_palette_label) conf.model = utils.abspath(conf.model) conf.unetxst_homographies = utils.abspath( conf.unetxst_homographies ) if conf.unetxst_homographies is not None else conf.unetxst_homographies conf.model_weights = utils.abspath(conf.model_weights) # load network architecture module architecture = utils.load_module(conf.model) # get max_samples_validation random validation samples files_input = [ utils.get_files_in_folder(folder) for folder in conf.input_validation ] files_label = utils.get_files_in_folder(conf.label_validation) _, idcs = utils.sample_list(files_label, n_samples=conf.max_samples_validation) files_input = [np.take(f, idcs) for f in files_input] files_label = np.take(files_label, idcs) n_inputs = len(conf.input_validation) n_samples = len(files_label) image_shape_original_input = utils.load_image(files_input[0][0]).shape[0:2] image_shape_original_label = utils.load_image(files_label[0]).shape[0:2] print(f"Found {n_samples} samples") # parse one-hot-conversion.xml conf.one_hot_palette_input = utils.parse_convert_xml( conf.one_hot_palette_input) conf.one_hot_palette_label = utils.parse_convert_xml(