count_aflw, duplicate_until=-1) ''' # Log found ratios if os.path.isfile(detector_log_path): file = open(detector_log_path, 'a') file.write("%.2f,%d,%f,%f,%d,%f,%f\n" % (confidence_threshold, count_aflw, t_ratio_aflw, f_ratio_aflw, count_p04 - count_aflw, t_ratio_p04, f_ratio_p04)) else: file = open(detector_log_path, 'w') file.write('threshold,count_aflw,t_ratio_aflw,f_ratio_aflw,count_p04,t_ratio_p04,f_ratio_p04\n') file.write("%.2f,%d,%f,%f,%d,%f,%f\n" % (confidence_threshold, count_aflw, t_ratio_aflw, f_ratio_aflw, count_p04 - count_aflw, t_ratio_p04, f_ratio_p04)) file.close() ''' # Assign classes class_assign(destination_dir, num_splits_tilt, num_splits_pan) # Split dataset split_dataset(destination_dir, test_ratio, validation_ratio) # Get normalization parameters find_norm_parameters(destination_dir) # OPTIONAL: Save dataset as numpy arrays (for uploading to Google Colab) store_dataset_arrays(destination_dir)
def main(): ''' This function acts as a testbench for the function clean_pointing04, using it to perform the basic processing of the Pointing'04 dataset from a set of default values defined below. ''' # Source paths. pointing04_dir = 'original/HeadPoseImageDatabase/' # Destination path. destination_dir = 'clean/pointing04/' # Detector model path. head_detector_path = 'models/head-detector.h5' # Detection parameters. in_size = 512 out_size = 64 confidence_threshold = 0.75 # Output parameters. grayscale_output = True downscaling_interpolation = cv2.INTER_LINEAR # Number of splits for class assignation. num_splits_tilt = 8 num_splits_pan = 8 # Ratios for train/test and train/validation split. test_ratio = 0.2 validation_ratio = 0.2 # Detector model. detector = ssd_512(image_size=(in_size, in_size, 3), n_classes=1, min_scale=0.1, max_scale=1, mode='inference') detector.load_weights(head_detector_path) # Check if output directory exists. try: os.mkdir(destination_dir) print("Directory", destination_dir, "created.") except FileExistsError: print("Directory", destination_dir, "already exists.") shutil.rmtree(destination_dir) os.mkdir(destination_dir) # Actual cleaning. clean_pointing04(pointing04_dir, destination_dir, detector, confidence_threshold, out_size, grayscale_output, downscaling_interpolation) # Assign classes. class_assign(destination_dir, num_splits_tilt, num_splits_pan) # Split dataset. split_dataset(destination_dir, test_ratio, validation_ratio) # Get normalization parameters. find_norm_parameters(destination_dir) # OPTIONAL: Save dataset as numpy arrays (for uploading to Google Colab). store_dataset_arrays(destination_dir)
def main(): ''' This function acts as a testbench for the function clean_pointing04, using it to perform the basic processing of the Pointing'04 dataset from a set of default values defined below. ''' # Source paths. pointing04_dir = '../original/HeadPoseImageDatabase/' # Destination path. destination_dir = 'clean/pointing04_haar_area_color/' # Detector model path. frontal_detector_path = 'models/haarcascade_frontalface_alt.xml' profile_detector_path = 'models/haarcascade_profileface.xml' # Detection parameters. out_size = 64 # Output paramenters grayscale_output = True downscaling_interpolation = cv2.INTER_AREA # Number of splits for class assignation. num_splits_tilt = 8 num_splits_pan = 8 # Ratios for train/test and train/validation split. test_ratio = 0.2 validation_ratio = 0.2 # Detector model. detector = HaarFaceDetector(frontal_detector_path, profile_detector_path) # Check if output directory exists. try: os.mkdir(destination_dir) print("Directory", destination_dir, "created.") except FileExistsError: print("Directory", destination_dir, "already exists.") shutil.rmtree(destination_dir) os.mkdir(destination_dir) # Actual cleaning. clean_pointing04(pointing04_dir, destination_dir, detector, out_size, grayscale_output, downscaling_interpolation) # Assign classes. class_assign(destination_dir, num_splits_tilt, num_splits_pan) # Split dataset. split_dataset(destination_dir, test_ratio, validation_ratio) # Get normalization parameters. find_norm_parameters(destination_dir) # OPTIONAL: Save dataset as numpy arrays (for uploading to Google Colab). store_dataset_arrays(destination_dir)