Пример #1
0
def main():
    args = get_argument_parser().parse_args()

    file_dir = args.file_dir
    config_file = args.config_file
    file_ext = args.file_ext
    verbose = args.verbose

    # Load settings file.
    settings = load_yaml_file(Path(file_dir, f'{config_file}.{file_ext}'))

    init_loggers(verbose=verbose, settings=settings['dirs_and_files'])

    logger_main = logger.bind(is_caption=False, indent=0)
    logger_sec = logger.bind(is_caption=False, indent=1)

    logger_main.info(datetime.now().strftime('%Y-%m-%d %H:%M'))

    logger_main.info('Doing only dataset creation')

    # Create the dataset.
    logger_main.info('Starting Clotho dataset creation')

    logger_sec.info('Creating examples')
    create_dataset(settings_dataset=settings['dataset_creation_settings'],
                   settings_dirs_and_files=settings['dirs_and_files'])
    logger_sec.info('Examples created')

    logger_sec.info('Extracting features')
    extract_features(root_dir=settings['dirs_and_files']['root_dirs']['data'],
                     settings_data=settings['dirs_and_files']['dataset'],
                     settings_features=settings['feature_extraction_settings'])
    logger_sec.info('Features extracted')

    logger_main.info('Dataset created')
Пример #2
0
def main():
    args = get_argument_parser().parse_args()

    file_dir = args.file_dir
    config_file = args.config_file
    file_ext = args.file_ext
    verbose = args.verbose

    settings = load_yaml_file(Path(file_dir, f'{config_file}.{file_ext}'))

    init_loggers(verbose=verbose, settings=settings['dirs_and_files'])

    logger_main = logger.bind(is_caption=False, indent=0)
    logger_inner = logger.bind(is_caption=False, indent=1)

    if settings['workflow']['dataset_creation']:
        logger_main.info('Starting creation of dataset')

        logger_inner.info('Creating examples')
        dataset_multiprocess.create_dataset(
            settings_dataset=settings['dataset_creation_settings'],
            settings_dirs_and_files=settings['dirs_and_files'])
        logger_inner.info('Examples created')

        logger_inner.info('Extracting features')
        dataset_multiprocess.extract_features(
            root_dir=settings['dirs_and_files']['root_dirs']['data'],
            settings_data=settings['dirs_and_files']['dataset'],
            settings_features=settings['feature_extraction_settings'])
        logger_inner.info('Features extracted')
        logger_main.info('Creation of dataset ended')
Пример #3
0
def main():
    args = get_argument_parser().parse_args()

    file_dir = args.file_dir
    config_file = args.config_file
    file_ext = args.file_ext
    verbose = args.verbose

    settings = load_yaml_file(Path(
        file_dir, f'{config_file}.{file_ext}'))

    init_loggers(verbose=verbose,
                 settings=settings['dirs_and_files'])

    logger_main = logger.bind(is_caption=False, indent=0)
    logger_inner = logger.bind(is_caption=False, indent=1)

    if settings['workflow']['dataset_creation']:
        logger_main.info('Starting creation of dataset')

        logger_inner.info('Creating examples')
        dataset.create_dataset(
            settings_dataset=settings['dataset_creation_settings'],
            settings_dirs_and_files=settings['dirs_and_files'])
        logger_inner.info('Examples created')

        logger_inner.info('Extracting features')
        dataset.extract_features(
            root_dir=settings['dirs_and_files']['root_dirs']['data'],
            settings_data=settings['dirs_and_files']['dataset'],
            settings_features=settings['feature_extraction_settings'])
        logger_inner.info('Features extracted')
        logger_main.info('Creation of dataset ended')

    if settings['workflow']['dnn_testing']:
        # Create test dataset if not yet created
        test_split_feat_dir = Path(settings['dirs_and_files']['root_dirs']['data']) \
            .joinpath(settings['dirs_and_files']['dataset']['features_dirs']['output'],
                      settings['dirs_and_files']['dataset']['features_dirs']['test'])
        if not test_split_feat_dir.exists():
            logger_main.info('Starting creation of test dataset')
            logger_inner.info('Extracting features')
            dataset.extract_features_test(
                root_dir=settings['dirs_and_files']['root_dirs']['data'],
                settings_data=settings['dirs_and_files']['dataset'],
                settings_features=settings['feature_extraction_settings'],
                settings_audio=settings['dataset_creation_settings']['audio'])
            logger_inner.info('Features extracted')
            logger_main.info('Creation of test dataset ended')
        else:
            logger_inner.info('Found existing test data')

    if settings['workflow']['dnn_training'] or \
            settings['workflow']['dnn_evaluation'] or \
            settings['workflow']['dnn_testing']:
        method.method(settings)
Пример #4
0
def main():
    args = get_argument_parser().parse_args()

    file_dir = args.file_dir
    config_file = args.config_file
    file_ext = args.file_ext
    verbose = args.verbose

    settings = load_yaml_file(Path(file_dir, f'{config_file}.{file_ext}'))

    init_loggers(verbose=verbose, settings=settings['logging'])

    method.method(settings)
Пример #5
0
def main():
    args = get_argument_parser().parse_args()

    file_dir = args.file_dir
    config_file = args.config_file
    file_ext = args.file_ext
    verbose = args.verbose
    job_id = args.job_id

    settings = load_yaml_file(Path(file_dir, f'{config_file}.{file_ext}'))

    init_loggers(verbose=verbose,
                 settings=settings['dirs_and_files'],
                 job_id=job_id)

    method.method(settings, job_id)
def main():
    args = get_argument_parser().parse_args()

    file_dir = args.file_dir
    config_file = args.config_file
    file_ext = args.file_ext
    verbose = args.verbose

    settings = file_io.load_yaml_file(
        Path(file_dir, f'{config_file}.{file_ext}'))

    printing.init_loggers(verbose=verbose, settings=settings['dirs_and_files'])

    logger_main = logger.bind(is_caption=False, indent=0)

    logger_main.info('Starting method only')
    method(settings)
    logger_main.info('Method\'s done')
    training_epochs = 50
    log_interval = 100
    checkpoint_save_interval = 5

    device = torch.device('cuda:0')

    args = get_argument_parser().parse_args()

    file_dir = args.file_dir
    config_file = args.config_file
    file_ext = args.file_ext
    verbose = args.verbose

    print("load settings start")

    settings = load_yaml_file(Path(file_dir, f'{config_file}.{file_ext}'))

    settings_training = settings['dnn_training_settings']['training'],
    settings_data = settings['dnn_training_settings']['data'],
    settings_io = settings['dirs_and_files']

    indices_list = _load_indices_file(
        settings['dirs_and_files'], settings['dnn_training_settings']['data'])

    data_path_evaluation = Path(
        settings_io['root_dirs']['data'],
        settings_io['dataset']['features_dirs']['output'],
        settings_io['dataset']['features_dirs']['evaluation'])

    validation_data = get_clotho_loader(
        settings_io['dataset']['features_dirs']['evaluation'],